text stringlengths 1 1.05M |
|---|
<reponame>mason-fish/brim
import {createSelector} from "reselect"
import {ColumnsState} from "./types"
import {State} from "../types"
import {ViewerColumns} from "../Viewer/types"
import {createColumnSet} from "./models/columnSet"
import TableColumns from "../../models/TableColumns"
import Viewer from "../Viewer"
import activeTabSelect from "../Tab/activeTabSelect"
import {zng} from "zealot"
const getColumns = activeTabSelect<ColumnsState>((tab) => tab.columns)
const getCurrentTableColumns = createSelector<
State,
ViewerColumns,
ColumnsState,
zng.Record[],
TableColumns
>(
Viewer.getColumns,
getColumns,
Viewer.getRecords,
(viewerColumns, columnSettings, logs) => {
const set = createColumnSet(viewerColumns)
const prefs = columnSettings[set.getName()]
const table = new TableColumns(set.getName(), set.getUniqColumns(), prefs)
table.setWidths(logs.slice(0, 50))
return table
}
)
export default {
getCurrentTableColumns,
getColumns
}
|
package notepad;
import java.awt.Color;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.JOptionPane;
import javax.swing.KeyStroke;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
public class Note extends JFrame implements ActionListener {
private static final long serialVersionUID = 7453067955905534656L;
/**
* @param args
*/
// 声明记事本中需要的变量
// change_flag 标记当前的文档是否被修改过
private boolean change_flag = false;
// current_path 标记当前正在处理的文件的路径
private String current_path = null;
// 以下是界面元素
private JTextArea ta;
private JMenuBar menubar;
private JMenu file;
private JMenu edit;
private JMenu help;
private JMenuItem fileopen;
private JMenuItem filesave;
private JMenuItem filesave2;
private JMenuItem fileexit;
private JMenuItem copy;
private JMenuItem copyall;
private JMenuItem paste;
private JMenuItem cut;
private JMenuItem copyright;
private JPopupMenu pmenu;
private JMenuItem pcopy;
private JMenuItem ppaste;
private JMenuItem pcut;
private JMenuItem pcopyall;
public static void main(String[] args) {
// 主函数入口
Note note = new Note();
note.init();
note.setVisible(true);
}
public void init() {
this.setTitle("简易记事本");
this.setSize(600, 600);
this.setLocation(200, 200);
ta = new JTextArea();
// ta.setCursor(new Cursor(Cursor.TEXT_CURSOR));
ta.setFont(new Font("winter", Font.BOLD, 14));
ta.setSelectionColor(new Color(255, 255, 0));
ta.setSize(600, 600);
// 添加一个文档监听器
ta.getDocument().addDocumentListener(new DocumentListener() {
public void changedUpdate(DocumentEvent e) {
change_flag = true;
}
public void insertUpdate(DocumentEvent e) {
change_flag = true;
}
public void removeUpdate(DocumentEvent e) {
change_flag = true;
}
});
// 初始化菜单系列
menubar = new JMenuBar();
file = new JMenu("文件");
edit = new JMenu("编辑");
help = new JMenu("帮助");
fileopen = new JMenuItem("打开..");
fileopen.setMnemonic('O');
fileopen.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_O,InputEvent.CTRL_MASK));
filesave = new JMenuItem("保存..");
filesave.setMnemonic('S');
filesave.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S,InputEvent.CTRL_MASK));
filesave2 = new JMenuItem("另存为");
filesave2.setMnemonic('K');
fileexit = new JMenuItem("退出");
fileexit.setMnemonic('E');
fileopen.addActionListener(this);
filesave.addActionListener(this);
filesave2.addActionListener(this);
fileexit.addActionListener(this);
file.add(fileopen);
file.add(filesave);
file.add(filesave2);
file.add(fileexit);
copyall = new JMenuItem("全选");
copyall.setMnemonic('A');
copyall.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_A,InputEvent.CTRL_MASK));
copy = new JMenuItem("复制");
copy.setMnemonic('C');
copy.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_C,InputEvent.CTRL_MASK));
paste = new JMenuItem("粘贴");
paste.setMnemonic('V');
paste.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_V,InputEvent.CTRL_MASK));
cut = new JMenuItem("剪切");
cut.setMnemonic('T');
cut.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_T,InputEvent.CTRL_MASK));
copyall.addActionListener(this);
copy.addActionListener(this);
cut.addActionListener(this);
paste.addActionListener(this);
edit.add(copyall);
edit.add(copy);
edit.add(paste);
edit.add(cut);
copyright = new JMenuItem("版权");
copyright.setMnemonic('H');
copyright.addActionListener(this);
help.add(copyright);
menubar.add(file);
menubar.add(edit);
menubar.add(help);
// 初始化一个下拉菜单系列
pmenu = new JPopupMenu();
pcopy = new JMenuItem("复制");
pcopyall = new JMenuItem("全选");
ppaste = new JMenuItem("粘贴");
pcut = new JMenuItem("剪切");
pcopyall.addActionListener(this);
pcopy.addActionListener(this);
pcut.addActionListener(this);
ppaste.addActionListener(this);
pmenu.add(pcopy);
pmenu.add(pcopyall);
pmenu.add(pcut);
pmenu.add(ppaste);
this.setJMenuBar(menubar);
this.getContentPane().add(new JScrollPane(ta));
// 添加一个窗口监听器
this.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent arg0) {
// 调用一个退出函数
exit();
}
});
// 添加一个鼠标监听器,右击会弹出一个下拉菜单
ta.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
int button_type = e.getButton();
int x = e.getX();
int y = e.getY();
if (button_type == MouseEvent.BUTTON3) {
if (ta.getSelectedText()==null) {
pcopy.setEnabled(false);
pcut.setEnabled(false);
ppaste.setEnabled(true);
pcopyall.setEnabled(true);
// ppaste.setEnabled(false);
} else if
(ta.getSelectedText().equals(ta.getText())) {
pcopy.setEnabled(true);
pcut.setEnabled(true);
ppaste.setEnabled(true);
pcopyall.setEnabled(false);
} else {
pcopy.setEnabled(true);
pcut.setEnabled(true);
ppaste.setEnabled(true);
pcopyall.setEnabled(true);
}
//在文本区域内弹出菜单
pmenu.show(ta, x, y);
}
}
});
}
public void actionPerformed(ActionEvent e) {
// 获取触发事件的组件值
String cmd = e.getActionCommand();
if (cmd.equals("打开..")) {
ta.setText("");
// 创建一个文件选择器,这是 swing 提供的一个文件对话框,大家可以和 FileDialog 做一下比较
JFileChooser filechooser = new JFileChooser();
filechooser.setCurrentDirectory(new File("d:/"));
// 文件选择器的参数大家可以查询一下 API 文档
if (filechooser.showOpenDialog(this) ==
JFileChooser.APPROVE_OPTION) {
File openfile = filechooser.getSelectedFile();
current_path = openfile.getPath();
BufferedReader reader;
try {
reader = new BufferedReader(new
FileReader(openfile));
String r = reader.readLine();
while (r != null) {
ta.append(r + "\n");
r = reader.readLine();
}
reader.close();
} catch (FileNotFoundException e1) {
e1.printStackTrace();
} catch (IOException ee) {
ee.printStackTrace();
}
}
}
// 注意,打开文件和保存文件事实上是一个文件读写的过程
if (cmd.equals("保存..")) {
if (current_path != null) {
try {
BufferedWriter writer = new BufferedWriter(new
FileWriter(
current_path));
writer = new BufferedWriter(new FileWriter(current_path));
String data = ta.getText();
writer.write(data);
writer.close();
} catch (IOException e1) {
e1.printStackTrace();
}
} else {
JFileChooser filechooser = new JFileChooser();
filechooser.setCurrentDirectory(new File("d:/"));
if (filechooser.showSaveDialog(this) ==
JFileChooser.APPROVE_OPTION) {
File savefile = filechooser.getSelectedFile();
String savetarget = savefile.getPath() +
".txt";
BufferedWriter writer;
try {
writer = new BufferedWriter(new
FileWriter(savetarget));
String data = ta.getText();
writer.write(data);
writer.close();
} catch (FileNotFoundException e1) {
e1.printStackTrace();
} catch (IOException ee) {
ee.printStackTrace();
}
current_path = savetarget;
}
}
}
// 注意一下‘另存为’和‘保存’的区别
if (cmd.equals("另存为")) {
JFileChooser filechooser = new JFileChooser();
if (filechooser.showSaveDialog(this) ==
JFileChooser.APPROVE_OPTION) {
File savefile = filechooser.getSelectedFile();
String savetarget = savefile.getPath() + ".txt";
BufferedWriter writer;
try {
writer = new BufferedWriter(new
FileWriter(savetarget));
String data = ta.getText();
writer.write(data);
writer.close();
} catch (FileNotFoundException e1) {
e1.printStackTrace();
} catch (IOException ee) {
ee.printStackTrace();
}
}
}
if (cmd.equals("版权")) {
JOptionPane.showMessageDialog(this, "版权归 Winter 所有!", "版权声明",
JOptionPane.INFORMATION_MESSAGE);
}
if (cmd.equals("剪切")) {
ta.cut();
}
if (cmd.equals("复制")) {
ta.copy();
}
if (cmd.equals("粘贴")) {
ta.paste();
}
if (cmd.equals("全选")) {
ta.selectAll();
}
// 退出
if (cmd.equals("退出")) {
exit();
}
}
public void exit() {
if (change_flag == false) {
System.exit(1);
} else {
int sign = JOptionPane.showConfirmDialog(this, "当前文件还没有保存\n 是否退出?",
"提示", JOptionPane.YES_NO_CANCEL_OPTION);
if (sign == JOptionPane.YES_OPTION) {
BufferedWriter writer;
try {
writer = new BufferedWriter(new FileWriter("d:/新建文档.txt"));
String data = ta.getText();
writer.write(data);
writer.close();
} catch (FileNotFoundException e1) {
e1.printStackTrace();
} catch (IOException ee) {
ee.printStackTrace();
}
}
if (sign == JOptionPane.NO_OPTION) {
System.exit(1);
}
if (sign == JOptionPane.CANCEL_OPTION)
;
}
}
} |
<gh_stars>1-10
package com.envyful.advanced.holograms.forge.hologram.database;
import com.envyful.advanced.holograms.forge.hologram.ForgeHologram;
import com.envyful.advanced.holograms.forge.hologram.ForgeHologramTypeAdapter;
import com.envyful.holograms.api.hologram.Hologram;
import com.envyful.holograms.api.hologram.HologramBuilder;
import com.envyful.holograms.api.manager.HologramFactory;
import com.envyful.holograms.api.manager.database.HologramSaver;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.internal.LinkedTreeMap;
import java.io.*;
import java.lang.reflect.Modifier;
import java.nio.charset.StandardCharsets;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
*
* Json implementation of the {@link HologramSaver} interface
*
*/
public class JsonHologramSaver implements HologramSaver {
private static final Gson GSON = new GsonBuilder()
.setPrettyPrinting()
.registerTypeAdapter(ForgeHologram.class, new ForgeHologramTypeAdapter())
.excludeFieldsWithModifiers(Modifier.TRANSIENT, Modifier.STATIC)
.create();
private File file;
public JsonHologramSaver(String file) {
this.file = Paths.get(file).toFile();
try {
if (!this.file.exists()) {
if (!this.file.getParentFile().exists()) {
this.file.getParentFile().mkdirs();
}
this.file.createNewFile();
}
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public Map<String, Hologram> load() {
Map<String, Hologram> holograms = Maps.newHashMap();
try {
InputStreamReader jsonReader = new InputStreamReader(new FileInputStream(this.file), StandardCharsets.UTF_8);
List<LinkedTreeMap<String, Object>> forgeHolograms = GSON.fromJson(jsonReader, ArrayList.class);
if (forgeHolograms == null) {
return holograms;
}
forgeHolograms.forEach(data -> {
ForgeHologram hologram = this.fromTreeMap(data);
holograms.put(hologram.getId().toLowerCase(), hologram);
});
} catch (FileNotFoundException e) {
e.printStackTrace();
}
return holograms;
}
private ForgeHologram fromTreeMap(LinkedTreeMap<String, Object> map) {
LinkedTreeMap<String, Object> loc = (LinkedTreeMap<String, Object>) map.get("loc");
List<String> lines = (List<String>) map.get("lines");
HologramBuilder builder = HologramFactory.builder()
.id(map.get("id").toString())
.position((double) loc.get("x"), (double) loc.get("y"), (double) loc.get("z"))
.world(loc.get("world").toString())
.lines(lines.toArray(new String[0]));
int range = 64;
if (map.containsKey("range")) {
range = (int) ((double) map.get("range"));
if (range <= 0) {
range = 64;
}
}
return (ForgeHologram) builder.range(range).build(false);
}
@Override
public void save(List<Hologram> holograms) {
try {
OutputStreamWriter jsonWriter = new OutputStreamWriter(new FileOutputStream(this.file), StandardCharsets.UTF_8);
List<ForgeHologram> savedHolograms = Lists.newArrayList();
for (Hologram hologram : holograms) {
if (!(hologram instanceof ForgeHologram)) {
continue;
}
savedHolograms.add((ForgeHologram) hologram);
}
GSON.toJson(savedHolograms, ArrayList.class, jsonWriter);
jsonWriter.flush();
jsonWriter.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
# SPDX-License-Identifier: BSD-3-Clause
source helpers.sh
cleanup() {
if [ "$1" != "no-shut-down" ]; then
shut_down
fi
}
trap cleanup EXIT
start_up
cleanup "no-shut-down"
aesmodes="$(populate_algs "details['encrypting'] and details['symmetric']")"
hashalgs="$(populate_algs "details['hash'] and not details['method'] \
and not details['signing'] \
and not details['symmetric'] \
and alg is not None")"
eccmethods="$(populate_algs "details['signing'] and not details['hash'] and \"ec\" in alg")"
rsamethods="$(populate_algs "details['signing'] and not details['hash'] and \"rsa\" in alg")"
# Test that common algorithms are supported
for i in "rsa" "xor" "hmac" "ecc" "keyedhash"; do
tpm2_testparms "${i}"
done
# Test that RSA signing schemes are supported
for i in ${rsamethods}; do
echo "tpm2_testparms rsa:${i}"
tpm2_testparms "rsa:${i}"
done
# Test that ECC signing schemes are supported
for i in ${eccmethods}; do
tpm2_testparms "ecc:${i}"
done
# Test that aes modes are supported
for i in ${aesmodes}; do
tpm2_testparms "aes128${i}"
done
# Test that xor on hash algs is supported
for i in ${hashalgs}; do
tpm2_testparms "xor:${i}"
done
# Test that hmac on hash algs is supported
for i in ${hashalgs}; do
tpm2_testparms "hmac:${i}"
done
# Test that null algorithm raise an error (error from software stack)
if ! tpm2_testparms "null" 2>&1 1>/dev/null | grep -q "Invalid or unsupported by the tool : null"; then
echo "tpm2_testparms with 'null' algorithm didn't fail"
exit 1
else
true
fi
# Attempt to specify a suite that is not supported (error from TPM)
if tpm2_testparms "ecc521:ecdsa:aes256cbc" &>/dev/null; then
echo "tpm2_testparms succeeded while it shouldn't or TPM failed"
exit 1
else
true
fi
exit 0
|
#!/usr/bin/env bash
set -e
cd "$(dirname "$0")"
./build.sh $c
if [[ -z "$1" ]]; then
configs="Debug Release"
else
configs="$1"
fi
for f in netcoreapp2.1 netcoreapp3.1 net5.0; do
for c in $configs; do
if [[ "$c" == "Debug" ]]; then
coverage_args="-p:CollectCoverage=true
-p:CoverletOutputFormat=opencover
-p:Exclude=\"[NUnit*]*,[MoreLinq]MoreLinq.Extensions.*,[MoreLinq]MoreLinq.Experimental.*\""
else
unset coverage_args
fi
dotnet test --no-build -c $c -f $f MoreLinq.Test $coverage_args
done
done
if [[ -z `which mono 2>/dev/null` ]]; then
echo>&2 NOTE! Mono does not appear to be installed so unit tests
echo>&2 against the Mono runtime will be skipped.
else
for c in $configs; do
mono MoreLinq.Test/bin/$c/net451/MoreLinq.Test.exe
done
fi
|
<gh_stars>1-10
/*
* Copyright (c) 2017 Nedelosk, Mezz
*
* This work (the MOD) is licensed under the "MIT" License, see LICENSE for details.
*/
package oreregistry.api.registry;
import javax.annotation.Nullable;
import java.util.Map;
import net.minecraft.item.ItemStack;
/**
* A resource has a type (see {@link ResourceTypes} and contains several product types (see {@link ProductTypes}.
* This is used to unify resources from many mods so that they all use the same items.
* <p>
* Get an instance from {@link IResourceRegistry#registerResource(String)}.
*/
public interface IResource {
/**
* The type of the resource. For examples see {@link ResourceTypes}.
*/
String getType();
/**
* Register a product added by your mod for this resource.
*
* @param productType The type of the product. For examples see {@link ProductTypes}.
* @param product The product provided by your mod.
* @return the product that should be used by every mod.
*/
IProduct registerProduct(String productType, ItemStack product);
/**
*
* @param productType The type of the product. For examples see {@link ProductTypes}.
* @return True if it is already a product registered with this type.
*/
boolean hasProduct(String productType);
/**
* @param productType The type of the product. For examples see {@link ProductTypes}.
* @return The product of the productType, if one is registered.
*/
@Nullable
IProduct getProduct(String productType);
/**
* Returns a read-only map containing product types and their associated products.
*/
Map<String, IProduct> getRegisteredProducts();
}
|
module.exports = {
root: true,
env: {
node: true,
"jest/globals": true
},
extends: ["eslint:recommended", "plugin:vue/vue3-recommended", "@vue/typescript/recommended"],
parserOptions: {
ecmaVersion: 2021,
},
plugins: ["jest"],
rules: {
"no-alert": process.env.NODE_ENV === "production" ? "warn" : "off",
"no-console": process.env.NODE_ENV === "production" ? "warn" : "off",
"no-debugger": process.env.NODE_ENV === "production" ? "warn" : "off",
"arrow-body-style": "warn",
"arrow-parens": "warn",
eqeqeq: "error",
"generator-star-spacing": "warn",
"grouped-accessor-pairs": "warn",
"no-caller": "error",
"no-duplicate-imports": "error",
"no-else-return": "warn",
"no-eval": "error",
"no-extra-bind": "warn",
"no-implied-eval": "error",
"no-labels": "warn",
"no-lone-blocks": "warn",
"no-new-func": "error",
"no-new-wrappers": "error",
"no-return-await": "warn",
"no-template-curly-in-string": "warn",
"no-throw-literal": "error",
"no-undef-init": "warn",
"no-useless-call": "warn",
"no-useless-constructor": "warn",
"no-useless-rename": "warn",
"no-useless-return": "warn",
"no-var": "error",
"object-shorthand": "warn",
"prefer-const": "warn",
"prefer-destructuring": "warn",
"prefer-numeric-literals": "warn",
"prefer-rest-params": "warn",
"prefer-spread": "warn",
"prefer-template": "warn",
"require-atomic-updates": "warn",
"rest-spread-spacing": "warn",
"sort-imports": [
"warn",
{
ignoreCase: true,
ignoreDeclarationSort: true,
},
],
"template-curly-spacing": "warn",
"yield-star-spacing": "warn",
yoda: "warn",
"@typescript-eslint/no-explicit-any": "off",
"vue/no-dupe-keys": "warn",
"vue/require-default-prop": "off"
},
};
|
<reponame>PhoenixLM/planonew<gh_stars>0
const Imovel = require('../models/imovel')
const Admin = require('../models/admin')
const Cliente = require('../models/cliente')
const Boleto = require('../models/boleto')
const moveTo = require('../helpers/moveToTmp')
const moveFrom = require('../helpers/moveImgFromTmp')
const moveBFrom = require('../helpers/moveBoletoFromTmp')
module.exports = (app) => {
app.get('/admin/painel', (req, res, next) => {
res.render('admin/painel')
})
app.post('/admin/media', (req, res, next) => {
if(req.files.qqfile) {
moveTo(req, (err) => {
if(err) res.send({'success': false})
else res.send({'success': true})
})
} else {
res.send('Erro, nenhum arquivo no body da request')
}
})
app.post('/admin/uploadBoleto', (req, res, next) => {
if(req.files.qqfile) {
moveTo(req, (err) => {
if(err) res.send({'success': false})
else res.send({'success': true})
})
} else {
res.send('Erro, nenhum arquivo no body da request')
}
})
/********** CRUD Imoveis **********/
app.get('/admin/imoveis', (req, res, next) => {
Imovel.find({}, (err, imovels) => {
if(err) return next(err)
res.render('admin/imoveis', {imovels : imovels})
})
})
app.get('/admin/imoveis/search/:id', (req, res, next) => {
let id = req.params.id
Imovel.findById(id, (err, imovel) => {
if(err) return next(err)
if(!imovel) return next()
res.render('admin/imovel', {imovel : imovel})
})
})
app.get('/admin/imoveis/newLocacaoC', (req, res, next) => {
res.render('admin/imovel-formLC')
})
app.get('/admin/imoveis/newLocacaoR', (req, res, next) => {
res.render('admin/imovel-formLR')
})
app.get('/admin/imoveis/newVenda', (req, res, next) => {
res.render('admin/imovel-formV')
})
app.post('/admin/imoveis/new', (req, res, next) => {
let imovel = new Imovel(req.body)
imovel.save((err) => {
if(err) return next(err)
moveFrom(req, imovel._id, (err) => {
if(err) return next(err)
})
res.redirect('/admin/imoveis')
})
})
app.get('/admin/imoveis/delete/:id', (req, res, next) => {
let id = req.params.id
Imovel.findById(id, (err, imovel) => {
if(err) return next(err)
if(!imovel) return next()
imovel.remove((err) => {
if(err) return next(err)
console.log('Sucesso')
res.redirect('/admin/imoveis')
})
})
})
app.post('/admin/imoveis/update/:id', (req, res, next) => {
let id = req.params.id
Imovel.findById(id, (err, imovel) => {
if(err) return next(err)
if(!imovel) return next()
imovel.set(req.body)
imovel.save((err) => {
if(err) return next(err)
moveFrom(req, imovel._id, (err) => {
if(err) return next(err)
})
res.redirect('/admin/imoveis')
})
})
})
/********** CRUD Admins **********/
app.get('/admin/admins', (req, res, next) => {
Admin.find({}, (err, admins) => {
if(err) return next(err)
res.render('admin/admins', {admins : admins})
})
})
app.get('/admin/admins/search/:id', (req, res, next) => {
let id = req.params.id
Admin.findById(id, (err, admin) => {
if(err) return next(err)
if(!admin) return next()
res.render('admin/admin', {admin : admin})
})
})
app.get('/admin/admins/new', (req, res, next) => {
res.render('admin/admin-form')
})
app.post('/admin/admins/new', (req, res, next) => {
let admin = new Admin(req.body)
admin.save((err) => {
if(err) return next(err)
res.redirect('/admin/admins')
})
})
app.get('/admin/admins/delete/:id', (req, res, next) => {
let id = req.params.id
Admin.findById(id, (err, admin) => {
if(err) return next(err)
if(!admin) return next()
admin.remove((err) => {
if(err) return next(err)
res.redirect('/admin/admins')
})
})
})
app.post('/admin/admins/update/:id', (req, res, next) => {
let id = req.params.id
Admin.findById(id, (err, admin) => {
if(err) return next(err)
if(!admin) return next()
admin.set(req.body)
admin.save((err) => {
if(err) return next(err)
res.redirect('/admin/admins')
})
})
})
/********** CRUD Clientes **********/
app.get('/admin/clientes', (req, res, next) => {
Cliente.find({}, (err, clientes) => {
if(err) return next(err)
res.render('admin/clientes', {clientes : clientes})
})
})
app.get('/admin/clientes/search/:id', (req, res, next) => {
let id = req.params.id
Cliente.findById(id, (err, cliente) => {
if(err) return next(err)
if(!cliente) return next()
res.render('admin/cliente', {cliente : cliente})
})
})
app.get('/admin/clientes/new', (req, res, next) => {
res.render('admin/cliente-form')
})
app.post('/admin/clientes/new', (req, res, next) => {
let cliente = new Cliente(req.body)
cliente.save((err) => {
if(err) return next(err)
res.redirect('/admin/clientes')
})
})
app.get('/admin/clientes/delete/:id', (req, res, next) => {
let id = req.params.id
Cliente.findById(id, (err, cliente) => {
if(err) return next(err)
if(!cliente) return next()
cliente.remove((err) => {
if(err) return next(err)
res.redirect('/admin/clientes')
})
})
})
app.post('/admin/clientes/update/:id', (req, res, next) => {
let id = req.params.id
Cliente.findById(id, (err, cliente) => {
if(err) return next(err)
if(!cliente) return next()
cliente.set(req.body)
cliente.save((err) => {
if(err) return next(err)
res.redirect('/admin/clientes')
})
})
})
/********** CRUD Boletos **********/
app.get('/admin/boletos', (req, res, next) => {
Boleto.find({}, (err, boletos) => {
if(err) return next(err)
res.render('admin/boletos', {boletos : boletos})
})
})
app.get('/admin/boletos/search/:id', (req, res, next) => {
let id = req.params.id
Boleto.findById(id, (err, boleto) => {
if(err) return next(err)
if(!boleto) return next()
res.render('admin/boleto', {boleto : boleto})
})
})
app.get('/admin/boletos/new', (req, res, next) => {
Cliente.find({}, (err, clientes) => {
if(err) return next(err)
res.render('admin/boleto-form', {clientes : clientes})
})
})
app.post('/admin/boletos/new', (req, res, next) => {
let boleto = new Boleto(req.body)
boleto.save((err) => {
if(err) return next(err)
moveBFrom(req, boleto._id, (err) => {
if(err) return next(err)
})
res.redirect('/admin/boletos')
})
})
app.get('/admin/boletos/delete/:id', (req, res, next) => {
let id = req.params.id
Boleto.findById(id, (err, boleto) => {
if(err) return next(err)
if(!boleto) return next()
boleto.remove((err) => {
if(err) return next(err)
res.redirect('/admin/boletos')
})
})
})
app.post('/admin/boletos/update/:id', (req, res, next) => {
let id = req.params.id
Boleto.findById(id, (err, boleto) => {
if(err) return next(err)
if(!boleto) return next()
boleto.set(req.body)
boleto.save((err) => {
if(err) return next(err)
res.redirect('/admin/boletos')
})
})
})
app.get('/admin/boletos/file/:id', (req, res, next) => {
let tempFile="./public/boletos/"+req.params.id+"/boleto.pdf"
res.download(tempFile)
})
} |
<reponame>naomijub/Rubysss
#!/usr/bin/ruby
$hello = "Hello Rubysss"
puts $hello;
puts $hello.length
puts "#{$hello.slice(0, 5)} #{$hello.slice(6, 4)}"
|
import paramiko
def execute_ssh_command(vm_ip, username, password, command):
try:
# Establish SSH connection
ssh_client = paramiko.SSHClient()
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh_client.connect(vm_ip, username=username, password=password)
# Execute command
stdin, stdout, stderr = ssh_client.exec_command(command)
# Retrieve and display output
output = stdout.read().decode('utf-8')
print(output)
# Close SSH connection
ssh_client.close()
except paramiko.AuthenticationException:
print("Authentication failed. Please check your username and password.")
except paramiko.SSHException as e:
print(f"SSH connection failed: {e}")
except paramiko.ssh_exception.NoValidConnectionsError:
print("Unable to connect to the VM. Please check the IP address and ensure SSH is enabled.")
except Exception as e:
print(f"An error occurred: {e}")
# Example usage
vm_ip = "192.168.1.100"
username = "user"
password = "password"
command = "ls -l"
execute_ssh_command(vm_ip, username, password, command) |
import nvidia.dali.ops as ops
import nvidia.dali.types as types
import numpy as np
import nvidia.dali.pipeline as pipeline
import nvidia.dali.fn as fn
def process_and_augment_image(input_image, output_size, crop):
pipe = pipeline.Pipeline(batch_size=1, num_threads=1, device_id=0)
with pipe:
input = fn.external_source(source=input_image, layout="HWC")
input = fn.image_decoder(input, device="mixed")
input = fn.resize(input, resize_x=output_size[0], resize_y=output_size[1])
if crop:
input = fn.random_resized_crop(input, size=output_size)
output = fn.tensor_to_array(input)
pipe.set_outputs(output)
pipe.build()
pipe_out = pipe.run()
augmented_image = pipe_out[0].as_array()
return augmented_image |
/**
*
* @author Ware (<NAME>)
* @license MIT
* @website https://github.com/WareBare
*
*/
module.exports = {
Forms: {},
tplContent: {},
SpecialTags: false,
LibraryData: false,
GroupData: false,
CurrentPackageNameInput: ``,
bShowAdvancedOptions: false,
CheckPackageNameDupe: function(){
let bOutIsDupe = false, tempData;
// ---
if(this.CurrentPackageNameInput !== this.LibraryData.PackageName){
bOutIsDupe = Super.ReadData(tempData = {}, `LibraryData`, `PackageName`, `${this.CurrentPackageNameInput}`);
}
return bOutIsDupe;
},
OnChangeText_LibraryPackageName: function(el){
if(el.value !== ``){
this.CurrentPackageNameInput = el.value;
wzReloadCMS(10);
}
},
OnChangeText_LibraryDisplayName: function(el){
if(el.value !== ``){
this.LibraryData.DisplayName = el.value;
Super.UpdateLibrary(this.contentType, this.LibraryData);
}
},
OnChangeText_LibraryVersion: function(el){
if(el.value !== ``){
this.LibraryData.Version = el.value;
Super.UpdateLibrary(this.contentType, this.LibraryData);
}
},
OnDropListItem_GroupManage: function(e){
e.preventDefault();
try{
// Definitions
let SourceKey = e.dataTransfer.getData(`ListKey`).split(`::`)
, TargetKey = e.target.getAttribute(`wz-listKey`).split(`::`)
, ActionData = e.dataTransfer.getData(`ActionData`).split(`::`);
Log(SourceKey);
Log(TargetKey);
Log(ActionData);
// Logic
if(SourceKey[0] === TargetKey[0] && SourceKey[0] === ActionData[0] && ActionData[1]){
// SourceKey --> TargetKey | ActionData[1] string to save.
// save to db
if(SourceKey[1] === `Default` && TargetKey[1] === `Library`){
// Add to array.
this.LibraryData.Data.push({
GroupName: ActionData[1]
});
Super.UpdateLibrary(this.contentType, this.LibraryData);
}else if(SourceKey[1] === `Library` && TargetKey[1] === `Default`){
// Remove from array.
let dataIndex = this.LibraryData.Data.findIndex( x => x.GroupName === ActionData[1] );
if(dataIndex !== -1){
this.LibraryData.Data.splice(dataIndex, 1);
Super.UpdateLibrary(this.contentType, this.LibraryData);
}
//Super.ChangeAssignments(this.CurrentlySelectedGroupName, ActionData);
}
}
}catch(err){Log(err);}
wzReloadCMS(10);
},
ActiveListItem: false,
OnClickListItem_GroupManage: function(el){
if(el.innerHTML !== this.ActiveListItem){
this.ActiveListItem = el.innerHTML;
}else{
this.ActiveListItem = false;
}
wzReloadCMS(10);
},
UpdateGroupIndexInLibrary: function(bInMoveUp){
let dataIndex = this.LibraryData.Data.findIndex( x => x.GroupName === this.ActiveListItem);
if(dataIndex === -1) return false;
if(bInMoveUp){
if(dataIndex === 0) return false;
this.LibraryData.Data.splice(dataIndex - 1, 0, this.LibraryData.Data[dataIndex]);
this.LibraryData.Data.splice(dataIndex + 1, 1);
}else{
if(dataIndex === this.LibraryData.Data.length - 1) return false;
this.LibraryData.Data.splice(dataIndex + 2, 0, this.LibraryData.Data[dataIndex]);
this.LibraryData.Data.splice(dataIndex, 1);
//Log(`Move DOWN`);
}
Super.UpdateLibrary(this.contentType, this.LibraryData);
//Log(this.LibraryData);
//Log(Super.UpdateLibrary(this.contentType, this.LibraryData));
},
OnToggleCheckBox_ShowAdvancedOptions: function(el){
this.bShowAdvancedOptions = el.checked;
wzReloadCMS(10);
},
Content_Header: function(InLibraryData, InGroupData){
let outStr = ``
, groupList = []
, groupItemsDefault = []
, groupItemsLibrary = []
, tempFormItemOutput = ``
, tempFormItemOutput2 = ``;
// ---
tempFormItemOutput += Super.tplContent.TextFieldWithTip.wzReplace({
TEXT: this.CurrentPackageNameInput || `Name Required!`
, ON_CHANGE_FN: `_cms.OnChangeText_LibraryPackageName(this)`
, LABEL: `Package Name`
, TOOL_TIP: `<ul><li class="Msg_Warn">must be unique</li><li>Identifies Library Entries</li></ul>`
, SETTINGS: ` style="width: 250px;"`
, ERROR_MSG: this.CheckPackageNameDupe() ? `Must be Unique!` : ``
});
if(InLibraryData.bReadOnly){
// Read Only
outStr += `<span class="Msg_Warn">This entry is ReadOnly, you cannot change its settings, but you can make a new one based on it.</span><br />A few simple steps on how to do make a copy of this that is customizable: <ul><li>Change "Package Name" to something that does not yet exist and is different to the current one</li><li>A new button <kbd>Create Entry</kbd> on the right will appear</li><li>Click that button and reload the page with "F5" when it's finished (this will force a reload of the new entry)</li><li><b>Note:</b> The name can be updated later with the button <kbd>Update Entry</kbd> and the entire entry deleted with <kbd>Delete Entry</kbd>.</li></ul><br />`;
}else{
// Writable
tempFormItemOutput += Super.tplContent.TextFieldWithTip.wzReplace({
TEXT: this.LibraryData.DisplayName || `Name Required!`
, ON_CHANGE_FN: `_cms.OnChangeText_LibraryDisplayName(this)`
, LABEL: `Name`
, TOOL_TIP: `<ul><li>Visible Name in the list on the right.</li></ul>`
, SETTINGS: ` style="width: 250px;"`
, ERROR_MSG: ``
});
tempFormItemOutput += Super.tplContent.TextFieldWithTip.wzReplace({
TEXT: this.LibraryData.Version || `Name Required!`
, ON_CHANGE_FN: `_cms.OnChangeText_LibraryVersion(this)`
, LABEL: `Version`
, TOOL_TIP: `<ul><li>Optional</li><li>Entirely up to you, what goes in this field.</li><li>Feature for later, when you can package color codes for others.</li></ul>`
, SETTINGS: ` style="width: 50px;"`
, ERROR_MSG: ``
});
tempFormItemOutput += Super.tplContent.CheckBox.wzReplace({
LABEL: `Show Advanced Options`
, ON_CLICK_FN: `_cms.OnToggleCheckBox_ShowAdvancedOptions(this)`
, VALUE: `ShowAdvancedOptions`
, B_CHECKED: (this.bShowAdvancedOptions) ? ` CHECKED` : ``
});
/// ADVANCED OPTIONS
if(this.bShowAdvancedOptions){
for(let groupKey in InGroupData){
if( InLibraryData.Data.findIndex( libData => libData[`GroupName`] == groupKey) === -1 ){
groupItemsDefault.push({
Text: `${groupKey}`
, ActionData: `manage::${groupKey}`
, bChecked: (this.ActiveListItem === groupKey)
});
}
}
for(let i = 0; i < InLibraryData.Data.length; i++){
groupItemsLibrary.push({
Text: `${InLibraryData.Data[i].GroupName}`
, ActionData: `manage::${InLibraryData.Data[i].GroupName}`
, bChecked: (this.ActiveListItem === InLibraryData.Data[i].GroupName)
, OnClick: `_cms.OnClickListItem_GroupManage(this)`
});
}
groupList.push({
Name: `manage::Default`
, Text: `Not Assigned`
, Items: groupItemsDefault
});
groupList.push({
Name: `manage::Library`
, Text: `Assigned`
, Items: groupItemsLibrary
});
tempFormItemOutput2 += new WZ.Core.cDragDropList({
LegendName: `Manage Coloring-Groups`
, elementGroup: `groupmanage`
, OnDrop: `OnDropListItem_GroupManage`
, Lists: groupList
, SearchTerm: this.SearchTerm || ``
, Width: 300
}).create_();
if(this.ActiveListItem){
// ---
tempFormItemOutput2 += `<span class="Msg_Warn">Use Arrow Key Up/Down to move Coloring-Group.</span>`;
}
}
}
outStr += Super.tplContent.FormContainer.wzReplace({
TITLE: `Manage Library Entry`
, CONTENTS: `${tempFormItemOutput}`
});
outStr += `<div style="display: inline-block;">${tempFormItemOutput2}</div>`;
return outStr;
},
Content_ColorPicker: function(InLibraryData, InGroupData){
let outStr = ``
, outByTypes = {}
, tempFormItemOutput = ``
, tempFormItemOutput2 = ``
, curGroupInfo;
if(InLibraryData.bReadOnly){
tempFormItemOutput += `<span class="Msg_Warn">This entry is ReadOnly, you may view its colors, but not change them, but you may create a copy of this library entry and change the copy to your needs.</span><br />`;
}else if(!Super.IsUsingLocale()){
tempFormItemOutput += `<span>Any changes to "Clear" may require you to use <kbd>Remove Colors</kbd> before saving. You may also use "Auto-Delete Old Files" in settings and not worry about old files. If you're still experiencing issues reload the tool with "F5" and remove/save again.</span><br />`
}
for(let i = 0; i <= InLibraryData.Data.length - 1; i++){
// --- InLibraryData.Data[i].ColorCode
curGroupInfo = Object.assign({}, InGroupData[InLibraryData.Data[i].GroupName]);
if(InLibraryData.Data[i].ColorCode) curGroupInfo.ColorCode = InLibraryData.Data[i].ColorCode;
outByTypes[curGroupInfo.Keywords.Type[0]] = outByTypes[curGroupInfo.Keywords.Type[0]] || [];
outByTypes[curGroupInfo.Keywords.Type[0]].push(Super.MakeColorPicker(curGroupInfo.DisplayName, InLibraryData.Data[i].GroupName, curGroupInfo.ColorCode, InLibraryData.bReadOnly || false));
}
for(let keywordKey in outByTypes){
tempFormItemOutput2 = ``;
for(let i = 0; i <= outByTypes[keywordKey].length - 1; i++){
// outByTypes[keywordKey][i]
tempFormItemOutput2 += outByTypes[keywordKey][i];
}
tempFormItemOutput += Super.tplContent.CollapsibleContainer.wzReplace({
TITLE: keywordKey
, CONTENTS: tempFormItemOutput2
});
}
outStr += Super.tplContent.FormContainer.wzReplace({
TITLE: `Color per Group`
, CONTENTS: `${tempFormItemOutput}`
});
return outStr;
},
content_: function(InContentType){
let outStr = ``;
if(!Super.IsPathCorrect()) return `<span class="Msg_Warn">You must set the correct Path in "Settings" first, if you think you got it right please check if you have an ArchiveTool.exe inside that directory.</span>`;
this.GroupData = Super.GetClassData(`GroupData`);
// Prepare Mastery Names to speed up color creation later.
Super.GetMasteryNames();
/*
if(!this.SpecialTags){
this.SpecialTags = Super.GetClassData(`ImportantTags`);
this.SpecialTags.Groups = this.SpecialTags.Groups || {};
this.SpecialTags.Tags = this.SpecialTags.Tags || {};
//this.SpecialTags.Collapsibles = this.SpecialTags.Collapsibles || [];
}
*/
if(InContentType !== this.contentType && this.contentType !== `undefined`){
// Reset TagInfo when different Library is loaded, as it may change it.
Super.ResetClassData(`TagInfoData`);
}
this.contentType = this.contentType || appConfig.get(`GrimDawn.ColorLibrary`);
// first load used library in slot 1, then sets contentType and can use ContentType to load data.
if(typeof this.contentType === `undefined`) {
Super.ReadData(this.LibraryData = {}, `LibraryData`, 0);
this.CurrentPackageNameInput = this.LibraryData.PackageName;
}
this.contentType = InContentType || this.contentType || this.LibraryData.PackageName;
if(this.LibraryData.PackageName !== this.contentType){
Super.ReadData(this.LibraryData = {}, `LibraryData`, `PackageName`, `${this.contentType}`);
this.CurrentPackageNameInput = this.contentType;
this.ActiveListItem = false;
}
appConfig.set(`GrimDawn.ColorLibrary`, this.contentType);
/// OUTPUT
outStr += this.Content_Header(this.LibraryData, this.GroupData);
outStr += this.Content_ColorPicker(this.LibraryData, this.GroupData);
return outStr;
},
FindAndUpdateSourceEntry: function(OutSourceData, InFileName, InKeyToFind, InValueReplace){
let tempIndex = OutSourceData[InFileName].findIndex( DefKey => (DefKey.TagKey || ``).toLowerCase() === InKeyToFind.toLowerCase() )
, bOutFoundAndUpdated = false;
if(tempIndex !== -1){
OutSourceData[InFileName][tempIndex].TagValue = InValueReplace.wzReplace({
VALUE: OutSourceData[InFileName][tempIndex].TagValue
});
OutSourceData[InFileName][0].bUpdated = true;
Log(`FOUND!`);
bOutFoundAndUpdated = true;
}else{
Log(`NOT FOUND!`);
}
return bOutFoundAndUpdated;
},
UpdateSourceTag: function(OutSourceData, InKeyToFind, InTagValue, bInUpdateDef = false){
let tempIndex, bUpdated = false;
if(bInUpdateDef){
// check if updated before, only update if it wasn't before, no need to do it twice.
if(!OutSourceData[`language.def`][0].bUpdated){
this.FindAndUpdateSourceEntry(OutSourceData, `language.def`, InKeyToFind, InTagValue);
}
}else{
// first loop is through those that have been updated before.
for(let sourceIndex in OutSourceData){
if(OutSourceData[sourceIndex][0].bUpdated){
bUpdated = this.FindAndUpdateSourceEntry(OutSourceData, `language.def`, InKeyToFind, InTagValue) || bUpdated;
}
}
// second loop through those that have not been updated before, only when nothing was found before.
if(!bUpdated){
for(let sourceIndex in OutSourceData){
if(!OutSourceData[sourceIndex][0].bUpdated){
bUpdated = this.FindAndUpdateSourceEntry(OutSourceData, `language.def`, InKeyToFind, InTagValue) || bUpdated;
}
}
}
}
if(!bUpdated) Log(`Entry not found!`);
},
FetchColorFromGroup: function(InLibraryGroupEntry){
// Uses either Library ColorCode (if set) or Group Color Code (as default).
return InLibraryGroupEntry.ColorCode || this.GroupData[InLibraryGroupEntry.GroupName].ColorCode;
},
/**
* Checks if Group has a match for Tag Keyword.
* @param {string} InGroupName name of the group to check in GroupData
* @param {string} InKeywordKey key for Keywords (like Type, Classification, Group)
* @param {string} InKeywordCheck The Keyword to look for (like MI or Regular Item)
*/
DoesGroupMatchKeywords: function(InGroupName, InKeywordKey, InKeywordCheck){
// #DECLARATION
let bOutMatch = false;
// #VALIDATION
if(!this.GroupData[InGroupName]) return (InKeywordKey === `Type`) ? false : true;
if(!this.GroupData[InGroupName].Keywords[InKeywordKey]) return (InKeywordKey === `Type`) ? false : true;
// #LOGIC
bOutMatch = this.GroupData[InGroupName].Keywords[InKeywordKey].includes(InKeywordCheck[InKeywordKey]);
return bOutMatch;
},
FetchColorCodeForTag: function(InGroupKeywords){
// ---
let bFoundColor = false, colorCode = false;
for(let i = 0; i < this.LibraryData.Data.length; i++){
//bGroupMismatch = false;
bFoundColor = this.DoesGroupMatchKeywords(this.LibraryData.Data[i].GroupName, `Type`, InGroupKeywords);
if(bFoundColor) bFoundColor = this.DoesGroupMatchKeywords(this.LibraryData.Data[i].GroupName, `Classification`, InGroupKeywords);
if(bFoundColor) bFoundColor = this.DoesGroupMatchKeywords(this.LibraryData.Data[i].GroupName, `Group`, InGroupKeywords);
//if(!bGroupMismatch) bFoundColor = true;
if(!colorCode && bFoundColor){
colorCode = this.FetchColorFromGroup(this.LibraryData.Data[i]);
i = this.LibraryData.Data.length;
}
}
return colorCode;
},
IsSymbolDisabledByLibrary: function(InKeywords, InKeywordGroupName){
let bIsDisabled = false;
if(this.LibraryData.DisabledSymbols){
// #WiP
if(this.LibraryData.DisabledSymbols.includes(`${InKeywordGroupName}.${InKeywords[InKeywordGroupName]}`)){
bIsDisabled = true;
}
}
return bIsDisabled;
},
ApplyColorInSourceData: function(OutSourceData, InFileName, InIndex, InColorCode, InKeywords){
const tagName = OutSourceData[InFileName][InIndex].TagKey
, specialGroupName = this.SpecialTags.Tags[tagName];
let CustomPrefix = ``
, CustomSuffix = ``;
if (specialGroupName) {
const groupObject = this.SpecialTags.Groups[specialGroupName];
CustomPrefix = groupObject.Symbol;
//InColorCode = groupObject.ColorCode;
// only change param color code if it is not set to 'Clear'
InColorCode = (!groupObject.ColorCode || groupObject.ColorCode === `Clear`) ? InColorCode : groupObject.ColorCode;
}
if (CustomPrefix === ``) {
if (!this.IsSymbolDisabledByLibrary(InKeywords, `Type`)) {
CustomPrefix += Super.MakeSymbol(`Type`, InKeywords);
}
if (!this.IsSymbolDisabledByLibrary(InKeywords, `Classification`)) {
CustomPrefix += Super.MakeSymbol(`Classification`, InKeywords);
}
if (!this.IsSymbolDisabledByLibrary(InKeywords, `Group`)) {
CustomPrefix += Super.MakeSymbol(`Group`, InKeywords);
}
//
}
if (CustomSuffix === ``) {
if (InKeywords.Type === `Skill` && InKeywords.Classification === `Mastery`) {
CustomSuffix = ` (${Super.GetMasteryNames()[InKeywords.Group]})`;
}
}
InColorCode = InColorCode || `Clear`;
//Log(InColorCode);
if( (InColorCode === `Clear` || !InColorCode) && CustomPrefix === `` && CustomSuffix === `` ) return false;
//Log(InColorCode);
//Log(OutSourceData[InFileName][InIndex].TagValue);
let newValue = OutSourceData[InFileName][InIndex].TagValue
//, TypeSymbol = ( this.IsSymbolDisabledByLibrary(InKeywords, `Type`) ) ? `` : Super.MakeSymbol(`Type`, InKeywords)
//, ClassificationSymbol = ( this.IsSymbolDisabledByLibrary(InKeywords, `Classification`) ) ? `` : Super.MakeSymbol(`Classification`, InKeywords)
//, GroupSymbol = ( this.IsSymbolDisabledByLibrary(InKeywords, `Group`) ) ? `` : Super.MakeSymbol(`Group`, InKeywords)
, colorCode = `${CustomPrefix}${(InColorCode !== `Clear`) ? `{^${InColorCode.toUpperCase()}}` : ``}`;
if(newValue.includes(`{^E}`)){
newValue = `${newValue.replace(`{^E}`, `${colorCode}`)}{^E}`;
}else if(newValue.match(/{\^[A-Za-z]}/g)){
newValue = newValue.replace(/{\^[A-Za-z]}/g, colorCode);
}else if(newValue.startsWith(`[`) || newValue.startsWith(`\$[`)){
newValue = newValue.replace(/(\[[a-zA-Z]+])/g, `$1${colorCode}`);
//Log(newValue);
}else if(newValue.match(RegexGlobalLetters)){
newValue = `${colorCode}${newValue}`;
//Log(newValue);
}
if(OutSourceData[InFileName][InIndex].TagKey.includes(`Conversion`)){
newValue += `{^E}`;
//Log(newValue);
}
if (CustomSuffix !== ``) {
newValue += `${CustomSuffix}`;
}
if(OutSourceData[InFileName][InIndex].TagValue !== newValue){
OutSourceData[InFileName][0].bUpdated = true;
OutSourceData[InFileName][InIndex].TagValue = newValue;
//Log(OutSourceData[InFileName][InIndex].TagValue);
}else{
Log(OutSourceData[InFileName][InIndex].TagValue);
}
},
FindTagAndApplyColorInSourceData: function(OutSourceData, InTagKey, InColorCode, InKeywords){
// ---
let bFoundEntry = false
, foundIndex = -1
, fileName;
//Log(`${InTagKey} --- ${InColorCode}`);
// loop files with previous changes, ignore others.
for(let fileKey in OutSourceData){
//Log(fileKey);
if(OutSourceData[fileKey][0].bUpdated && foundIndex === -1){
foundIndex = OutSourceData[fileKey].findIndex( x => x.TagKey === InTagKey );
if(foundIndex !== -1) this.ApplyColorInSourceData(OutSourceData, fileKey, foundIndex, InColorCode, InKeywords);
// #BugFix - Russian Localization
if(fileKey === `tags_gmreplacer.txt` && foundIndex !== -1){
//Log(`found match`);
foundIndex = -1;
}
//foundIndex = -1;
//if(foundIndex !== -1) fileName = fileKey;
}
}
// loopt files without previous changes if no entry found yet.
if(foundIndex === -1){
for(let fileKey in OutSourceData){
//Log(OutSourceData[fileKey][0].bUpdated);
if(!OutSourceData[fileKey][0].bUpdated && foundIndex === -1){
foundIndex = OutSourceData[fileKey].findIndex( x => x.TagKey === InTagKey );
if(foundIndex !== -1) this.ApplyColorInSourceData(OutSourceData, fileKey, foundIndex, InColorCode, InKeywords);
// #BugFix - Russian Localization
if(fileKey === `tags_gmreplacer.txt` && foundIndex !== -1){
//Log(`found match`);
foundIndex = -1;
}
//foundIndex = -1;
//if(foundIndex !== -1) fileName = fileKey;
}
}
}
},
UpdateSourceData: function(OutSourceData){
let outNewSourceData = OutSourceData
, tagInfoData = Super.GetClassData(`TagInfoData`);
//Log(tagInfoData[`tagClass01SkillName01A`]);
for(let tagKey in tagInfoData){
// Applies color to tag inside sourceData and gets the colorCode for tag from group
this.FindTagAndApplyColorInSourceData(OutSourceData, tagKey, this.FetchColorCodeForTag(tagInfoData[tagKey]), tagInfoData[tagKey]);
}
// ---
return outNewSourceData;
},
OnClick_WriteColors__DEPRECIATED: function(){
let SourceData = Super.GetSourceData()
, savePath;
this.SpecialTags = Super.GetClassData(`ImportantTags`);
this.SpecialTags.Groups = this.SpecialTags.Groups || {};
this.SpecialTags.Tags = this.SpecialTags.Tags || {};
this.UpdateSourceData(SourceData);
//Log(SourceData);
//return false;
if(Super.IsUsingLocale()){
let zip = new JSZip();
savePath = `${Super.GetGrimDawnPath()}/localization`;
//SourceData[`language.def`][SourceData[`language.def`].find( )] // [C]
this.UpdateSourceTag(SourceData, `language`, `[C] {VALUE} ${this.contentType}`, true);
if(appConfig.get(`Filter.bZipChanges`)){
// Save Filter as .zip using Locale in /localization/*.zip
let zipSuffix = `_${this.contentType}_C_`;
// --- appConfig.get(`Filter.LocaleFileName`)
fs.readFile(`${savePath}/${appConfig.get(`Filter.LocaleFileName`)}`, function(err, data) {
if (err) throw err;
zip.loadAsync(data).then(function (zip) {
for(let fileKey in SourceData){
if(SourceData[fileKey][0].bUpdated){
zip.file(`${fileKey}`, Super.StringifyTagData(SourceData[fileKey]));
}
}
zip
.generateNodeStream({type:'nodebuffer', streamFiles:true, compression: "DEFLATE"})
.pipe(fs.createWriteStream(`${savePath}/${appConfig.get(`Filter.LocaleFileName`).replace(`.zip`, `${zipSuffix}.zip`)}`))
.on('finish', function () {
// JSZip generates a readable stream with a "end" event,
// but is piped here in a writable stream which emits a "finish" event.
wzNotify.save(`${appConfig.get(`Filter.LocaleFileName`).replace(`.zip`, `${zipSuffix}.zip`)}`);
//console.log("out.zip written.");
});
});
});
}else{
// Save Filter Files using Locale in /localization/
for(let fileKey in SourceData){
if(SourceData[fileKey][0].bUpdated){
wzIO.file_put_contents(`${savePath}/${fileKey}`, Super.StringifyTagData(SourceData[fileKey]), savePath);
}
}
}
}else{
if(appConfig.get(`Filter.bEnableAutoDelete`)){
Super.OnDeleteOldFiles();
}
// Save Filter Files in /settings/
savePath = `${Super.GetGrimDawnPath()}/settings/text_en`;
let zip = new JSZip()
, zipName = `${this.contentType}-${(this.LibraryData.Version !== ``) ? this.LibraryData.Version : Super.GrimDawnVersion}.zip`;
for(let fileKey in SourceData){
if(SourceData[fileKey][0].bUpdated){
wzIO.file_put_contents(`${savePath}/${fileKey}`, Super.StringifyTagData(SourceData[fileKey]), savePath);
if(appConfig.get(`GrimDawn.Paths.UserData`) && appConfig.get(`GrimDawn.Paths.UserData`) !== ``){
wzIO.file_put_contents(`${appConfig.get(`GrimDawn.Paths.UserData`).replace(/\\/g, `/`).replace(`/Settings`, ``)}/Settings/text_en/${fileKey}`, Super.StringifyTagData(SourceData[fileKey]), savePath);
}
if(appConfig.get(`Filter.bMakeZipForTextEn`)){
zip.file(`Grim Dawn/settings/text_en/${fileKey}`, Super.StringifyTagData(SourceData[fileKey]));
}
}
}
// ---
if(appConfig.get(`Filter.bMakeZipForTextEn`)){
zip
.generateNodeStream({type:'nodebuffer', streamFiles:true, compression: "DEFLATE"})
.pipe(fs.createWriteStream(`${Super.GetGrimDawnPath()}/settings/${zipName}`))
.on('finish', function () {
// JSZip generates a readable stream with a "end" event,
// but is piped here in a writable stream which emits a "finish" event.
wzNotify.save(`settings/${zipName}`);
//console.log("out.zip written.");
});
}
}
},
OnClick_CreateLibraryEntry: function(){
// change data.
this.LibraryData.DisplayName = `${this.LibraryData.DisplayName} (NEW)`;
this.LibraryData.PackageName = this.CurrentPackageNameInput;
this.LibraryData.Version = `0.0`;
// DELETE
delete this.LibraryData.InSearchKey;
delete this.LibraryData.bReadOnly;
// CALL UPDATE
Super.UpdateLibrary(this.CurrentPackageNameInput, this.LibraryData);
// load new entry after reload.
this.contentType = this.CurrentPackageNameInput;
},
OnClick_UpdateLibraryEntry: function(){
this.LibraryData.PackageName = this.CurrentPackageNameInput;
Super.UpdateLibrary(this.contentType, this.LibraryData);
// load new entry after reload.
this.contentType = this.CurrentPackageNameInput;
},
OnClick_DeleteLibraryEntry: function(){
Super.UpdateLibrary(this.contentType);
// load new entry after reload.
Super.ReadData(this.LibraryData = {}, `LibraryData`, 0);
this.CurrentPackageNameInput = this.LibraryData.PackageName;
this.contentType = this.CurrentPackageNameInput;
},
sidebarBtns_: function(){
let outButtons = Super.sidebarBtns_();
if(!Super.IsPathCorrect()) return outButtons;
if(appConfig.get(`GrimDawn.Paths.Game`)){
/*
outButtons.push({
"ONCLICK": "_cms.OnClick_WriteColors()",
"TEXT": "Save Colors"
});
*/
if(!this.CheckPackageNameDupe() && this.contentType !== this.CurrentPackageNameInput){
outButtons.push({
"ONCLICK": "_cms.OnClick_CreateLibraryEntry()",
"TEXT": "Create Entry"
});
if(!this.LibraryData.bReadOnly){
outButtons.push({
"ONCLICK": "_cms.OnClick_UpdateLibraryEntry()",
"TEXT": "Update Entry"
});
}
}else{
if(!this.LibraryData.bReadOnly){
outButtons.push({
"ONCLICK": "_cms.OnClick_DeleteLibraryEntry()",
"TEXT": "Delete Entry"
});
}
}
}
return outButtons;
},
sidebarList_: function(){
if(!Super.IsPathCorrect()) return {};
let OutList = {}
, LibraryData = Super.GetLibraryData();
for(let libraryIndex in LibraryData){
OutList[`${LibraryData[libraryIndex].PackageName}`] = {
text: `${LibraryData[libraryIndex].DisplayName}`
}
}
//Log(LibraryData);
/*
for(let kContenType in this.FilterConfig.store){
mList[kContenType] = [];
}
*/
return OutList;
}
};
|
package main
import (
"errors"
"net/http"
"time"
"greenlight.nickherrig.com/internal/data"
"greenlight.nickherrig.com/internal/validator"
)
func (app *application) registerUserHandler(w http.ResponseWriter, r *http.Request) {
var input struct {
Name string `json:"name"`
Email string `json:"email"`
Password string `json:"password"`
}
err := app.readJSON(w, r, &input)
if err != nil {
app.badRequestResponse(w, r, err)
return
}
user := &data.User{
Name: input.Name,
Email: input.Email,
Activated: false,
}
err = user.Password.Set(input.Password)
if err != nil {
app.serverErrorResponse(w, r, err)
return
}
v := validator.New()
if data.ValidateUser(v, user); !v.Valid() {
app.failedValidationResponse(w, r, v.Errors)
return
}
err = app.storage.Users.Insert(user)
if err != nil {
switch {
case errors.Is(err, data.ErrDuplicateEmail):
v.AddError("email", "a user with this email address alread exits")
app.failedValidationResponse(w, r, v.Errors)
default:
app.serverErrorResponse(w, r, err)
}
return
}
token, err := app.storage.Tokens.New(user.ID, 3*24*time.Hour, data.ScopeActivation)
if err != nil {
app.serverErrorResponse(w, r, err)
}
app.background(func() {
data := map[string]interface{}{
"activationToken": token.Plaintext,
"userID": user.ID,
}
err = app.mailer.Send(user.Email, "welcome.tmpl", data)
if err != nil {
app.logger.PrintError(err, nil)
}
})
err = app.writeJSON(w, http.StatusAccepted, envelope{"user": user}, nil)
if err != nil {
app.serverErrorResponse(w, r, err)
}
}
func (app *application) activateUserHandler(w http.ResponseWriter, r *http.Request) {
var input struct {
TokenPlaintext string `json:"token"`
}
err := app.readJSON(w, r, &input)
if err != nil {
app.badRequestResponse(w, r, err)
return
}
v := validator.New()
if data.ValidateTokenPlaintext(v, input.TokenPlaintext); !v.Valid() {
app.failedValidationResponse(w, r, v.Errors)
return
}
user, err := app.storage.Users.GetForToken(data.ScopeActivation, input.TokenPlaintext)
if err != nil {
switch {
case errors.Is(err, data.ErrRecordNotFound):
v.AddError("token", "invalid or expired activation token")
app.failedValidationResponse(w, r, v.Errors)
default:
app.serverErrorResponse(w, r, err)
}
return
}
user.Activated = true
err = app.storage.Users.Update(user)
if err != nil {
switch {
case errors.Is(err, data.ErrEditConflict):
app.editConflictResponse(w, r)
default:
app.serverErrorResponse(w, r, err)
}
return
}
err = app.storage.Tokens.DeleteAllForUser(data.ScopeActivation, user.ID)
if err != nil {
app.serverErrorResponse(w, r, err)
return
}
err = app.writeJSON(w, http.StatusOK, envelope{"user": user}, nil)
if err != nil {
app.serverErrorResponse(w, r, err)
}
}
|
#!/bin/sh
set -e
# Go to git repo root.
cd "$(git rev-parse --show-toplevel)"
if [ -z "$API_KEY" ]; then
echo "API_KEY environment variable is not defined"
exit 1
fi
if [ -z "$API_URL" ]; then
echo "API_URL environment variable is not defined"
exit 1
fi
if [ -z "$CLUSTER_ID" ]; then
echo "CLUSTER_ID environment variable is not defined"
exit 1
fi
# Build bo binary and push docker image.
IMAGE_TAG=v0.0.1
GOOS=linux GOARCH=amd64 go build -ldflags "-X main.Version=${IMAGE_TAG}" -o bin/castai-cluster-controller .
DOCKER_IMAGE_REPO=europe-west3-docker.pkg.dev/ci-master-mo3d/tilt/$USER/castai-cluster-controller
if [ -z "$SKIP_BUILD" ]; then
docker build -t "$DOCKER_IMAGE_REPO:$IMAGE_TAG" .
docker push "$DOCKER_IMAGE_REPO:$IMAGE_TAG"
fi
# Install local chart and binary.
LOCAL_CHART_DIR=../gh-helm-charts/charts/castai-cluster-controller
helm upgrade -i cluster-controller $LOCAL_CHART_DIR \
-f ./hack/remote/values.yaml \
--set image.repository="${DOCKER_IMAGE_REPO}" \
--set image.tag="${IMAGE_TAG}" \
--set aks.enabled=false \
--set serviceAccount.create="true" \
--set castai.apiKey="${API_KEY}" \
--set castai.apiURL="${API_URL}" \
--set castai.clusterID="${CLUSTER_ID}" \
--history-max=3 \
-n castai-agent
kubectl rollout restart deployment castai-cluster-controller -n castai-agent
|
<div class="app" ng-cloak>
<h1>Photo Gallery</h1>
<input type="search" ng-model="query">
<div class="gallery">
<img ng-repeat="photo in photos | filter: query" ng-src="{{photo.url}}">
</div>
</div>
<script>
const app = angular.module('app', []);
app.controller('MainController', ($scope, $http) => {
$scope.photos = [];
$http
.get('http://exampleapi.com/photos')
.then(response => {$scope.photos = response.data});
});
</script> |
#!/bin/bash
# Ignores any electron-or-metadata-specific content and builds files for AMO
# release. YOU DO NOT NEED TO BUILD THE EXTENSION WHILE DEVELOPING!
rm -r ./build
mkdir ./build
process () {
mkdir -p $(dirname "./build/$1")
if [[ ! $1 == ./source/lib/* ]] && [ ${1: -3} == ".js" ]; then
echo "Building $1"
cat $1 | node build.js > ./build/$1
else
echo "Copying $1"
cp $1 ./build/$1
fi
}
files=$(
find -type f |\
grep -Ev 'build|buildscripttest|node_modules|\.zip|\.xpi|\.git|electron|package(-lock)?\.json|desktop-manifest\.js|pack.sh|tpseimporter|makeTPSE'
)
for file in $files; do
process $file
done
wait
# Replace vue with the runtime build now that everything is built
rm ./build/source/lib/vue.js
mv ./build/source/lib/vue.runtime.js ./build/source/lib/vue.js
cd build
zip -r ../tetrioplus.xpi -9 -u ./*
|
/**
* The MIT License (MIT)
*
* Copyright (c) 2014 <NAME>, University of Massachusetts
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#ifndef SA_REWARDS_ARRAY_H
#define SA_REWARDS_ARRAY_H
#include "sa_rewards.h"
#include "../states/state.h"
#include "../actions/action.h"
/**
* A class for state-action rewards in an MDP-like object, internally storing
* the rewards as an array.
*/
class SARewardsArray : virtual public SARewards {
public:
/**
* The default constructor for the SARewardsArray class. This requires the number of states,
* actions, and observations.
* @param numStates The number of states.
* @param numActions The number of actions.
*/
SARewardsArray(unsigned int numStates, unsigned int numActions);
/**
* The default deconstructor for the SARewardsArray class.
*/
virtual ~SARewardsArray();
/**
* Set a state transition from a particular state-action pair to a probability.
* @param state The current state of the system.
* @param action The action taken in the current state.
* @param reward The reward from the provided state-action-state triple.
*/
virtual void set(State *state, Action *action, double reward);
/**
* Set a state transition from a particular state-action-state triple to a probability.
* @param state The current state of the system.
* @param action The action taken in the current state.
* @param nextState The next state with which we assign the reward.
* @param reward The reward from the provided state-action-state triple.
*/
virtual void set(State *state, Action *action, State *nextState, double reward);
/**
* Set a state transition from a particular state-action-state-observation quadruple to a probability.
* @param state The current state of the system.
* @param action The action taken in the current state.
* @param nextState The next state with which we assign the reward.
* @param observation The observation made at the next state.
* @param reward The reward from the provided state-action-state-observation quadruple.
*/
virtual void set(State *state, Action *action, State *nextState,
Observation *observation, double reward);
/**
* The probability of a transition following the state-action pair provided.
* @param state The current state of the system.
* @param action The action taken at the current state.
* @return The reward from taking the given action in the given state.
*/
virtual double get(State *state, Action *action);
/**
* The probability of a transition following the state-action-state triple provided.
* @param state The current state of the system.
* @param action The action taken at the current state.
* @param nextState The next state with which we assign the reward.
* @return The reward from taking the given action in the given state.
*/
virtual double get(State *state, Action *action, State *nextState);
/**
* The probability of a transition following the state-action-state-observation quadruple provided.
* @param state The current state of the system.
* @param action The action taken in the current state.
* @param nextState The next state with which we assign the reward.
* @param observation The observation made at the next state.
* @return The reward from taking the given action in the given state.
*/
virtual double get(State *state, Action *action, State *nextState,
Observation *observation);
/**
* Set the entire 2-dimensional array with the one provided. This only performs a copy.
* @param R A pointer to the new 2-d array of raw rewards data. This must be
* an array of size n x m.
*/
virtual void set_rewards(const float *R);
/**
* Get the memory location of the 2-dimensional array.
* @return A pointer to the raw rewards data.
*/
virtual float *get_rewards();
/**
* Get the number of states used for the rewards array.
* @return The number of states.
*/
virtual unsigned int get_num_states() const;
/**
* Get the number of actions used for the rewards array.
* @return The number of actions.
*/
virtual unsigned int get_num_actions() const;
/**
* Get the minimal R-value.
* @return The minimal R-value.
*/
virtual double get_min() const;
/**
* Get the maximal R-value.
* @return The maximal R-value.
*/
virtual double get_max() const;
/**
* Reset the rewards, clearing the internal mapping.
*/
virtual void reset();
private:
/**
* The 2-dimensional array mapping state-action to floats. Floats were
* used to improve speed.
*/
float *rewards;
/**
* The number of states, which is the first dimension of the rewards array.
*/
unsigned int states;
/**
* The number of actions, which is the second dimension of the rewards array.
*/
unsigned int actions;
/**
* The minimum R-value.
*/
double Rmin;
/**
* The maximum R-value.
*/
double Rmax;
};
#endif // SA_REWARDS_ARRAY_H
|
#include <string>
enum ShelfAutoHideBehavior {
SHELF_AUTO_HIDE_BEHAVIOR_ALWAYS,
SHELF_AUTO_HIDE_BEHAVIOR_NEVER
};
const std::string kShelfAutoHideBehaviorAlways = "Always";
ShelfAutoHideBehavior AutoHideBehaviorFromPref(const std::string& value) {
// Note: To maintain sync compatibility with old images of chrome/chromeos
// the set of values that may be encountered includes the now-extinct
// "Default" as well as "Never" and "Always", "Default" should now
// be treated as "Never" (http://crbug.com/146773).
if (value == kShelfAutoHideBehaviorAlways)
return SHELF_AUTO_HIDE_BEHAVIOR_ALWAYS;
return SHELF_AUTO_HIDE_BEHAVIOR_NEVER;
}
const char* AutoHideBehaviorToPref(ShelfAutoHideBehavior behavior) {
switch (behavior) {
case SHELF_AUTO_HIDE_BEHAVIOR_ALWAYS:
return kShelfAutoHideBehaviorAlways.c_str();
case SHELF_AUTO_HIDE_BEHAVIOR_NEVER:
return "Never";
default:
return "Unknown";
}
} |
package weixin.tenant.entity;
import java.math.BigDecimal;
import java.util.Date;
import java.lang.String;
import java.lang.Double;
import java.lang.Integer;
import java.math.BigDecimal;
import javax.xml.soap.Text;
import java.sql.Blob;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.SequenceGenerator;
import org.jeecgframework.poi.excel.annotation.Excel;
/**
* @author onlineGenerator
* @version V1.0
* @Title: Entity
* @Description: 商户管理表
* @date 2015-04-25 21:14:50
*/
@Entity
@Table(name = "weixin_acct", schema = "")
@SuppressWarnings("serial")
public class WeixinAcctEntity implements java.io.Serializable {
/**
* 主键
*/
private java.lang.String id;
/**
* 创建日期
*/
private java.util.Date createDate;
/**
* 修改日期
*/
private java.util.Date endDate;
/**
* 商户名称
*/
@Excel(exportName = "商户名称")
private java.lang.String acctName;
/**
* 企业名称
*/
@Excel(exportName = "企业名称")
private java.lang.String businessName;
/**
* 电话
*/
@Excel(exportName = "电话")
private java.lang.String mobilePhone;
/**
* 邮箱
*/
@Excel(exportName = "邮箱")
private java.lang.String email;
/**
* QQ
*/
@Excel(exportName = "QQ")
private java.lang.String qqNumber;
/**
* 群发次数
*/
@Excel(exportName = "群发次数")
private java.lang.Integer smsnum;
/**
* 图文次数
*/
@Excel(exportName = "图文次数")
private java.lang.Integer newsnum;
/**
* 请求次数
*/
@Excel(exportName = "请求次数")
private java.lang.Integer requestnum;
/**
* 员工账号个数
*/
@Excel(exportName = "员工账号个数")
private java.lang.Integer usernum;
/**
* 公众号个数
*/
@Excel(exportName = "公众号个数")
private java.lang.Integer accountnum;
/**
* 域名地址
*/
@Excel(exportName = "域名地址")
private java.lang.String domainurl;
/**
* 商戶編碼
*/
@Excel(exportName = "商戶編碼")
private java.lang.String acctCode;
/**
* 所屬商戶
*/
@Excel(exportName = "所屬商戶")
private java.lang.String belogAcct;
/**
* 商戶類型
*/
@Excel(exportName = "商戶類型")
private java.lang.String businessType;
/**
* 是否广告代理商
* */
private String adAgency;
/**
* 是否出让广告位
* */
private String sellAdpos;
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 商业类型
*/
@Column(name = "BUSINESS_TYPE", nullable = true, length = 30)
public String getBusinessType() {
return businessType;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 商业类型
*/
public void setBusinessType(String businessType) {
this.businessType = businessType;
}
/**
* 所屬省份
*/
@Excel(exportName = "所屬省份")
private java.lang.String province;
/**
* 所屬地市
*/
@Excel(exportName = "所屬地市")
private java.lang.String city;
/**
* 可開商戶數
*/
@Excel(exportName = "可開商戶數")
private java.lang.Integer totalAccount;
/**
* 已開商戶數
*/
@Excel(exportName = "已開商戶數")
private java.lang.Integer opendedAccount;
/**
* 商戶狀態
*/
@Excel(exportName = "商戶狀態")
private java.lang.String status;
/**
* 流量类型
*/
@Excel(exportName = "流量类型")
private java.lang.String flowtype;
/**
* 商户级别
*/
@Excel(exportName = "商户级别")
private java.lang.String acctLevel;
/**
* 商户名称
*/
@Excel(exportName = "真正的商户名称")
private java.lang.String acctForName;
private java.lang.String callPhone;
@Column(name = "CALLPHONE", nullable = true, length = 30)
public String getCallPhone() {
return callPhone;
}
public void setCallPhone(String callPhone) {
this.callPhone = callPhone;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 真正的商户名称
*/
@Column(name = "ACCTFORNAME", nullable = true, length = 30)
public String getAcctForName() {
return acctForName;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 真正的商户名称
*/
public void setAcctForName(String acctForName) {
this.acctForName = acctForName;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 商户等级
*/
@Column(name = "ACCT_LEVEL", nullable = true, length = 10)
public String getAcctLevel() {
return acctLevel;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 商户等级
*/
public void setAcctLevel(String acctLevel) {
this.acctLevel = acctLevel;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 流量类型
*/
@Column(name = "FLOWTYPE", nullable = true, length = 30)
public String getFlowtype() {
return flowtype;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 流量类型
*/
public void setFlowtype(String flowtype) {
this.flowtype = flowtype;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 商戶編碼
*/
@Column(name = "ACCT_CODE", nullable = true, length = 20)
public String getAcctCode() {
return acctCode;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 商戶編碼
*/
public void setAcctCode(String acctCode) {
this.acctCode = acctCode;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 所屬商戶
*/
@Column(name = "BELOGACCT", nullable = true, length = 20)
public String getBelogAcct() {
return belogAcct;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 所屬商戶
*/
public void setBelogAcct(String belogAcct) {
this.belogAcct = belogAcct;
}
/**
* pid
*/
@Excel(exportName = "pid")
private java.lang.String pid;
/**
* 是否加入
*/
@Excel(exportName = "joinOrNot")
private java.lang.String joinOrNot;
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 是否加入组标识
*/
@Column(name = "JOINORNOT", nullable = true, length = 2)
public String getJoinOrNot() {
return joinOrNot;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 是否加入组标识
*/
public void setJoinOrNot(String joinOrNot) {
this.joinOrNot = joinOrNot;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 父类
*/
@Column(name = "PID", nullable = true, length = 36)
public String getPid() {
return pid;
}
/**
* 方法: 取得java.lang.String
*
* @return 方法: 取得java.lang.String 父类
*/
public void setPid(String pid) {
this.pid = pid;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 主键
*/
@Id
@GeneratedValue(generator = "paymentableGenerator")
@GenericGenerator(name = "paymentableGenerator", strategy = "uuid")
@Column(name = "ID", nullable = false, length = 36)
public java.lang.String getId() {
return this.id;
}
/**
* 方法: 设置java.lang.String
*
* @param: java.lang.String 主键
*/
public void setId(java.lang.String id) {
this.id = id;
}
/**
* 方法: 取得java.util.Date
*
* @return: java.util.Date 创建日期
*/
@Column(name = "CREATE_DATE", nullable = true, length = 20)
public java.util.Date getCreateDate() {
return this.createDate;
}
/**
* 方法: 设置java.util.Date
*
* @param: java.util.Date 创建日期
*/
public void setCreateDate(java.util.Date createDate) {
this.createDate = createDate;
}
/**
* 方法: 取得java.util.Date
*
* @return: java.util.Date 修改日期
*/
@Column(name = "END_DATE", nullable = true, length = 20)
public java.util.Date getEndDate() {
return this.endDate;
}
/**
* 方法: 设置java.util.Date
*
* @param: java.util.Date 修改日期
*/
public void setEndDate(java.util.Date endDate) {
this.endDate = endDate;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 商户名称
*/
@Column(name = "ACCT_NAME", nullable = true, length = 32)
public java.lang.String getAcctName() {
return this.acctName;
}
/**
* 方法: 设置java.lang.String
*
* @param: java.lang.String 商户名称
*/
public void setAcctName(java.lang.String acctName) {
this.acctName = acctName;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 电话
*/
@Column(name = "MOBILE_PHONE", nullable = true, length = 32)
public java.lang.String getMobilePhone() {
return this.mobilePhone;
}
/**
* 方法: 设置java.lang.String
*
* @param: java.lang.String 电话
*/
public void setMobilePhone(java.lang.String mobilePhone) {
this.mobilePhone = mobilePhone;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 邮箱
*/
@Column(name = "EMAIL", nullable = true, length = 32)
public java.lang.String getEmail() {
return this.email;
}
/**
* 方法: 设置java.lang.String
*
* @param: java.lang.String 邮箱
*/
public void setEmail(java.lang.String email) {
this.email = email;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String QQ
*/
@Column(name = "QQ_NUMBER", nullable = true, length = 32)
public java.lang.String getQqNumber() {
return this.qqNumber;
}
/**
* 方法: 设置java.lang.String
*
* @param: java.lang.String QQ
*/
public void setQqNumber(java.lang.String qqNumber) {
this.qqNumber = qqNumber;
}
/**
* 方法: 取得java.lang.Integer
*
* @return: java.lang.Integer 群发次数
*/
@Column(name = "SMSNUM", nullable = true, length = 8)
public java.lang.Integer getSmsnum() {
return this.smsnum;
}
/**
* 方法: 设置java.lang.Integer
*
* @param: java.lang.Integer 群发次数
*/
public void setSmsnum(java.lang.Integer smsnum) {
this.smsnum = smsnum;
}
/**
* 方法: 取得java.lang.Integer
*
* @return: java.lang.Integer 图文次数
*/
@Column(name = "NEWSNUM", nullable = true, length = 8)
public java.lang.Integer getNewsnum() {
return this.newsnum;
}
/**
* 方法: 设置java.lang.Integer
*
* @param: java.lang.Integer 图文次数
*/
public void setNewsnum(java.lang.Integer newsnum) {
this.newsnum = newsnum;
}
/**
* 方法: 取得java.lang.Integer
*
* @return: java.lang.Integer 请求次数
*/
@Column(name = "REQUESTNUM", nullable = true, length = 8)
public java.lang.Integer getRequestnum() {
return this.requestnum;
}
/**
* 方法: 设置java.lang.Integer
*
* @param: java.lang.Integer 请求次数
*/
public void setRequestnum(java.lang.Integer requestnum) {
this.requestnum = requestnum;
}
/**
* 方法: 取得java.lang.Integer
*
* @return: java.lang.Integer 员工账号个数
*/
@Column(name = "USERNUM", nullable = true, length = 8)
public java.lang.Integer getUsernum() {
return this.usernum;
}
/**
* 方法: 设置java.lang.Integer
*
* @param: java.lang.Integer 员工账号个数
*/
public void setUsernum(java.lang.Integer usernum) {
this.usernum = usernum;
}
/**
* 方法: 取得java.lang.Integer
*
* @return: java.lang.Integer 公众号个数
*/
@Column(name = "ACCOUNTNUM", nullable = true, length = 8)
public java.lang.Integer getAccountnum() {
return this.accountnum;
}
/**
* 方法: 设置java.lang.Integer
*
* @param: java.lang.Integer 公众号个数
*/
public void setAccountnum(java.lang.Integer accountnum) {
this.accountnum = accountnum;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 域名地址
*/
@Column(name = "DOMAINURL", nullable = true, length = 32)
public java.lang.String getDomainurl() {
return this.domainurl;
}
/**
* 方法: 设置java.lang.String
*
* @param: java.lang.String 域名地址
*/
public void setDomainurl(java.lang.String domainurl) {
this.domainurl = domainurl;
}
// /**
// * 方法: 取得java.lang.String
// *
// * @return: java.lang.String 商業類型
// */
// @Column(name = "BUSINESS_TYPE", nullable = true, length = 30)
// public String getBusinessType() {
// return this.businessType;
// }
//
// /**
// * 方法: 取得java.lang.String
// *
// * @return: java.lang.String 商業類型
// */
// public void setBusinessType(java.lang.String businessType) {
// this.businessType = businessType;
// }
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 所属省份
*/
@Column(name = "PROVINCE", nullable = true, length = 30)
public String getProvince() {
return province;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 所属省份
*/
public void setProvince(String province) {
this.province = province;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 所属地市
*/
@Column(name = "CITY", nullable = true, length = 30)
public String getCity() {
return city;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 所属地市
*/
public void setCity(String city) {
this.city = city;
}
/**
* 方法: 取得java.lang.Integer
*
* @return: java.lang.Integer 可开商户数
*/
@Column(name = "TOTALACCOUNT", nullable = true, length = 11)
public Integer getTotalAccount() {
return totalAccount;
}
/**
* 方法: 取得java.lang.Integer
*
* @return: java.lang.Integer 可开商户数
*/
public void setTotalAccount(Integer totalAccount) {
this.totalAccount = totalAccount;
}
/**
* 方法: 取得java.lang.Integer
*
* @return: java.lang.Integer 已开商户数
*/
@Column(name = "OPENDEDACCOUNT", nullable = true, length = 11)
public Integer getOpendedAccount() {
return opendedAccount;
}
/**
* 方法: 取得java.lang.Integer
*
* @return: java.lang.Integer 已开商户数
*/
public void setOpendedAccount(Integer opendedAccount) {
this.opendedAccount = opendedAccount;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 商户状态
*/
@Column(name = "STATUS", nullable = true, length = 30)
public String getStatus() {
return status;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 商户状态
*/
public void setStatus(String status) {
this.status = status;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 企业名称
*/
@Column(name = "BUSINESSNAME", nullable = true, length = 20)
public String getBusinessName() {
return businessName;
}
/**
* 方法: 取得java.lang.String
*
* @return: java.lang.String 企业名称
*/
public void setBusinessName(String businessName) {
this.businessName = businessName;
}
@Column(name = "ad_agency", nullable = true, length = 20)
public String getAdAgency() {
return adAgency;
}
public void setAdAgency(String adAgency) {
this.adAgency = adAgency;
}
@Column(name = "sell_adpos", nullable = true, length = 20)
public String getSellAdpos() {
return sellAdpos;
}
public void setSellAdpos(String sellAdpos) {
this.sellAdpos = sellAdpos;
}
}
|
import datetime
def get_day_name(date):
day_name = datetime.datetime.strptime(date, '%Y-%m-%d').strftime('%A')
return day_name
if __name__ == '__main__':
date = '2021-04-19'
print(get_day_name(date)) |
pub struct Buffer<T> {
content: T,
// Other fields
}
impl<T> Buffer<T> {
#[inline]
pub unsafe fn slice_custom<'a, F, R: ?Sized>(&'a self, f: F) -> Buffer<<&'a T as ::buffer::SliceCustom<R>>::Slice>
where &'a T: ::buffer::SliceCustom<R>, R: ::buffer::Content,
F: for<'r> FnOnce(&'r <&'a T as ::buffer::Storage>::Content) -> &'r R
{
Buffer {
content: ::buffer::SliceCustom::slice_custom(&self.content, f),
// Clone other fields
}
}
} |
rm -rf ./archive-Fire.xcarchive
rm -rf apps
xcodebuild archive -workspace Fire.xcworkspace -scheme Fire -archivePath ./archive-Fire -configuration Release
mkdir apps
cp -a ./archive-Fire.xcarchive/Products/Applications/*.app apps
ditto -c -k --sequesterRsrc --keepParent apps/Fire.app apps/Fire.zip
rm -rf /Library/Input\ Methods/Fire.app
cp -r /Users/chenqiang/Documents/github/Fire/apps/Fire.app /Library/Input\ Methods/
|
use org\bovigo\vfs\vfsStream;
use PHPUnit\Framework\TestCase;
class ConfigurationTest extends TestCase
{
protected $configuration;
protected $root;
protected function setUp(): void
{
$this->root = vfsStream::setup();
mkdir(vfsStream::url('root/vendor/bin'), 0777, true);
mkdir(vfsStream::url('root/bin'));
$this->configuration = $this
->getMockBuilder(Configuration::class)
->disableOriginalConstructor()
->getMock();
}
public function testGetComposerBinDirectory()
{
$this->configuration
->method('getComposerBinDirectory')
->willReturn(vfsStream::url('root/vendor/bin'));
$this->assertEquals(vfsStream::url('root/vendor/bin'), $this->configuration->getComposerBinDirectory());
}
} |
my_list = [1, 2, 3, 4, 5]
result = [i**2 for i in my_list] |
#!/bin/bash
# This file is meant to be included by the parent cppbuild.sh script
if [[ -z "$PLATFORM" ]]; then
pushd ..
bash cppbuild.sh "$@" leptonica
popd
exit
fi
NASM_VERSION=2.14
ZLIB=zlib-1.2.11
GIFLIB=giflib-5.1.4
LIBJPEG=libjpeg-turbo-1.5.3
LIBPNG=libpng-1.5.30 # libpng16 doesn't work on CentOS 6 for some reason
LIBTIFF=tiff-4.0.10
LIBWEBP=libwebp-1.1.0
LEPTONICA_VERSION=1.80.0
download https://download.videolan.org/contrib/nasm/nasm-$NASM_VERSION.tar.gz nasm-$NASM_VERSION.tar.gz
download http://zlib.net/$ZLIB.tar.gz $ZLIB.tar.gz
download http://downloads.sourceforge.net/project/giflib/$GIFLIB.tar.gz $GIFLIB.tar.gz
download http://downloads.sourceforge.net/project/libjpeg-turbo/1.5.3/$LIBJPEG.tar.gz $LIBJPEG.tar.gz
download http://downloads.sourceforge.net/project/libpng/libpng15/1.5.30/$LIBPNG.tar.gz $LIBPNG.tar.gz
download http://download.osgeo.org/libtiff/$LIBTIFF.tar.gz $LIBTIFF.tar.gz
download http://downloads.webmproject.org/releases/webp/$LIBWEBP.tar.gz $LIBWEBP.tar.gz
download https://github.com/DanBloomberg/leptonica/releases/download/$LEPTONICA_VERSION/leptonica-$LEPTONICA_VERSION.tar.gz leptonica-$LEPTONICA_VERSION.tar.gz
mkdir -p $PLATFORM
cd $PLATFORM
INSTALL_PATH=`pwd`
echo "Decompressing archives..."
tar --totals -xzf ../nasm-$NASM_VERSION.tar.gz
tar --totals -xzf ../$ZLIB.tar.gz
tar --totals -xzf ../$GIFLIB.tar.gz
tar --totals -xzf ../$LIBJPEG.tar.gz
tar --totals -xzf ../$LIBPNG.tar.gz
tar --totals -xzf ../$LIBTIFF.tar.gz
tar --totals -xzf ../$LIBWEBP.tar.gz
tar --totals -xzf ../leptonica-$LEPTONICA_VERSION.tar.gz
cd nasm-$NASM_VERSION
# fix for build with GCC 8.x
sedinplace 's/void pure_func/void/g' include/nasmlib.h
./configure --prefix=$INSTALL_PATH
make -j $MAKEJ V=0
make install
export PATH=$INSTALL_PATH/bin:$PATH
cd ..
case $PLATFORM in
android-arm)
export AR="$ANDROID_PREFIX-ar"
export RANLIB="$ANDROID_PREFIX-ranlib"
export CC="$ANDROID_CC -DS_IREAD=S_IRUSR -DS_IWRITE=S_IWUSR -pthread -I$INSTALL_PATH/include/ -L$INSTALL_PATH/lib/ $ANDROID_FLAGS"
export STRIP="$ANDROID_PREFIX-strip"
export LDFLAGS="-ldl -lm -lc"
cd $ZLIB
./configure --prefix=$INSTALL_PATH --static --uname=arm-linux
make -j $MAKEJ
make install
cd ../$GIFLIB
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=arm-linux --with-sysroot="$ANDROID_ROOT"
make -j $MAKEJ
make install
cd ../$LIBJPEG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=arm-linux --with-sysroot="$ANDROID_ROOT"
make -j $MAKEJ
make install
cd ../$LIBPNG
rm contrib/arm-neon/android-ndk.c || true
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=arm-linux --with-sysroot="$ANDROID_ROOT" --disable-arm-neon
make -j $MAKEJ
make install
cd ../$LIBWEBP
patch -Np1 < ../../../libwebp-arm.patch
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=arm-linux-androideabi --with-sysroot="$ANDROID_ROOT" --disable-neon --enable-libwebpmux
cd src
make -j $MAKEJ
make install
cd ../../$LIBTIFF
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=arm-linux --with-sysroot="$ANDROID_ROOT" --disable-lzma --disable-zstd --without-x
make -j $MAKEJ
make install
cd ../leptonica-$LEPTONICA_VERSION
patch -Np1 < ../../../leptonica-android.patch
PKG_CONFIG_PATH=$INSTALL_PATH/lib/pkgconfig/ ./configure --prefix=$INSTALL_PATH --host=arm-linux-androideabi --disable-programs --without-libopenjpeg
make -j $MAKEJ
make install-strip
;;
android-arm64)
export AR="$ANDROID_PREFIX-ar"
export RANLIB="$ANDROID_PREFIX-ranlib"
export CC="$ANDROID_CC -DS_IREAD=S_IRUSR -DS_IWRITE=S_IWUSR -pthread -I$INSTALL_PATH/include/ -L$INSTALL_PATH/lib/ $ANDROID_FLAGS"
export STRIP="$ANDROID_PREFIX-strip"
export LDFLAGS="-ldl -lm -lc"
cd $ZLIB
./configure --prefix=$INSTALL_PATH --static --uname=aarch64-linux
make -j $MAKEJ
make install
cd ../$GIFLIB
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=aarch64-linux --with-sysroot="$ANDROID_ROOT"
make -j $MAKEJ
make install
cd ../$LIBJPEG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=aarch64-linux --with-sysroot="$ANDROID_ROOT"
make -j $MAKEJ
make install
cd ../$LIBPNG
rm contrib/arm-neon/android-ndk.c || true
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=aarch64-linux --with-sysroot="$ANDROID_ROOT" --disable-arm-neon
make -j $MAKEJ
make install
cd ../$LIBWEBP
patch -Np1 < ../../../libwebp-arm.patch
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=aarch64-linux-android --with-sysroot="$ANDROID_ROOT" --disable-neon --enable-libwebpmux
cd src
make -j $MAKEJ
make install
cd ../../$LIBTIFF
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=arm-linux --with-sysroot="$ANDROID_ROOT" --disable-lzma --disable-zstd --without-x
make -j $MAKEJ
make install
cd ../leptonica-$LEPTONICA_VERSION
patch -Np1 < ../../../leptonica-android.patch
PKG_CONFIG_PATH=$INSTALL_PATH/lib/pkgconfig/ ./configure --prefix=$INSTALL_PATH --host=aarch64-linux-android --disable-programs --without-libopenjpeg
make -j $MAKEJ
make install-strip
;;
android-x86)
export AR="$ANDROID_PREFIX-ar"
export RANLIB="$ANDROID_PREFIX-ranlib"
export CC="$ANDROID_CC -DS_IREAD=S_IRUSR -DS_IWRITE=S_IWUSR -pthread -I$INSTALL_PATH/include/ -L$INSTALL_PATH/lib/ $ANDROID_FLAGS"
export STRIP="$ANDROID_PREFIX-strip"
export LDFLAGS="-ldl -lm -lc"
cd $ZLIB
./configure --prefix=$INSTALL_PATH --static --uname=i686-linux
make -j $MAKEJ
make install
cd ../$GIFLIB
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=i686-linux --with-sysroot="$ANDROID_ROOT"
make -j $MAKEJ
make install
cd ../$LIBJPEG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=i686-linux --with-sysroot="$ANDROID_ROOT"
make -j $MAKEJ
make install
cd ../$LIBPNG
rm contrib/arm-neon/android-ndk.c || true
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=i686-linux --with-sysroot="$ANDROID_ROOT"
make -j $MAKEJ
make install
cd ../$LIBWEBP
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=i686-linux-android --with-sysroot="$ANDROID_ROOT" --enable-libwebpmux
cd src
make -j $MAKEJ
make install
cd ../../$LIBTIFF
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=i686-linux --with-sysroot="$ANDROID_ROOT" --disable-lzma --disable-zstd --without-x
make -j $MAKEJ
make install
cd ../leptonica-$LEPTONICA_VERSION
patch -Np1 < ../../../leptonica-android.patch
PKG_CONFIG_PATH=$INSTALL_PATH/lib/pkgconfig/ ./configure --prefix=$INSTALL_PATH --host=i686-linux-android --disable-programs --without-libopenjpeg
make -j $MAKEJ
make install-strip
;;
android-x86_64)
export AR="$ANDROID_PREFIX-ar"
export RANLIB="$ANDROID_PREFIX-ranlib"
export CC="$ANDROID_CC -DS_IREAD=S_IRUSR -DS_IWRITE=S_IWUSR -pthread -I$INSTALL_PATH/include/ -L$INSTALL_PATH/lib/ $ANDROID_FLAGS"
export STRIP="$ANDROID_PREFIX-strip"
export LDFLAGS="-ldl -lm -lc"
cd $ZLIB
./configure --prefix=$INSTALL_PATH --static --uname=x86_64-linux
make -j $MAKEJ
make install
cd ../$GIFLIB
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=x86_64-linux --with-sysroot="$ANDROID_ROOT"
make -j $MAKEJ
make install
cd ../$LIBJPEG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=x86_64-linux --with-sysroot="$ANDROID_ROOT"
make -j $MAKEJ
make install
cd ../$LIBPNG
rm contrib/arm-neon/android-ndk.c || true
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=x86_64-linux --with-sysroot="$ANDROID_ROOT"
make -j $MAKEJ
make install
cd ../$LIBWEBP
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=x86_64-linux-android --with-sysroot="$ANDROID_ROOT" --enable-libwebpmux
cd src
make -j $MAKEJ
make install
cd ../../$LIBTIFF
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=x86_64-linux --with-sysroot="$ANDROID_ROOT" --disable-lzma --disable-zstd --without-x
make -j $MAKEJ
make install
cd ../leptonica-$LEPTONICA_VERSION
patch -Np1 < ../../../leptonica-android.patch
PKG_CONFIG_PATH=$INSTALL_PATH/lib/pkgconfig/ ./configure --prefix=$INSTALL_PATH --host=x86_64-linux-android --disable-programs --without-libopenjpeg
make -j $MAKEJ
make install-strip
;;
linux-x86)
export CFLAGS="-pthread -I$INSTALL_PATH/include/"
export CXXFLAGS="$CFLAGS"
export CPPFLAGS="$CFLAGS"
export LDFLAGS="-L$INSTALL_PATH/lib/"
export CC="gcc -m32 -fPIC"
cd $ZLIB
./configure --prefix=$INSTALL_PATH --static
make -j $MAKEJ
make install
cd ../$GIFLIB
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=i686-linux
make -j $MAKEJ
make install
cd ../$LIBJPEG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=i686-linux
make -j $MAKEJ
make install
cd ../$LIBPNG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=i686-linux
make -j $MAKEJ
make install
cd ../$LIBWEBP
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=i686-linux --enable-libwebpmux
make -j $MAKEJ
make install
cd ../$LIBTIFF
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=i686-linux --disable-lzma --disable-zstd
make -j $MAKEJ
make install
cd ../leptonica-$LEPTONICA_VERSION
PKG_CONFIG_PATH=$INSTALL_PATH/lib/pkgconfig/ ./configure --prefix=$INSTALL_PATH CFLAGS="-pthread -I$INSTALL_PATH/include/" LDFLAGS="-L$INSTALL_PATH/lib/" --disable-programs --without-libopenjpeg
make -j $MAKEJ
make install-strip
;;
linux-x86_64)
export CFLAGS="-pthread -I$INSTALL_PATH/include/"
export CXXFLAGS="$CFLAGS"
export CPPFLAGS="$CFLAGS"
export LDFLAGS="-L$INSTALL_PATH/lib/"
export CC="gcc -m64 -fPIC"
cd $ZLIB
./configure --prefix=$INSTALL_PATH --static
make -j $MAKEJ
make install
cd ../$GIFLIB
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=x86_64-linux
make -j $MAKEJ
make install
cd ../$LIBJPEG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=x86_64-linux
make -j $MAKEJ
make install
cd ../$LIBPNG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=x86_64-linux
make -j $MAKEJ
make install
cd ../$LIBWEBP
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=x86_64-linux --enable-libwebpmux
make -j $MAKEJ
make install
cd ../$LIBTIFF
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=x86_64-linux --disable-lzma --disable-zstd
make -j $MAKEJ
make install
cd ../leptonica-$LEPTONICA_VERSION
PKG_CONFIG_PATH=$INSTALL_PATH/lib/pkgconfig/ ./configure --prefix=$INSTALL_PATH CFLAGS="-pthread -I$INSTALL_PATH/include/" LDFLAGS="-L$INSTALL_PATH/lib/" --disable-programs --without-libopenjpeg
make -j $MAKEJ
make install-strip
;;
linux-armhf)
export CFLAGS="-pthread -march=armv6 -marm -mfpu=vfp -mfloat-abi=hard -I$INSTALL_PATH/include/"
export CXXFLAGS="$CFLAGS"
export CPPFLAGS="$CFLAGS"
export LDFLAGS="-L$INSTALL_PATH/lib/"
export CC="arm-linux-gnueabihf-gcc -fPIC"
cd $ZLIB
CC="arm-linux-gnueabihf-gcc -fPIC" ./configure --prefix=$INSTALL_PATH --static
make -j $MAKEJ
make install
cd ../$GIFLIB
CC="arm-linux-gnueabihf-gcc -fPIC" ./configure --prefix=$INSTALL_PATH --host=arm-linux-gnueabihf --disable-shared
#./configure --prefix=$INSTALL_PATH --disable-shared --host=arm-linux-gnueabihf
make -j $MAKEJ
make install
cd ../$LIBJPEG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=arm-linux-gnueabihf
make -j $MAKEJ
make install
cd ../$LIBPNG
rm contrib/arm-neon/android-ndk.c || true
CC="arm-linux-gnueabihf-gcc -fPIC" ./configure --prefix=$INSTALL_PATH CFLAGS="-pthread -I$INSTALL_PATH/include/" LDFLAGS="-L$INSTALL_PATH/lib/" --disable-shared --with-pic --host=arm-linux-gnueabihf --disable-arm-neon
make -j $MAKEJ
make install
cd ../$LIBWEBP
patch -Np1 < ../../../libwebp-arm.patch
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=arm-linux-gnueabihf --disable-neon --enable-libwebpmux
make -j $MAKEJ
make install
cd ../$LIBTIFF
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --disable-lzma --disable-zstd --host=arm-linux-gnueabihf
make -j $MAKEJ
make install
cd ../leptonica-$LEPTONICA_VERSION
PKG_CONFIG_PATH=$INSTALL_PATH/lib/pkgconfig/ CC="arm-linux-gnueabihf-gcc -fPIC" ./configure --prefix=$INSTALL_PATH CFLAGS="-pthread -I$INSTALL_PATH/include/" LDFLAGS="-L$INSTALL_PATH/lib/" --host=arm-linux-gnueabihf --disable-programs --without-libopenjpeg
make -j $MAKEJ
make install-strip
;;
linux-arm64)
export CFLAGS="-pthread -I$INSTALL_PATH/include/"
export CXXFLAGS="$CFLAGS"
export CPPFLAGS="$CFLAGS"
export LDFLAGS="-L$INSTALL_PATH/lib/"
export CC="aarch64-linux-gnu-gcc -fPIC"
cd $ZLIB
CC="aarch64-linux-gnu-gcc -fPIC" ./configure --prefix=$INSTALL_PATH --static
make -j $MAKEJ
make install
cd ../$GIFLIB
CC="aarch64-linux-gnu-gcc -fPIC" ./configure --prefix=$INSTALL_PATH --host=aarch64-linux-gnu --disable-shared
#./configure --prefix=$INSTALL_PATH --disable-shared --host=aarch64-linux-gnu
make -j $MAKEJ
make install
cd ../$LIBJPEG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=aarch64-linux-gnu
make -j $MAKEJ
make install
cd ../$LIBPNG
CC="aarch64-linux-gnu-gcc -fPIC" ./configure --prefix=$INSTALL_PATH CFLAGS="-pthread -I$INSTALL_PATH/include/" LDFLAGS="-L$INSTALL_PATH/lib/" --disable-shared --with-pic --host=aarch64-linux-gnu
make -j $MAKEJ
make install
cd ../$LIBWEBP
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --host=aarch64-linux-gnu --enable-libwebpmux
make -j $MAKEJ
make install
cd ../$LIBTIFF
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --disable-lzma --disable-zstd --host=aarch64-linux-gnu
make -j $MAKEJ
make install
cd ../leptonica-$LEPTONICA_VERSION
PKG_CONFIG_PATH=$INSTALL_PATH/lib/pkgconfig/ CC="aarch64-linux-gnu-gcc -fPIC" ./configure --prefix=$INSTALL_PATH CFLAGS="-pthread -I$INSTALL_PATH/include/" LDFLAGS="-L$INSTALL_PATH/lib/" --host=aarch64-linux-gnu --disable-programs --without-libopenjpeg
make -j $MAKEJ
make install-strip
;;
linux-ppc64le)
export CFLAGS="-pthread -I$INSTALL_PATH/include/"
export CXXFLAGS="$CFLAGS"
export CPPFLAGS="$CFLAGS"
export LDFLAGS="-L$INSTALL_PATH/lib/"
MACHINE_TYPE=$( uname -m )
if [[ "$MACHINE_TYPE" =~ ppc64 ]]; then
export CC="gcc -m64 -fPIC"
export BFLAGS="--build=ppc64le-linux"
else
export CC="powerpc64le-linux-gnu-gcc"
export CXX="powerpc64le-linux-gnu-g++"
export BFLAGS="--host=powerpc64le-linux-gnu"
fi
cd $ZLIB
./configure --prefix=$INSTALL_PATH --static
make -j $MAKEJ
make install
cd ../$GIFLIB
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic $BFLAGS
make -j $MAKEJ
make install
cd ../$LIBJPEG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic $BFLAGS
make -j $MAKEJ
make install
cd ../$LIBPNG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic $BFLAGS
make -j $MAKEJ
make install
cd ../$LIBWEBP
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic $BFLAGS --enable-libwebpmux
make -j $MAKEJ
make install
cd ../$LIBTIFF
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic $BFLAGS --disable-lzma --disable-zstd
make -j $MAKEJ
make install
cd ../leptonica-$LEPTONICA_VERSION
sed -i s/elf64ppc/elf64lppc/ configure
PKG_CONFIG_PATH=$INSTALL_PATH/lib/pkgconfig/ ./configure --prefix=$INSTALL_PATH CFLAGS="-pthread -I$INSTALL_PATH/include/" LDFLAGS="-L$INSTALL_PATH/lib/" $BFLAGS --disable-programs --without-libopenjpeg
make -j $MAKEJ
make install-strip
;;
macosx-*)
export CFLAGS="-pthread -I$INSTALL_PATH/include/"
export CXXFLAGS="$CFLAGS"
export CPPFLAGS="$CFLAGS"
export LDFLAGS="-L$INSTALL_PATH/lib/"
cd $ZLIB
./configure --prefix=$INSTALL_PATH --static
make -j $MAKEJ
make install
cd ../$GIFLIB
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic
make -j $MAKEJ
make install
cd ../$LIBJPEG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic
make -j $MAKEJ
make install
cd ../$LIBPNG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic
make -j $MAKEJ
make install
cd ../$LIBWEBP
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --enable-libwebpmux
make -j $MAKEJ
make install
cd ../$LIBTIFF
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --disable-lzma --disable-zstd
make -j $MAKEJ
make install
cd ../leptonica-$LEPTONICA_VERSION
patch -Np1 < ../../../leptonica-macosx.patch
PKG_CONFIG_PATH=$INSTALL_PATH/lib/pkgconfig/ ./configure --prefix=$INSTALL_PATH CFLAGS="-pthread -I$INSTALL_PATH/include/" LDFLAGS="-L$INSTALL_PATH/lib/" --disable-programs --without-libopenjpeg
make -j $MAKEJ
make install-strip
;;
windows-x86)
export CFLAGS="-pthread -I$INSTALL_PATH/include/"
export CXXFLAGS="$CFLAGS"
export CPPFLAGS="$CFLAGS"
export LDFLAGS="-L$INSTALL_PATH/lib/"
export CC="gcc -m32"
cd $ZLIB
make -j $MAKEJ install -fwin32/Makefile.gcc BINARY_PATH=$INSTALL_PATH/bin/ INCLUDE_PATH=$INSTALL_PATH/include/ LIBRARY_PATH=$INSTALL_PATH/lib/
cd ../$GIFLIB
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --build=i686-w64-mingw32
make -j $MAKEJ
make install
cd ../$LIBJPEG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --build=i686-w64-mingw32
make -j $MAKEJ
make install
cd ../$LIBPNG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --build=i686-w64-mingw32
make -j $MAKEJ
make install
cd ../$LIBWEBP
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --build=i686-w64-mingw32 --enable-libwebpmux
make -j $MAKEJ
make install
cd ../$LIBTIFF
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --build=i686-w64-mingw32 --disable-lzma --disable-zstd
make -j $MAKEJ
make install
cd ../leptonica-$LEPTONICA_VERSION
PKG_CONFIG_PATH=$INSTALL_PATH/lib/pkgconfig/ ./configure --prefix=$INSTALL_PATH CFLAGS="-pthread -I$INSTALL_PATH/include/" LDFLAGS="-L$INSTALL_PATH/lib/ -Wl,$INSTALL_PATH/lib/libwebpmux.a -Wl,$INSTALL_PATH/lib/*.a" --build=i686-w64-mingw32 --disable-programs --without-libopenjpeg
make -j $MAKEJ
make install-strip
;;
windows-x86_64)
export CFLAGS="-pthread -I$INSTALL_PATH/include/"
export CXXFLAGS="$CFLAGS"
export CPPFLAGS="$CFLAGS"
export LDFLAGS="-L$INSTALL_PATH/lib/"
export CC="gcc -m64"
cd $ZLIB
make -j $MAKEJ install -fwin32/Makefile.gcc BINARY_PATH=$INSTALL_PATH/bin/ INCLUDE_PATH=$INSTALL_PATH/include/ LIBRARY_PATH=$INSTALL_PATH/lib/
cd ../$GIFLIB
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --build=x86_64-w64-mingw32
make -j $MAKEJ
make install
cd ../$LIBJPEG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --build=x86_64-w64-mingw32
make -j $MAKEJ
make install
cd ../$LIBPNG
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --build=x86_64-w64-mingw32
make -j $MAKEJ
make install
cd ../$LIBWEBP
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --build=x86_64-w64-mingw32 --enable-libwebpmux
make -j $MAKEJ
make install
cd ../$LIBTIFF
./configure --prefix=$INSTALL_PATH --disable-shared --with-pic --build=x86_64-w64-mingw32 --disable-lzma --disable-zstd
make -j $MAKEJ
make install
cd ../leptonica-$LEPTONICA_VERSION
PKG_CONFIG_PATH=$INSTALL_PATH/lib/pkgconfig/ ./configure --prefix=$INSTALL_PATH CFLAGS="-pthread -I$INSTALL_PATH/include/" LDFLAGS="-L$INSTALL_PATH/lib/ -Wl,$INSTALL_PATH/lib/libwebpmux.a -Wl,$INSTALL_PATH/lib/*.a" --build=x86_64-w64-mingw32 --disable-programs --without-libopenjpeg
make -j $MAKEJ
make install-strip
;;
*)
echo "Error: Platform \"$PLATFORM\" is not supported"
;;
esac
# remove broken dependency_libs from files for libtool
sedinplace '/dependency_libs/d' ../lib/*.la || true
cd ../..
|
import { inject, injectStyles } from "./utils";
let lastDetection: any = null;
window.addEventListener("message", ev => {
if (
ev.source === window &&
ev.data &&
ev.data.source === "preact-devtools-detector"
) {
chrome.runtime.sendMessage(
(lastDetection = {
hasPreact: true,
}),
);
}
});
// Firefox seemingly doesn't always reinject content scripts
window.addEventListener("pageshow", ev => {
if (!lastDetection || ev.target !== window.document) {
return;
}
chrome.runtime.sendMessage(lastDetection);
});
// Only inject for HTML pages
if (document.contentType === "text/html") {
inject(chrome.runtime.getURL("installHook.js"), "script");
injectStyles(chrome.runtime.getURL("installHook.css"));
}
|
<?php
$products = [
[ 'name' => 'Apple', 'price' => 5.00 ],
[ 'name' => 'Orange', 'price' => 3.50],
[ 'name' => 'Banana', 'price' => 2.40],
];
foreach ($products as $product) {
echo $product['name'] . ': ' . $product['price'];
}
?> |
def TreeToArrayList(tree):
list = []
queue = [tree]
while queue:
current_node = queue.pop(0)
list.append(current_node.val)
if current_node.left:
queue.append(current_node.left)
if current_node.right:
queue.append(current_node.right)
return list |
import { ChakraProvider } from "@chakra-ui/react";
import Debug from "debug";
import React from "react";
import ReactDOM from "react-dom";
import { BrowserRouter } from "react-router-dom";
import App from "./App";
import { BobtimusRateProvider } from "./Bobtimus";
import "./index.css";
import reportWebVitals from "./reportWebVitals";
import theme from "./theme";
const webVitalsLogger = Debug("webVitals");
function checkLogger() {
const debugSettings = localStorage.getItem("debug");
if (debugSettings) {
// `debug` var set: we honor existing settings and do not overwrite them.
return;
}
if (process.env.NODE_ENV === "production") {
// if `debug` variable is not set and we are in production mode: give a warning that user won't see logs.
// eslint-disable-next-line no-console
console.log("`debug` variable not set. You won't see any logs unless you add `debug=*` to your localStorage.");
} else if (process.env.NODE_ENV === "development") {
// if `debug` variable is not set but we are in development mode: enable all logs.
localStorage.setItem("debug", "*");
}
}
checkLogger();
ReactDOM.render(
<React.StrictMode>
<ChakraProvider theme={theme}>
<BobtimusRateProvider>
<BrowserRouter>
<App />
</BrowserRouter>
</BobtimusRateProvider>
</ChakraProvider>
</React.StrictMode>,
document.getElementById("root"),
);
// If you want to start measuring performance in your app, pass a function
// to log results (for example: reportWebVitals(console.log))
// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
reportWebVitals(webVitalsLogger);
|
#! /bin/bash
set -eo pipefail
if [ $# -ne 1 ]; then
echo "Usage: ./build.sh <VERSION>"
echo "Example: ./build.sh 1.0.0"
exit 1
fi
IMAGE_TAG="tdoer/alpine-nginx1.15.8:$1"
echo "Step 1: Build image: ${IMAGE_TAG}"
docker build -t ${IMAGE_TAG} .
if [ $? -eq 0 ]; then
echo "DONE"
else
echo "Failed"
exit 1
fi
echo "Step 2: Push image to docker registry"
docker push ${IMAGE_TAG}
if [ $? -eq 0 ]; then
echo "DONE"
else
echo "Failed"
exit 1
fi
|
<filename>vital/config/token_type_config.h<gh_stars>1-10
/*ckwg +29
* Copyright 2013-2017 by Kitware, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither name of Kitware, Inc. nor the names of any contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _TOKEN_TYPE_CONFIG_H_
#define _TOKEN_TYPE_CONFIG_H_
#include <vital/util/token_type.h>
#include <vital/config/vital_config_export.h>
#include <vital/config/config_block.h>
namespace kwiver {
namespace vital {
// ----------------------------------------------------------------
/** Config token type.
*
* This class implements token_expander access to a config block. The
* name of the config entry is replaced with its contents.
*
* The config entry passed to the constructor is still under the
* control of the originator and will not be deleted by this class.
*
* When the string "$CONFIG{key}" is found in the input text is is
* replaces with the value in the config block specified by the key.
*
* Example:
\code
kwiver::vital::config_block block;
kwiver::vital::token_expander m_token_expander;
m_token_expander.add_token_type( new kwiver::vital::token_type_config( block ) );
\endcode
*/
class VITAL_CONFIG_EXPORT token_type_config
: public token_type
{
public:
/** Constructor. A token type object is created that has access to
* the supplied config block. The ownership of this config block
* remains with the creator.
*
* @param[in] blk - config block
*/
token_type_config( kwiver::vital::config_block_sptr blk );
virtual ~token_type_config();
/** Lookup name in token type resolver.
*/
virtual bool lookup_entry (std::string const& name, std::string& result) const;
private:
kwiver::vital::config_block_sptr m_config;
}; // end class token_type_config
} } // end namespace
#endif /* _TOKEN_TYPE_CONFIG_H_ */
|
import { IMessage } from '../interfaces';
import { modulesSchema } from '../modules';
import { parseRawText, describeMissingArgs } from '../utils';
import { sendTypingIndicator, sendMessage, sendImage } from '../api';
export const handleMessage = async (err: Error, message: IMessage) => {
if (err) throw err;
const { body, threadID } = message;
if (body == null || threadID == null) return;
const parsed = parseRawText(body);
if (parsed == null) return;
const { args, name } = parsed;
const schema = modulesSchema(threadID);
if (schema[name] == null) return;
const end = sendTypingIndicator(message.threadID);
if (!validateArgs(args, name, threadID)) {
await sendMessage(
{ body: describeMissingArgs(schema[name].args, args.length) },
threadID,
);
} else {
try {
if (schema[name].action != null) {
await schema[name].action(args, threadID);
} else if (schema[name].image != null) {
await sendImage(schema[name].image, threadID);
}
} catch (err) {
throw err;
}
}
end();
};
export const validateArgs = (userArgs: string[], name: string, threadId: string) => {
const { args } = modulesSchema(threadId)[name];
return args == null || userArgs.length >= args.length;
};
|
<gh_stars>1-10
'use strict';
const Loader = require('@structures/Loader');
const { requireDirectory } = require('@util/FileUtil');
const Logger = require('@util/Logger');
class ListenerLoader extends Loader {
constructor(client) {
super(client, true);
}
async load() {
const logs = [];
await requireDirectory('src/listeners', (Listener, fileName) => {
const listener = new Listener(this.client);
logs.push(fileName);
this.client.on(fileName, (...args) => listener.on(this.client, ...args));
}, Logger.error);
this.client.availableLogs = logs;
return true;
}
}
module.exports = ListenerLoader;
|
<reponame>Tiendeo/settings_loader<filename>configuration_loader/loaders/environment.py
import os
from functools import reduce
from configuration_loader.loaders.base import Base
class EnvironmentLoader(Base):
__SPLIT_STRING_KEY = '__'
def __init__(self, split_string_key=None):
if split_string_key:
self.__split_string_key = split_string_key
else:
self.__split_string_key = self.__SPLIT_STRING_KEY
def load(self):
configs = {}
for key, value in os.environ.items():
configs[key] = value
return self.__split_keys(configs)
def __split_keys(self, config):
configs = []
for key, value in config.items():
configs.append(reduce(lambda res, cur: {cur: res}, reversed(key.split(self.__split_string_key)), value))
return configs
|
<filename>iwiwi/src/agl/connectivity/connectivity.h
#pragma once
#include <queue>
#include <algorithm>
#include "graph/graph.h"
namespace agl {
template<typename GraphType>
bool is_connected(const GraphType &g) {
if (g.num_vertices() == 0) return true;
std::queue<V> que;
std::vector<bool> vis(g.num_vertices());
que.push(0);
vis[0] = true;
while (!que.empty()) {
V v = que.front();
que.pop();
for (V tv : g.neighbors(v)) {
if (vis[tv]) continue;
que.push(tv);
vis[tv] = true;
}
}
return std::find(vis.begin(), vis.end(), false) == vis.end();
}
} // namespace agl
|
import { Component, OnInit, Inject } from '@angular/core';
import { MatDialogRef } from '@angular/material/dialog';
import { UserModel } from '../../models/user-model';
import { GitService } from '../../services/git.service';
import { UserRepoModel } from '../../models/user-repo-model';
@Component({
selector: 'app-single-user',
templateUrl: './single-user.component.html',
styleUrls: ['./single-user.component.scss']
})
export class SingleUserComponent implements OnInit {
userData: UserModel;
repoList: Array<UserRepoModel>;
searchTerm: string;
constructor(
@Inject(MatDialogRef) private dialogRef: MatDialogRef<SingleUserComponent>,
private gitService: GitService
) {
this.repoList = new Array<UserRepoModel>(0);
}
ngOnInit(): void {
this.fetchReposFromServer();
}
fetchReposFromServer() {
this.gitService.getAllReposForUsers<UserRepoModel[]>(this.userData.repos_url).subscribe(data => {
if (data !== null) {
this.repoList = data;
}
});
}
}
|
#!/bin/sh
echo "Setting up your WSL..."
sudo apt update
sudo apt upgrade -y
# =============================================================================
# Install linux packages
# =============================================================================
sudo apt install tree
sudo apt install bat
sudo apt install wget ca-certificates
sh -c "$(curl -fsSL https://starship.rs/install.sh)"
# neovim
sudo apt install neovim
sh -c 'curl -fLo "${XDG_DATA_HOME:-$HOME/.local/share}"/nvim/site/autoload/plug.vim --create-dirs https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim'
# =============================================================================
# Python
# =============================================================================
# Install pyenv
sudo apt install make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev
curl https://pyenv.run | bash
exec $SHELL
pyenv install 3.7.12
pyenv rehash
pyenv install 3.8.12
pyenv rehash
pyenv install 3.9.9
pyenv rehash
pyenv install 3.10.1
pyenv rehash
# Configure pyenv defaults and upgrade pip
pyenv global 3.10.1
pyenv rehash
exec $SHELL
python -m pip install --upgrade pip
pip install wheel
# pipx
pip install pipx
pipx ensurepath
pipx install black
pipx install flake8
# Create default python virtual environments
mkdir ~/.virtualenvs
python -m venv ~/.virtualenvs/py310
source ~/.virtualenvs/py310/bin/activate
python -m pip install --upgrade pip
pip install wheel
pip install jupyterlab pandas dash streamlit rich typer
# Set up poetry
curl -sSL https://install.python-poetry.org | python3 -
# Copy .bashrc_template into .bashrc
echo "" >> ~/.bashrc
echo "" >> ~/.bashrc
cat .bashrc_template >> ~/.bashrc
# Complete!
exec $SHELL
echo "Complete!"
|
const router = require("express").Router()
const userController = require("../../controllers/userController")
// 👥 "/api/user" +
router.route("/")
.post(userController.createUser)
router.route("/signin")
.post(userController.signIn)
router.route("/save")
.post(userController.saveImage)
module.exports = router
|
#######################################################################
# wrapper script to execute the complete benchmark in one go
#######################################################################
# delete results folder if it already exists
rm -rf ./results
# delete potentially existing log files
rm -f error.log console-overflow.log conversion-failure.log csv-parse-errors.log
# install dependencies
npm install # nodejs
pip install -r requirements.txt # python
# collect metrics for all API types via RAMA CLI (list of JSON files in ./results/<api-format>)
# the script expects the RAMA CLI to be located at `../rama-cli/target/rama-cli-0.1.2.jar`
# you can use the following helper script to automatically set this up: `download-and-build-rama-cli.sh`
# if the JAR is located somewhere else, use the --cliPath argument to specify the path to it
node src/01-collect-metrics.js --format=raml
node src/01-collect-metrics.js --format=wadl
node src/01-collect-metrics.js --format=openapi
# convert JSON output files into one holistic CSV file (./result/metrics.csv)
node src/02-generate-csv.js --format=raml
node src/02-generate-csv.js --format=wadl
node src/02-generate-csv.js --format=openapi
# produce aggregated JSON results file (./results/aggregated-metrics.json) as well as diagrams (./results/diagrams)
python src/03-aggregate-metrics.py
# produce LaTex tables with statistic results (./results/latex-tables.tex); only works if all three formats have been generated
node src/04-generate-latex.js
|
<filename>pyvoltha/adapters/interface.py
#
# Copyright 2018 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Interface definition for Voltha Adapters
"""
from __future__ import absolute_import
from zope.interface import Interface
class IAdapterInterface(Interface):
"""
A Voltha adapter. This interface is used by the Voltha Core to initiate
requests towards a voltha adapter.
"""
def adapter_descriptor():
"""
Return the adapter descriptor object for this adapter.
:return: voltha.Adapter grpc object (see voltha/protos/adapter.proto),
with adapter-specific information and config extensions.
"""
def device_types():
"""
Return list of device types supported by the adapter.
:return: voltha.DeviceTypes protobuf object, with optional type
specific extensions.
"""
def health():
"""
Return a 3-state health status using the voltha.HealthStatus message.
:return: Deferred or direct return with voltha.HealthStatus message
"""
def adopt_device(device):
"""
Make sure the adapter looks after given device. Called when a device
is provisioned top-down and needs to be activated by the adapter.
:param device: A voltha.Device object, with possible device-type
specific extensions. Such extensions shall be described as part of
the device type specification returned by device_types().
:return: (Deferred) Shall be fired to acknowledge device ownership.
"""
def reconcile_device(device):
"""
Make sure the adapter looks after given device. Called when this
device has changed ownership from another Voltha instance to
this one (typically, this occurs when the previous voltha
instance went down).
:param device: A voltha.Device object, with possible device-type
specific extensions. Such extensions shall be described as part of
the device type specification returned by device_types().
:return: (Deferred) Shall be fired to acknowledge device ownership.
"""
def abandon_device(device):
"""
Make sur ethe adapter no longer looks after device. This is called
if device ownership is taken over by another Voltha instance.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge abandonment.
"""
def disable_device(device):
"""
This is called when a previously enabled device needs to be disabled
based on a NBI call.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge disabling the device.
"""
def reenable_device(device):
"""
This is called when a previously disabled device needs to be enabled
based on a NBI call.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge re-enabling the
device.
"""
def reboot_device(device):
"""
This is called to reboot a device based on a NBI call. The admin
state of the device will not change after the reboot
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge the reboot.
"""
def download_image(device, request):
"""
This is called to request downloading a specified image into
the standby partition of a device based on a NBI call.
This call is expected to be non-blocking.
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) Shall be fired to acknowledge the download.
"""
def get_image_download_status(device, request):
"""
This is called to inquire about a requested image download
status based on a NBI call.
The adapter is expected to update the DownloadImage DB object
with the query result
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) Shall be fired to acknowledge
"""
def cancel_image_download(device, request):
"""
This is called to cancel a requested image download
based on a NBI call. The admin state of the device will not
change after the download.
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) Shall be fired to acknowledge
"""
def activate_image_update(device, request):
"""
This is called to activate a downloaded image from
a standby partition into active partition.
Depending on the device implementation, this call
may or may not cause device reboot.
If no reboot, then a reboot is required to make the
activated image running on device
This call is expected to be non-blocking.
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) OperationResponse object.
"""
def revert_image_update(device, request):
"""
This is called to deactivate the specified image at
active partition, and revert to previous image at
standby partition.
Depending on the device implementation, this call
may or may not cause device reboot.
If no reboot, then a reboot is required to make the
previous image running on device
This call is expected to be non-blocking.
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) OperationResponse object.
"""
def enable_port(device_id, port):
"""
This is called to enable the specified port.
Depending on the implementation, this call may or may
not cause port enable.
This call is expected to be non-blocking.
:param device_id: A voltha.Device.id object.
:param port: A voltha.Port object
"""
def disable_port(device_id, port):
"""
This is called to disable the specified port.
Depending on the implementation, this call may or may
not cause port disable.
This call is expected to be non-blocking.
:param device_id: A voltha.Device.id object.
:param port: A voltha.Port object
"""
def self_test_device(device):
"""
This is called to Self a device based on a NBI call.
:param device: A Voltha.Device object.
:return: Will return result of self test
"""
def delete_device(device):
"""
This is called to delete a device from the PON based on a NBI call.
If the device is an OLT then the whole PON will be deleted.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge the deletion.
"""
def get_device_details(device):
"""
This is called to get additional device details based on a NBI call.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge the retrieval of
additional details.
"""
def update_flows_bulk(device, flows, groups):
"""
Called after any flow table change, but only if the device supports
bulk mode, which is expressed by the 'accepts_bulk_flow_update'
capability attribute of the device type.
:param device: A Voltha.Device object.
:param flows: An openflow_v13.Flows object
:param groups: An openflow_v13.Flows object
:return: (Deferred or None)
"""
def update_flows_incrementally(device, flow_changes, group_changes):
"""
Called after a flow table update, but only if the device supports
non-bulk mode, which is expressed by the 'accepts_add_remove_flow_updates'
capability attribute of the device type.
:param device: A Voltha.Device object.
:param flow_changes: An openflow_v13.FlowChanges object
:param group_changes: An openflow_v13.FlowGroupChanges object
:return: (Deferred or None)
"""
def update_pm_config(device, pm_configs):
"""
Called every time a request is made to change pm collection behavior
:param device: A Voltha.Device object
:param pm_collection_config: A Pms
"""
def receive_packet_out(device_id, egress_port_no, msg):
"""
Pass a packet_out message content to adapter so that it can forward
it out to the device. This is only called on root devices.
:param device_id: device ID
:param egress_port: egress logical port number
:param msg: actual message
:return: None
"""
def suppress_alarm(filter):
"""
Inform an adapter that all incoming alarms should be suppressed
:param filter: A Voltha.AlarmFilter object.
:return: (Deferred) Shall be fired to acknowledge the suppression.
"""
def unsuppress_alarm(filter):
"""
Inform an adapter that all incoming alarms should resume
:param filter: A Voltha.AlarmFilter object.
:return: (Deferred) Shall be fired to acknowledge the unsuppression.
"""
def get_ofp_device_info(device):
"""
Retrieve the OLT device info. This includes the ofp_desc and
ofp_switch_features. The existing ofp structures can be used,
or all the attributes get added to the Device definition or a new proto
definition gets created. This API will allow the Core to create a
LogicalDevice associated with this device (OLT only).
:param device: device
:return: Proto Message (TBD)
"""
def get_ofp_port_info(device, port_no):
"""
Retrieve the port info. This includes the ofp_port. The existing ofp
structure can be used, or all the attributes get added to the Port
definitions or a new proto definition gets created. This API will allow
the Core to create a LogicalPort associated with this device.
:param device: device
:param port_no: port number
:return: Proto Message (TBD)
"""
def process_inter_adapter_message(msg):
"""
Called when the adapter receives a message that was sent to it directly
from another adapter. An adapter is automatically registered for these
messages when creating the inter-container kafka proxy. Note that it is
the responsibility of the sending and receiving adapters to properly encode
and decode the message.
:param msg: Proto Message (any)
:return: Proto Message Response
"""
class ICoreSouthBoundInterface(Interface):
"""
Represents a Voltha Core. This is used by an adapter to initiate async
calls towards Voltha Core.
"""
def get_device(device_id):
"""
Retrieve a device using its ID.
:param device_id: a device ID
:return: Device Object or None
"""
def get_child_device(parent_device_id, **kwargs):
"""
Retrieve a child device object belonging to the specified parent
device based on some match criteria. The first child device that
matches the provided criteria is returned.
:param parent_device_id: parent's device protobuf ID
:param **kwargs: arbitrary list of match criteria where the Value
in each key-value pair must be a protobuf type
:return: Child Device Object or None
"""
def get_ports(device_id, port_type):
"""
Retrieve all the ports of a given type of a Device.
:param device_id: a device ID
:param port_type: type of port
:return Ports object
"""
def get_child_devices(parent_device_id):
"""
Get all child devices given a parent device id
:param parent_device_id: The parent device ID
:return: Devices object
"""
def get_child_device_with_proxy_address(proxy_address):
"""
Get a child device based on its proxy address. Proxy address is
defined as {parent id, channel_id}
:param proxy_address: A Device.ProxyAddress object
:return: Device object or None
"""
def device_state_update(device_id,
oper_status=None,
connect_status=None):
"""
Update a device state.
:param device_id: The device ID
:param oper_state: Operational state of device
:param conn_state: Connection state of device
:return: None
"""
def child_device_detected(parent_device_id,
parent_port_no,
child_device_type,
channel_id,
**kw):
"""
A child device has been detected. Core will create the device along
with its unique ID.
:param parent_device_id: The parent device ID
:param parent_port_no: The parent port number
:param device_type: The child device type
:param channel_id: A unique identifier for that child device within
the parent device (e.g. vlan_id)
:param kw: A list of key-value pair where the value is a protobuf
message
:return: None
"""
def device_update(device):
"""
Event corresponding to a device update.
:param device: Device Object
:return: None
"""
def child_device_removed(parent_device_id, child_device_id):
"""
Event indicating a child device has been removed from a parent.
:param parent_device_id: Device ID of the parent
:param child_device_id: Device ID of the child
:return: None
"""
def child_devices_state_update(parent_device_id,
oper_status=None,
connect_status=None,
admin_status=None):
"""
Event indicating the status of all child devices have been changed.
:param parent_device_id: Device ID of the parent
:param oper_status: Operational status
:param connect_status: Connection status
:param admin_status: Admin status
:return: None
"""
def child_devices_removed(parent_device_id):
"""
Event indicating all child devices have been removed from a parent.
:param parent_device_id: Device ID of the parent device
:return: None
"""
def device_pm_config_update(device_pm_config, init=False):
"""
Event corresponding to a PM config update of a device.
:param device_pm_config: a PmConfigs object
:param init: True indicates initializing stage
:return: None
"""
def port_created(device_id, port):
"""
A port has been created and needs to be added to a device.
:param device_id: a device ID
:param port: Port object
:return None
"""
def port_removed(device_id, port):
"""
A port has been removed and it needs to be removed from a Device.
:param device_id: a device ID
:param port: a Port object
:return None
"""
def ports_enabled(device_id):
"""
All ports on that device have been re-enabled. The Core will change
the admin state to ENABLED and operational state to ACTIVE for all
ports on that device.
:param device_id: a device ID
:return: None
"""
def ports_disabled(device_id):
"""
All ports on that device have been disabled. The Core will change the
admin status to DISABLED and operational state to UNKNOWN for all
ports on that device.
:param device_id: a device ID
:return: None
"""
def ports_oper_status_update(device_id, oper_status):
"""
The operational status of all ports of a Device has been changed.
The Core will update the operational status for all ports on the
device.
:param device_id: a device ID
:param oper_status: operational Status
:return None
"""
def image_download_update(img_dnld):
"""
Event corresponding to an image download update.
:param img_dnld: a ImageDownload object
:return: None
"""
def image_download_deleted(img_dnld):
"""
Event corresponding to the deletion of a downloaded image. The
references of this image needs to be removed from the Core.
:param img_dnld: a ImageDownload object
:return: None
"""
def packet_in(device_id, egress_port_no, packet):
"""
Sends a packet to the SDN controller via voltha Core
:param device_id: The OLT device ID
:param egress_port_no: The port number representing the ONU (cvid)
:param packet: The actual packet
:return: None
"""
|
<reponame>herpan/ssi_cpc<gh_stars>0
const socket = io("https://172.16.58.3", {
withCredentials: true,
extraHeaders: { "xDPEAOEEssANz4gFAAAB": "UlCKearSRj0DgEfIAAAB" }
});
function update_data(to_bank) {
socket.emit('new_update', {
to_bank: to_bank,
});
} |
#!/bin/bash
model_dir=$1
resnet_size=$2
train_epochs=$3
batch_size=$4
activation=$5
if [ "$activation" == "all" ];
then
declare -a functions=("relu" "lrelu" "tanh" "elu" "swish")
else
declare -a functions=("$activation")
fi
#declare -a functions=("relu" "lrelu" "tanh" "elu" "swish")
for func in "${functions[@]}"
do
modelpath=$1"/cifar100_resnet-""$2""_train_epochs-""$3""_batch_size-""$4""_activation-""$func"
logpath=$1"/log_cifar100_resnet-""$2""_train_epochs-""$3""_batch_size-""$4""_activation-""$func"
echo "Running--------------" python cifar10_main.py --model_dir $modelpath --resnet_size "$resnet_size" \
--train_epochs "$train_epochs" --batch_size "$batch_size" --activation "$func" --data_format channels_first
time python cifar100_main.py --model_dir $modelpath --resnet_size "$resnet_size" \
--train_epochs "$train_epochs" --batch_size "$batch_size" --activation "$func" --data_format channels_first >> "$logpath"
done
name="cifar100_resnet-""$2""_train_epochs-""$3""_batch_size-""$4""_activation-*"
name2="cifar100_resnet-""$2""_train_epochs-""$3""_batch_size-""$4"
cd $1
find . -maxdepth 1 -type d -name "$name" -print0 | tar -czvf "$name2".tar.gz --null -T -
exit 0
|
<gh_stars>0
package com.github.infobarbosa.kafka;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.Map;
import java.util.Properties;
public class SslAuthenticationConsumer {
private static final Logger logger = LoggerFactory.getLogger(SslAuthenticationConsumer.class.getName());
public static void main(String[] args) {
Properties properties = new Properties();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "brubeck:9093");
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
properties.put(ConsumerConfig.CLIENT_ID_CONFIG, "consumer-tutorial");
properties.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, "100");
properties.put(ConsumerConfig.GROUP_ID_CONFIG, "consumer-tutorial-group");
properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
properties.put("security.protocol", "SSL");
properties.put("ssl.truststore.location", "/tmp/ssl/kafka.client.truststore.jks");
properties.put("ssl.truststore.password", "<PASSWORD>");
properties.put("ssl.keystore.location", "/tmp/ssl/kafka.client.keystore.jks");
properties.put("ssl.keystore.password", "<PASSWORD>");
properties.put("ssl.key.password", "<PASSWORD>");
final String topic = "teste";
try {
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties);
consumer.subscribe(Arrays.asList(topic));
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100);
for (ConsumerRecord<String, String> record : records) {
String key = record.key();
String value = record.value();
long offset = record.offset();
long partition = record.partition();
long timestamp = record.timestamp();
consumer.commitAsync(new OffsetCommitCallback() {
@Override
public void onComplete(Map<TopicPartition, OffsetAndMetadata> offsets, Exception exception) {
logger.info("K: " + key
+ "; V: " + value
+ "; TS: " + timestamp
);
}
});
//coloca pra dormir um pouco
try {
Thread.sleep(100);
}
catch(InterruptedException e){
logger.error("problemas durante o sono.", e);
}
}
}
}
catch(Exception e){
logger.error("Problemas durante o consumo", e);
}
}
}
|
import { Response } from "../runtime";
import { Logger } from "./Logger.js";
export interface Events {
// TODO: Temporarily here until moved to function builder
"function.requested": {
localID: string;
request: {
event: any;
context: any;
};
};
"function.responded": {
localID: string;
request: {
event: any;
context: any;
};
response: Response;
};
}
type EventTypes = keyof Events;
type EventPayload<Type extends EventTypes> = {
type: Type;
properties: Events[Type];
};
type Subscription = {
type: EventTypes;
cb: (payload: any) => void;
};
export type Bus = ReturnType<typeof createBus>;
export function createBus() {
const subscriptions: Record<string, Subscription[]> = {};
function subscribers(type: EventTypes) {
let arr = subscriptions[type];
if (!arr) {
arr = [];
subscriptions[type] = arr;
}
return arr;
}
return {
publish<Type extends EventTypes>(type: Type, properties: Events[Type]) {
Logger.print("debug", `Publishing event`, type, properties);
const payload: EventPayload<Type> = {
type,
properties
};
for (const sub of subscribers(type)) sub.cb(payload);
},
unsubscribe(sub: Subscription) {
const arr = subscribers(sub.type);
const index = arr.indexOf(sub);
if (!index) return;
arr.splice(index, 1);
},
subscribe<Type extends EventTypes>(
type: Type,
cb: (payload: EventPayload<Type>) => void
) {
const sub: Subscription = {
type,
cb
};
subscribers(type).push(sub);
return sub;
}
};
}
|
<gh_stars>1-10
// Copyright 2009 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.enterprise.secmgr.config;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import java.util.Arrays;
/**
* Unit test utilities for config.
*/
public final class ConfigTestUtil {
// Don't instantiate
private ConfigTestUtil() {
throw new UnsupportedOperationException();
}
public static String jsonArray(String... elts) {
return jsonArray(Arrays.asList(elts));
}
public static String jsonArray(Iterable<String> elts) {
return commaSeparated("[", "]", elts);
}
public static String jsonObject(String... bindings) {
return jsonObject(Arrays.asList(bindings));
}
public static String jsonObject(Iterable<String> bindings) {
return commaSeparated("{", "}", bindings);
}
private static String commaSeparated(String prefix, String suffix, Iterable<String> elts) {
if (Iterables.isEmpty(elts)) {
return prefix + suffix;
}
StringBuilder buffer = new StringBuilder();
for (String elt : elts) {
buffer.append(prefix);
prefix = ",";
buffer.append(elt);
}
buffer.append(suffix);
return buffer.toString();
}
public static String jsonBinding(String key, String value) {
return jsonQuote(key) + ":" + value;
}
public static String jsonBinding(String key, boolean value) {
return jsonQuote(key) + ":" + value;
}
public static String jsonBinding(String key, int value) {
return jsonQuote(key) + ":" + value;
}
public static String jsonBinding(String key, long value) {
return jsonQuote(key) + ":" + value;
}
public static String jsonStringArray(String... strings) {
return jsonStringArray(Arrays.asList(strings));
}
public static String jsonStringArray(Iterable<String> strings) {
return jsonArray(
Iterables.transform(strings,
new Function<String, String>() {
public String apply(String string) {
return jsonQuote(string);
}
}));
}
public static String jsonQuote(String string) {
if (string == null) {
return "null";
}
StringBuilder buffer = new StringBuilder();
buffer.append("\"");
for (int i = 0; i < string.length(); i++) {
char c = string.charAt(i);
if (c == '\\' || c == '"') {
buffer.append("\\");
}
buffer.append(c);
}
buffer.append("\"");
return buffer.toString();
}
public static String jsonQuote(int n) {
return Integer.toString(n);
}
public static String jsonQuote(boolean b) {
return Boolean.toString(b);
}
}
|
#!/bin/bash
/soft/libraries/anaconda-unstable/bin/python ./lc_resample.py params_lc_resamp/skysim5000/skysim5000_v1.1.1_grp21/skysim5000_v1.1.1_z_0_1_hpx:grp21.param
/soft/libraries/anaconda-unstable/bin/python ./lc_resample.py params_lc_resamp/skysim5000/skysim5000_v1.1.1_grp21/skysim5000_v1.1.1_z_1_2_hpx:grp21.param
/soft/libraries/anaconda-unstable/bin/python ./lc_resample.py params_lc_resamp/skysim5000/skysim5000_v1.1.1_grp21/skysim5000_v1.1.1_z_2_3_hpx:grp21.param
|
import { Component, OnInit } from '@angular/core';
import { UsersService } from './users.service';
@Component ({
selector: 'my-app',
templateUrl: 'app/Modules/Users/users.component.html',
providers: [ UsersService]
})
export class AppUsers implements OnInit {
ListUsers: any[];
constructor(public UsersService: UsersService) { }
ngOnInit() {
this.UsersService.getListUser().subscribe((data) => this.ListUsers = data)
}
} |
import { isEqual as _isEqual } from '@collectable/map';
import { HashSetStructure } from '../internals';
export function isEqual<T> (set: HashSetStructure<T>, other: HashSetStructure<T>): boolean {
return _isEqual(set._map, other._map);
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-3152-1
#
# Security announcement date: 2016-12-05 00:00:00 UTC
# Script generation date: 2017-02-02 21:04:28 UTC
#
# Operating System: Ubuntu 16.10
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - linux-image-4.8.0-30-generic:4.8.0-30.32
# - linux-source-4.8.0:4.8.0-30.32
# - linux-doc:4.8.0-30.32
# - linux-headers-4.8.0-30:4.8.0-30.32
# - linux-libc-dev:4.8.0-30.32
# - linux-tools-common:4.8.0-30.32
# - linux-tools-4.8.0-30:4.8.0-30.32
# - linux-cloud-tools-common:4.8.0-30.32
# - linux-cloud-tools-4.8.0-30:4.8.0-30.32
# - linux-image-extra-4.8.0-30-generic:4.8.0-30.32
# - linux-headers-4.8.0-30-generic:4.8.0-30.32
# - linux-tools-4.8.0-30-generic:4.8.0-30.32
# - linux-cloud-tools-4.8.0-30-generic:4.8.0-30.32
# - linux-image-4.8.0-30-lowlatency:4.8.0-30.32
# - linux-headers-4.8.0-30-lowlatency:4.8.0-30.32
# - linux-tools-4.8.0-30-lowlatency:4.8.0-30.32
# - linux-cloud-tools-4.8.0-30-lowlatency:4.8.0-30.32
# - linux-image-generic:4.8.0.30.39
# - linux-source:4.8.0.30.39
# - linux-crashdump:4.8.0.30.39
# - linux-headers-generic:4.8.0.30.39
# - linux-tools-generic:4.8.0.30.39
# - linux-cloud-tools-generic:4.8.0.30.39
# - linux-generic:4.8.0.30.39
# - linux-headers-lowlatency:4.8.0.30.39
# - linux-image-lowlatency:4.8.0.30.39
# - linux-tools-lowlatency:4.8.0.30.39
# - linux-cloud-tools-lowlatency:4.8.0.30.39
# - linux-lowlatency:4.8.0.30.39
# - linux-signed-image-generic:4.8.0.30.39
# - linux-signed-generic:4.8.0.30.39
# - linux-signed-image-lowlatency:4.8.0.30.39
# - linux-signed-lowlatency:4.8.0.30.39
# - linux-image-hwe-generic-trusty:4.8.0.30.39
# - linux-hwe-generic-trusty:4.8.0.30.39
#
# Last versions recommanded by security team:
# - linux-image-4.8.0-30-generic:4.8.0-30.32
# - linux-source-4.8.0:4.8.0-37.39
# - linux-doc:4.8.0-37.39
# - linux-headers-4.8.0-30:4.8.0-30.32
# - linux-libc-dev:4.8.0-37.39
# - linux-tools-common:4.8.0-37.39
# - linux-tools-4.8.0-30:4.8.0-30.32
# - linux-cloud-tools-common:4.8.0-37.39
# - linux-cloud-tools-4.8.0-30:4.8.0-30.32
# - linux-image-extra-4.8.0-30-generic:4.8.0-30.32
# - linux-headers-4.8.0-30-generic:4.8.0-30.32
# - linux-tools-4.8.0-30-generic:4.8.0-30.32
# - linux-cloud-tools-4.8.0-30-generic:4.8.0-30.32
# - linux-image-4.8.0-30-lowlatency:4.8.0-30.32
# - linux-headers-4.8.0-30-lowlatency:4.8.0-30.32
# - linux-tools-4.8.0-30-lowlatency:4.8.0-30.32
# - linux-cloud-tools-4.8.0-30-lowlatency:4.8.0-30.32
# - linux-image-generic:4.8.0.37.46
# - linux-source:4.8.0.37.46
# - linux-crashdump:4.8.0.37.46
# - linux-headers-generic:4.8.0.37.46
# - linux-tools-generic:4.8.0.37.46
# - linux-cloud-tools-generic:4.8.0.37.46
# - linux-generic:4.8.0.37.46
# - linux-headers-lowlatency:4.8.0.37.46
# - linux-image-lowlatency:4.8.0.37.46
# - linux-tools-lowlatency:4.8.0.37.46
# - linux-cloud-tools-lowlatency:4.8.0.37.46
# - linux-lowlatency:4.8.0.37.46
# - linux-signed-image-generic:4.8.0.37.46
# - linux-signed-generic:4.8.0.37.46
# - linux-signed-image-lowlatency:4.8.0.37.46
# - linux-signed-lowlatency:4.8.0.37.46
# - linux-image-hwe-generic-trusty:4.8.0.37.46
# - linux-hwe-generic-trusty:4.8.0.37.46
#
# CVE List:
# - CVE-2016-8655
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade linux-image-4.8.0-30-generic=4.8.0-30.32 -y
sudo apt-get install --only-upgrade linux-source-4.8.0=4.8.0-37.39 -y
sudo apt-get install --only-upgrade linux-doc=4.8.0-37.39 -y
sudo apt-get install --only-upgrade linux-headers-4.8.0-30=4.8.0-30.32 -y
sudo apt-get install --only-upgrade linux-libc-dev=4.8.0-37.39 -y
sudo apt-get install --only-upgrade linux-tools-common=4.8.0-37.39 -y
sudo apt-get install --only-upgrade linux-tools-4.8.0-30=4.8.0-30.32 -y
sudo apt-get install --only-upgrade linux-cloud-tools-common=4.8.0-37.39 -y
sudo apt-get install --only-upgrade linux-cloud-tools-4.8.0-30=4.8.0-30.32 -y
sudo apt-get install --only-upgrade linux-image-extra-4.8.0-30-generic=4.8.0-30.32 -y
sudo apt-get install --only-upgrade linux-headers-4.8.0-30-generic=4.8.0-30.32 -y
sudo apt-get install --only-upgrade linux-tools-4.8.0-30-generic=4.8.0-30.32 -y
sudo apt-get install --only-upgrade linux-cloud-tools-4.8.0-30-generic=4.8.0-30.32 -y
sudo apt-get install --only-upgrade linux-image-4.8.0-30-lowlatency=4.8.0-30.32 -y
sudo apt-get install --only-upgrade linux-headers-4.8.0-30-lowlatency=4.8.0-30.32 -y
sudo apt-get install --only-upgrade linux-tools-4.8.0-30-lowlatency=4.8.0-30.32 -y
sudo apt-get install --only-upgrade linux-cloud-tools-4.8.0-30-lowlatency=4.8.0-30.32 -y
sudo apt-get install --only-upgrade linux-image-generic=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-source=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-crashdump=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-headers-generic=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-tools-generic=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-cloud-tools-generic=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-generic=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-headers-lowlatency=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-image-lowlatency=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-tools-lowlatency=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-cloud-tools-lowlatency=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-lowlatency=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-signed-image-generic=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-signed-generic=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-signed-image-lowlatency=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-signed-lowlatency=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-image-hwe-generic-trusty=4.8.0.37.46 -y
sudo apt-get install --only-upgrade linux-hwe-generic-trusty=4.8.0.37.46 -y
|
#include <stdio.h>
#include <string.h>
int main(void){
// set up integer to convert to a string
int x = 123;
// create an array to store the converted string
char str[10];
// convert integer to a string
sprintf(str, "%d", x);
printf("Integer %d has been converted to string %s. \n", x, str);
return 0;
} |
<reponame>MichalTuleja/chat-node-js<filename>public_html/js/viewmodel/msg_panel.js
define(['knockout', 'view/chatbox'], function(ko, WindowResizer) {
return function() {
var self = this;
this.messages = ko.observableArray();
var windowResizer = new WindowResizer();
var msgObjArr = [];
/*
var msgObjArr = [
{user: 'User1', time: new Date(), msg: 'Hello1!'},
{user: 'User2', time: new Date(), msg: 'Hello2!'},
{user: 'User3', time: new Date(), msg: 'Hello3!'},
{user: 'User4', time: new Date(), msg: 'Hello4!'}
];
*/
var initialize = function() {
self.addMsg('System', 'Welcome to the chat application!');
signal.msgReceived.add(msgReceived);
for (k = 0; k < 1; k++) {
for (var i = 0; i < msgObjArr.length; i++) {
self.addMsg(msgObjArr[i].user, msgObjArr[i].msg);
}
}
setTimeout(self.scrollToBottom, 100);
};
this.addMsg = function(user, msg) {
var date = new Date();
self.messages.push({msg: true, user: user, time: date, formattedTime: formatDate(date), msg: msg});
windowResizer.scrollToBottom();
};
var msgReceived = function(msgData) {
try {
msgData = JSON.parse(msgData);
} catch(e) {
console.log('Warning: incoming message is not in JSON format.');
}
try {
if(typeof msgData === 'object') {
var user = msgData.user.name;
var msg = msgData.msg.text;
self.addMsg(user, msg);
}
else if(typeof msgData === 'string') {
self.addMsg('System', msgData);
}
} catch(e) {
console.log('Broken message received.');
}
};
var formatDate = function(date) {
if (date.getMinutes().toString().length === 1) {
var min = "0" + date.getMinutes();
}
else {
var min = date.getMinutes();
}
return date.getHours() + ':' + min;
};
initialize();
};
;
}); |
#!/bin/bash
dieharder -d 9 -g 45 -S 1437067766
|
#!/bin/bash -l
set -eox pipefail
CWDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "${CWDIR}/common.bash"
function gen_env(){
cat > /opt/run_test.sh <<-EOF
BEHAVE_TAGS="${BEHAVE_TAGS}"
BEHAVE_FLAGS="${BEHAVE_FLAGS}"
source /usr/local/greenplum-db-devel/greenplum_path.sh
cd "\${1}/gpdb_src/gpAux"
source gpdemo/gpdemo-env.sh
cd "\${1}/gpdb_src/gpMgmt/"
if [ ! -z "\${BEHAVE_TAGS}" ]; then
make -f Makefile.behave behave tags=\${BEHAVE_TAGS}
else
flags="\${BEHAVE_FLAGS}" make -f Makefile.behave behave
fi
EOF
chmod a+x /opt/run_test.sh
}
function gpcheck_setup() {
# gpcheck looks for specific system settings as a requirement.
# normally, containers do not have root access when running as gpadmin.
# So, we need to setup system requirements prior to running the test. Note
# that, for this test, we simply change the conf file gpcheck only checks
# the conf file, not the runtime system settings
echo "
xfs_mount_options = rw,noatime,inode64,allocsize=16m
kernel.shmmax = 500000000
kernel.shmmni = 4096
kernel.shmall = 4000000000
kernel.sem = 250 512000 100 2048
kernel.sysrq = 1
kernel.core_uses_pid = 1
kernel.msgmnb = 65536
kernel.msgmax = 65536
kernel.msgmni = 2048
net.ipv4.tcp_syncookies = 1
net.ipv4.ip_forward = 0
net.ipv4.conf.default.accept_source_route = 0
net.ipv4.tcp_tw_recycle = 1
net.ipv4.tcp_max_syn_backlog = 4096
net.ipv4.conf.all.arp_filter = 1
net.ipv4.ip_local_port_range = 1025 65535
net.core.netdev_max_backlog = 10000
vm.overcommit_memory = 2" >> /etc/sysctl.conf
echo "
* soft nofile 65536
* hard nofile 65536
* soft nproc 131072
* hard nproc 131072" >> /etc/security/limits.conf
}
function _main() {
if [ -z "${BEHAVE_TAGS}" ] && [ -z "${BEHAVE_FLAGS}" ]; then
echo "FATAL: BEHAVE_TAGS or BEHAVE_FLAGS not set"
exit 1
fi
time install_gpdb
time ./gpdb_src/concourse/scripts/setup_gpadmin_user.bash
time make_cluster
time gen_env
if [ "$GPCHECK_SETUP" = "true" ]; then
time gpcheck_setup
fi
time run_test
}
_main "$@"
|
#!/bin/sh
docker run -it -v "$PWD/../..:/mnt" debian_mta /usr/bin/make -C /mnt/premake-build config=release $@
|
<reponame>SowahCore/PacMan
import java.awt.event.*;
import javax.swing.*;
public class ControlMenu extends Controller implements ActionListener{
public ControlMenu(Model model, Vue f) {
super(model, f);
f.setMenuControler(this);
}
public void actionPerformed(ActionEvent e) {
if (e.getSource() == f.getNouvellePartie()) { super.newGame(); }
if (e.getSource() == f.getAPropos()) { f.creerDialog("PacMan realiser par : \n\n- <NAME> (chef de projet)\n- <NAME> (manoeuvre de haute performance) \n (5 ou 6 étudiants : he he he, quoi ?)"); }
if (e.getSource() == f.getPacmanM()) { super.selectPerso("pacman"); }
if (e.getSource() == f.getPikachuM()) { super.selectPerso("pikachu"); }
if (e.getSource() == f.getSonicM()) { super.selectPerso("sonic"); }
if (e.getSource() == f.getVoltaliM()) { super.selectPerso("voltali"); }
if (e.getSource() == f.getMegamanM()) { super.selectPerso("megaman"); }
}
}
|
<gh_stars>0
export default ({ isActive, messages }) =>
isActive ? messages.turn : messages.ending
|
package com.nexusdevs.shoppersdeal.admin.dto;
public class Category {
private String id;
private String categoryId;
private String category;
private long createTime;
private long updateTime;
private boolean deleted;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getCategoryId() {
return categoryId;
}
public void setCategoryId(String categoryId) {
this.categoryId = categoryId;
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public long getCreateTime() {
return createTime;
}
public void setCreateTime(long createTime) {
this.createTime = createTime;
}
public long getUpdateTime() {
return updateTime;
}
public void setUpdateTime(long updateTime) {
this.updateTime = updateTime;
}
public boolean isDeleted() {
return deleted;
}
public void setDeleted(boolean deleted) {
this.deleted = deleted;
}
}
|
#!/bin/bash
# Build and upload (to GitHub) for all platforms for version $1.
set -e
DIR=`dirname $0`
ROOT=$DIR/..
VSN=$1
$ROOT/scripts/release.sh "cidr2range" $VSN
|
<filename>src/components/old_components/PopupMenu/styledComponents/index.js
import styled from 'styled-components';
export const PopupWrapper = styled.div`
position: absolute;
left: 5px;
padding: 10px 15px;
min-width: 100px;
background-color: #fff;
border-radius: 5px;
color: ${props => props.theme.darkFontColor};
box-shadow: 2px 0px 24px -6px rgba(0,0,0,0.75);
cursor: default;
`;
|
#!/usr/bin/env bash
#
# Copyright (c) 2018-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
if [ -n "$QEMU_USER_CMD" ]; then
BEGIN_FOLD wrap-qemu
# Generate all binaries, so that they can be wrapped
DOCKER_EXEC make $MAKEJOBS -C src/secp256k1 VERBOSE=1
DOCKER_EXEC make $MAKEJOBS -C src/univalue VERBOSE=1
for b_name in {"${BASE_OUTDIR}/bin"/*,src/secp256k1/*tests,src/univalue/{no_nul,test_json,unitester,object}}; do
# shellcheck disable=SC2044
for b in $(find "${BASE_ROOT_DIR}" -executable -type f -name $(basename $b_name)); do
echo "Wrap $b ..."
DOCKER_EXEC mv "$b" "${b}_orig"
DOCKER_EXEC echo "\#\!/usr/bin/env bash" \> "$b"
DOCKER_EXEC echo "$QEMU_USER_CMD \\\"${b}_orig\\\" \\\"\\\$@\\\"" \>\> "$b"
DOCKER_EXEC chmod +x "$b"
done
done
END_FOLD
fi
if [ -n "$USE_VALGRIND" ]; then
BEGIN_FOLD wrap-valgrind
DOCKER_EXEC "${BASE_ROOT_DIR}/ci/test/wrap-valgrind.sh"
END_FOLD
fi
if [ "$RUN_UNIT_TESTS" = "true" ]; then
BEGIN_FOLD unit-tests
bash -c "${CI_WAIT}" & # Print dots in case the unit tests take a long time to run
DOCKER_EXEC LD_LIBRARY_PATH=$DEPENDS_DIR/$HOST/lib make $MAKEJOBS check VERBOSE=1
END_FOLD
fi
if [ "$RUN_FUNCTIONAL_TESTS" = "true" ]; then
BEGIN_FOLD functional-tests
DOCKER_EXEC test/functional/test_runner.py --ci $MAKEJOBS --tmpdirprefix "${BASE_SCRATCH_DIR}/test_runner/" --ansi --combinedlogslen=4000 ${TEST_RUNNER_EXTRA} --quiet --failfast
END_FOLD
fi
if [ "$RUN_FUZZ_TESTS" = "true" ]; then
BEGIN_FOLD fuzz-tests
DOCKER_EXEC test/fuzz/test_runner.py -l DEBUG ${DIR_FUZZ_IN}
END_FOLD
fi
|
def common_items(dict1, dict2):
common_dict = {}
for key in dict1:
if key in dict2 and dict1[key] == dict2[key]:
common_dict[key] = dict1[key]
return common_dict |
<filename>src/login/constants.ts
export const REQUEST = 'login/request';
export const REQUEST_ERROR = 'login/requestError';
export const REQUEST_SUCCESS = 'login/requestSuccess';
export const SUBMIT_IDENTITY = 'login/submitIdentity';
export const VERIFY_ACCOUNT = 'login/verifyAccount';
export const VERIFIED = 'login/VERIFIED';
|
#!/bin/bash
# Run_PeakSpeed2.sh
# Check Environment
if [ -z ${IMPERAS_HOME} ]; then
echo "IMPERAS_HOME not set. Please check environment setup."
exit
fi
${IMPERAS_ISS} --verbose --output imperas.log \
--program ../../../Applications/peakSpeed2/peakSpeed2.AARCH64-O0-g.elf \
--processorvendor arm.ovpworld.org --processorname arm --variant Cortex-A57MPx1 \
--numprocessors 1 \
--parameter endian=little \
"$@" \
-argv 500000000
|
import React, {PropTypes}from 'react';
import {connect} from 'react-redux';
import {Table, Image} from 'react-bootstrap';
import '../../styles/CandidateManagerPage/candidate-manager-icon.css';
class InterviewerCandidateTable extends React.Component {
constructor(props) {
super(props);
this.checkNull = this.checkNull.bind(this);
this.setStatusColor = this.setStatusColor.bind(this);
}
setStatusColor(status) {
if(status == "未通过") {
return (<label style={{color:"#FF0080"}}>未通过</label>);
}
else if(status == "通过") {
return (<label style={{color:"#28FF28"}}>通过</label>);
}
else {
return (<label style={{color:"##000000"}}>未面试</label>);
}
}
checkNull() {
if(this.props.candidateManager.length) {
return (<tbody>{this.props.candidateManager.map(candidate =>
<tr key={candidate.id}>
<td>{candidate.name}</td>
<td>{candidate.email}</td>
<td>{candidate.phone}</td>
<td>{this.props.rooms.find(room => room.id === candidate.roomId) && this.props.rooms.find(room => room.id === candidate.roomId).name}</td>
<td className="icon">
<a href={candidate.record && candidate.record.video} target="_blank"><Image src="../../images/1.png" width={17} height={17} /></a>
<a href={candidate.record && candidate.record.chat} target="_blank"><Image src="../../images/2.png" width={17} height={17} /></a>
<a href={candidate.record && candidate.record.code} target="_blank"><Image src="../../images/3.png" width={13} height={13} /></a>
<a href={candidate.record && candidate.record.board} target="_blank"><Image src="../../images/4.png" width={13} height={13} /></a>
<a href={candidate.record && candidate.record.report} target="_blank"><Image src="../../images/5.png" width={15} height={15} /></a>
</td>
<td>{this.setStatusColor(candidate.status)}</td>
</tr>)}</tbody>);
}
return (<tbody><label>暂无候选人</label></tbody>);
}
render() {
return (
<Table responsive>
<thead>
<tr>
<th>姓名</th>
<th>邮箱</th>
<th>手机</th>
<th>房间</th>
<th>面试记录</th>
<th>面试状态</th>
</tr>
</thead>
{this.checkNull()}
</Table>
);
}
}
InterviewerCandidateTable.propTypes = {
candidateManager: PropTypes.arrayOf(PropTypes.object).isRequired,
rooms: PropTypes.arrayOf(PropTypes.object).isRequired,
};
function mapStateToProps(state) {
return {
candidateManager: state.candidatesStates.candidates,
rooms: state.roomsStates.rooms
};
}
export default connect(mapStateToProps)(InterviewerCandidateTable);
|
#!/bin/sh
set -e -x
/usr/lib/postgresql/10/bin/initdb ${PGDATA}
echo "archive_mode = on" >> /var/lib/postgresql/10/main/postgresql.conf
echo "archive_command = '\
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=3 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
/usr/bin/timeout 600 /usr/bin/wal-g wal-push %p'" >> /var/lib/postgresql/10/main/postgresql.conf
echo "archive_timeout = 600" >> /var/lib/postgresql/10/main/postgresql.conf
/usr/lib/postgresql/10/bin/pg_ctl -D ${PGDATA} -w start
/tmp/scripts/wait_while_pg_not_ready.sh
#delete all backups of any
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=3 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g delete everything FORCE --confirm
# push permanent and impermanent delta backups
for i in 1 2 3 4
do
pgbench -i -s 1 postgres &
sleep 1
if [ $i -eq 3 ]
then
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=3 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-push --permanent ${PGDATA}
pg_dumpall -f /tmp/dump1
else
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=3 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-push ${PGDATA}
fi
done
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=3 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-list --detail
# delete backups by pushing a full backup and running `delete retain 1`
# this should only delete the last impermanent delta backup
pgbench -i -s 1 postgres &
sleep 1
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-push ${PGDATA}
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-list --detail
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g delete retain 1 --confirm
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-list
# restore the backup and compare with previous state
/tmp/scripts/drop_pg.sh
first_backup_name=`\
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-list | sed '2q;d' | cut -f 1 -d " "`
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-fetch ${PGDATA} $first_backup_name
echo "restore_command = 'echo \"WAL file restoration: %f, %p\"&& \
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
/usr/bin/wal-g wal-fetch \"%f\" \"%p\"'" > ${PGDATA}/recovery.conf
/usr/lib/postgresql/10/bin/pg_ctl -D ${PGDATA} -w start
pg_dumpall -f /tmp/dump2
diff /tmp/dump1 /tmp/dump2
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-list --detail
# delete all backups after previous tests
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g delete everything FORCE --confirm
# make impermanent base backup
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-push ${PGDATA}
imperm_backup=`\
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-list | egrep -o "[0-9A-F]{24}"`
# make permanent base backup
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-push --permanent ${PGDATA}
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-list --detail
# check that nothing changed when permanent backups exist
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-list > /tmp/dump1
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g delete everything --confirm || true
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-list > /tmp/dump2
diff /tmp/dump1 /tmp/dump2
rm /tmp/dump2
touch /tmp/dump2
# delete all backups
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g delete everything FORCE --confirm
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=0 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_S3_PREFIX=s3://deletebeforepermanentdeltabucket \
WALG_USE_WAL_DELTA=true \
wal-g backup-list 2> /tmp/2 1> /tmp/1
# check that stdout not include any backup
! cat /tmp/1 | egrep -o "[0-9A-F]{24}" > /tmp/dump1
diff /tmp/dump1 /tmp/dump2
# check that stderr not include any backup
# stderr shuld be "INFO: ... No backups found"
! cat /tmp/2 | egrep -o "[0-9A-F]{24}" > /tmp/dump1
diff /tmp/dump1 /tmp/dump2
/tmp/scripts/drop_pg.sh
echo "Delete before permanent delta success!!!!!!"
|
#!/bin/bash
declare -a traces=("164.gzip.f2b" "176.gcc.m2b" "177.mesa.f2b" "254.gap.m2b")
declare -a tfiles=("gzip_f2b" "gcc_m2b" "mesa_f2b" "gap_m2b")
declare -a bsizes=(2 4 8 16 32)
export REPO="/home/ubuntu/code/MC733/exercicio2"
cd /home/ubuntu/benchmarks/traces
#cd /opt/mc723/benchmarks/traces
for ((i=0;i<${#traces[@]};++i))
do
cd ${traces[i]}
for l in "${bsizes[@]}"
do
dineroIV -informat s -trname ${tfiles[i]} -maxtrace 101 -l1-isize 32K -l1-dsize 32K -l1-ibsize $l -l1-dbsize $l >> $REPO/test2.txt
done
cd ../
done
|
function longestPalindrome(str) {
let maxLength = 0;
let longestPalindrome = '';
for (let i = 0; i < str.length; i ++) {
for (let j = i + 1; j <= str.length; j ++) {
let subString = str.substring(i, j);
let reversedSubString = subString.split('').reverse().join('');
if (subString === reversedSubString && maxLength < subString.length) {
maxLength = subString.length;
longestPalindrome = subString;
}
}
}
return longestPalindrome;
}
const result = longestPalindrome('myracecar');
console.log(result); |
#!/bin/bash
set -x
bundle install --path "${HOME}/bundles/${JOB_NAME}" --deployment
export RESTCLIENT_LOG="log/smokey-rest-client.log"
export ENVIRONMENT=${TARGET_PLATFORM}
FLAGS=(--profile "${TARGET_PLATFORM}")
FLAGS+=(--strict-undefined)
FLAGS+=(-t "not @benchmarking")
if [ -n "${TARGET_APPLICATION}" ]; then
FLAGS+=(-t "@app-${TARGET_APPLICATION}")
fi
govuk_setenv smokey bundle exec cucumber "${FLAGS[@]}"
|
<reponame>xoxox4dev/madedit
#ifndef BOOST_PP_IS_ITERATING
///////////////////////////////////////////////////////////////////////////////
/// \file call.hpp
/// Contains definition of the call<> transform.
//
// Copyright 2008 <NAME>. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_PROTO_TRANSFORM_CALL_HPP_EAN_11_02_2007
#define BOOST_PROTO_TRANSFORM_CALL_HPP_EAN_11_02_2007
#include <boost/xpressive/proto/detail/prefix.hpp>
#include <boost/preprocessor/cat.hpp>
#include <boost/preprocessor/iteration/iterate.hpp>
#include <boost/preprocessor/repetition/enum.hpp>
#include <boost/preprocessor/repetition/repeat.hpp>
#include <boost/preprocessor/repetition/enum_params.hpp>
#include <boost/preprocessor/repetition/enum_trailing_params.hpp>
#include <boost/utility/result_of.hpp>
#include <boost/xpressive/proto/proto_fwd.hpp>
#include <boost/xpressive/proto/traits.hpp>
#include <boost/xpressive/proto/detail/dont_care.hpp>
#include <boost/xpressive/proto/detail/as_lvalue.hpp>
#include <boost/xpressive/proto/detail/suffix.hpp>
namespace boost { namespace proto
{
namespace transform
{
namespace detail
{
using proto::detail::uncv;
using proto::detail::as_lvalue;
using proto::detail::dont_care;
typedef char (&yes_type)[2];
typedef char no_type;
struct private_type_
{
private_type_ const &operator ,(int) const;
};
template<typename T>
yes_type check_fun_arity(T const &);
no_type check_fun_arity(private_type_ const &);
template<typename Fun>
struct callable0_wrap : Fun
{
callable0_wrap();
typedef private_type_ const &(*pfun0)();
operator pfun0() const;
};
template<typename Fun>
struct callable1_wrap : Fun
{
callable1_wrap();
typedef private_type_ const &(*pfun1)(dont_care);
operator pfun1() const;
};
template<typename Fun>
struct callable2_wrap : Fun
{
callable2_wrap();
typedef private_type_ const &(*pfun2)(dont_care, dont_care);
operator pfun2() const;
};
template<typename Fun>
struct arity0
{
static callable0_wrap<Fun> &fun;
static int const value =
sizeof(yes_type) == sizeof(check_fun_arity((fun(), 0)))
? 0
: 3;
};
template<typename Fun, typename A0>
struct arity1
{
static callable1_wrap<Fun> &fun;
static A0 &a0;
static int const value =
sizeof(yes_type) == sizeof(check_fun_arity((fun(a0), 0)))
? 1
: 3;
};
template<typename Fun, typename A0, typename A1>
struct arity2
{
static callable2_wrap<Fun> &fun;
static A0 &a0;
static A1 &a1;
static int const value =
sizeof(yes_type) == sizeof(check_fun_arity((fun(a0, a1), 0)))
? 2
: 3;
};
template<typename Fun, typename Expr, typename State, typename Visitor>
struct call3
{
typedef typename boost::result_of<Fun(Expr, State, Visitor)>::type type;
template<typename Expr2, typename State2, typename Visitor2>
static type call(Expr2 &expr, State2 &state, Visitor2 &visitor)
{
Fun f;
return f(expr, state, visitor);
}
};
template<typename Fun, typename Expr, typename State, typename Visitor
, int Arity = arity0<Fun>::value>
struct call0
: call3<Fun, Expr, State, Visitor>
{};
template<typename Fun, typename Expr, typename State, typename Visitor>
struct call0<Fun, Expr, State, Visitor, 0>
{
typedef typename boost::result_of<Fun()>::type type;
template<typename Expr2, typename State2, typename Visitor2>
static type call(Expr2 &, State2 &, Visitor2 &)
{
Fun f;
return f();
}
};
template<typename Fun, typename Expr, typename State, typename Visitor
, int Arity = arity1<Fun, Expr>::value>
struct call1
: call3<Fun, Expr, State, Visitor>
{};
template<typename Fun, typename Expr, typename State, typename Visitor>
struct call1<Fun, Expr, State, Visitor, 1>
{
typedef typename boost::result_of<Fun(Expr)>::type type;
template<typename Expr2, typename State2, typename Visitor2>
static type call(Expr2 &expr, State2 &, Visitor2 &)
{
Fun f;
return f(expr);
}
};
template<typename Fun, typename Expr, typename State, typename Visitor
, int Arity = arity2<Fun, Expr, State>::value>
struct call2
: call3<Fun, Expr, State, Visitor>
{};
template<typename Fun, typename Expr, typename State, typename Visitor>
struct call2<Fun, Expr, State, Visitor, 2>
{
typedef typename boost::result_of<Fun(Expr, State)>::type type;
template<typename Expr2, typename State2, typename Visitor2>
static type call(Expr2 &expr, State2 &state, Visitor2 &)
{
Fun f;
return f(expr, state);
}
};
} // namespace detail
/// \brief Wrap \c PrimitiveTransform so that <tt>when\<\></tt> knows
/// it is callable. Requires that the parameter is actually a
/// PrimitiveTransform.
///
/// This form of <tt>call\<\></tt> is useful for annotating an
/// arbitrary PrimitiveTransform as callable when using it with
/// <tt>when\<\></tt>. Consider the following transform, which
/// is parameterized with another transform.
///
/// \code
/// template<typename Grammar>
/// struct Foo
/// : when<
/// posit<Grammar>
/// , Grammar(_arg) // May or may not work.
/// >
/// {};
/// \endcode
///
/// The problem with the above is that <tt>when\<\></tt> may or
/// may not recognize \c Grammar as callable, depending on how
/// \c Grammar is implemented. (See <tt>is_callable\<\></tt> for
/// a discussion of this issue.) The above code can guard against
/// the issue by wrapping \c Grammar in <tt>call\<\></tt>, such
/// as:
///
/// \code
/// template<typename Grammar>
/// struct Foo
/// : when<
/// posit<Grammar>
/// , call<Grammar>(_arg) // OK, this works
/// >
/// {};
/// \endcode
///
/// The above could also have been written as:
///
/// \code
/// template<typename Grammar>
/// struct Foo
/// : when<
/// posit<Grammar>
/// , call<Grammar(_arg)> // OK, this works, too
/// >
/// {};
/// \endcode
template<typename PrimitiveTransform>
struct call : PrimitiveTransform
{
BOOST_PROTO_CALLABLE()
};
/// \brief Either call the PolymorphicFunctionObject with 0
/// arguments, or invoke the PrimitiveTransform with 3
/// arguments.
template<typename Fun>
struct call<Fun()> : proto::callable
{
template<typename Sig>
struct result;
template<typename This, typename Expr, typename State, typename Visitor>
struct result<This(Expr, State, Visitor)>
{
/// If \c Fun is a nullary PolymorphicFunctionObject, \c type is a typedef
/// for <tt>boost::result_of\<Fun()\>::::type</tt>. Otherwise, it is
/// a typedef for <tt>boost::result_of\<Fun(Expr, State, Visitor)\>::::type</tt>.
typedef
typename detail::call0<
Fun
, Expr
, State
, Visitor
>::type
type;
};
/// Either call the PolymorphicFunctionObject \c Fun with 0 arguments; or
/// invoke the PrimitiveTransform \c Fun with 3 arguments: the current
/// expression, state, and visitor.
///
/// If \c Fun is a nullary PolymorphicFunctionObject, return <tt>Fun()()</tt>.
/// Otherwise, return <tt>Fun()(expr, state, visitor)</tt>.
///
/// \param expr The current expression
/// \param state The current state
/// \param visitor An arbitrary visitor
template<typename Expr, typename State, typename Visitor>
typename result<void(Expr, State, Visitor)>::type
operator ()(Expr const &expr, State const &state, Visitor &visitor) const
{
typedef
detail::call0<
Fun
, Expr
, State
, Visitor
>
impl;
return impl::call(expr, state, visitor);
}
};
/// \brief Either call the PolymorphicFunctionObject with 1
/// argument, or invoke the PrimitiveTransform with 3
/// arguments.
template<typename Fun, typename A0>
struct call<Fun(A0)> : proto::callable
{
template<typename Sig>
struct result;
template<typename This, typename Expr, typename State, typename Visitor>
struct result<This(Expr, State, Visitor)>
{
/// Let \c x be <tt>when\<_, A0\>()(expr, state, visitor)</tt> and \c X
/// be the type of \c x.
/// If \c Fun is a unary PolymorphicFunctionObject that accepts \c x,
/// then \c type is a typedef for <tt>boost::result_of\<Fun(X)\>::::type</tt>.
/// Otherwise, it is a typedef for <tt>boost::result_of\<Fun(X, State, Visitor)\>::::type</tt>.
typedef
typename detail::call1<
Fun
, typename when<_, A0>::template result<void(Expr, State, Visitor)>::type
, State
, Visitor
>::type
type;
};
/// Either call the PolymorphicFunctionObject with 1 argument:
/// the result of applying the \c A0 transform; or
/// invoke the PrimitiveTransform with 3 arguments:
/// result of applying the \c A0 transform, the state, and the
/// visitor.
///
/// Let \c x be <tt>when\<_, A0\>()(expr, state, visitor)</tt>.
/// If \c Fun is a unary PolymorphicFunctionObject that accepts \c x,
/// then return <tt>Fun()(x)</tt>. Otherwise, return
/// <tt>Fun()(x, state, visitor)</tt>.
///
/// \param expr The current expression
/// \param state The current state
/// \param visitor An arbitrary visitor
template<typename Expr, typename State, typename Visitor>
typename result<void(Expr, State, Visitor)>::type
operator ()(Expr const &expr, State const &state, Visitor &visitor) const
{
typedef
detail::call1<
Fun
, typename when<_, A0>::template result<void(Expr, State, Visitor)>::type
, State
, Visitor
>
impl;
return impl::call(
detail::as_lvalue(when<_, A0>()(expr, state, visitor))
, state
, visitor
);
}
};
/// \brief Either call the PolymorphicFunctionObject with 2
/// arguments, or invoke the PrimitiveTransform with 3
/// arguments.
template<typename Fun, typename A0, typename A1>
struct call<Fun(A0, A1)> : proto::callable
{
template<typename Sig>
struct result;
template<typename This, typename Expr, typename State, typename Visitor>
struct result<This(Expr, State, Visitor)>
{
/// Let \c x be <tt>when\<_, A0\>()(expr, state, visitor)</tt> and \c X
/// be the type of \c x.
/// Let \c y be <tt>when\<_, A1\>()(expr, state, visitor)</tt> and \c Y
/// be the type of \c y.
/// If \c Fun is a binary PolymorphicFunction object that accepts \c x
/// and \c y, then \c type is a typedef for
/// <tt>boost::result_of\<Fun(X, Y)\>::::type</tt>. Otherwise, it is
/// a typedef for <tt>boost::result_of\<Fun(X, Y, Visitor)\>::::type</tt>.
typedef
typename detail::call2<
Fun
, typename when<_, A0>::template result<void(Expr, State, Visitor)>::type
, typename when<_, A1>::template result<void(Expr, State, Visitor)>::type
, Visitor
>::type
type;
};
/// Either call the PolymorphicFunctionObject with 2 arguments:
/// the result of applying the \c A0 transform, and the
/// result of applying the \c A1 transform; or invoke the
/// PrimitiveTransform with 3 arguments: the result of applying
/// the \c A0 transform, the result of applying the \c A1
/// transform, and the visitor.
///
/// Let \c x be <tt>when\<_, A0\>()(expr, state, visitor)</tt>.
/// Let \c y be <tt>when\<_, A1\>()(expr, state, visitor)</tt>.
/// If \c Fun is a binary PolymorphicFunction object that accepts \c x
/// and \c y, return <tt>Fun()(x, y)</tt>. Otherwise, return
/// <tt>Fun()(x, y, visitor)</tt>.
///
/// \param expr The current expression
/// \param state The current state
/// \param visitor An arbitrary visitor
template<typename Expr, typename State, typename Visitor>
typename result<void(Expr, State, Visitor)>::type
operator ()(Expr const &expr, State const &state, Visitor &visitor) const
{
typedef
detail::call2<
Fun
, typename when<_, A0>::template result<void(Expr, State, Visitor)>::type
, typename when<_, A1>::template result<void(Expr, State, Visitor)>::type
, Visitor
>
impl;
return impl::call(
detail::as_lvalue(when<_, A0>()(expr, state, visitor))
, detail::as_lvalue(when<_, A1>()(expr, state, visitor))
, visitor
);
}
};
/// \brief Call the PolymorphicFunctionObject or the
/// PrimitiveTransform with the current expression, state
/// and visitor, transformed according to \c A0, \c A1, and
/// \c A2, respectively.
template<typename Fun, typename A0, typename A1, typename A2>
struct call<Fun(A0, A1, A2)> : proto::callable
{
template<typename Sig>
struct result;
template<typename This, typename Expr, typename State, typename Visitor>
struct result<This(Expr, State, Visitor)>
{
typedef typename when<_, A0>::template result<void(Expr, State, Visitor)>::type a0;
typedef typename when<_, A1>::template result<void(Expr, State, Visitor)>::type a1;
typedef typename when<_, A2>::template result<void(Expr, State, Visitor)>::type a2;
typedef typename boost::result_of<Fun(a0, a1, a2)>::type type;
};
/// Let \c x be <tt>when\<_, A0\>()(expr, state, visitor)</tt>.
/// Let \c y be <tt>when\<_, A1\>()(expr, state, visitor)</tt>.
/// Let \c z be <tt>when\<_, A2\>()(expr, state, visitor)</tt>.
/// Return <tt>Fun()(x, y, z)</tt>.
///
/// \param expr The current expression
/// \param state The current state
/// \param visitor An arbitrary visitor
template<typename Expr, typename State, typename Visitor>
typename result<void(Expr, State, Visitor)>::type
operator ()(Expr const &expr, State const &state, Visitor &visitor) const
{
Fun f;
return f(
detail::as_lvalue(when<_, A0>()(expr, state, visitor))
, detail::as_lvalue(when<_, A1>()(expr, state, visitor))
, detail::uncv(when<_, A2>()(expr, state, visitor)) // HACK
);
}
};
#if BOOST_PROTO_MAX_ARITY > 3
#define BOOST_PP_ITERATION_PARAMS_1 (3, (4, BOOST_PROTO_MAX_ARITY, <boost/xpressive/proto/transform/call.hpp>))
#include BOOST_PP_ITERATE()
#endif
}
/// INTERNAL ONLY
///
template<typename Fun>
struct is_callable<transform::call<Fun> >
: mpl::true_
{};
}}
#endif
#else
#define N BOOST_PP_ITERATION()
/// \brief Call the PolymorphicFunctionObject \c Fun with the
/// current expression, state and visitor, transformed according
/// to \c A0 through \c AN.
template<typename Fun BOOST_PP_ENUM_TRAILING_PARAMS(N, typename A)>
struct call<Fun(BOOST_PP_ENUM_PARAMS(N, A))> : proto::callable
{
template<typename Sig>
struct result;
template<typename This, typename Expr, typename State, typename Visitor>
struct result<This(Expr, State, Visitor)>
{
#define TMP(Z, M, DATA) \
typedef \
typename when<_, BOOST_PP_CAT(A, M)> \
::template result<void(Expr, State, Visitor)> \
::type \
BOOST_PP_CAT(a, M); \
/**/
BOOST_PP_REPEAT(N, TMP, ~)
#undef TMP
typedef
typename boost::result_of<
Fun(BOOST_PP_ENUM_PARAMS(N, a))
>::type
type;
};
/// Let \c ax be <tt>when\<_, Ax\>()(expr, state, visitor)</tt>
/// for each \c x in <tt>[0,N]</tt>.
/// Return <tt>Fun()(a0, a1,... aN)</tt>.
///
/// \param expr The current expression
/// \param state The current state
/// \param visitor An arbitrary visitor
template<typename Expr, typename State, typename Visitor>
typename result<void(Expr, State, Visitor)>::type
operator ()(Expr const &expr, State const &state, Visitor &visitor) const
{
Fun f;
#define TMP(Z, M, DATA) when<_, BOOST_PP_CAT(A, M)>()(expr, state, visitor)
return f(BOOST_PP_ENUM(N, TMP, ~));
#undef TMP
}
};
#undef N
#endif
|
<gh_stars>0
// Name of coder : <NAME>
// Roll No : 1801CS16
// Date created : 19/02/2019
// Brief objective of the program : Program that completes all the tasks in Assignment 6
#include <stdio.h>
#include <math.h>
// Function to check perfect square
int is_perfect_square(int y)
{
if (sqrt(y) == (int)(sqrt(y)))
return 1;
else
return 0;
}
// Main
int main()
{
// Prompt the user for input
int n, k1, k2;
scanf("%d", &n);
int a[n], b[n], d[n], f[n];
float c[n];
int A = 0, A1 = 0;
// Check whether array element is fibonacci or not
for (int i = 0; i < n; i++)
{
scanf("%d", &a[i]);
if ( is_perfect_square(5*a[i]*a[i] - 4) || is_perfect_square(5*a[i]*a[i] + 4) )
{
f[i] = a[i];
}
else
{
f[i] = -1;
}
}
scanf("%d", &k1);
scanf("%d", &k2);
// Reverse a[n] and store in b[n]
for (int i = 0; i < n; i++)
{
b[i] = a[(n-i)-1];
}
// Sort the fibonaaci elements in increasing order
int swap = -1;
while (swap != 0)
{
swap = 0;
for (int i = 0; i < n - 1; i++)
{
if (f[i+1] < f[i])
{
int temp = f[i];
f[i] = f[i+1];
f[i+1] = temp;
swap++;
}
}
}
// Print fibonacci elements
for (int i = 0; i < n; i++)
{
if (i==0 && f[i]!=-1)
{
printf("%d ", f[i]);
}
if (i>0 && f[i]!=-1 && f[i]!=f[i-1])
{
printf("%d ", f[i]);
}
}
printf(" ");
// Print C
for (int i = 0; i < n; i++)
{
int l = a[i];
int j = 0;
for (j = 0; l != 0; j++)
{
l = l/10;
}
d[i] = j;
c[i] = (float)a[i] + (float)b[i]/pow(10, d[i]);
printf("%.2f ", c[i]);
}
printf(" ");
// Value of Polynomial at x=k1
for (int i = 0; i < n; i++)
{
A = A + a[i]*pow(k1,i);
}
printf("%d ", A);
// Value of derivative of polynomial at x=k2
for (int i = 0; i < n; i++)
{
A1 = A1 + i*a[i]*pow(k2,i-1);
}
printf("%d\n", A1);
return 0;
}
|
<reponame>crimsonmoon9/PTTChatOnYoutube
import { InitApp } from '../../app/appindex.js'
import { ChangeLog } from '../../ChangeLog.js'
import { ThemeCheck } from '../../library.js'
export function InitHD (messageposter) {
// Check Theme
const WhiteTheme = ThemeCheck('html', '250, 250, 250')
// run app instance loop
const watchcheck = /https:\/\/holodex\.net\/multiview/.exec(window.location.href)
let pttBootBtnCount = 0; let count = 0
let isPTTInstalled = false
if (watchcheck) {
const t = setInterval(() => {
if ($('#PTTSwitch').length === 1) {
clearInterval(t)
} else {
const parent = $('.vue-grid-layout').parent()
const pluginwidth = GM_getValue('PluginWidth', 400)
const pluginwidth0 = '0'
const liveControls = $('.flex-grow-1.justify-end.d-flex.mv-toolbar-btn.align-center.no-btn-text')
const fakeparent = $('<div id="fakeparent" class="d-flex flex-row"></div>')
const defaultVideoHandler = $('<div id="holotoolsvideohandler" style="flex:1 1 auto;"></div>')
const PTTChatHandler = $('<div id="pttchatparent" class="p-0 d-flex" style="flex:0 0 0px;position:relative;"></div>')
// const defaultVideo = $(`.vue-grid-layout`).parent();
const defaultVideo = $('.vue-grid-layout')
const iconPTT = $('<button type="button" id="PTTSwitch" title="切換為舊版PTT顯示模式\n(嵌入模式故障時可使用,使用前請先重新整理)" style="height: 36px; width: 36px; margin: 0px 4px 0px 4px; font-size: 21px;">P</button>')
liveControls.prepend(iconPTT)
let now = pluginwidth0
let collapseStart = false
let collapseEnd = true
let isChatOnen = false
iconPTT.on('click', function () {
if ($('#PTTChat').length === 0) {
InitApp(PTTChatHandler, WhiteTheme, true, messageposter, true)
ChangeLog()
}
if (!isPTTInstalled) {
if (collapseEnd || !collapseStart) {
if (now === '0') {
$('#PTTMainBtn').css('display', 'block')
$('#PTTMain').collapse('show')
} else {
$('#PTTMainBtn').css('display', 'none')
$('#PTTMain').collapse('hide')
}
now = (now === pluginwidth0 ? pluginwidth : pluginwidth0)
$('#pttchatparent').css('flex', '0 0 ' + now + 'px')
isChatOnen = !isChatOnen
}
}
})
$(document).on('show.bs.collapse hide.bs.collapse', '#PTTMain', function () { collapseStart = true; collapseEnd = false })
$(document).on('shown.bs.collapse hidden.bs.collapse', '#PTTMain', function () { collapseStart = false; collapseEnd = true })
parent.append(fakeparent)
fakeparent.append(defaultVideoHandler)
defaultVideoHandler.append(defaultVideo)
PTTChatHandler.css('z-index', '5')
fakeparent.append(PTTChatHandler)
}
}, 200)
setInterval(() => {
const btnParentSet = $('.px-0.d-flex.flex-grow-1.align-stretch.mb-1')
if (btnParentSet.length > pttBootBtnCount) {
btnParentSet.each(function (index) {
const btnParent = btnParentSet.eq(index)
if (!(btnParent.children().eq(0).hasClass('d-flex'))) {
btnParent.css('flex-direction', 'column')
btnParent.children().eq(1).clone().attr({ id: 'pttBootBtn', ins: `${count}` }).appendTo(btnParent).attr('style', 'background-color: rgb(150, 0, 180) !important').css({ 'margin-top': '15px', 'flex-basis': '100%', padding: '6px 0px', 'max-width': '608px' })
btnParent.children().eq(2).find('path').attr('d', 'M13 3H6v18h4v-6h3c3.31 0 6-2.69 6-6s-2.69-6-6-6zm.2 8H10V7h3.2c1.1 0 2 .9 2 2s-.9 2-2 2z')
btnParent.prepend($('<div class="d-flex"></div>').prepend(btnParent.children().eq(0), btnParent.children().eq(1)))
console.log(count)
const currentBtn = $(`#pttBootBtn[ins="${count}"]`)
currentBtn.on('click', function () {
console.log('click')
if ($('#PTTChat').length === 0) {
const gridIndex = currentBtn.parents().eq(4).index()
btnParent.children().eq(0).children().eq(1).trigger('click')
installPTT(gridIndex)
}
})
count++
}
})
}
pttBootBtnCount = btnParentSet.length
}, 1000)
}
function installPTT (gridIndex) {
const t = setInterval(() => {
if ($('#PTTChat').length === 0) {
const gridParent = $('.vue-grid-layout').children().eq(gridIndex)
if (gridParent.has($('.cell-content'))) {
gridParent.children().children().eq(1).css('position', 'relative').prepend($('<div style="height: 100%; width: 100%; position: absolute;"></div>'))
InitApp(gridParent.children().children().eq(1).children().eq(0), WhiteTheme, true, messageposter, true)
ChangeLog()
isPTTInstalled = true
const chatBtn = gridParent.find($('span:contains("Chat")')).parent()
$('#PTTChat').addClass('h-100').css('background', 'transparent')
$('#PTTMain').removeClass('position-absolute')
$('#PTTChat-contents').css('height', '')
if (chatBtn.css('background-color') !== 'rgb(39, 39, 39)') {
chatBtn.trigger('click')
}
$('#PTTMainBtn').on('click', function () {
const originChat = gridParent.children().children().eq(1).children().eq(1)
const h = $('#PTTChat').parent().height()
if (originChat.css('z-index') === '-1') {
$('#PTTMain').removeClass('h-100').css('max-height', h + 'px')
$('#PTTChat-app').removeClass('h-100').css('max-height', h + 'px')
originChat.css('z-index', '0')
} else {
$('#PTTMain').css('max-height', h + 'px')
$('#PTTChat-app').css('max-height', h + 'px')
$('#PTTMain').on('shown.bs.collapse', function () {
$('#PTTMain').addClass('h-100').css('max-height', '')
$('#PTTChat-app').addClass('h-100').css('max-height', '')
})
originChat.css('z-index', '-1')
}
})
listenEditBtn(gridParent)
$('#PTTMainBtn').trigger('click')
clearInterval(t)
}
}
}, 200)
}
function listenEditBtn (gridParent) {
const t = setInterval(() => {
const editBtn = gridParent.find($('path[d="M3,6H21V8H3V6M3,11H21V13H3V11M3,16H21V18H3V16Z"]')).parents().eq(3)
if (editBtn) {
editBtn.on('click', function () {
const cellContent = gridParent.children().children().eq(1)
const px = cellContent.css('padding-left')
const pt = cellContent.css('padding-top')
const pb = cellContent.css('padding-bottom')
cellContent.children().eq(0).css({ height: `calc(100% - ${pt} - ${pb})`, width: `calc(100% - ${px} - ${px})` })
listenCtrlBtn(gridParent)
})
clearInterval(t)
}
}, 200)
}
function listenCtrlBtn (gridParent) {
const t = setInterval(() => {
const btnParent = gridParent.find($('.cell-control')).children()
if (btnParent) {
btnParent.children().eq(0).attr({ disabled: 'true', title: '鎖定:PTT運行中', style: 'background-color: rgb(150, 150, 150) !important' })
btnParent.children().eq(1).on('click', function () {
gridParent.children().children().eq(1).children().eq(0).css({ height: '100%', width: '100%' })
listenEditBtn(gridParent)
})
clearInterval(t)
}
}, 200)
}
}
|
<filename>modules/dbschema.py
#!/usr/bin/env python
##############################################################################
#
# dbschema.py
#
# Abstracts representation of a database's structure. Allows modules to
# define entire schemas in a compact representation, and then build the
# appropriate queries to create, synchronize, backup, and restore the
# structure of a database.
#
# Schemas are free-form node graphs represented by nested dictionaries in
# Python, and stored as JSON files.
#
# The target database API for the generated queries is sqlite3.
#
##############################################################################
# Minimal example of the schema document structure
_example_schema = """
{
"serial" : 20130612,
"tables" : [
{
"name" : "example",
"columns" : [
{
"name" : "id",
"type" : "integer",
"null" : false
},
{
"name" : "name",
"type" : "text",
"default" : "",
"null" : false
},
{
"name" : "age",
"type" : "integer"
},
{
"name" : "compkey",
"type" : "integer"
}
],
"indexes" : [
[ "name" ],
[ "age", "compkey" ]
],
"initdata" : [
[ 1, "adam", 11, 8 ],
[ 2, "baker", 12, 5 ],
[ 3, "charlie", 13, 4 ]
]
}
]
}
"""
#=============================================================================
class dbdict( dict ):
"""
Extends the basic dictionary type to enable auto-loading existing
dictionaries, and dot-style element access.
"""
# Statically assign attribute access methods to dictionary access methods.
__getattr__ = dict.__getitem__
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
#=========================================================================
def __init__( self, data = {} ):
"""
Initialize the dbdict instance.
"""
super( dbdict, self ).__init__( data )
#=============================================================================
class dbcolumn( dbdict ):
#=========================================================================
def __init__( self, table, schema, prikey = False ):
super( dbcolumn, self ).__init__( schema )
self.table = table
self.prikey = prikey
#=========================================================================
def __str__( self ):
s = '%s %s' % ( self.name, self.type )
if ( 'null' in self ) and ( self.null == False ):
s += ' not null'
if 'default' in self:
s += " default '%s'" % self.default
if self.prikey == True:
s += ' primary key'
return s
#=============================================================================
class dbindex( list ):
#=========================================================================
def __init__( self, table, schema ):
super( dbindex, self ).__init__( schema )
self.table = table
#=========================================================================
def __str__( self ):
return 'create index %s_i on %s (%s)' % (
'_'.join( self ),
self.table.name,
','.join( self )
)
#=============================================================================
class dbtable( dbdict ):
#=========================================================================
def __init__( self, schema ):
super( dbtable, self ).__init__( schema )
self.dbcolumns = []
self.dbindexes = []
first = True
for c in self.columns:
self.dbcolumns.append( dbcolumn( self, c, first ) )
first = False
for i in self.indexes:
self.dbindexes.append( dbindex( self, i ) )
#=========================================================================
def __str__( self ):
return 'create table %s (\n %s\n)' % (
self.name,
',\n '.join( [ str( c ) for c in self.dbcolumns ] )
)
#=========================================================================
def get_column_names( self ):
return [ c.name for c in self.dbcolumns ]
#=========================================================================
def get_columns( self ):
return self.dbcolumns
#=========================================================================
def get_indexes( self ):
return self.dbindexes
#=========================================================================
def get_init_data( self ):
markers = ','.join( [ '?' ] * len( self.columns ) )
q = 'insert into %s values (%s)' % ( self.name, markers )
return { 'query' : q, 'values' : self.initdata }
#=============================================================================
class dbschema( dbdict ):
#=========================================================================
def __init__( self, schema ):
super( dbdict, self ).__init__( schema )
#=========================================================================
def get_table_by_name( self, name ):
for t in self.tables:
if t[ 'name' ] == name:
return dbtable( t )
return None
#=============================================================================
def main( argv ):
""" Script execution entry point """
import argparse
import json
# Create and configure an argument parser
parser = argparse.ArgumentParser(
description = 'Development and testing script for dbschema module.'
)
parser.add_argument(
'-j', '--json', default = None,
help = 'Load schema from JSON file for testing'
)
parser.add_argument(
'-t', '--table', default = None,
help = 'Specify table to display'
)
# The parser only wants the arguments (not the program "argument")
args = parser.parse_args( argv[ 1 : ] )
# See if the user wants to check out their own JSON schema
if args.json != None:
schema = json.load( arts.json )
else:
schema = json.loads( _example_schema )
# Initialize the schema object
dbs = dbschema( schema )
# Check for a table to display
if args.table != None:
table_name = args.table
else:
table_name = 'example'
# Demonstrate query generation
table = dbs.get_table_by_name( table_name )
print str( table )
indexes = table.get_indexes()
for i in indexes:
print str( i )
data = table.get_init_data()
print data[ 'query' ]
for d in data[ 'values' ]:
print ' %s' % ','.join( [ str( v ) for v in d ] )
# Return success.
return 0
#=============================================================================
if __name__ == "__main__":
import sys
sys.exit( main( sys.argv ) )
|
<reponame>heremaps/here-olp-edge-sdk-cpp<gh_stars>10-100
/*
* Copyright (C) 2019-2021 HERE Europe B.V.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
* License-Filename: LICENSE
*/
#pragma once
#include <chrono>
#include <memory>
#include <string>
#include <utility>
#include <olp/authentication/AuthenticationCredentials.h>
#include <olp/authentication/ErrorResponse.h>
#include <olp/authentication/Settings.h>
#include <olp/authentication/Types.h>
#include <olp/core/client/CancellationContext.h>
#include <olp/core/client/OauthToken.h>
#include <olp/core/http/HttpStatusCode.h>
namespace olp {
namespace authentication {
static constexpr auto kDefaultMinimumValidity = 300ll;
static constexpr auto kDefaultMinimumValiditySeconds =
std::chrono::seconds(kDefaultMinimumValidity);
static constexpr auto kForceRefresh = std::chrono::seconds(0);
namespace internal {
class TokenProviderPrivate;
/// An implementation of `TokenProvider`.
/// @note This is a private implementation class for internal use only, and not
/// bound to any API stability promises. Please do not use directly.
class AUTHENTICATION_API TokenProviderImpl {
public:
/**
* @brief Creates the `TokenProviderImpl` instance.
*
* @param settings The `Settings` object that is used to customize
* the `TokenEndpoint` instance.
* @param minimum_validity Sets the minimum validity period of
* the token in seconds.
*/
TokenProviderImpl(Settings settings, std::chrono::seconds minimum_validity);
/// @copydoc TokenProvider::operator()()
std::string operator()() const;
/// @copydoc TokenProvider::operator()(client::CancellationContext&)
client::OauthTokenResponse operator()(
client::CancellationContext& context) const;
/// @copydoc TokenProvider::GetErrorResponse()
ErrorResponse GetErrorResponse() const;
/// @copydoc TokenProvider::GetHttpStatusCode()
int GetHttpStatusCode() const;
/// @copydoc TokenProvider::GetResponse()(client::CancellationContext&)
TokenResponse GetResponse(client::CancellationContext& context) const;
/// @copydoc TokenProvider::IsTokenResponseOK()
bool IsTokenResponseOK() const;
private:
std::shared_ptr<TokenProviderPrivate> impl_;
};
} // namespace internal
/**
* @brief Provides the authentication tokens if the HERE platform
* user credentials are valid.
*
* @tparam MinimumValidity The minimum token validity time (in seconds).
* To use the default `MinimumValidity` value, use the `TokenProviderDefault`
* typedef.
*
* @see `TokenProviderDefault`
*/
template <uint64_t MinimumValidity>
class TokenProvider {
public:
/**
* @brief Creates the `TokenProvider` instance with the `settings` parameter.
*
* @param settings The settings that can be used to configure
* the `TokenEndpoint` instance.
*/
explicit TokenProvider(Settings settings)
: impl_(std::make_shared<internal::TokenProviderImpl>(
std::move(settings), std::chrono::seconds(MinimumValidity))) {}
/// A default copy constructor.
TokenProvider(const TokenProvider& other) = default;
/// A default move constructor.
TokenProvider(TokenProvider&& other) noexcept = default;
/// A default copy assignment operator.
TokenProvider& operator=(const TokenProvider& other) = default;
/// A default move assignment operator.
TokenProvider& operator=(TokenProvider&& other) noexcept = default;
/**
* @brief Casts the `TokenProvider` instance to the `bool` type.
*
* Returns true if the previous token request was successful.
*
* @returns True if the previous token request was successful; false
* otherwise.
*/
operator bool() const { return impl_->IsTokenResponseOK(); }
/**
* @brief Casts the `TokenProvider` instance to the `std::string` type.
*
* Returns the access token string if the response is successful. Otherwise,
* returns an empty string.
*
* @returns The access token string if the response is successful; an empty
* string otherwise.
*
* @deprecated Will be removed by 10.2022. Use the operator with
* `CancellationContext` instead.
*/
OLP_SDK_DEPRECATED(
"Will be removed by 10.2022. Use the operator with `CancellationContext` "
"instead.")
std::string operator()() const { return impl_->operator()(); }
/**
* @brief Returns the access token or an error.
*
* @param context Used to cancel the pending token request.
*
* @returns An `OauthTokenResponse` if the response is successful; an
* `ApiError` otherwise.
*/
client::OauthTokenResponse operator()(
client::CancellationContext& context) const {
return impl_->operator()(context);
}
/**
* @brief Allows the `olp::client::ApiError` object associated
* with the last request to be accessed if the token request is unsuccessful.
*
* @returns An error if the last token request failed.
*/
ErrorResponse GetErrorResponse() const { return impl_->GetErrorResponse(); }
/**
* @brief Gets the HTTP status code of the last request.
*
* @returns The HTTP code of the last token request if it was successful.
* Otherwise, returns the HTTP 503 Service Unavailable server error.
*/
int GetHttpStatusCode() const { return impl_->GetHttpStatusCode(); }
private:
std::shared_ptr<internal::TokenProviderImpl> impl_;
};
/// Provides the authentication tokens using the default minimum token
/// validity.
using TokenProviderDefault = TokenProvider<kDefaultMinimumValidity>;
} // namespace authentication
} // namespace olp
|
<gh_stars>0
import request from '@/utils/request'
const baseAPI = process.env.VUE_APP_BASE_API_2
// 设备分类设置
/**
* 获取分页设备信息列表
*/
export function findDeviceInfoListByKeyWord4Pagination(data) {
return request({
baseURL: baseAPI,
url: '/deviceClassificationSettings/findDeviceInfoListByKeyWord4Pagination',
method: 'post',
data: data
})
}
// 所有的设备类型
export function querySttps() {
return request({
baseURL: baseAPI,
url: '/statistics/querySttps',
method: 'get'
})
}
// 所以项目
export function queryComprojects(data) {
return request({
baseURL: baseAPI,
url: '/iotComProj/getComProject',
method: 'post',
data: data
})
}
// 所以物模型
export function queryProducts(data) {
return request({
baseURL: baseAPI,
url: '/model/queryDevice',
method: 'get',
data: data
})
}
//查询设备分类节点数据
export function findNodeFun(data) {
return request({
baseURL: baseAPI,
url: '/neilaw/queryDeviceTypeById',
method: 'post',
data: data
})
}
//保存设备分类信息
export function saveFun(data) {
return request({
baseURL: baseAPI,
url: '/oper/saveDeviceCate',
method: 'post',
data: data
})
}
//批量保存设备分类
export function batchSaveDeviceCate(data) {
return request({
baseURL: baseAPI,
url: '/oper/batchSaveDeviceCate',
method: 'post',
data: data
})
}
//根据设备编码查询设备分类字典信息--回显
export function queryDeviceCateInfoByDeviceIdFun(data) {
return request({
baseURL: baseAPI,
url: '/oper/queryDeviceCateInfos',
method: 'post',
data: data
})
}
//获取类型列表
export function getBizDictList(params) {
return request({
url: 'iotBizDict/getIotBizDictByPid/0',
method: 'POST',
data: params,
baseURL: baseAPI
})
} |
package me.insidezhou.southernquiet.auth;
import org.springframework.core.annotation.AliasFor;
import java.lang.annotation.*;
/**
* 权限验证。被标记的方法或类只能被拥有指定权限的用户访问。
* 当类及其方法成员均被标记时,每个Auth以其自身模式执行,但所有Auth都必须验证通过。
*/
@Documented
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface Auth {
/**
* 权限标识。空则标识任意权限均可认证通过,包括空权限。
*/
@AliasFor("value")
String[] permissions() default {};
@AliasFor("permissions")
String[] value() default {};
MatchMode mode() default MatchMode.All;
/**
* 权限验证时,多权限情况下的匹配模式。
*/
enum MatchMode {
/**
* 匹配列出的所有权限
*/
All,
/**
* 匹配列出的任意权限
*/
Any
}
}
|
<filename>dummy/temporarystore/store_test.go
package temporarystore
import (
"context"
"reflect"
"squirreldb/types"
"testing"
"time"
"github.com/prometheus/client_golang/prometheus"
)
const (
MetricIDTest1 = 1
MetricIDTest2 = 2
MetricIDTest3 = 3
)
func TestAppend(t *testing.T) {
type fields struct {
metrics map[types.MetricID]storeData
}
type args struct {
points []types.MetricData
}
tests := []struct {
fields fields
wantState map[types.MetricID]storeData
name string
args args
want []int
}{
{
name: "store_filled",
fields: fields{
metrics: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
ID: MetricIDTest1,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
},
TimeToLive: 150,
},
},
MetricIDTest2: {
MetricData: types.MetricData{
ID: MetricIDTest2,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
},
TimeToLive: 2400,
},
},
},
},
args: args{
points: []types.MetricData{
{
ID: MetricIDTest1,
Points: []types.MetricPoint{
{
Timestamp: 20000,
Value: 30,
},
{
Timestamp: 30000,
Value: 40,
},
{
Timestamp: 40000,
Value: 50,
},
{
Timestamp: 50000,
Value: 60,
},
{
Timestamp: 60000,
Value: 70,
},
},
TimeToLive: 300,
},
{
ID: MetricIDTest2,
Points: []types.MetricPoint{
{
Timestamp: 40000,
Value: 150,
},
{
Timestamp: 60000,
Value: 200,
},
{
Timestamp: 80000,
Value: 250,
},
{
Timestamp: 100000,
Value: 300,
},
{
Timestamp: 120000,
Value: 350,
},
},
TimeToLive: 1200,
},
},
},
wantState: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
ID: MetricIDTest1,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
{
Timestamp: 20000,
Value: 30,
},
{
Timestamp: 30000,
Value: 40,
},
{
Timestamp: 40000,
Value: 50,
},
{
Timestamp: 50000,
Value: 60,
},
{
Timestamp: 60000,
Value: 70,
},
},
TimeToLive: 300,
},
},
MetricIDTest2: {
MetricData: types.MetricData{
ID: MetricIDTest2,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
{
Timestamp: 40000,
Value: 150,
},
{
Timestamp: 60000,
Value: 200,
},
{
Timestamp: 80000,
Value: 250,
},
{
Timestamp: 100000,
Value: 300,
},
{
Timestamp: 120000,
Value: 350,
},
},
TimeToLive: 2400,
},
},
},
want: []int{7, 7},
},
{
name: "store_empty",
fields: fields{
metrics: make(map[types.MetricID]storeData),
},
args: args{
points: []types.MetricData{
{
ID: MetricIDTest1,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
{
Timestamp: 20000,
Value: 30,
},
{
Timestamp: 30000,
Value: 40,
},
{
Timestamp: 40000,
Value: 50,
},
},
TimeToLive: 300,
},
{
ID: MetricIDTest2,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
{
Timestamp: 40000,
Value: 150,
},
{
Timestamp: 60000,
Value: 200,
},
{
Timestamp: 80000,
Value: 250,
},
},
TimeToLive: 1200,
},
},
},
wantState: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
ID: MetricIDTest1,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
{
Timestamp: 20000,
Value: 30,
},
{
Timestamp: 30000,
Value: 40,
},
{
Timestamp: 40000,
Value: 50,
},
},
TimeToLive: 300,
},
},
MetricIDTest2: {
MetricData: types.MetricData{
ID: MetricIDTest2,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
{
Timestamp: 40000,
Value: 150,
},
{
Timestamp: 60000,
Value: 200,
},
{
Timestamp: 80000,
Value: 250,
},
},
TimeToLive: 1200,
},
},
},
want: []int{5, 5},
},
{
name: "store_filled_metrics_empty",
fields: fields{
metrics: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
ID: MetricIDTest1,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
},
TimeToLive: 150,
},
expirationTime: time.Unix(400, 0),
},
MetricIDTest2: {
MetricData: types.MetricData{
ID: MetricIDTest2,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
},
TimeToLive: 2400,
},
expirationTime: time.Unix(400, 0),
},
},
},
args: args{
points: nil,
},
wantState: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
ID: MetricIDTest1,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
},
TimeToLive: 150,
},
expirationTime: time.Unix(400, 0),
},
MetricIDTest2: {
MetricData: types.MetricData{
ID: MetricIDTest2,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
},
TimeToLive: 2400,
},
expirationTime: time.Unix(400, 0),
},
},
want: nil,
},
{
name: "store_empty_metrics_empty",
fields: fields{
metrics: make(map[types.MetricID]storeData),
},
args: args{
points: nil,
},
wantState: make(map[types.MetricID]storeData),
want: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := &Store{
metricsStore: tt.fields.metrics,
metrics: newMetrics(prometheus.NewRegistry()),
}
got, err := s.Append(context.Background(), tt.args.points)
if err != nil {
t.Errorf("Append() error = %v", err)
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("Append() = %v, want %v", got, tt.want)
}
if !reflect.DeepEqual(s.metricsStore, tt.wantState) {
t.Errorf("Append() metrics = %v, want %v", s.metricsStore, tt.wantState)
}
})
}
}
func TestStore_expire(t *testing.T) {
type fields struct {
metrics map[types.MetricID]storeData
}
type args struct {
now time.Time
}
tests := []struct {
args args
fields fields
want map[types.MetricID]storeData
name string
}{
{
name: "no_expire",
fields: fields{
metrics: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{},
expirationTime: time.Unix(800, 0),
},
MetricIDTest2: {
MetricData: types.MetricData{},
expirationTime: time.Unix(1600, 0),
},
},
},
args: args{
now: time.Unix(600, 0),
},
want: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{},
expirationTime: time.Unix(800, 0),
},
MetricIDTest2: {
MetricData: types.MetricData{},
expirationTime: time.Unix(1600, 0),
},
},
},
{
name: "expire",
fields: fields{
metrics: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{},
expirationTime: time.Unix(800, 0),
},
MetricIDTest2: {
MetricData: types.MetricData{},
expirationTime: time.Unix(1600, 0),
},
},
},
args: args{
now: time.Unix(1200, 0),
},
want: map[types.MetricID]storeData{
MetricIDTest2: {
MetricData: types.MetricData{},
expirationTime: time.Unix(1600, 0),
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := &Store{
metricsStore: tt.fields.metrics,
metrics: newMetrics(prometheus.NewRegistry()),
}
s.expire(tt.args.now)
if !reflect.DeepEqual(s.metricsStore, tt.want) {
t.Errorf("expire() metrics = %v, want %v", s.metricsStore, tt.want)
}
})
}
}
func TestStoreReadPointsAndOffset(t *testing.T) {
type fields struct {
metrics map[types.MetricID]storeData
}
type args struct {
ids []types.MetricID
}
tests := []struct {
name string
fields fields
args args
want []types.MetricData
wantOffset []int
wantErr bool
}{
{
name: "store_empty",
fields: fields{
metrics: make(map[types.MetricID]storeData),
},
args: args{
ids: []types.MetricID{
MetricIDTest1,
},
},
want: make([]types.MetricData, 1),
wantOffset: []int{0},
wantErr: false,
},
{
name: "store_filled",
fields: fields{
metrics: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
{
Timestamp: 20000,
Value: 30,
},
{
Timestamp: 30000,
Value: 40,
},
{
Timestamp: 40000,
Value: 50,
},
},
TimeToLive: 300,
},
WriteOffset: 1,
expirationTime: time.Unix(800, 0),
},
MetricIDTest2: {
MetricData: types.MetricData{
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
{
Timestamp: 40000,
Value: 150,
},
{
Timestamp: 60000,
Value: 200,
},
{
Timestamp: 80000,
Value: 250,
},
},
TimeToLive: 1200,
},
WriteOffset: 0,
expirationTime: time.Unix(800, 0),
},
},
},
args: args{
ids: []types.MetricID{
MetricIDTest1,
},
},
want: []types.MetricData{
{
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
{
Timestamp: 20000,
Value: 30,
},
{
Timestamp: 30000,
Value: 40,
},
{
Timestamp: 40000,
Value: 50,
},
},
TimeToLive: 300,
},
},
wantOffset: []int{1},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := &Store{
metricsStore: tt.fields.metrics,
metrics: newMetrics(prometheus.NewRegistry()),
}
got, gotOffset, err := s.ReadPointsAndOffset(context.Background(), tt.args.ids)
if (err != nil) != tt.wantErr {
t.Errorf("ReadPointsAndOffset() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("ReadPointsAndOffset() got = %v, want %v", got, tt.want)
}
if !reflect.DeepEqual(gotOffset, tt.wantOffset) {
t.Errorf("ReadPointsAndOffset() gotOffset = %v, want %v", gotOffset, tt.wantOffset)
}
})
}
}
func TestStoreGetSetPointsAndOffset(t *testing.T) {
type fields struct {
metrics map[types.MetricID]storeData
}
type args struct {
now time.Time
points []types.MetricData
offsets []int
}
tests := []struct {
args args
fields fields
wantState map[types.MetricID]storeData
name string
want []types.MetricData
}{
{
name: "store_filled",
fields: fields{
metrics: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
},
TimeToLive: 150,
},
expirationTime: time.Unix(800, 0),
},
MetricIDTest2: {
MetricData: types.MetricData{
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
},
TimeToLive: 2400,
},
expirationTime: time.Unix(800, 0),
},
},
},
args: args{
points: []types.MetricData{
{
ID: MetricIDTest1,
Points: []types.MetricPoint{
{
Timestamp: 20000,
Value: 30,
},
{
Timestamp: 30000,
Value: 40,
},
{
Timestamp: 40000,
Value: 50,
},
{
Timestamp: 50000,
Value: 60,
},
{
Timestamp: 60000,
Value: 70,
},
},
TimeToLive: 300,
},
{
ID: MetricIDTest2,
Points: []types.MetricPoint{
{
Timestamp: 40000,
Value: 150,
},
{
Timestamp: 60000,
Value: 200,
},
{
Timestamp: 80000,
Value: 250,
},
{
Timestamp: 100000,
Value: 300,
},
{
Timestamp: 120000,
Value: 350,
},
},
TimeToLive: 1200,
},
},
offsets: []int{1, 3},
now: time.Unix(400, 0),
},
wantState: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
ID: MetricIDTest1,
Points: []types.MetricPoint{
{
Timestamp: 20000,
Value: 30,
},
{
Timestamp: 30000,
Value: 40,
},
{
Timestamp: 40000,
Value: 50,
},
{
Timestamp: 50000,
Value: 60,
},
{
Timestamp: 60000,
Value: 70,
},
},
TimeToLive: 300,
},
expirationTime: time.Unix(400, 0).Add(defaultTTL),
WriteOffset: 1,
},
MetricIDTest2: {
MetricData: types.MetricData{
ID: MetricIDTest2,
Points: []types.MetricPoint{
{
Timestamp: 40000,
Value: 150,
},
{
Timestamp: 60000,
Value: 200,
},
{
Timestamp: 80000,
Value: 250,
},
{
Timestamp: 100000,
Value: 300,
},
{
Timestamp: 120000,
Value: 350,
},
},
TimeToLive: 1200,
},
expirationTime: time.Unix(400, 0).Add(defaultTTL),
WriteOffset: 3,
},
},
want: []types.MetricData{
{
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
},
TimeToLive: 150,
},
{
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
},
TimeToLive: 2400,
},
},
},
{
name: "store_empty",
fields: fields{
metrics: make(map[types.MetricID]storeData),
},
args: args{
points: []types.MetricData{
{
ID: MetricIDTest1,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
{
Timestamp: 20000,
Value: 30,
},
{
Timestamp: 30000,
Value: 40,
},
{
Timestamp: 40000,
Value: 50,
},
},
TimeToLive: 300,
},
{
ID: MetricIDTest2,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
{
Timestamp: 40000,
Value: 150,
},
{
Timestamp: 60000,
Value: 200,
},
{
Timestamp: 80000,
Value: 250,
},
},
TimeToLive: 1200,
},
},
offsets: []int{1, 0},
now: time.Unix(200, 0),
},
wantState: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
ID: MetricIDTest1,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
{
Timestamp: 20000,
Value: 30,
},
{
Timestamp: 30000,
Value: 40,
},
{
Timestamp: 40000,
Value: 50,
},
},
TimeToLive: 300,
},
expirationTime: time.Unix(200, 0).Add(defaultTTL),
WriteOffset: 1,
},
MetricIDTest2: {
MetricData: types.MetricData{
ID: MetricIDTest2,
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
{
Timestamp: 40000,
Value: 150,
},
{
Timestamp: 60000,
Value: 200,
},
{
Timestamp: 80000,
Value: 250,
},
},
TimeToLive: 1200,
},
expirationTime: time.Unix(200, 0).Add(defaultTTL),
WriteOffset: 0,
},
},
want: make([]types.MetricData, 2),
},
{
name: "store_filled_metrics_empty",
fields: fields{
metrics: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
},
TimeToLive: 150,
},
expirationTime: time.Unix(400, 0),
},
MetricIDTest2: {
MetricData: types.MetricData{
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
},
TimeToLive: 2400,
},
expirationTime: time.Unix(400, 0),
},
},
},
args: args{
points: nil,
offsets: nil,
now: time.Unix(200, 0),
},
wantState: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 10,
},
{
Timestamp: 10000,
Value: 20,
},
},
TimeToLive: 150,
},
expirationTime: time.Unix(400, 0),
},
MetricIDTest2: {
MetricData: types.MetricData{
Points: []types.MetricPoint{
{
Timestamp: 0,
Value: 50,
},
{
Timestamp: 20000,
Value: 100,
},
},
TimeToLive: 2400,
},
expirationTime: time.Unix(400, 0),
},
},
want: nil,
},
{
name: "store_empty_metrics_empty",
fields: fields{
metrics: make(map[types.MetricID]storeData),
},
args: args{
points: nil,
offsets: nil,
now: time.Unix(200, 0),
},
wantState: make(map[types.MetricID]storeData),
want: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := &Store{
metricsStore: tt.fields.metrics,
knownMetrics: make(map[types.MetricID]interface{}),
metrics: newMetrics(prometheus.NewRegistry()),
}
got, err := s.getSetPointsAndOffset(tt.args.points, tt.args.offsets, tt.args.now)
if err != nil {
t.Errorf("getSetPointsAndOffset() error = %v", err)
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("getSetPointsAndOffset() = %v, want %v", got, tt.want)
}
if !reflect.DeepEqual(s.metricsStore, tt.wantState) {
t.Errorf("getSetPointsAndOffset() metrics = %v, want %v", s.metricsStore, tt.wantState)
}
})
}
}
func TestStore_markToExpire(t *testing.T) {
type args struct {
now time.Time
ids []types.MetricID
ttl time.Duration
}
tests := []struct {
state map[types.MetricID]storeData
wantState map[types.MetricID]storeData
name string
args args
}{
{
name: "simple",
state: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
ID: MetricIDTest1,
},
WriteOffset: 1,
expirationTime: time.Unix(900000, 0),
},
MetricIDTest2: {
MetricData: types.MetricData{
ID: MetricIDTest2,
},
WriteOffset: 2,
expirationTime: time.Unix(900000, 0),
},
},
args: args{
ids: []types.MetricID{MetricIDTest2},
ttl: 30 * time.Second,
now: time.Unix(600, 0),
},
wantState: map[types.MetricID]storeData{
MetricIDTest1: {
MetricData: types.MetricData{
ID: MetricIDTest1,
},
WriteOffset: 1,
expirationTime: time.Unix(900000, 0),
},
MetricIDTest2: {
MetricData: types.MetricData{
ID: MetricIDTest2,
},
WriteOffset: 2,
expirationTime: time.Unix(600+30, 0),
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := &Store{
metricsStore: tt.state,
metrics: newMetrics(prometheus.NewRegistry()),
}
if err := s.markToExpire(tt.args.ids, tt.args.ttl, tt.args.now); err != nil {
t.Errorf("Store.markToExpire() error = %v", err)
}
if !reflect.DeepEqual(s.metricsStore, tt.wantState) {
t.Errorf("Store.markToExpire() metrics = %v, want %v", s.metricsStore, tt.wantState)
}
})
}
}
func TestStore_GetSetFlushDeadline(t *testing.T) {
tests := []struct {
state map[types.MetricID]storeData
args map[types.MetricID]time.Time
want map[types.MetricID]time.Time
wantState map[types.MetricID]storeData
name string
}{
{
name: "simple",
state: map[types.MetricID]storeData{
MetricIDTest1: {
flushDeadline: time.Unix(42, 0),
},
MetricIDTest2: {
flushDeadline: time.Unix(1337, 0),
},
},
args: map[types.MetricID]time.Time{
MetricIDTest2: time.Unix(200, 0),
},
want: map[types.MetricID]time.Time{
MetricIDTest2: time.Unix(1337, 0),
},
wantState: map[types.MetricID]storeData{
MetricIDTest1: {
flushDeadline: time.Unix(42, 0),
},
MetricIDTest2: {
flushDeadline: time.Unix(200, 0),
},
},
},
{
name: "no-state",
state: map[types.MetricID]storeData{},
args: map[types.MetricID]time.Time{
MetricIDTest2: time.Unix(200, 0),
},
want: map[types.MetricID]time.Time{
MetricIDTest2: {},
},
wantState: map[types.MetricID]storeData{
MetricIDTest2: {
flushDeadline: time.Unix(200, 0),
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := &Store{
metricsStore: tt.state,
metrics: newMetrics(prometheus.NewRegistry()),
}
got, err := s.GetSetFlushDeadline(context.Background(), tt.args)
if err != nil {
t.Errorf("Store.GetSetFlushDeadline() error = %v", err)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("Store.GetSetFlushDeadline() = %v, want %v", got, tt.want)
}
if !reflect.DeepEqual(s.metricsStore, tt.wantState) {
t.Errorf("Store.GetSetFlushDeadline() flushDeadlines = %v, want %v", s.metricsStore, tt.wantState)
}
})
}
}
func TestStore_GetTransfert(t *testing.T) {
type fields struct {
metrics map[types.MetricID]storeData
transfertMetrics []types.MetricID
}
tests := []struct {
want map[types.MetricID]time.Time
name string
fields fields
wantState []types.MetricID
args int
}{
{
name: "empty-nil",
fields: fields{
metrics: nil,
transfertMetrics: nil,
},
args: 50,
want: map[types.MetricID]time.Time{},
wantState: nil,
},
{
name: "empty",
fields: fields{
metrics: nil,
transfertMetrics: []types.MetricID{},
},
args: 50,
want: map[types.MetricID]time.Time{},
wantState: []types.MetricID{},
},
{
name: "less-than-requested",
fields: fields{
metrics: map[types.MetricID]storeData{
MetricIDTest1: {
flushDeadline: time.Unix(0, 0),
},
MetricIDTest2: {
flushDeadline: time.Unix(42, 0),
},
},
transfertMetrics: []types.MetricID{
MetricIDTest1,
MetricIDTest2,
MetricIDTest3,
},
},
args: 50,
want: map[types.MetricID]time.Time{
MetricIDTest1: time.Unix(0, 0),
MetricIDTest2: time.Unix(42, 0),
MetricIDTest3: {},
},
wantState: []types.MetricID{},
},
{
name: "more-than-requested",
fields: fields{
metrics: map[types.MetricID]storeData{
MetricIDTest1: {
flushDeadline: time.Unix(0, 0),
},
MetricIDTest2: {
flushDeadline: time.Unix(42, 0),
},
MetricIDTest3: {
flushDeadline: time.Unix(1337, 0),
},
},
transfertMetrics: []types.MetricID{
MetricIDTest1,
MetricIDTest2,
MetricIDTest3,
},
},
args: 2,
want: map[types.MetricID]time.Time{
MetricIDTest1: time.Unix(0, 0),
MetricIDTest2: time.Unix(42, 0),
},
wantState: []types.MetricID{
MetricIDTest3,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := &Store{
metricsStore: tt.fields.metrics,
transfertMetrics: tt.fields.transfertMetrics,
metrics: newMetrics(prometheus.NewRegistry()),
}
got, err := s.GetTransfert(context.Background(), tt.args)
if err != nil {
t.Errorf("Store.GetTransfert() error = %v", err)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("Store.GetTransfert() = %v, want %v", got, tt.want)
}
if !reflect.DeepEqual(s.transfertMetrics, tt.wantState) {
t.Errorf("Store.GetTransfert() = %v, want %v", s.transfertMetrics, tt.wantState)
}
})
}
}
func TestStore_GetAllKnownMetrics(t *testing.T) {
store := New(prometheus.NewRegistry())
_, err := store.getSetPointsAndOffset(
[]types.MetricData{
{
ID: MetricIDTest1,
TimeToLive: 42,
Points: []types.MetricPoint{
{Timestamp: 10000},
},
},
},
[]int{0},
time.Unix(10, 0),
)
if err != nil {
t.Fatal(err)
}
want := map[types.MetricID]time.Time{
MetricIDTest1: {},
}
got, _ := store.GetAllKnownMetrics(context.Background())
if !reflect.DeepEqual(got, want) {
t.Errorf("GetAllKnownMetrics() = %v, want %v", got, want)
}
_, err = store.GetSetFlushDeadline(context.Background(), map[types.MetricID]time.Time{
MetricIDTest1: time.Unix(42, 42),
})
if err != nil {
t.Fatal(err)
}
want = map[types.MetricID]time.Time{
MetricIDTest1: time.Unix(42, 42),
}
got, _ = store.GetAllKnownMetrics(context.Background())
if !reflect.DeepEqual(got, want) {
t.Errorf("GetAllKnownMetrics() = %v, want %v", got, want)
}
err = store.markToExpire(
[]types.MetricID{MetricIDTest1},
time.Minute,
time.Unix(10, 0),
)
if err != nil {
t.Fatal(err)
}
want = map[types.MetricID]time.Time{}
got, _ = store.GetAllKnownMetrics(context.Background())
if !reflect.DeepEqual(got, want) {
t.Errorf("GetAllKnownMetrics() = %v, want %v", got, want)
}
}
|
package tictactoe;
import tictactoe.ai.*;
import tictactoe.engine.Game;
import tictactoe.engine.Board;
import java.util.Map;
import java.util.Scanner;
import java.util.regex.Pattern;
public final class Application implements Runnable {
private static final Pattern PATTERN_COMMAND = Pattern.compile("(exit|start( easy| medium| hard| user){2})");
private static final Map<String, Player> players = Map.of("user", User::new,
"easy", Easy::new, "medium", Medium::new, "hard", Hard::new);
public void run() {
do {
final var args = readCommand().split(" ");
if (args[0].equals("exit")) {
break;
}
new Game(
new Board(),
players.get(args[1]),
players.get(args[2])
).start();
} while (true);
}
private String readCommand() {
final var scanner = new Scanner(System.in);
do {
System.out.println("Input command: ");
final var command = scanner.nextLine().toLowerCase();
if (PATTERN_COMMAND.matcher(command).matches()) {
return command;
}
System.out.println("Bad parameters!");
} while (true);
}
}
|
<gh_stars>10-100
exports.name = "Generate Errors - custom";
exports.description = "Always report an error";
exports.rule = function(report) {
myReport(report, "error");
}; |
package uk.joshiejack.husbandry.entity.traits.lifestyle;
import net.minecraft.entity.AgeableEntity;
import net.minecraft.entity.MobEntity;
import net.minecraft.entity.SpawnReason;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.CompoundNBT;
import net.minecraft.tags.ITag;
import net.minecraft.tags.ItemTags;
import net.minecraft.util.Hand;
import net.minecraft.util.ResourceLocation;
import net.minecraft.world.server.ServerWorld;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.api.distmarker.OnlyIn;
import net.minecraftforge.common.util.Lazy;
import uk.joshiejack.husbandry.Husbandry;
import uk.joshiejack.husbandry.api.HusbandryAPI;
import uk.joshiejack.husbandry.api.IMobStats;
import uk.joshiejack.husbandry.api.trait.IDataTrait;
import uk.joshiejack.husbandry.api.trait.IIconTrait;
import uk.joshiejack.husbandry.api.trait.IInteractiveTrait;
import uk.joshiejack.husbandry.api.trait.INewDayTrait;
import uk.joshiejack.husbandry.item.HusbandryItems;
import uk.joshiejack.penguinlib.util.icon.Icon;
import uk.joshiejack.penguinlib.util.icon.ItemIcon;
public class MammalTrait implements IDataTrait, IInteractiveTrait, INewDayTrait, IIconTrait {
public static final Lazy<Icon> ICON = Lazy.of(() -> new ItemIcon(HusbandryItems.MIRACLE_POTION.get()));
public static final ITag.INamedTag<Item> IMPREGNATES_MAMMALS = ItemTags.createOptional(new ResourceLocation(Husbandry.MODID, "impregnates_mammals"));
private int gestation;//How many days this mob has been pregnant
private boolean pregnant; //If the mob is pregnant
@OnlyIn(Dist.CLIENT)
@Override
public Icon getIcon(IMobStats<?> stats) {
return pregnant ? ICON.get() : ItemIcon.EMPTY;
}
@Override
public void onNewDay(IMobStats<?> stats) {
if (pregnant) {
gestation--;
if (gestation <= 0) {
pregnant = false;
giveBirth(stats);
}
}
}
@Override
public boolean onRightClick(IMobStats<?> stats, PlayerEntity player, Hand hand) {
ItemStack held = player.getItemInHand(hand);
if (IMPREGNATES_MAMMALS.contains(held.getItem())
&& !pregnant && stats.getSpecies().getDaysToBirth() != 0) {
pregnant = true;
gestation = stats.getSpecies().getDaysToBirth();
held.shrink(1);
return true;
}
return false;
}
public boolean isPregnant() {
return pregnant;
}
private void giveBirth(IMobStats<?> stats) {
MobEntity entity = stats.getEntity();
stats.increaseHappiness(100); //Happy to have a child
int chance = entity.level.random.nextInt(100);
int offspring = chance >= 99 ? 3 : chance >= 90 ? 2 : 1;
for (int i = 0; i < offspring; i++) {
if (entity instanceof AgeableEntity) {
AgeableEntity ageable = ((AgeableEntity) entity).getBreedOffspring((ServerWorld) entity.level, (AgeableEntity) entity);
if (ageable != null) {
ageable.setAge(-Integer.MAX_VALUE);
ageable.setPos(entity.xo, entity.yo, entity.zo);
IMobStats<?> babyStats = HusbandryAPI.instance.getStatsForEntity(ageable);
if (babyStats != null)
babyStats.increaseHappiness(stats.getHappiness() / 2);
entity.level.addFreshEntity(ageable);
}
} else
entity.getType().spawn((ServerWorld) entity.level, null, null, null, entity.blockPosition(), SpawnReason.BREEDING, true, true);
}
}
@Override
public void save(CompoundNBT tag) {
tag.putInt("Gestation", gestation);
tag.putBoolean("Pregnant", pregnant);
}
@Override
public void load(CompoundNBT nbt) {
gestation = nbt.getInt("Gestation");
pregnant = nbt.getBoolean("Pregnant");
}
}
|
import React, { Component } from 'react'
import { LinkContainer } from 'react-router-bootstrap';
// all compoenent from react bootsrap
import Container from 'react-bootstrap/Container';
import Row from 'react-bootstrap/Row';
import Col from 'react-bootstrap/Col';
import Form from 'react-bootstrap/Form'
import Button from 'react-bootstrap/Button';
import Image from 'react-bootstrap/Image';
// scss for array
import style from './DefaultLogin.module.scss';
// Logo for Array
import Logo from '../Image/Logo.svg';
// icon
import FacebookIcon from "../Image/Facebookicon.svg";
import TwiterIcon from "../Image/twitericon.svg";
import GoogleIcon from "../Image/googleicon.svg";
export default class DefaultLogin extends Component {
submithander(event){
event.preventDefault();
}
render() {
return (
<>
<Container fluid={true} >
<Row>
<Col sm={12} xl={6} xs={12} md={6} lg={6} className={style.bg}>
<Container fluid={true} className={style.ownhight} >
<Image className='mt-5' src={Logo} alt="logo for Array " fluid />
<div className={style.ourcontainer}>
<h2 className={style.tittle} > Welcome Back !</h2>
<p className={style.text} > Contect with us Enter your Personal Information</p>
<LinkContainer to="/signup">
<Button className={style.btnprimary} type="submit">
Sign up
</Button>
</LinkContainer>
</div>
</Container>
</Col>
<Col sm={12} xl={6} xs={12} md={6} lg={6} >
<Container fluid={true} className={style.ownhight} >
<div className='m-5 p-3'>
<h2 className={style.texttittle}> Login Here</h2>
<div className=' d-flex flex-row justify-content-center'>
<Image className='mr-2' src={FacebookIcon} alt="icon for facebook " fluid/>
<Image className='mr-2' src={GoogleIcon} alt="icon for Google " fluid/>
<Image src={TwiterIcon} alt="icon for twiter " fluid/>
</div>
<h2 className={style.textor}> or </h2>
<Form className='m-3 p-2' onSubmit={this.submithander} >
<Form.Group controlId="formBasicEmail">
{/* use autocomplete attribute */}
<Form.Control autoComplete='current-email' className={style.formControl} type="email" placeholder="Enter email" />
</Form.Group>
<Form.Group controlId="formBasicPassword">
{/* use autocomplete html attribute */}
<Form.Control autoComplete='current-password' className={style.formControl} type="password" placeholder="Password" />
</Form.Group>
<LinkContainer to="/">
<Button className={style.btnsubmit} type="submit">
Submit
</Button>
</LinkContainer>
</Form>
</div>
</Container>
</Col>
</Row>
</Container>
</>
)
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package simplertsplugin;
import com.rma.model.Project;
import hec2.plugin.AbstractPlugin;
import hec2.rts.plugin.SimpleRtsPlugin;
import hec2.rts.plugin.RtsPluginManager;
import rma.swing.RmaImage;
import com.rma.client.BrowserAction;
import javax.swing.Icon;
import javax.swing.Action;
import com.rma.client.Browser;
import hec2.rts.client.RtsFrame;
import javax.swing.JOptionPane;
//
// @author q0hecemt
//
public class SimpleRTSPluginMain extends AbstractPlugin implements SimpleRtsPlugin {
public static final String PLUGINNAME = "Simple RTS Plugin";
public static final String PluginShortName = "Simple";
private static final String PLUGIN_VERSION = "0.1.0";
public static void main (String[] args) {
SimpleRTSPluginMain p = new SimpleRTSPluginMain();
}
public SimpleRTSPluginMain(){
super();
setName(PluginShortName);
RtsPluginManager.register(this);
addToToolsToolbar();
}
protected void addToToolsToolbar(){
Icon i = RmaImage.getImageIcon("Images/anchor.png");
// constructor public BrowserAction(java.lang.String name, javax.swing.Icon icon, java.lang.Object methodContainer, java.lang.String methodName)
// action of the button/menu item is defined by the fourth argument
BrowserAction a = new BrowserAction(PluginShortName, i , this , "displayApplication" );
// Setting SHORT_DESCRIPTION field to PluginShortName for the new BrowserAction?
a.putValue(Action.SHORT_DESCRIPTION, getName());
//Insert the menu item in the Tools menu in the third position
Browser.getBrowserFrame().getToolsMenu().insert(a,3);
//Casting BrowserFrame to RtsFrame type , add the BrowserAction to the Programs toolbar
((RtsFrame)Browser.getBrowserFrame()).getProgramToolbar().add(a);
}
@Override
public boolean createProject(Project prjct) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public boolean openProject(Project prjct) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public boolean close(boolean bln) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public String getProjectName() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public boolean saveProject() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public String getLogfile() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public boolean displayApplication() {
JOptionPane.showMessageDialog(Browser.getBrowserFrame(), "Sorry, this button does not do anything...yet\nYou should implement it!", "Sorry",JOptionPane.PLAIN_MESSAGE);
return true;
}
@Override
public String getVersion() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public String getDirectory() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
|
sed -i "s#master#${1}#g" ./*
sed -i "s#/home/wangqg/socialite/hadoop-2.6.5#${2}#g" ./*
sed -i "s#/home/wangqg/socialite/jdk1.8.0_161#${3}#g" ./* |
def sort_strings(list_of_strings):
sorted_list = sorted(list_of_strings, key=lambda str: str.lower())
return sorted_list
result = sort_strings(['apple', 'orange', 'pear'])
print(result) |
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Usage: $ ./up.sh [--build] [--seed]
set -e
usage() {
echo "usage: ./$(basename -- ${0}) [--build] [--seed]"
exit 1
}
# Change working directory to project root
project_root=$(git rev-parse --show-toplevel)
cd "${project_root}"
compose_files="-f docker-compose.yml"
args="-V --force-recreate"
while [ $# -gt 0 ]; do
case $1 in
'--build'|-b)
BUILD='true'
;;
'--seed'|-s)
SEED='true'
;;
'--help'|-h)
usage
exit 0
;;
*) usage
exit 1
;;
esac
shift
done
if [[ "${BUILD}" = "true" ]]; then
compose_files+=" -f docker-compose.dev.yml"
args+=" --build"
fi
if [[ "${SEED}" = "true" ]]; then
compose_files+=" -f docker-compose.seed.yml"
fi
docker-compose $compose_files up $args
|
export LAYER_NAME=nodejs12
export NODE_VERSION=12.16.1
|
average :: [Int] -> Float
average [] = 0
average xs = (fromIntegral (sum xs)) / (fromIntegral (length xs))
average [1,2,3,4,5] |
import React from 'react';
import PropTypes from 'prop-types';
import { Route, Redirect, Switch } from 'react-router-dom';
import ExampleList from './ExampleList';
import Basic from '../scripts/example01-basic';
import ResizableCols from '../scripts/example02-resizable-cols';
import FrozenCols from '../scripts/example03-frozen-cols';
import Editable from '../scripts/example04-editable';
import CustomFormatters from '../scripts/example05-custom-formatters';
import BuiltInEditors from '../scripts/example06-built-in-editors';
import SortableCols from '../scripts/example08-sortable-cols';
import FilterableGrid from '../scripts/example09-filterable-grid';
import OneMillionRows from '../scripts/example10-one-million-rows';
import ImmutableData from '../scripts/example11-immutable-data';
import CustomRowRenderer from '../scripts/example12-customRowRenderer';
import AllFeatures from '../scripts/example13-all-features';
import AllFeaturesImmutable from '../scripts/example14-all-features-immutable';
import EmptyRows from '../scripts/example15-empty-rows';
import CellDragDown from '../scripts/example16-cell-drag-down';
import FilterableSortableGrid from '../scripts/example16-filterable-sortable-grid';
import RowSelect from '../scripts/example16-row-select';
import GridEvents from '../scripts/example17-grid-events';
import ContextMenu from '../scripts/example18-context-menu';
import ColumnEvents from '../scripts/example19-column-events';
import CellNavigation from '../scripts/example20-cell-navigation';
import CellSelectionEvents from '../scripts/example21-cell-selection-events';
import Grouping from '../scripts/example21-grouping';
import CustomFilters from '../scripts/example22-custom-filters';
import ImmutableDataGrouping from '../scripts/example23-immutable-data-grouping';
import RowOrdering from '../scripts/example23-row-reordering';
import DraggableHeader from '../scripts/example24-draggable-header';
import TreeView from '../scripts/example25-tree-view';
import TreeViewNoAddDelete from '../scripts/example26-tree-view-no-add-delete';
import CellActions from '../scripts/example27-cell-actions';
import ScrollToRowIndex from '../scripts/example28-scroll-to-row-index';
import DescendingFirstSortable from '../scripts/example29-descendingFirstSortable';
import SelectionRangeEvents from '../scripts/example30-selection-range-events';
import IsScrolling from '../scripts/example31-isScrolling';
export default function Examples({ match }) {
return (
<div className="container-fluid top-space">
<div className="row">
<div className="col-md-2 top-space" role="complementary">
<nav id="sidebar" className="bs-docs-sidebar hidden-print hidden-xs hidden-sm" data-spy="affix" data-offset-top="0" data-offset-bottom="200">
<div id="grid-examples-div">
<ExampleList className="nav bs-docs-sidenav" />
</div>
</nav>
</div>
<div className="col-md-10">
<div>
<h1 className="page-header">React Data Grid Examples</h1>
<Switch>
<Route path="/examples/basic" component={Basic} />
<Route path="/examples/resizable-cols" component={ResizableCols} />
<Route path="/examples/frozen-cols" component={FrozenCols} />
<Route path="/examples/editable" component={Editable} />
<Route path="/examples/custom-formatters" component={CustomFormatters} />
<Route path="/examples/built-in-editors" component={BuiltInEditors} />
<Route path="/examples/sortable-cols" component={SortableCols} />
<Route path="/examples/filterable-grid" component={FilterableGrid} />
<Route path="/examples/one-million-rows" component={OneMillionRows} />
<Route path="/examples/immutable-data" component={ImmutableData} />
<Route path="/examples/customRowRenderer" component={CustomRowRenderer} />
<Route path="/examples/all-features" component={AllFeatures} />
<Route path="/examples/all-features-immutable" component={AllFeaturesImmutable} />
<Route path="/examples/empty-rows" component={EmptyRows} />
<Route path="/examples/cell-drag-down" component={CellDragDown} />
<Route path="/examples/filterable-sortable-grid" component={FilterableSortableGrid} />
<Route path="/examples/row-select" component={RowSelect} />
<Route path="/examples/grid-events" component={GridEvents} />
<Route path="/examples/context-menu" component={ContextMenu} />
<Route path="/examples/column-events" component={ColumnEvents} />
<Route path="/examples/cell-navigation" component={CellNavigation} />
<Route path="/examples/cell-selection-events" component={CellSelectionEvents} />
<Route path="/examples/grouping" component={Grouping} />
<Route path="/examples/custom-filters" component={CustomFilters} />
<Route path="/examples/immutable-data-grouping" component={ImmutableDataGrouping} />
<Route path="/examples/row-reordering" component={RowOrdering} />
<Route path="/examples/draggable-header" component={DraggableHeader} />
<Route path="/examples/tree-view" component={TreeView} />
<Route path="/examples/tree-view-no-add-delete" component={TreeViewNoAddDelete} />
<Route path="/examples/cell-actions" component={CellActions} />
<Route path="/examples/scroll-to-row-index" component={ScrollToRowIndex} />
<Route path="/examples/descendingFirstSortable" component={DescendingFirstSortable} />
<Route path="/examples/selection-range-events" component={SelectionRangeEvents} />
<Route path="/examples/isScrolling" component={IsScrolling} />
<Redirect from={`${match.url}`} to={`${match.url}/all-features`} />
</Switch>
</div>
</div>
</div>
</div>
);
}
Examples.propTypes = {
match: PropTypes.shape({
url: PropTypes.string
})
};
|
"""
Copyright (C) 2020 ETH Zurich. All rights reserved.
Author: <NAME>, ETH Zurich
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from os.path import dirname, abspath
import cv2
import h5py
import numpy as np
from pybf.pybf.io_interfaces import ImageLoader
from pybf.pybf.visualization import log_compress
def make_video(dataset_file_path,
db_range=50,
video_fps=60,
save_path=None):
# Create image loader object
image_loader = ImageLoader(dataset_file_path)
# Get sorted list of available frame indices
frames_list = image_loader.frame_indices
frames_list.sort()
# Get image shape in pixels
img_shape = image_loader.get_high_res_image(frames_list[0]).shape
# Get x and z ranges and define aspect ratio
pixels_coords = image_loader.get_pixels_coords()
x_range = pixels_coords[0,:].max() - pixels_coords[0,:].min()
z_range = pixels_coords[1,:].max() - pixels_coords[1,:].min()
aspect_ratio = x_range / z_range
print("Aspect ratio: ", aspect_ratio)
# Define output image shape
img_shape_out = (img_shape[0], int(img_shape[0]/aspect_ratio))
print("Output image resolution: ", img_shape_out)
# Construct save path (save to dataset folder by default)
if save_path is None:
len_to_cut = len(dataset_file_path.split('/')[-1])
save_path = dataset_file_path[:-1 - len_to_cut]
video = cv2.VideoWriter(save_path + '/' + 'video.avi', cv2.VideoWriter_fourcc(*"MJPG"), int(video_fps), img_shape_out, 0)
for n_frame in frames_list:
# Take absolute value of high resilution frames
frame_data = np.abs(image_loader.get_high_res_image(n_frame))
# Make log compression
frame_data_log = log_compress(frame_data, db_range)
frame_data_log = frame_data_log + db_range
# Convert to uint8_t
frame_data_uint8 = np.uint8(frame_data_log/np.amax(frame_data_log) * 255)
frame_final = cv2.resize(frame_data_uint8, img_shape_out)
video.write(frame_final.astype('uint8'))
# Close hdf5 file and video
image_loader.close_file()
video.release()
return
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--dataset_file_path',
type=str,
default=' ',
help='Path to the image dataset file.')
parser.add_argument(
'--db_range',
type=float,
default=50,
help='Db range of images')
parser.add_argument(
'--video_fps',
type=float,
default=60,
help='FPS of output video')
FLAGS, unparsed = parser.parse_known_args()
# Run main function
make_video(FLAGS.dataset_file_path,
db_range=FLAGS.db_range,
video_fps=FLAGS.video_fps) |
<reponame>tmkn/packageanalyzer
import * as path from "path";
import { promises as fs } from "fs";
import { PassThrough } from "stream";
import { cli } from "../../src/cli/cli";
import { createMockNpmServer, IMockServer } from "../server";
import { DependencyDumperCommand } from "../../src/cli/dependencyDumpCommand";
import { TestWritable } from "../common";
describe(`Dependency Dumper`, () => {
let server: IMockServer;
const outputFolder = path.join(process.cwd(), `tmp`, `dump`);
beforeAll(async () => {
server = await createMockNpmServer();
});
test(`works`, async () => {
const command = cli.process([
`dependencydump`,
`--package`,
`react@16.8.1`,
`--folder`,
outputFolder,
`--registry`,
`http://localhost:${server.port}`
]);
expect(command).toBeInstanceOf(DependencyDumperCommand);
await fs.rm(outputFolder, { recursive: true, force: true });
await expect(fs.readdir(outputFolder)).rejects.toThrow();
const stderr = new TestWritable();
command.context = {
stdin: process.stdin,
stdout: new PassThrough(),
stderr: stderr
};
await command.execute();
const folder = await fs.readdir(outputFolder);
expect(folder.length).toEqual(7);
expect(stderr.lines.length).toEqual(0);
});
test(`fails on dumping`, async () => {
const command = cli.process([
`dependencydump`,
`--package`,
`react@16.8.1`,
`--folder`,
outputFolder,
`--registry`,
`http://unknown:${server.port}`
]);
expect(command).toBeInstanceOf(DependencyDumperCommand);
await fs.rm(outputFolder, { recursive: true, force: true });
await expect(fs.readdir(outputFolder)).rejects.toThrow();
const stderr = new TestWritable();
command.context = {
stdin: process.stdin,
stdout: new PassThrough(),
stderr: stderr
};
await command.execute();
expect(stderr.lines.length).toBeGreaterThan(0);
}, 10000);
test(`fails on undefined --package`, async () => {
const command = cli.process([
`dependencydump`,
`--package`,
`react@16.8.1`,
`--folder`,
outputFolder,
`--registry`,
`http://localhost:${server.port}`
]);
expect(command).toBeInstanceOf(DependencyDumperCommand);
await fs.rm(outputFolder, { recursive: true, force: true });
await expect(fs.readdir(outputFolder)).rejects.toThrow();
const stderr = new TestWritable();
command.context = {
stdin: process.stdin,
stdout: new PassThrough(),
stderr: stderr
};
(command as any).package = undefined;
await command.execute();
expect(stderr.lines.length).toBeGreaterThan(0);
});
afterAll(() => server.close());
});
|
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
def process_annotations(coco_dts, coco_gts):
patches_list = []
title = ""
if len(coco_dts) == 0 and len(coco_gts) == 1:
bbox = coco_gts[0]['bbox']
rect = patches.Rectangle((bbox[0], bbox[1]), bbox[2], bbox[3], fill=False, edgecolor=[1, .6, 0], linewidth=3)
patches_list.append(rect)
title = "[%d][%d][%d]" % (coco_gts[0]['image_id'], coco_gts[0]['id'], coco_gts[0]['num_keypoints'])
plt.title(title, fontsize=20)
for ann in coco_dts:
c = (np.random.random((1, 3)) * 0.6 + 0.4).tolist()[0]
sks = np.array(ann['skeleton'])
# Process sks as needed
return patches_list, title |
<gh_stars>1-10
package io.github.spair.service.services.impl;
import io.github.spair.repository.repositories.SequenceRepository;
import io.github.spair.service.services.SequenceService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class SequenceServiceImpl implements SequenceService {
@Autowired
private SequenceRepository sequenceRepository;
@Override
public Long getNextSequence(String seqName) {
return sequenceRepository.getNextSequence(seqName);
}
}
|
const stringLengths = (arr) => {
return arr.map((str) => {
return { str, length: str.length };
});
};
const arr = ["Hello", "World", "Foo", "Bar"]
const output = stringLengths(arr);
console.log(output);
// Output: [
// { str: 'Hello', length: 5 },
// { str: 'World', length: 5 },
// { str: 'Foo', length: 3 },
// { str: 'Bar', length: 3 }
//] |
function findPair(arr, target) {
arr.sort();
let low = 0;
let high = arr.length - 1;
while (low < high) {
let sum = arr[low] + arr[high];
if (sum === target) {
return [arr[low], arr[high]];
} else if (sum < target) {
low ++;
} else {
high --;
}
}
return [];
}
let arr = [4, 5, 7, 11, 9, 13];
let target = 16;
console.log(findPair(arr, target));
// Output: [7, 9] |
<filename>node_modules/googleapis/build/src/apis/slides/index.d.ts
/*! THIS FILE IS AUTO-GENERATED */
import { AuthPlus } from 'googleapis-common';
import { slides_v1 } from './v1';
export declare const VERSIONS: {
'v1': typeof slides_v1.Slides;
};
export declare function slides(version: 'v1'): slides_v1.Slides;
export declare function slides(options: slides_v1.Options): slides_v1.Slides;
declare const auth: AuthPlus;
export { auth };
|
import requests
from bs4 import BeautifulSoup
from urllib.parse import urljoin
class SimpleCrawler:
def __init__(self, url):
self.start_url = url
self.visited_urls = set()
def crawl(self):
self._crawl_url(self.start_url)
def get_unique_urls(self):
return list(self.visited_urls)
def _crawl_url(self, url):
if url in self.visited_urls:
return
try:
response = requests.get(url)
if response.status_code == 200:
self.visited_urls.add(url)
soup = BeautifulSoup(response.content, 'html.parser')
for link in soup.find_all('a'):
href = link.get('href')
if href and href.startswith('http'):
absolute_url = urljoin(url, href)
self._crawl_url(absolute_url)
except requests.RequestException:
pass
# Usage
crawler = SimpleCrawler('https://example.com')
crawler.crawl()
unique_urls = crawler.get_unique_urls()
print(unique_urls) |
#!/bin/bash
if test "$#" -ne 1; then
echo "################################"
echo "Usage:"
echo "./01_setup.sh <voice_name>"
echo ""
echo "Give a voice name eg., slt_arctic"
echo "################################"
exit 1
fi
setup_data=true
# For demo purpose we use the data of 10 speakers only!
# Build an average voice model (AVM) with 9 speakers
avg_voice="p225 p226 p227 p228 p229 p230 p231 p232 p233"
# setup directory structure and copy the data
if [ "$setup_data" = true ]; then
# copy the data
mkdir -p database
mkdir -p database/wav
mkdir -p database/txt
for spkid in $avg_voice; do
echo "copying the speaker $spkid data to database"
cp VCTK-Corpus/wav48/$spkid/*.wav database/wav
cp VCTK-Corpus/txt/$spkid/*.txt database/txt
done
# create
for spkid in $adapt_voice; do
mkdir -p database_$spkid
mkdir -p database_$spkid/wav
mkdir -p database_$spkid/txt
echo "copying the speaker $spkid data to database_$spkid"
cp VCTK-Corpus/wav48/$spkid/*.wav database_$spkid/wav
cp VCTK-Corpus/txt/$spkid/*.txt database_$spkid/txt
done
fi
current_working_dir=$(pwd)
merlin_dir=$(dirname $(dirname $(dirname $current_working_dir)))
experiments_dir=${current_working_dir}/experiments
data_dir=${current_working_dir}/database
voice_name=$1
voice_dir=${experiments_dir}/${voice_name}
acoustic_dir=${voice_dir}/acoustic_model
duration_dir=${voice_dir}/duration_model
synthesis_dir=${voice_dir}/test_synthesis
mkdir -p ${data_dir}
mkdir -p ${experiments_dir}
mkdir -p ${voice_dir}
mkdir -p ${acoustic_dir}
mkdir -p ${duration_dir}
mkdir -p ${synthesis_dir}
mkdir -p ${acoustic_dir}/data
mkdir -p ${duration_dir}/data
mkdir -p ${synthesis_dir}/txt
### create some test files ###
echo "Hello world." > ${synthesis_dir}/txt/test_001.txt
echo "Hi, this is a demo voice from Merlin." > ${synthesis_dir}/txt/test_002.txt
echo "Hope you guys enjoy free open-source voices from Merlin." > ${synthesis_dir}/txt/test_003.txt
printf "test_001\ntest_002\ntest_003" > ${synthesis_dir}/test_id_list.scp
global_config_file=conf/global_settings.cfg
### default settings ###
echo "######################################" > $global_config_file
echo "############# PATHS ##################" >> $global_config_file
echo "######################################" >> $global_config_file
echo "" >> $global_config_file
echo "MerlinDir=${merlin_dir}" >> $global_config_file
echo "WorkDir=${current_working_dir}" >> $global_config_file
echo "" >> $global_config_file
echo "######################################" >> $global_config_file
echo "############# PARAMS #################" >> $global_config_file
echo "######################################" >> $global_config_file
echo "" >> $global_config_file
echo "Voice=${voice_name}" >> $global_config_file
echo "Labels=state_align" >> $global_config_file
echo "QuestionFile=questions-radio_dnn_416.hed" >> $global_config_file
echo "Vocoder=WORLD" >> $global_config_file
echo "SamplingFreq=48000" >> $global_config_file
echo "SilencePhone='sil'" >> $global_config_file
echo "FileIDList=file_id_list.scp" >> $global_config_file
echo "" >> $global_config_file
echo "######################################" >> $global_config_file
echo "######### No. of files ###############" >> $global_config_file
echo "######################################" >> $global_config_file
echo "" >> $global_config_file
echo "Train=3258" >> $global_config_file
echo "Valid=50" >> $global_config_file
echo "Test=50" >> $global_config_file
echo "" >> $global_config_file
echo "######################################" >> $global_config_file
echo "############# TOOLS ##################" >> $global_config_file
echo "######################################" >> $global_config_file
echo "" >> $global_config_file
#echo "ESTDIR=${merlin_dir}/tools/speech_tools" >> $global_config_file
#echo "FESTDIR=${merlin_dir}/tools/festival" >> $global_config_file
#echo "FESTVOXDIR=${merlin_dir}/tools/festvox" >> $global_config_file
echo "ESTDIR=/l/SRC/speech_tools/bin" >> $global_config_file
echo "FESTDIR=/l/SRC/festival_2_4/festival" >> $global_config_file
echo "FESTVOXDIR=/l/SRC/festvox/" >> $global_config_file
echo "" >> $global_config_file
#echo "HTKDIR=${merlin_dir}/tools/bin/htk" >> $global_config_file
echo "HTKDIR=/l/SRC/htk-3.5/bin" >> $global_config_file
echo "" >> $global_config_file
echo "Step 1:"
echo "Merlin default voice settings configured in \"$global_config_file\""
echo "Modify these params as per your data..."
echo "eg., sampling frequency, no. of train files etc.,"
echo "setup done...!"
|
#!/bin/bash
#
g++ -Wall -g -DSERIAL parallelionisation.cc ../../testing/global.cc ../../testing/uniformGrid.cc ../../testing/dataio.cc -lreadline -lcfitsio
./a.out d1_const_parallel/nh0ss1e7_n1k_dt1_acc3sv2 ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt1_acc3sv2 0 1 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt2_acc3sv2 ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt2_acc3sv2 0 1 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt3_acc3sv2 ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt3_acc3sv2 0 8 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt4_acc3sv2 ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt4_acc3sv2 0 50 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt1_acc3sv2 ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt1_acc3sv2 0 1 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt2_acc3sv2 ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt2_acc3sv2 0 1 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt3_acc3sv2 ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt3_acc3sv2 0 8 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt4_acc3sv2 ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt4_acc3sv2 0 50 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt1_acc3sv2 ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt1_acc3sv2 0 1 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt2_acc3sv2 ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt2_acc3sv2 0 1 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt3_acc3sv2 ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt3_acc3sv2 0 8 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt4_acc3sv2 ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt4_acc3sv2 0 50 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt1_acc3sv2 ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt1_acc3sv2 0 1 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt2_acc3sv2 ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt2_acc3sv2 0 1 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt3_acc3sv2 ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt3_acc3sv2 0 8 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt4_acc3sv2 ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt4_acc3sv2 0 50 5
exit
############################################################
# Monday 26/5/08 #
# Analysis for 1D photo-ionisation problems, w/recombs. #
############################################################
g++ -Wall -g -DSERIAL parallelionisation.cc ../../testing/global.cc ../../testing/uniformGrid.cc ../../testing/dataio.cc -lreadline -lcfitsio
#############
## nh=1
# t/dt=10
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt1_acc3hard ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt1_acc3h 0 1 5
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt1_acc3simple ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt1_acc3s 0 1 5
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt1_acc4hard ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt1_acc4h 0 1 5
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt1_acc4simple ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt1_acc4s 0 1 5
# t/dt=10^2
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt2_acc3hard ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt2_acc3h 0 1 5
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt2_acc3simple ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt2_acc3s 0 1 5
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt2_acc4hard ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt2_acc4h 0 1 5
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt2_acc4simple ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt2_acc4s 0 1 5
# t/dt=10^3
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt3_acc3hard ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt3_acc3h 0 8 5
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt3_acc3simple ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt3_acc3s 0 8 5
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt3_acc4hard ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt3_acc4h 0 8 5
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt3_acc4simple ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt3_acc4s 0 8 5
# t/dt=10^4
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt4_acc3hard ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt4_acc3h 0 50 5
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt4_acc3simple ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt4_acc3s 0 50 5
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt4_acc4hard ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt4_acc4h 0 50 5
./a.out d1_const_parallel/RRnh0ss2e6_n1c_dt4_acc4simple ../../results/PE1dcart_parallel_RRnh0ss2e6_n1c_dt4_acc4s 0 50 5
#############
## nh=10
# t/dt=10
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt1_acc3hard ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt1_acc3h 0 1 5
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt1_acc3simple ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt1_acc3s 0 1 5
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt1_acc4hard ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt1_acc4h 0 1 5
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt1_acc4simple ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt1_acc4s 0 1 5
# t/dt=10^2
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt2_acc3hard ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt2_acc3h 0 1 5
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt2_acc3simple ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt2_acc3s 0 1 5
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt2_acc4hard ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt2_acc4h 0 1 5
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt2_acc4simple ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt2_acc4s 0 1 5
# t/dt=10^3
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt3_acc3hard ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt3_acc3h 0 8 5
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt3_acc3simple ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt3_acc3s 0 8 5
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt3_acc4hard ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt3_acc4h 0 8 5
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt3_acc4simple ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt3_acc4s 0 8 5
# t/dt=10^4
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt4_acc3hard ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt4_acc3h 0 50 5
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt4_acc3simple ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt4_acc3s 0 50 5
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt4_acc4hard ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt4_acc4h 0 50 5
./a.out d1_const_parallel/RRnh1ss2e8_n1c_dt4_acc4simple ../../results/PE1dcart_parallel_RRnh1ss2e8_n1c_dt4_acc4s 0 50 5
#############
## nh=100
# t/dt=10
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt1_acc3hard ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt1_acc3h 0 1 5
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt1_acc3simple ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt1_acc3s 0 1 5
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt1_acc4hard ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt1_acc4h 0 1 5
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt1_acc4simple ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt1_acc4s 0 1 5
# t/dt=10^2
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt2_acc3hard ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt2_acc3h 0 1 5
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt2_acc3simple ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt2_acc3s 0 1 5
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt2_acc4hard ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt2_acc4h 0 1 5
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt2_acc4simple ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt2_acc4s 0 1 5
# t/dt=10^3
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt3_acc3hard ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt3_acc3h 0 8 5
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt3_acc3simple ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt3_acc3s 0 8 5
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt3_acc4hard ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt3_acc4h 0 8 5
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt3_acc4simple ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt3_acc4s 0 8 5
# t/dt=10^4
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt4_acc3hard ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt4_acc3h 0 50 5
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt4_acc3simple ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt4_acc3s 0 50 5
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt4_acc4hard ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt4_acc4h 0 50 5
./a.out d1_const_parallel/RRnh2ss2e10_n1c_dt4_acc4simple ../../results/PE1dcart_parallel_RRnh2ss2e10_n1c_dt4_acc4s 0 50 5
#############
## nh=1000
# t/dt=10
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt1_acc3hard ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt1_acc3h 0 1 5
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt1_acc3simple ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt1_acc3s 0 1 5
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt1_acc4hard ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt1_acc4h 0 1 5
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt1_acc4simple ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt1_acc4s 0 1 5
# t/dt=10^2
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt2_acc3hard ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt2_acc3h 0 1 5
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt2_acc3simple ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt2_acc3s 0 1 5
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt2_acc4hard ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt2_acc4h 0 1 5
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt2_acc4simple ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt2_acc4s 0 1 5
# t/dt=10^3
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt3_acc3hard ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt3_acc3h 0 8 5
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt3_acc3simple ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt3_acc3s 0 8 5
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt3_acc4hard ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt3_acc4h 0 8 5
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt3_acc4simple ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt3_acc4s 0 8 5
# t/dt=10^4
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt4_acc3hard ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt4_acc3h 0 50 5
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt4_acc3simple ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt4_acc3s 0 50 5
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt4_acc4hard ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt4_acc4h 0 50 5
./a.out d1_const_parallel/RRnh3ss2e12_n1c_dt4_acc4simple ../../results/PE1dcart_parallel_RRnh3ss2e12_n1c_dt4_acc4s 0 50 5
exit
############################################################
# Monday 19/5/08 #
# Analysis for 1D photo-ionisation problems, no recombs. #
############################################################
g++ -Wall -g -DSERIAL parallelionisation.cc ../../testing/global.cc ../../testing/uniformGrid.cc ../../testing/dataio.cc -lreadline -lcfitsio
#############
## nh=1
# t/dt=10
./a.out d1_const_parallel/nh0ss1e7_n1k_dt1_acc3hard ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt1_acc3h 0 1 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt1_acc3simple ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt1_acc3s 0 1 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt1_acc4hard ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt1_acc4h 0 1 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt1_acc4simple ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt1_acc4s 0 1 5
# t/dt=10^2
./a.out d1_const_parallel/nh0ss1e7_n1k_dt2_acc3hard ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt2_acc3h 0 1 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt2_acc3simple ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt2_acc3s 0 1 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt2_acc4hard ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt2_acc4h 0 1 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt2_acc4simple ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt2_acc4s 0 1 5
# t/dt=10^3
./a.out d1_const_parallel/nh0ss1e7_n1k_dt3_acc3hard ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt3_acc3h 0 8 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt3_acc3simple ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt3_acc3s 0 8 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt3_acc4hard ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt3_acc4h 0 8 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt3_acc4simple ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt3_acc4s 0 8 5
# t/dt=10^4
./a.out d1_const_parallel/nh0ss1e7_n1k_dt4_acc3hard ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt4_acc3h 0 50 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt4_acc3simple ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt4_acc3s 0 50 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt4_acc4hard ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt4_acc4h 0 50 5
./a.out d1_const_parallel/nh0ss1e7_n1k_dt4_acc4simple ../../results/PE1dcart_parallel_nh0ss1e7_n1k_dt4_acc4s 0 50 5
#############
## nh=10
# t/dt=10
./a.out d1_const_parallel/nh1ss1e7_n1k_dt1_acc3hard ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt1_acc3h 0 1 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt1_acc3simple ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt1_acc3s 0 1 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt1_acc4hard ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt1_acc4h 0 1 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt1_acc4simple ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt1_acc4s 0 1 5
# t/dt=10^2
./a.out d1_const_parallel/nh1ss1e7_n1k_dt2_acc3hard ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt2_acc3h 0 1 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt2_acc3simple ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt2_acc3s 0 1 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt2_acc4hard ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt2_acc4h 0 1 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt2_acc4simple ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt2_acc4s 0 1 5
# t/dt=10^3
./a.out d1_const_parallel/nh1ss1e7_n1k_dt3_acc3hard ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt3_acc3h 0 8 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt3_acc3simple ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt3_acc3s 0 8 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt3_acc4hard ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt3_acc4h 0 8 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt3_acc4simple ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt3_acc4s 0 8 5
# t/dt=10^4
./a.out d1_const_parallel/nh1ss1e7_n1k_dt4_acc3hard ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt4_acc3h 0 50 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt4_acc3simple ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt4_acc3s 0 50 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt4_acc4hard ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt4_acc4h 0 50 5
./a.out d1_const_parallel/nh1ss1e7_n1k_dt4_acc4simple ../../results/PE1dcart_parallel_nh1ss1e7_n1k_dt4_acc4s 0 50 5
#############
## nh=100
# t/dt=10
./a.out d1_const_parallel/nh2ss1e7_n1k_dt1_acc3hard ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt1_acc3h 0 1 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt1_acc3simple ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt1_acc3s 0 1 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt1_acc4hard ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt1_acc4h 0 1 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt1_acc4simple ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt1_acc4s 0 1 5
# t/dt=10^2
./a.out d1_const_parallel/nh2ss1e7_n1k_dt2_acc3hard ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt2_acc3h 0 1 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt2_acc3simple ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt2_acc3s 0 1 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt2_acc4hard ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt2_acc4h 0 1 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt2_acc4simple ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt2_acc4s 0 1 5
# t/dt=10^3
./a.out d1_const_parallel/nh2ss1e7_n1k_dt3_acc3hard ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt3_acc3h 0 8 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt3_acc3simple ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt3_acc3s 0 8 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt3_acc4hard ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt3_acc4h 0 8 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt3_acc4simple ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt3_acc4s 0 8 5
# t/dt=10^4
./a.out d1_const_parallel/nh2ss1e7_n1k_dt4_acc3hard ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt4_acc3h 0 50 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt4_acc3simple ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt4_acc3s 0 50 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt4_acc4hard ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt4_acc4h 0 50 5
./a.out d1_const_parallel/nh2ss1e7_n1k_dt4_acc4simple ../../results/PE1dcart_parallel_nh2ss1e7_n1k_dt4_acc4s 0 50 5
#############
## nh=1000
# t/dt=10
./a.out d1_const_parallel/nh3ss1e7_n1k_dt1_acc3hard ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt1_acc3h 0 1 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt1_acc3simple ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt1_acc3s 0 1 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt1_acc4hard ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt1_acc4h 0 1 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt1_acc4simple ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt1_acc4s 0 1 5
# t/dt=10^2
./a.out d1_const_parallel/nh3ss1e7_n1k_dt2_acc3hard ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt2_acc3h 0 1 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt2_acc3simple ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt2_acc3s 0 1 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt2_acc4hard ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt2_acc4h 0 1 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt2_acc4simple ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt2_acc4s 0 1 5
# t/dt=10^3
./a.out d1_const_parallel/nh3ss1e7_n1k_dt3_acc3hard ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt3_acc3h 0 8 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt3_acc3simple ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt3_acc3s 0 8 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt3_acc4hard ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt3_acc4h 0 8 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt3_acc4simple ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt3_acc4s 0 8 5
# t/dt=10^4
./a.out d1_const_parallel/nh3ss1e7_n1k_dt4_acc3hard ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt4_acc3h 0 50 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt4_acc3simple ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt4_acc3s 0 50 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt4_acc4hard ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt4_acc4h 0 50 5
./a.out d1_const_parallel/nh3ss1e7_n1k_dt4_acc4simple ../../results/PE1dcart_parallel_nh3ss1e7_n1k_dt4_acc4s 0 50 5
exit
|
/*
*
* Copyright © 2019 <NAME>.
*
* [This program is licensed under the "MIT License"]
* Please see the file LICENSE in the source
* distribution of this software for license terms.
*
*/
import * as faceRecognition from './faceRecognition';
import * as path from 'path';
function getFaceFilesPath(photoDetails) {
const baseDir = path.resolve(__dirname, '../album-faces');
const albumName = photoDetails.album.name;
const faces = photoDetails.faces;
const newFaceObjects = faces.map(face => {
const fileName = face.name;
const filePath = path.resolve(baseDir, albumName, fileName);
return {faceId: face.id, path: filePath};
});
return newFaceObjects;
}
/**
* Implements the get face descriptions rpc method
*/
async function describeFaces(photoDetails) {
const faceFilesPath = getFaceFilesPath(photoDetails);
const faceDescriptors = await Promise.all(faceFilesPath.map(async (faceFile) => {
const result = await faceRecognition.describe(faceFile.path);
const descriptor = Array.from(result[0].descriptor);
return {...faceFile, descriptor};
}));
const photoDetailsWithFaceDescriptions = {...photoDetails, faceDescriptors};
return photoDetailsWithFaceDescriptions;
}
export {
describeFaces
} |
package com.emc.mongoose.base.metrics.type;
import com.emc.mongoose.base.metrics.snapshot.TimingMetricSnapshot;
import com.emc.mongoose.base.metrics.snapshot.TimingMetricSnapshotImpl;
import com.emc.mongoose.base.metrics.snapshot.HistogramSnapshot;
import java.util.concurrent.atomic.LongAdder;
/** @author <NAME>. on 10.10.18 */
public class TimingMeterImpl implements LongMeter<TimingMetricSnapshot> {
private final LongMeter<HistogramSnapshot> histogram;
private final LongAdder count = new LongAdder();
private final LongAdder sum = new LongAdder();
private volatile long min = Long.MAX_VALUE;
private volatile long max = Long.MIN_VALUE;
private final String metricName;
public TimingMeterImpl(final LongMeter<HistogramSnapshot> histogram, final String metricName) {
this.histogram = histogram;
this.metricName = metricName;
}
@Override
public void update(final long value) {
histogram.update(value);
count.increment();
sum.add(value);
if (value < min) {
min = value;
}
if (value > max) {
max = value;
}
}
@Override
public TimingMetricSnapshotImpl snapshot() {
if (count.sum() == 0) {
return new TimingMetricSnapshotImpl(0, 0, 0, 0, 0, histogram.snapshot(), metricName);
}
return new TimingMetricSnapshotImpl(
sum.sum(),
count.sum(),
min,
max,
((double) sum.sum()) / count.sum(),
histogram.snapshot(),
metricName);
}
}
|
<reponame>chpublichp/masspred<filename>tools/region_filter-0.19/open_file.c
#include "main.h"
_FUNCTION_DECLARATION_BEGIN_
FILE* open_file(char* output_file_prefix, char* output_file_suffix)
_FUNCTION_DECLARATION_END_
{
char name[PATH_MAX];
FILE* file;
snprintf(name, PATH_MAX, "%s.%s", output_file_prefix, output_file_suffix);
file = fopen(name, "a");
if(file == NULL)
ERROR("Cannot create output file \'%s\'", name);
return file;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.