text
stringlengths 1
1.05M
|
|---|
def is_palindrome(input_string):
reversed_string = input_string[::-1]
return reversed_string == input_string
|
/********************************************************************************
** Form generated from reading UI file 'LoginDlg.ui'
**
** Created by: Qt User Interface Compiler version 5.6.0
**
** WARNING! All changes made in this file will be lost when recompiling UI file!
********************************************************************************/
#ifndef UI_LOGINDLG_H
#define UI_LOGINDLG_H
#include <QtCore/QVariant>
#include <QtWidgets/QAction>
#include <QtWidgets/QApplication>
#include <QtWidgets/QButtonGroup>
#include <QtWidgets/QDialog>
#include <QtWidgets/QFrame>
#include <QtWidgets/QHBoxLayout>
#include <QtWidgets/QHeaderView>
#include <QtWidgets/QLabel>
#include <QtWidgets/QLineEdit>
#include <QtWidgets/QPushButton>
#include <QtWidgets/QSpacerItem>
#include <QtWidgets/QVBoxLayout>
QT_BEGIN_NAMESPACE
class Ui_LoginDlg
{
public:
QVBoxLayout *verticalLayout;
QHBoxLayout *horizontalLayout_2;
QLabel *label;
QLineEdit *textUser;
QHBoxLayout *horizontalLayout;
QLabel *label_2;
QLineEdit *textPasswd;
QFrame *line;
QHBoxLayout *horizontalLayout_3;
QSpacerItem *horizontalSpacer;
QPushButton *btnOk;
QPushButton *btnCancel;
QSpacerItem *horizontalSpacer_2;
void setupUi(QDialog *LoginDlg)
{
if (LoginDlg->objectName().isEmpty())
LoginDlg->setObjectName(QStringLiteral("LoginDlg"));
LoginDlg->resize(373, 263);
verticalLayout = new QVBoxLayout(LoginDlg);
verticalLayout->setSpacing(6);
verticalLayout->setContentsMargins(11, 11, 11, 11);
verticalLayout->setObjectName(QStringLiteral("verticalLayout"));
horizontalLayout_2 = new QHBoxLayout();
horizontalLayout_2->setSpacing(6);
horizontalLayout_2->setObjectName(QStringLiteral("horizontalLayout_2"));
label = new QLabel(LoginDlg);
label->setObjectName(QStringLiteral("label"));
label->setMinimumSize(QSize(40, 0));
horizontalLayout_2->addWidget(label);
textUser = new QLineEdit(LoginDlg);
textUser->setObjectName(QStringLiteral("textUser"));
textUser->setMinimumSize(QSize(60, 0));
textUser->setMaximumSize(QSize(16777215, 16777215));
horizontalLayout_2->addWidget(textUser);
verticalLayout->addLayout(horizontalLayout_2);
horizontalLayout = new QHBoxLayout();
horizontalLayout->setSpacing(6);
horizontalLayout->setObjectName(QStringLiteral("horizontalLayout"));
label_2 = new QLabel(LoginDlg);
label_2->setObjectName(QStringLiteral("label_2"));
label_2->setMinimumSize(QSize(40, 0));
horizontalLayout->addWidget(label_2);
textPasswd = new QLineEdit(LoginDlg);
textPasswd->setObjectName(QStringLiteral("textPasswd"));
textPasswd->setMinimumSize(QSize(60, 0));
textPasswd->setMaximumSize(QSize(16777215, 16777215));
textPasswd->setEchoMode(QLineEdit::Password);
horizontalLayout->addWidget(textPasswd);
verticalLayout->addLayout(horizontalLayout);
line = new QFrame(LoginDlg);
line->setObjectName(QStringLiteral("line"));
line->setFrameShape(QFrame::HLine);
line->setFrameShadow(QFrame::Sunken);
verticalLayout->addWidget(line);
horizontalLayout_3 = new QHBoxLayout();
horizontalLayout_3->setSpacing(6);
horizontalLayout_3->setObjectName(QStringLiteral("horizontalLayout_3"));
horizontalSpacer = new QSpacerItem(40, 20, QSizePolicy::Expanding, QSizePolicy::Minimum);
horizontalLayout_3->addItem(horizontalSpacer);
btnOk = new QPushButton(LoginDlg);
btnOk->setObjectName(QStringLiteral("btnOk"));
horizontalLayout_3->addWidget(btnOk);
btnCancel = new QPushButton(LoginDlg);
btnCancel->setObjectName(QStringLiteral("btnCancel"));
horizontalLayout_3->addWidget(btnCancel);
horizontalSpacer_2 = new QSpacerItem(40, 20, QSizePolicy::Expanding, QSizePolicy::Minimum);
horizontalLayout_3->addItem(horizontalSpacer_2);
verticalLayout->addLayout(horizontalLayout_3);
retranslateUi(LoginDlg);
QMetaObject::connectSlotsByName(LoginDlg);
} // setupUi
void retranslateUi(QDialog *LoginDlg)
{
LoginDlg->setWindowTitle(QApplication::translate("LoginDlg", "LoginDlg", 0));
label->setText(QApplication::translate("LoginDlg", "\347\224\250\346\210\267\345\220\215", 0));
label_2->setText(QApplication::translate("LoginDlg", "\345\257\206\347\240\201", 0));
btnOk->setText(QApplication::translate("LoginDlg", "\347\241\256\350\256\244", 0));
btnCancel->setText(QApplication::translate("LoginDlg", "\345\217\226\346\266\210", 0));
} // retranslateUi
};
namespace Ui {
class LoginDlg: public Ui_LoginDlg {};
} // namespace Ui
QT_END_NAMESPACE
#endif // UI_LOGINDLG_H
|
python3 ../../../main_pretrain.py \
--dataset $1 \
--encoder resnet18 \
--data_dir ./datasets \
--max_epochs 1000 \
--gpus 0 \
--precision 16 \
--optimizer sgd \
--lars \
--grad_clip_lars \
--eta_lars 0.02 \
--scheduler warmup_cosine \
--lr 0.6 \
--min_lr 0.0006 \
--warmup_start_lr 0.0 \
--warmup_epochs 11 \
--classifier_lr 0.1 \
--weight_decay 1e-6 \
--batch_size 256 \
--num_workers 4 \
--brightness 0.8 \
--contrast 0.8 \
--saturation 0.8 \
--hue 0.2 \
--gaussian_prob 0.0 0.0 \
--name deepclusterv2-$1 \
--project solo-learn \
--entity unitn-mhug \
--wandb \
--save_checkpoint \
--method deepclusterv2 \
--proj_hidden_dim 2048 \
--proj_output_dim 128 \
--num_prototypes 3000 3000 3000
|
#!/bin/sh
ENDPOINT=${1:-http://localhost:8080}
KEY=${2:-demo-key}
curl -H "Authorization: Key ${KEY}" "${ENDPOINT}/webhooks/pingdom?message=%7B%22check%22%3A%20%22803318%22%2C%20%22checkname%22%3A%20%22Alerta%20API%22%2C%20%22host%22%3A%20%22api.alerta.io%22%2C%20%22action%22%3A%20%22notify_of_close%22%2C%20%22incidentid%22%3A%201262%2C%20%22description%22%3A%20%22up%22%7D"
|
#!/bin/bash
version=$(git describe --tags --always --dirty 2>/dev/null)
if [ ! -e ".git" ] || [ -z $version ]
then
version=$(grep "^VERSION=" makefile_include.mk | sed "s/.*=//")
fi
echo "Testing version:" $version
#grep "VERSION=" makefile | perl -e "@a = split('=', <>); print @a[1];"`
# get uname
echo "uname="`uname -a`
# get gcc name
if [ -z ${CC} ]
then
CC="gcc"
fi
echo "${CC}="`${CC} -dumpversion`
echo
# ref: $Format:%D$
# git commit: $Format:%H$
# commit time: $Format:%ai$
|
<reponame>oueya1479/OpenOLAT
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.lecture;
import java.util.Date;
import java.util.List;
import org.olat.core.id.CreateInfo;
import org.olat.core.id.Identity;
import org.olat.core.id.ModifiedInfo;
/**
*
* Initial date: 20 mars 2017<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public interface LectureBlockRollCall extends LectureBlockRollCallRef, ModifiedInfo, CreateInfo {
public Identity getIdentity();
public LectureBlock getLectureBlock();
public int getLecturesAbsentNumber();
public int getLecturesAttendedNumber();
public List<Integer> getLecturesAttendedList();
public List<Integer> getLecturesAbsentList();
public Boolean getAbsenceAuthorized();
public void setAbsenceAuthorized(Boolean absenceAuthorized);
public String getAbsenceReason();
public void setAbsenceReason(String absenceReason);
public String getComment();
public void setComment(String comment);
public Date getAppealDate();
public void setAppealDate(Date date);
public LectureBlockAppealStatus getAppealStatus();
public void setAppealStatus(LectureBlockAppealStatus status);
public String getAppealStatusReason();
public void setAppealStatusReason(String statusReason);
public String getAppealReason();
public void setAppealReason(String reason);
public Date getAbsenceSupervisorNotificationDate();
public void setAbsenceSupervisorNotificationDate(Date absenceSupervisorNotificationDate);
public AbsenceCategory getAbsenceCategory();
public void setAbsenceCategory(AbsenceCategory category);
public AbsenceNotice getAbsenceNotice();
}
|
describe('Authentication tests - read access user', () => {
it('Cannot see unauthorized elements when not logged in', () => {
cy.visit('/');
cy.getTestElement('authInfo').should('not.exist');
cy.getTestElement('lineSearch').should('not.exist');
});
it('Can log in with HSL ID', () => {
cy.hslLoginReadAccess();
cy.getTestElement('authInfo').should('exist');
cy.getTestElement('authInfo').contains('Selauskäyttäjä');
cy.getTestElement('lineSearch').should('exist');
});
it('Can log out', () => {
cy.hslLoginReadAccess();
cy.getTestElement('logoutButton')
.should('exist')
.click();
cy.getTestElement('authInfo').should('not.exist');
cy.getTestElement('lineSearch').should('not.exist');
});
});
describe('Authentication tests - write access user', () => {
it('Cannot see unauthorized elements when not logged in', () => {
cy.visit('/');
cy.getTestElement('authInfo').should('not.exist');
cy.getTestElement('lineSearch').should('not.exist');
});
it('Can log in with HSL ID', () => {
cy.hslLoginWriteAccess();
cy.getTestElement('authInfo').should('exist');
cy.getTestElement('authInfo').contains('Pääkäyttäjä');
cy.getTestElement('lineSearch').should('exist');
});
it('Can log out', () => {
cy.hslLoginWriteAccess();
cy.getTestElement('logoutButton')
.should('exist')
.click();
cy.getTestElement('authInfo').should('not.exist');
cy.getTestElement('lineSearch').should('not.exist');
});
});
|
import React from 'react';
import {
StyleSheet,
View,
Text,
FlatList
} from 'react-native';
import { SafeAreaView } from 'react-native-safe-area-context';
import CollectionOverView from './CollectionComponent';
const CategoriesComponent=({navigation})=>{
return(
<View>
<SafeAreaView>
<FlatList
data={['CollectionOverView']}
keyExtractor={data=>data}
renderItem={({item,index})=>{
switch (index){
case 0:
return (
<View style={styles.Collection}>
<CollectionOverView />
</View>
);
return 'Nothing is present';
}
}
}
/>
</SafeAreaView>
</View>
);
}
const styles=StyleSheet.create({
Collection:{
top:-164
}
})
export default CategoriesComponent;
|
class Restaurant {
let name: String
let address: String
let cuisine: String
let numTables: Int
init(name: String, address: String, cuisine: String, numTables: Int) {
self.name = name
self.address = address
self.cuisine = cuisine
self.numTables = numTables
}
}
|
<reponame>Delicode/astra<filename>src/plugins/orbbec_hand/hnd_segmentation.hpp
// This file is part of the Orbbec Astra SDK [https://orbbec3d.com]
// Copyright (c) 2015 Orbbec 3D
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Be excellent to each other.
#ifndef HND_SEGMENTATION_H
#define HND_SEGMENTATION_H
#include "hnd_scaling_coordinate_mapper.hpp"
#include "hnd_point.hpp"
#ifndef MIN
#define MIN(a,b) (((a)<(b))?(a):(b))
#endif
#ifndef MAX
#define MAX(a,b) (((a)>(b))?(a):(b))
#endif
namespace astra { namespace hand {
struct tracking_data;
enum class foreground_status
{
empty = 0,
has_points = 1
};
namespace segmentation {
static Point2i INVALID_POINT(-1, -1);
float get_point_area(tracking_matrices& matrices,
area_test_settings& settings,
const Point2i& point);
float get_point_area_integral(tracking_matrices& matrices,
BitmapF& integralArea,
area_test_settings& settings,
const Point2i& point);
bool test_point_in_range(tracking_matrices& matrices,
const Point2i& targetPoint,
test_behavior outputLog);
bool test_point_area(tracking_matrices& matrices,
area_test_settings& settings,
const Point2i& targetPoint,
test_phase phase,
test_behavior outputLog);
bool test_point_area_integral(tracking_matrices& matrices,
BitmapF& integralArea,
area_test_settings& settings,
const Point2i& targetPoint,
test_phase phase,
test_behavior outputLog);
bool test_foreground_radius_percentage(tracking_matrices& matrices,
circumference_test_settings& settings,
const Point2i& targetPoint,
test_phase phase,
test_behavior outputLog);
foreground_status create_test_pass_from_foreground(tracking_data& data);
bool find_next_velocity_seed_pixel(BitmapMask& foregroundMatrix,
BitmapMask& searchedMatrix,
Point2i& foregroundPosition,
Point2i& nextSearchStart);
void calculate_edge_distance(BitmapMask& segmentationMatrix,
BitmapF& areaSqrtMatrix,
BitmapF& edgeDistanceMatrix,
const float maxEdgeDistance);
float count_neighborhood_area(BitmapMask& matSegmentation,
BitmapF& matDepth,
BitmapF& matArea,
const Point2i& center,
const float bandwidth,
const float bandwidthDepth,
const scaling_coordinate_mapper& mapper);
BitmapF& calculate_integral_area(tracking_matrices& matrices);
float count_neighborhood_area_integral(BitmapF& matDepth,
BitmapF& matAreaIntegral,
const Point2i& center,
const float bandwidth,
const scaling_coordinate_mapper& mapper);
Point2i track_point_impl(tracking_data& data);
Point2i track_point_from_seed(tracking_data& data);
void get_circumference_points(BitmapF& matDepth,
const Point2i& center,
const float& radius,
const scaling_coordinate_mapper& mapper,
std::vector<astra::Vector2i>& points);
float get_max_sequential_circumference_percentage(BitmapF& matDepth,
BitmapMask& matSegmentation,
const Point2i& center,
const float& radius,
const scaling_coordinate_mapper& mapper,
std::vector<astra::Vector2i>& points);
float get_percent_natural_edges(BitmapF& matDepth,
BitmapMask& matSegmentation,
const Point2i& center,
const float bandwidth,
const scaling_coordinate_mapper& mapper);
bool test_natural_edges(tracking_matrices& matrices,
natural_edge_test_settings& settings,
const Point2i& targetPoint,
test_phase phase,
test_behavior outputLog);
}
}}
#endif // HND_SEGMENTATION_H
|
python3 train.py --name w --checkpoints_dir ./checkpoints --batchSize 1 --loadSize 256 --fineSize 128 --label_nc 0 --dataroot datasets/w/ --nThreads 8 --no_instance --save_latest_freq 200 --save_epoch_freq 10 --continue_train --which_epoch 200 --netG local
|
#!/bin/bash
input="codes.txt"
regex="(cleave-phone)\.([a-z\-]+)\.js"
while IFS= read -r line
do
if [[ "$line" =~ $regex ]]
then
code="${BASH_REMATCH[2]}"
cat PhoneFieldFormatterClassTemplate.txt | sed s/#codeuc#/${code^^}/g | sed s/#codelc#/$code/g > "src/main/java/org/vaadin/textfieldformatter/phone/Phone${code^^}FieldFormatter.java"
fi
done < "$input"
|
<filename>js_events/migrations/0002_auto_20190213_0944.py<gh_stars>0
# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-02-13 09:44
from __future__ import unicode_literals
from django.db import migrations
import sortedm2m.fields
class Migration(migrations.Migration):
dependencies = [
('js_events', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='event',
name='related',
),
migrations.AddField(
model_name='event',
name='services',
field=sortedm2m.fields.SortedManyToManyField(blank=True, help_text=None, to='js_services.Service', verbose_name='services'),
),
]
|
#!/bin/bash -e
testScriptDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd ${testScriptDir}
source ../util.sh
createDgossVolumes
# Add test file to user config map
createInitConfigContainer
createPreRunConfigMapFile
createCassandraBootstrapContainer
#createAndCheckCassandraContainer #comment to go quicker
# check using test specific `goss.yaml`
GOSS_SLEEP=0 dgoss run \
-v ${BOOTSTRAP_VOLUME}:/etc/cassandra \
-v ${EXTRA_LIB_VOLUME}:/extra-lib \
${CASSANDRA_IMAGE}
|
#!/bin/bash
# exit with nonzero exit code if anything fails
set -e
! grep -r --include "*.spec.*" "\.only" src
|
package com.ruoyi.project.system.hisZytf.controller;
import java.util.List;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.ruoyi.framework.aspectj.lang.annotation.Log;
import com.ruoyi.framework.aspectj.lang.enums.BusinessType;
import com.ruoyi.project.system.hisZytf.domain.HisZytf;
import com.ruoyi.project.system.hisZytf.service.IHisZytfService;
import com.ruoyi.framework.web.controller.BaseController;
import com.ruoyi.framework.web.page.TableDataInfo;
import com.ruoyi.framework.web.domain.AjaxResult;
import com.ruoyi.common.utils.poi.ExcelUtil;
/**
* 住院退费数据 信息操作处理
*
* @author panda
* @date 2019-05-30
*/
@Controller
@RequestMapping("/system/hisZytf")
public class HisZytfController extends BaseController
{
private String prefix = "system/hisZytf";
@Autowired
private IHisZytfService hisZytfService;
@RequiresPermissions("system:hisZytf:view")
@GetMapping()
public String hisZytf()
{
return prefix + "/hisZytf";
}
/**
* 查询住院退费数据列表
*/
@RequiresPermissions("system:hisZytf:list")
@PostMapping("/list")
@ResponseBody
public TableDataInfo list(HisZytf hisZytf)
{
startPage();
List<HisZytf> list = hisZytfService.selectHisZytfList(hisZytf);
return getDataTable(list);
}
/**
* 导出住院退费数据列表
*/
@RequiresPermissions("system:hisZytf:export")
@PostMapping("/export")
@ResponseBody
public AjaxResult export(HisZytf hisZytf)
{
List<HisZytf> list = hisZytfService.selectHisZytfList(hisZytf);
ExcelUtil<HisZytf> util = new ExcelUtil<HisZytf>(HisZytf.class);
return util.exportExcel(list, "hisZytf");
}
/**
* 新增住院退费数据
*/
@GetMapping("/add")
public String add()
{
return prefix + "/add";
}
/**
* 新增保存住院退费数据
*/
@RequiresPermissions("system:hisZytf:add")
@Log(title = "住院退费数据", businessType = BusinessType.INSERT)
@PostMapping("/add")
@ResponseBody
public AjaxResult addSave(HisZytf hisZytf)
{
return toAjax(hisZytfService.insertHisZytf(hisZytf));
}
/**
* 修改住院退费数据
*/
@GetMapping("/edit/{id}")
public String edit(@PathVariable("id") Integer id, ModelMap mmap)
{
HisZytf hisZytf = hisZytfService.selectHisZytfById(id);
mmap.put("hisZytf", hisZytf);
return prefix + "/edit";
}
/**
* 修改保存住院退费数据
*/
@RequiresPermissions("system:hisZytf:edit")
@Log(title = "住院退费数据", businessType = BusinessType.UPDATE)
@PostMapping("/edit")
@ResponseBody
public AjaxResult editSave(HisZytf hisZytf)
{
return toAjax(hisZytfService.updateHisZytf(hisZytf));
}
/**
* 删除住院退费数据
*/
@RequiresPermissions("system:hisZytf:remove")
@Log(title = "住院退费数据", businessType = BusinessType.DELETE)
@PostMapping( "/remove")
@ResponseBody
public AjaxResult remove(String ids)
{
return toAjax(hisZytfService.deleteHisZytfByIds(ids));
}
}
|
//==========
function DendImageTest() {
$('.gimg').each(
function (indx, element) {
var src = $(element).attr('src');
console.log(src);
SendImageToServer(src);
});
}
function SendImageToServer(imagefile, id, id_data) {
var ft = new FileTransfer();
var options = new FileUploadOptions();
options.fileKey = "vImage";
options.fileName = imagefile.substr(imagefile.lastIndexOf('/') + 1);
// console.log(options.fileName);
options.mimeType = "image/jpeg";
var params = new Object();
var email = window.localStorage.getItem("user_email");
var hash = window.localStorage.getItem("user_hash");
params.email = email;
params.hash = hash;
params.id_data = id_data;
params.id = id;
options.params = params;
options.chunkedMode = false;
ft.upload(imagefile, serverName + '?act=saveimage&mode=upload', SendImageToServerSuccess, SendImageToServerFail, options);
}
/*
*
* +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
* +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
*/
function SynSendAudioToServer() {
console.log('............................................SynSendImagesToServer....................................');
db.getDataFromSQL("SELECT * FROM AUDIO WHERE sync!=1",
function (arr) {
if (arr.length > 0) {
for (var i = 0; i < arr.length; i++) {
var obj = arr.item(i);
console.log(JSON.stringify(obj));
SendAudioToServer(obj.filename, obj.id, obj.id_data);
}
} else {
console.log('Нет записей для синхронизации!');
// alert("Нет записей для синхронизации!");
}
});
}
function SendAudioToServer(imagefile, id, id_data) {
var ft = new FileTransfer();
var options = new FileUploadOptions();
options.fileKey = "vImage";
options.fileName = imagefile.substr(imagefile.lastIndexOf('/') + 1);
var params = new Object();
var email = window.localStorage.getItem("user_email");
var hash = window.localStorage.getItem("user_hash");
params.email = email;
params.hash = hash;
params.id_data = id_data;
params.id = id;
options.params = params;
options.chunkedMode = false;
ft.upload(imagefile, serverName + '?act=saveaudio&mode=upload',
function (data) {
console.log(JSON.stringify(data.response));
var json = JSON.parse(data.response);
console.log(JSON.stringify(json));
if (json.code == 'OK') {
var UpdatedID = json.id2;
db.updateData("AUDIO", "id=" + UpdatedID, ['sync'], [1], function (row) {})
console.log('Успешно обновлено id=:' + UpdatedID);
} else {
console.log('ERROR 46');
}
},
function () {},
options);
}
function SendImageToServerSuccess(data) {
console.log(JSON.stringify(data.response));
var json = JSON.parse(data.response);
console.log(JSON.stringify(json));
if (json.code == 'OK') {
var UpdatedID = json.id2;
db.updateData("IMAGES", "id=" + UpdatedID, ['sync'], [1], function (row) {})
console.log('Успешно обновлено id=:' + UpdatedID);
} else {
console.log('ERROR 46');
}
}
function SendImageToServerFail(data) {
console.log('SendImageToServerFail...');
console.log(JSON.stringify(data));
}
/*
*
* ЗАПУСК синхронизации.
* Алгоритм:
* -вызов
* -провекра на новые записи
*/
function SynStart() {
console.log('SynStart...');
SynSendImagesToServer();
SynSendAudioToServer();
if (window.localStorage.getItem("is_updated") == 1) {
console.log('Обновлен...');
// return false;
}
var email = window.localStorage.getItem("user_email");
var hash = window.localStorage.getItem("user_hash");
console.log(email);
console.log(hash);
$.ajax({type: 'POST', data: 'hash=' + hash + '&email=' + email,
url: serverName + '?act=auth',
success: function (data) {
console.log(data);
try
{
var json = JSON.parse(data);
if (json.code == 'ERROR') {
// alert2(json.message, {theme: 'red'});
window.localStorage.setItem("user_hash", "");
window.localStorage.setItem("user_email", "");
}
if (json.code == 'OK') {
SynSendDataToServer();
}
} catch (e) {
console.log('Ошибка JSON');
}
},
error: function () {
// $('.server-content').html('<p class="error">Ой.Сервер не отвечает... (@_@)</p>');
}
});
}
/*
*
* @returns {undefined}
*
*/
function SynSendDataToServer() {
console.log('SynSendDataToServer...');
var Ngood = 0;
// var timeStamp = Math.floor(Date.now());
db.getDataFromSQL("SELECT * FROM DATA WHERE flag=1 AND sync!=1",
function (arr) {
var data = '';
// alert("SELECT * FROM GOOD" + sql);
if (arr.length > 0) {
for (var i = 0; i < arr.length; i++) {
var obj = arr.item(i);
Ngood++;
data += '&data[' + i + '][id]=' + obj.id +
'&data[' + i + '][flag]=' + obj.flag +
'&data[' + i + '][text]=' + obj.text +
'&data[' + i + '][created_time]=' + obj.created_time +
'&data[' + i + '][emails]=' + obj.emails +
'&data[' + i + '][future_time]=' + obj.future_time;
}
console.log("Ngood=" + Ngood);
var email = window.localStorage.getItem("user_email");
var hash = window.localStorage.getItem("user_hash");
$.ajax({type: 'POST',
data: 'hash=' + hash + '&email=' + email + data,
url: serverName + '?act=insert',
success: function (data) {
console.log("FROM SERVER^");
console.log(data);
try
{
var json = JSON.parse(data);
if (json.code == 'OK') {
// window.localStorage.setItem("is_updated", 1);
var arrUpdated = JSON.parse(json.arr);
// console.log(arrUpdated);
for (var i = 0; i < arrUpdated.length; i++) {
//console.log(arrUpdated[i]);
db.updateData("DATA", "id=" + arrUpdated[i], ['sync'], [1], function (row) {})
}
console.log('Успешно обновлено записей:' + arrUpdated.length);
// SyncGetData();
// alert('На сервере есть записи. Необходимо сначала их загрузить. Нажмите на кнопку "загрузить данные из сервера"');
// alert(-1);
} else {
}
} catch (e)
{
//alert2(json.message,{theme:'red'});
console.log('Ошибка JSON');
}
// setTimeout(function(){$('#sync-log').html('')}, 4000);
},
error: function () {
alert2('Ошибка синхронизации.Сервер забил болт...', {theme: 'red'});
}
});
} else {
console.log('Нет записей для синхронизации!');
//alert("Нет записей для синхронизации!");
}
});
}
function SynSendImagesToServer() {
console.log('............................................SynSendImagesToServer....................................');
db.getDataFromSQL("SELECT * FROM IMAGES WHERE sync!=1",
function (arr) {
if (arr.length > 0) {
for (var i = 0; i < arr.length; i++) {
var obj = arr.item(i);
console.log(JSON.stringify(obj));
SendImageToServer(obj.filename, obj.id, obj.id_data);
}
} else {
console.log('Нет записей для синхронизации!');
// alert("Нет записей для синхронизации!");
}
});
}
$(document).on("pagebeforeshow", "#profile", function (event) {
//$('#sync-log').html('...');
var email = window.localStorage.getItem("user_email");
if (email == undefined || email == '') {
$('.if_email_y_full_no').hide();
$('#no-sync-alert').show();
email = '<span class="profile-user-email-none" >нет синхронизации</span>';
//user_email_edit
$('.user_email_edit').html('<a class="myhref" onclick="SetUserEmail()">Добавить почту</a>');
} else {
$('#no-sync-alert').hide();
$('.user_email_edit').html('<a class="myhref" onclick="SyncChangeEmail()">Изменить почту</a>');
if (window.localStorage.getItem("full_version", 0) == '1') {
$('.if_email_y_full_no').hide();
} else {
$('.if_email_y_full_no').show();
}
}
// $('#sync-log').append(email);
$('.user_email').html(email);
});
function SetUserEmail() {
var t = '';
$('#user .user-set-email').show();
$.mobile.changePage("#user", {transition: "slideup"})
}
function SyncSetFlag2(idgood) {
console.log('SyncSetFlag2 ' + idgood);
var email = window.localStorage.getItem("user_email");
var hash = window.localStorage.getItem("user_hash");
$.ajax({type: 'POST', data: 'hash=' + hash + '&email=' + email,
url: serverName + '?act=setflag2&id=' + idgood,
success: function (data) {
console.log(data);
},
error: function () {}
});
}
function SyncCheckFullVersion() {
var email = window.localStorage.getItem("user_email");
var hash = window.localStorage.getItem("user_hash");
if (email == undefined) {
alert2('<span style="font-size:14px;">Для начала укажите email в разделе СИНХРОНИЗАЦИЯ</span>', {theme: 'red'});
return;
}
$.ajax({type: 'POST', data: 'hash=' + hash + '&email=' + email,
url: serverName + '?act=checkfullversion',
success: function (data) {
console.log(data);
try
{
var json = JSON.parse(data);
if (json.code == 'ERROR') {
alert2('Мне очень жаль. У вас обычная версия!', {theme: 'red'});
}
if (json.code == 'OK') {
alert2('Шеф,все ок - у вас платная версия!', {theme: 'green'});
window.localStorage.setItem("full_version", 1);
Buy_CheckFullVersion();
WorkInBackground();
}
} catch (e) {
alert2('Ошибка:ERROR_SC_FV1 Сервер забил болт...', {theme: 'red'});
}
},
error: function () {
alert2('Ошибка:ERROR_SC_FV2 Сервер забил болт...', {theme: 'red'});
}
});
}
function SyncSetFullVersion() {
var email = window.localStorage.getItem("user_email");
var hash = window.localStorage.getItem("user_hash");
$.ajax({type: 'POST', data: 'hash=' + hash + '&email=' + email,
url: serverName + '?act=setfullversion',
success: function (data) {
console.log(data);
try
{
var json = JSON.parse(data);
if (json.code == 'ERROR') {
console.log('SyncSetFullVersion ERROR');
}
if (json.code == 'OK') {
console.log('SyncSetFullVersion OK');
}
} catch (e) {
console.log('Ошибка JSON');
}
},
error: function () {
// $('.server-content').html('<p class="error">Ой.Сервер не отвечает... (@_@)</p>');
}
});
}
//получает все данные из сервера и сохраняет на
// телефоне, причем, если есть запись с одинаковым ИД то запись
// на телефоне перемещается в конец.
function SyncGetData(callback) {
console.log('SyncGetData');
var email = window.localStorage.getItem("user_email");
var hash = window.localStorage.getItem("user_hash");
$('.SyncGetData').addClass('loading');
$.ajax({type: 'POST', data: 'hash=' + hash + '&email=' + email,
url: serverName + '/getdata',
success: function (data) {
$('.SyncGetData').removeClass('loading');
// callback(data);
//$('#btngetdatas').hide(500);
SyncSaveServerData(data);
},
error: function () {
$('.SyncGetData').removeClass('loading');
$('.server-content').html('<p class="error">Ой.Сервер не отвечает... (@_@)</p>');
}
});
}
var rTitle = '';
function CompStr(str1, str2)
{
str1 = str1.replace(/[/.,!?;()'" ]*/g, '');
str2 = str2.replace(/[/.,!?;()'" ]*/g, '');
// str2 = str2.replace(/[/.,!?; ]*/g, '');
// str1 = $.trim(str1);
// str2 = $.trim(str2);
// Удаляем все пробелы, используя
// регулярные выражения в JavaScript (RegEx):
//var text3 = " a b c d e f g ";
//var newText3 = text3.replace(/\s+/g, '');
str1 = str1.replace(/\s+/g, '');
str2 = str2.replace(/\s+/g, '');
//alert(str1+str2);
//return fUpperCase(Str.toString())==fUpperCase(Str2.toString());
return (str1.toUpperCase() == str2.toUpperCase()) ? true : false;
}
function SyncAuthPageOpen() {
$('#user section').hide();
$('#user section.auth').show();
$.mobile.changePage("#user", {transition: "slideup"});
}
function SyncAuth() {
var email = $('#authemail').text();
var pass = $('#auth_password').val();
console.log(pass);
console.log(email);
if (email.length < 2) {
alert2('Некорректный email... (@_@)', {theme: 'red'});
return;
}
if (pass.length < 4) {
alert2('Некорректный пароль... (@_@)', {theme: 'red'});
return;
}
$('.SyncAuth').addClass('loading');
$.ajax({
type: 'POST',
data: 'email=' + email + '&pass=' + pass + '',
url: serverName + '?act=auth',
success: function (data) {
console.log(data);
$('.SyncAuth').removeClass('loading');
var json = JSON.parse(data);
if (json.code == 'ERROR') {
alert2(json.message, {theme: 'red'});
}
if (json.code == 'OK') {
window.localStorage.setItem("user_email", email);
window.localStorage.setItem("user_hash", json.hash);
alert2('Ура! Вы авторизованы!', {theme: 'green'});
$.mobile.navigate("#profile");
//После успешной авторизации запускаем синхронизацию на получение данных
SyncUpdateDataFormServer();
}
},
error: function () {
$('.SyncAuth').removeClass('loading');
}
});
}
function SyncRegistration() {
var email = $('#emailreg').val();
if (!check_email(email)) {
return;
}
var pass = <PASSWORD>.random();
$('.SyncRegistration').addClass('loading');
// return;
$.ajax({
type: 'POST',
crossDomain: true,
// headers: { 'Access-Control-Allow-Origin': serverName },
data: 'email=' + email + '&pass=' + pass + '',
url: serverName + '?act=test',
success: function (data) {
console.log(data);
log(JSON.stringify(data));
},
error: function (e) {
console.log(e);
log(e);
}
});
$.ajax({
type: 'POST',
data: 'email=' + email + '&pass=' + pass + '',
url: serverName + '?act=registration',
success: function (data) {
$('.SyncRegistration').removeClass('loading');
console.log(data);
var json = JSON.parse(data);
if (json.code == 0) {
alert('Такой email уже есть в системе. Авторизуйтесь или восстановите пароль!');
$('#sync-reg-log').html('<p class="error"><b>' + json.message + '</p>');
}
//Если человек ввел почту, а на сервере уже есть почта то предлагаем
// Восстановить доступ
if (json.code == 'EMAIL_EXIST') {
$('#authemail').html(email);
alert2('Ваша почта ранее была зарегистрирована. Для восстановления доступа нажмите на кнопку: <br>\n\
<button onclick="SyncAuthPageOpen()">ВПЕРЕД!</button>')
//SyncAuthPageOpen()
}
if (json.code == 1) {
window.localStorage.setItem("user_email", email);
window.localStorage.setItem("user_hash", json.hash);
alert2('Ура! Вы зарегистрированы !', {theme: 'green'});
$.mobile.navigate("#profile");
}
},
error: function (e) {
console.log(e);
$('.SyncRegistration').removeClass('loading');
alert2('Сервер не доступен... попробуйте чуть позже! Уверен, чуваки там уже начали отладку)')
}
});
}
function SyncChangeEmail() {
alert2('Функция не доступна :(', {theme: 'black'});
}
/*
*
* @returns {undefined}
* Отправка запроса на смену пароля!
*/
function SyncGetPass() {
var email = $('#email_getpass').val();
if (email.length < 2) {
alert('Некорректный email... (@_@)');
return;
}
$('.SyncGetPass').addClass('loading');
$.ajax({type: 'POST',
data: 'email=' + email,
url: serverName + '/getpass',
success: function (data) {
alert('Новый пароль отправлен на вашу почту!');
$('.SyncGetPass').removeClass('loading');
},
error: function () {
$('#sync-log').html('<p class="error">Ошибка.Сервер не отвечает... (@_@)</p>');
$('.SyncGetPass').removeClass('loading');
}
});
}
function SyncSection(cmd) {
switch (cmd) {
case 'reg_ok':
break;
}
}
//БЕРЕТ ВСЕ ДАННЫЕ ИЗ СЕРВРЕА, ВОЗВРАЩАЕТ В КОЛБАК ФУНКЦИЕЙ
function F1_Sync(func_call_back) {
console.log('F1_Sync');
var email = window.localStorage.getItem("user_email");
var hash = window.localStorage.getItem("user_hash");
$.ajax({type: 'POST', data: 'hash=' + hash + '&email=' + email,
url: serverName + '?act=getdata',
success: function (data) {
func_call_back(data);
},
error: function () {
func_call_back(0);
}
});
}
//Берем существующие данные. Меняем id на id + 10000, запомнинаем.
function F2_Sync(data, func_call_back) {
console.log('F2_Sync');
//console.log(data);
var json = JSON.parse(data);
// console.log(json);
for (var i = 0; i < json.data.length; i++) {
// id - это порядковый номер на сервере. тут используем id2
var id = json.data[i].id2;
console.log('--------------------------');
console.log(id);
db.getDataFromSQL("SELECT * FROM DATA WHERE id=" + id, function (arr, good) {
//arr = - данные из БД
//good - данные из цикла.
if (arr.length == 1) {
console.log("В базе есть запись с id=" + good.id2);
var obj = arr.item(0);
console.log('text s: ' + good.text);
console.log('text m: ' + obj.text);
//ЕСЛИ равны ничего не делаем
if (CompStr(good.text, obj.text)) {
console.log('равны');
} else {
console.log('НЕ РАВНЫ ' + good.id2);
//Перезаписываем!
db.updateData('DATA', 'id=' + obj.id, ['text', 'flag', 'created_time', 'future_time'], [good.text, good.flag, good.created_time, good.future_time], function () {
console.log('обновлена запись c [' + obj.text + '] на [' + good.text + ']');
});
//Делаем копию
db.insertData2('DATA', 'flag,text,created_time,future_time', [obj.flag, obj.text, obj.created_time, obj.future_time],
function () {
console.log('добавлена запись =>' + obj.text);
});
}
} else {
console.log(' НЕт такой записи в БД');
db.insertData2('DATA', 'id,flag,text,created_time,future_time',
[good.id2, good.flag, good.text, good.created_time, good.future_time],
function () {
console.log('добавлена запись 2');
});
}
}, json.data[i]);
// IDS+=','+json.goods[i].id;
}
window.localStorage.setItem("is_updated", 0);
SynStart();
/*
db.getDataFromSQL("SELECT * FROM DATA",
function (arr) {
var data = '';
// alert("SELECT * FROM GOOD" + sql);
if (arr.length > 0) {
for (var i = 0; i < arr.length; i++) {
var obj = arr.item(i);
db.updateData('DATA', 'id=' + obj.id,
['id'],
[obj.id+100],
function () {});
}
} else {
//alert("Нет записей для синхронизации!");
}
});
*/
}
function SyncUpdateDataFormServer() {
console.log('SyncUpdateDataFormServer');
//ф1 - Получаем все данные из сервера.
//ф2 - Берем существующие данные. Меняем id на id + 10000, запомнинаем.
//ф3 - Добавляем в базу данные из сервера
//ф4 - Берем последнее id , и у тех записей что были до этого обновляем id
//ф5 - Отправляем на сервер только новые записи
//ф6 - Все круто. Радуемся.
//--------------------------------------------------------------------------
//ф1 - Получаем все данные из сервера.
F1_Sync(function (data_form_server) {
//ф2 - Берем существующие данные. Меняем id на id + 10000, запомнинаем.
if (data_form_server != 0) {
F2_Sync(data_form_server, function (data_form_server) {
});//F2_Sync
}
});//F1_Sync
//--------------------------------------------------------------------------
return false;
}
function SyncUpdateDataFormServer2() {
console.log('SyncUpdateDataFormServer')
console.log(data);
try
{
var json = JSON.parse(data);
console.log(json.rubrics);
/*
*
* Рубрики.
* Если нет то записываем, сущесвтующие в телефоне если совпадает
* с рубриков на сервере, переименовываем.
*/
for (var i = 0; i < json.rubrics.length; i++) {
rTitle = json.rubrics[i].title;
db.getDataFromSQL("SELECT * FROM RUBRIC WHERE id=" + json.rubrics[i].id,
function (arr, rTitle) {
console.log(rTitle);
if (arr.length == 1) {
var obj = arr.item(0);
console.log(obj.id);
db.updateData('RUBRIC', 'id=' + obj.id, ['title'], [rTitle], function () {});
} else {
db.insertData2('RUBRIC', 'id, id_parent, title,text,deleted', [obj.id, 0, rTitle, '', 0], function () {});
}
}, rTitle);
}
//Записи
/*
* Если записи с сервера совпадает по id с существующими записями то
* записи на телефоне копирются, а первая записб перезаписывается.
* делается это чтобы не сбились ID
*/
for (var i = 0; i < json.goods.length; i++) {
db.getDataFromSQL("SELECT * FROM GOOD WHERE id=" + json.goods[i].id,
function (arr, good) {
console.log(good);
if (arr.length == 1) {
var obj = arr.item(0);
console.log(obj.id);
if (good.title != '' && good.title.length > 0) {
//перезаписываем существующий, так как оно обновится с
// записью из сервера
db.insertData2('GOOD',
'title,text,deleted,created_at,id_rubric',
[good.title, good.text, 0, good.created_at, good.rubric_id],
function () {
console.log('добавлена запись');
});
db.updateData('GOOD', 'id=' + obj.id,
['title', 'text', 'deleted', 'created_at', 'id_rubric'],
[good.title, good.text, 0, good.created_at, good.rubric_id],
function () {});
}
} else {
db.insertData2('GOOD',
'id,title,text,deleted,created_at,id_rubric',
[obj.id, good.title, good.text, 0, good.created_at, good.rubric_id],
function () {
console.log('добавлена запись');
});
}
}, json.goods[i]);
}
} catch (e)
{
console.log('Ошибка');
}
}
|
<gh_stars>0
/*
* node-android-automate v1.0.0
* https://github.com/pete-rai/node-android-automate
*
* Copyright 2021 <NAME>
* Released under the MIT license
* https://github.com/pete-rai/node-android-automate/blob/main/LICENSE
*
* Released with the karmaware tag
* https://pete-rai.github.io/karmaware
*
* Website : http://www.rai.org.uk
* GitHub : https://github.com/pete-rai
* LinkedIn : https://uk.linkedin.com/in/raipete
* NPM : https://www.npmjs.com/~peterai
*
*/
'use strict';
// --- dependencies
const fs = require('fs');
const path = require('path');
const _ = require('lodash');
const sharp = require('sharp');
const engine = require('./engine');
// --- screen class - methods for grabbing, manipulating, examining and reading screens
module.exports = class Screen {
// --- constructor
constructor(path, name, device) {
this._path = path.replace(/\/+$/, ''); // removes any trailing slashes
this._name = name;
this._file = this._fullpath(name);
this._device = device;
if (!fs.existsSync(this._file)) {
fs.closeSync(fs.openSync(this._file, 'w')); // make sure the underlying screen capture file exists
}
sharp.cache(false); // turn off sharp caching
}
// --- full path to a screen capture file
_fullpath(name) {
return path.resolve(process.env.PWD, `${ this._path }/${ name }.png`);
}
// --- promise then handler for chaining
then(callback) {
engine.then(callback); // delegate to engine
}
// --- clones this screen object
clone(name) {
engine.chain(async () => {
engine.trace(`cloning '${ this._name }' to '${ name }'...`);
fs.copyFileSync(this._file, this._fullpath(name));
engine.trace('cloned');
});
return new Screen(this._path, name, this._device);
}
// --- extracts colors at the given points
_colors(points) {
engine.trace(`getting colors from '${ this._name }' at ${ points.length } point${ points.length === 1 ? '' : 's' }...`);
let shot = sharp(this._file);
let meta = null;
return shot.metadata()
.then(results => {
meta = results;
return shot.raw().toBuffer();
})
.then(data => {
let colors = [];
let matched = points.length > 0;
for (let i = 0; i < points.length; i++) {
let point = points[i];
let delta = meta.channels * (meta.width * point.y + point.x);
let slice = data.slice(delta, delta + meta.channels);
let found = { x: point.x, y: point.y, c: { r: slice[0], g: slice[1], b: slice[2] } };
matched &= _.isEqual(point, found);
colors.push(found);
}
return { colors: colors, matched: matched ? true : false };
})
.catch(err => engine.error('failure getting colors'))
.finally(() => engine.trace('got colors'));
}
colors(points) {
return engine.chain(async () => await this._colors(points));
}
// --- whether the screen capture file contains the given image within it
_contains(other, max) {
engine.trace(`checking if '${ this._name }' contains at most ${ max } instance${ max === 1 ? '' : 's'} of '${ other }'...`);
let file_o = sharp(this._file); // _o = outer
let file_i = sharp(this._fullpath(other)); // _i = inner
let buff_o = null;
let buff_i = null;
let meta_o = null;
let meta_i = null;
let awaits = [];
awaits.push(file_o.raw().toBuffer().then(buffer => buff_o = buffer));
awaits.push(file_i.raw().toBuffer().then(buffer => buff_i = buffer));
awaits.push(file_o.metadata().then(metadata => meta_o = metadata));
awaits.push(file_i.metadata().then(metadata => meta_i = metadata));
return Promise.all(awaits)
.then(() => {
let size_o = meta_o.width * meta_o.channels;
let size_i = meta_i.width * meta_i.channels;
let upper = buff_i.slice(0, size_i); // upper row of inner
let found = -1;
let finds = [];
if (meta_i.width <= meta_o.width && meta_i.height <= meta_o.height) { // must be containable within
do {
found = buff_o.indexOf(upper, found + 1); // upper row is present, so its another candidate
if (found != -1) {
let matches = true;
let oy = Math.floor(found / size_o);
let ox = Math.floor((found - size_o * oy) / meta_o.channels);
for (let y = 1; matches && y < meta_i.height; y++) { // start from one as upper row is already matched
let pos_i = y * size_i;
let pos_o = y * size_o + found;
let slice_i = buff_i.slice(pos_i, pos_i + size_i);
let slice_o = buff_o.slice(pos_o, pos_o + size_i);
matches &= slice_o.equals(slice_i); // does next row also match?
}
if (matches) {
finds.push({ x: ox, y: oy, w: meta_i.width, h: meta_i.height });
/* await sharp(outer) // debug test only code!
.extract({ left: finds[finds.length - 1].x,
top: finds[finds.length - 1].y,
width: finds[finds.length - 1].w,
height: finds[finds.length - 1].h })
.toBuffer()
.then(buffer => sharp(buffer).toFile(`found_${ finds.length }.png`)); */
}
}
}
while (found != -1 && finds.length < max);
}
return finds;
})
.catch(err => engine.error('failure checking contained'))
.finally(() => engine.trace('checked contained'));
}
contains(other, max = 1) {
return engine.chain(async () => await this._contains(other, max));
}
// --- crops to the given section
_crop(section) {
engine.trace(`cropping '${ this._name }' at { x: ${ section.x }, y: ${ section.y }, w: ${ section.w }, h: ${ section.h } }...`);
return sharp(this._file)
.extract({ left: section.x, top: section.y, width: section.w, height: section.h })
.toBuffer()
.then(buffer => sharp(buffer).toFile(this._file)) // only way to save back to same filename in sharp
.catch(err => engine.error('failure cropping'))
.finally(() => engine.trace('cropped'));
}
crop(section) {
engine.chain(async () => this._crop(section));
return this;
}
// --- deletes the underlying screenshot file
delete() {
engine.chain(async () => {
engine.trace(`deleting '${ this._name }'...`);
fs.unlinkSync(this._file);
engine.trace('deleted');
});
return this.device();
}
// --- switches the operation chain context back to the device
device() {
return this._device;
}
// --- returns the name of the underlying image file
file() {
return engine.chain(() => this._file);
}
// --- converts to greyscale
_greyscale() {
engine.trace(`greyscaling '${ this._name }'...`);
return sharp(this._file)
.greyscale()
.toBuffer()
.then(buffer => sharp(buffer).toFile(this._file)) // only way to save back to same filename in sharp
.catch(err => engine.error('failure greyscaling'))
.finally(() => engine.trace('greyscaled'));
}
greyscale() {
engine.chain(async () => await this._greyscale());
return this;
}
// --- reads text within the screen section
_read(clean) {
engine.trace(`${ clean ? 'clean ' : ''}reading '${ this._name }'...`);
return engine.ocr(this._file)
.then(text => clean ? text.replace(/\s+/g, ' ').trim() : text) // normalise whitespace
.catch(err => engine.error('failure reading'))
.finally(() => engine.trace('read'));
}
read(clean = true) {
return engine.chain(async () => await this._read(clean));
}
// --- saves the screenshot to the given file
save(file) {
engine.chain(async () => {
engine.trace(`saving '${ this._name }' to '${ file }'...`);
fs.copyFileSync(this._file, file);
engine.trace('saved');
});
return this;
}
// --- switches context to a new or existing other screen instance
screen(name) {
return new Screen(this._path, name, this._device)
}
// --- returns the underlying sharp class instance
sharp() {
return engine.chain(() => sharp(this._file));
}
// --- sharpens an image
_sharpen() {
engine.trace(`sharpening '${ this._name }'...`);
return sharp(this._file)
.sharpen()
.toBuffer()
.then(buffer => sharp(buffer).toFile(this._file)) // only way to save back to same filename in sharp
.catch(err => engine.error('failure sharpening'))
.finally(() => engine.trace('sharpened'));
}
sharpen() {
engine.chain(async () => await this._sharpen());
return this;
}
// --- takes a screenshot
_shoot() {
engine.trace(`shooting '${ this._name }'...`);
return engine.adb(`screencap -p > ${ this._file }`)
.catch(err => engine.error('failure shooting'))
.finally(() => engine.trace('shot'));
}
shoot() {
engine.chain(async () => await this._shoot());
return this;
}
// --- sharpens an image
_threshold(value) {
value = Math.min(255, Math.max(0, value));
engine.trace(`thresholding '${ this._name }' with value ${ value }...`);
return sharp(this._file)
.threshold(value)
.toBuffer()
.then(buffer => sharp(buffer).toFile(this._file)) // only way to save back to same filename in sharp
.catch(err => engine.error('failure thresholding'))
.finally(() => engine.trace('thresholded'));
}
threshold(value = 128) {
engine.chain(async () => await this._threshold(value));
return this;
}
// --- outputs a trace message
trace(text) {
engine.chain(async () => engine.trace(text));
return this;
}
// --- sets the trace mode
tracing(mode) {
engine.chain(async () => engine.tracing(mode));
return this;
}
// --- waits for a given time
_wait(millisecs) {
engine.trace(`waiting for ${ millisecs }ms...`);
return engine.wait(millisecs)
.catch(err => engine.error('failure waiting'))
.finally(() => engine.trace('waited'));
}
wait(millisecs) {
engine.chain(async () => await this._wait(millisecs));
return this;
}
}
|
package main
import (
"github.com/gin-gonic/gin"
"net/http"
)
func main() {
router := gin.Default()
router.GET("/", func(c *gin.Context) {
msg := "Hello, Welcome Gin World!"
c.JSON(http.StatusOK, msg)
})
err := router.Run(":9000")
if err != nil {
panic(err)
}
}
|
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Output = void 0;
const _ = __importStar(require("lodash"));
const bn_js_1 = __importDefault(require("bn.js"));
const bn_1 = require("../crypto/bn");
const buffer_1 = require("buffer");
const buffer_2 = require("../util/buffer");
const js_1 = require("../util/js");
const bufferwriter_1 = require("../encoding/bufferwriter");
const script_1 = require("../script");
const preconditions_1 = __importDefault(require("../util/preconditions"));
const errors_1 = require("../errors");
var MAX_SAFE_INTEGER = 0x1fffffffffffff;
class Output {
constructor(args) {
this.invalidSatoshis = function () {
if (this._satoshis > MAX_SAFE_INTEGER) {
return 'transaction txout satoshis greater than max safe integer';
}
if (this._satoshis !== this._satoshisBN.toNumber()) {
return 'transaction txout satoshis has corrupted value';
}
if (this._satoshis < 0) {
return 'transaction txout negative';
}
return false;
};
this.toObject = function toObject() {
const obj = {
satoshis: this.satoshis,
script: this._scriptBuffer.toString('hex')
};
return obj;
};
this.toJSON = this.toObject;
this.setScriptFromBuffer = function (buffer) {
this._scriptBuffer = buffer;
try {
this._script = script_1.Script.fromBuffer(this._scriptBuffer);
this._script._isOutput = true;
}
catch (e) {
if (e instanceof errors_1.BitcoreError) {
this._script = null;
}
else {
throw e;
}
}
};
this.setScript = function (script) {
if (script instanceof script_1.Script) {
this._scriptBuffer = script.toBuffer();
this._script = script;
this._script._isOutput = true;
}
else if (_.isString(script)) {
this._script = script_1.Script.fromString(script);
this._scriptBuffer = this._script.toBuffer();
this._script._isOutput = true;
}
else if (buffer_2.BufferUtil.isBuffer(script)) {
this.setScriptFromBuffer(script);
}
else {
throw new TypeError('Invalid argument type: script');
}
return this;
};
this.inspect = function () {
var scriptStr;
if (this.script) {
scriptStr = this.script.inspect();
}
else {
scriptStr = this._scriptBuffer.toString('hex');
}
return '<Output (' + this.satoshis + ' sats) ' + scriptStr + '>';
};
this.toBufferWriter = function (writer) {
if (!writer) {
writer = new bufferwriter_1.BufferWriter();
}
writer.writeUInt64LEBN(this._satoshisBN);
var script = this._scriptBuffer;
writer.writeVarintNum(script.length);
writer.write(script);
return writer;
};
if (!(this instanceof Output)) {
return new Output(args);
}
if (_.isObject(args)) {
this.setSatoshis(args.satoshis);
if (buffer_2.BufferUtil.isBuffer(args.script)) {
this._scriptBuffer = args.script;
}
else {
var script;
if (_.isString(args.script) && js_1.JSUtil.isHexa(args.script)) {
script = new buffer_1.Buffer(args.script, 'hex');
}
else {
script = args.script;
}
this.setScript(script);
}
}
else {
throw new TypeError('Unrecognized argument for Output');
}
}
get script() {
if (this._script) {
return this._script;
}
else {
this.setScriptFromBuffer(this._scriptBuffer);
return this._script;
}
}
get satoshis() {
return this._satoshis;
}
setSatoshis(num) {
if (num instanceof bn_js_1.default) {
this._satoshisBN = num;
this._satoshis = num.toNumber();
}
else if (typeof num === 'string') {
this._satoshis = parseInt(num, 10);
this._satoshisBN = bn_1.BitcoreBN.fromNumber(this._satoshis);
}
else {
preconditions_1.default.checkArgument(js_1.JSUtil.isNaturalNumber(num), 'Output satoshis is not a natural number');
this._satoshisBN = bn_1.BitcoreBN.fromNumber(num);
this._satoshis = num;
}
preconditions_1.default.checkState(js_1.JSUtil.isNaturalNumber(this._satoshis), 'Output satoshis is not a natural number');
}
}
exports.Output = Output;
Output.fromObject = function (data) {
return new Output(data);
};
Output.fromBufferReader = function (br) {
const size = br.readVarintNum();
const obj = {
satoshis: br.readUInt64LEBN(),
script: size !== 0 ? br.read(size) : new buffer_1.Buffer([])
};
return new Output(obj);
};
//# sourceMappingURL=output.js.map
|
<gh_stars>0
/*
* Copyright (c) 2020 The Khronos Group Inc.
* Copyright (c) 2020 Valve Corporation
* Copyright (c) 2020 LunarG, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Author: <NAME> <<EMAIL>>
*/
#include "cast_utils.h"
#include "layer_validation_tests.h"
class VkPortabilitySubsetTest : public VkLayerTest {
public:
void InitPortabilitySubsetFramework() {
// VK_KHR_portability_subset extension dependencies
instance_extensions_.emplace_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
InitFramework(m_errorMonitor, nullptr);
}
};
TEST_F(VkPortabilitySubsetTest, ValidatePortabilityCreateDevice) {
TEST_DESCRIPTION("Portability: CreateDevice called and VK_KHR_portability_subset not enabled");
ASSERT_NO_FATAL_FAILURE(InitPortabilitySubsetFramework());
bool portability_supported = DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME);
if (!portability_supported) {
printf("%s Test requires VK_KHR_portability_subset, skipping\n", kSkipPrefix);
return;
}
vk_testing::PhysicalDevice phys_device(gpu());
// request all queues
const std::vector<VkQueueFamilyProperties> queue_props = phys_device.queue_properties();
vk_testing::QueueCreateInfoArray queue_info(phys_device.queue_properties());
// Only request creation with queuefamilies that have at least one queue
std::vector<VkDeviceQueueCreateInfo> create_queue_infos;
auto qci = queue_info.data();
for (uint32_t j = 0; j < queue_info.size(); ++j) {
if (qci[j].queueCount) {
create_queue_infos.push_back(qci[j]);
}
}
VkDeviceCreateInfo dev_info = {};
dev_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
dev_info.pNext = nullptr;
dev_info.queueCreateInfoCount = create_queue_infos.size();
dev_info.pQueueCreateInfos = create_queue_infos.data();
dev_info.enabledLayerCount = 0;
dev_info.ppEnabledLayerNames = NULL;
dev_info.enabledExtensionCount = 0;
dev_info.ppEnabledExtensionNames =
nullptr; // VK_KHR_portability_subset not included in enabled extensions should trigger 04451
m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-VkDeviceCreateInfo-pProperties-04451");
VkDevice device;
vk::CreateDevice(gpu(), &dev_info, nullptr, &device);
m_errorMonitor->VerifyFound();
}
TEST_F(VkPortabilitySubsetTest, PortabilityCreateEvent) {
TEST_DESCRIPTION("Portability: CreateEvent when not supported");
ASSERT_NO_FATAL_FAILURE(InitPortabilitySubsetFramework());
bool portability_supported = DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME);
if (!portability_supported) {
printf("%s Test requires VK_KHR_portability_subset, skipping\n", kSkipPrefix);
return;
}
m_device_extension_names.push_back(VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME);
auto portability_feature = lvl_init_struct<VkPhysicalDevicePortabilitySubsetFeaturesKHR>();
auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&portability_feature);
vk::GetPhysicalDeviceFeatures2(gpu(), &features2);
portability_feature.events = VK_FALSE; // Make sure events are disabled
ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCreateEvent-events-04468");
VkEventCreateInfo eci = {VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, nullptr, 0};
VkEvent event;
vk::CreateEvent(m_device->device(), &eci, nullptr, &event);
m_errorMonitor->VerifyFound();
}
|
function isValidEmail(email) {
const regEx = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
return regEx.test(String(email).toLowerCase());
}
|
<gh_stars>0
package file
import (
"fmt"
"sort"
"testing"
)
var source = map[string]map[string]string{
"mini": {
"shareholder": "shareholder",
"KeyPerson": "KeyPerson",
"company_base_info": "entbaseInfo",
"branch": "branch",
"alter": "alter",
"spotCheckInfo": "spotCheckInfo",
"licenceinfoDetail": "licenceinfoDetail",
"insLicenceinfo": "insLicenceinfo",
"punishmentdetail": "punishmentdetail",
"illInfo": "illInfo",
"liquidation": "liquidation",
"insinvinfo": "insinvinfo",
"insAlterstockinfo": "insAlterstockinfo",
"entBusExcep": "entBusExcep",
"stakqualitinfo": "stakqualitinfo",
"mortreginfo": "mortreginfo",
"assistInfo": "assistInfo",
"insProPledgeRegInfo": "insProPledgeRegInfo",
"report_base": "report_base",
"report_webinfo": "report_webinfo",
"report_investor": "report_investor",
"report_investment": "report_investment",
"report_change": "report_change",
"report_social_security": "report_social_security",
"report_guarantee": "report_guarantee",
"report_alter_stockinfo": "report_alter_stockinfo",
},
"pc": {
"shareholder": "shareholder",
"KeyPerson": "KeyPerson",
"company_base_info": "base",
"shareholderDetail": "shareholderDetail",
"branch": "branch",
"alter": "alter",
"spotCheckInfo": "spotCheckInfo",
"licenceinfoDetail": "licenceinfoDetail",
"insLicenceinfo": "insLicenceinfo",
"punishmentdetail": "punishmentdetail",
"illInfo": "illInfo",
"liquidation": "liquidation",
"insInvinfo": "insInvinfo",
"insAlterstockinfo": "insAlterstockinfo",
"entBusExcep": "entBusExcep",
"stakqualitinfo": "stakqualitinfo",
"mortreginfo": "mortreginfo",
"mortregpersoninfo": "mortregpersoninfo",
"mortCreditorRightInfo": "mortCreditorRightInfo",
"mortGuaranteeInfo": "mortGuaranteeInfo",
"getMortAltItemInfo": "getMortAltItemInfo",
"getMortRegCancelInfo": "getMortRegCancelInfo",
"assistInfo": "assistInfo",
"assistInfoDetail": "assistInfoDetail",
"insProPledgeRegInfo": "insProPledgeRegInfo",
"report_base": "report_base",
"report_webinfo": "report_webinfo",
"report_investor": "report_investor",
"report_investment": "report_investment",
"report_change": "report_change",
"report_social_security": "report_social_security",
"report_guarantee": "report_guarantee",
"report_alter_stockinfo": "report_alter_stockinfo",
},
"diqu": {
"shareholder": "shareholder",
"KeyPerson": "KeyPerson",
"company_base_info": "base",
"shareholderDetail": "shareholderDetail",
"branch": "branch",
"alter": "alter",
"spotCheckInfo": "spotCheckInfo",
"licenceinfoDetail": "licenceinfoDetail",
"insLicenceinfo": "insLicenceinfo",
"punishmentdetail": "punishmentdetail",
"illInfo": "illInfo",
"liquidation": "liquidation",
"insInvinfo": "insInvinfo",
"insAlterstockinfo": "insAlterstockinfo",
"entBusExcep": "entBusExcep",
"stakqualitinfo": "stakqualitinfo",
"mortreginfo": "mortreginfo",
"mortregpersoninfo": "mortregpersoninfo",
"mortCreditorRightInfo": "mortCreditorRightInfo",
"mortGuaranteeInfo": "mortGuaranteeInfo",
"getMortAltItemInfo": "getMortAltItemInfo",
"getMortRegCancelInfo": "getMortRegCancelInfo",
"assistInfo": "assistInfo",
"assistInfoDetail": "assistInfoDetail",
"insProPledgeRegInfo": "insProPledgeRegInfo",
"report_base": "report_base",
"report_webinfo": "report_webinfo",
"report_investor": "report_investor",
"report_investment": "report_investment",
"report_change": "report_change",
"report_social_security": "report_social_security",
"report_guarantee": "report_guarantee",
"report_alter_stockinfo": "report_alter_stockinfo",
},
}
func TestFileExist(t *testing.T) {
path := "./a.txt"
f, err := Open(path)
if err != nil {
fmt.Println(err)
return
}
var b []byte
n, err := f.Read(b)
if err != nil {
fmt.Println(err, n)
}
defer func() {
err := f.Close()
if err != nil {
fmt.Println(err)
return
}
}()
}
func TestFindNotExistKeys(t *testing.T) {
//path := "/home/learnGoroutine/file/src.log"
//path := "/home/learnGoroutine/file/gongshang_pc.log"
//dataSource := "pc"
path := "/home/learnGoroutine/file/gongshang_diqu.log"
dataSource := "diqu"
var keys []string
for k := range source[dataSource] {
keys = append(keys, k)
}
//sort.Strings(keys)
output, err := FindNotExistKeys(path, dataSource, keys)
if err != nil {
fmt.Println(err)
return
}
// 对字符串列表进行排序, 从而保证输出结果总是一样的
sort.Strings(output)
fmt.Println(output)
}
// go test -bench=. -run=none -benchmem -benchtime=3s
// -bench=.表示运行所有的基准测试,-bench=BenchmarkFindNotExistKeys表示只允许该基准函数
// -run=none表示不运行TestFindNotExistKeys函数,-benchtime=3s表示设置基准测试运行的时间(默认为1s)
// -benchmem表示显示memory指标(也就是再PASS这一行多显示 B/op 和 allocs/op)
// 15表示3s内运行的次数(b.N的数值),220301926 ns/op表示执行一次循环耗时220301926纳秒
// B/op 表示执行一次循环分配的内存数(字节,Byte);allocs/op表示执行一次操作分配的内存次数
//
// goos表示操作系统,goarch表示平台的体系架构,pkg表示运行的文件所在的包
// BenchmarkFindNotExistKeys表示基准测试函数名
//
//goos: linux
//goarch: amd64
//pkg: GopherUtils/file
//BenchmarkFindNotExistKeys 15 220301926 ns/op 1485740 B/op 2629 allocs/op
//PASS
//ok GopherUtils/file 3.525s
func BenchmarkFindNotExistKeys(b *testing.B) {
path := "/home/learnGoroutine/file/gongshang_pc_test.log"
var keys []string
dataSource := "pc"
for k := range source[dataSource] {
keys = append(keys, k)
}
// 重置计时器,会将运行事件归零,然后从下面的代码运行开始重新计时;过滤掉之前运行的代码所消耗的时间
b.ResetTimer()
for i := 0; i < b.N; i++ {
_, err := FindNotExistKeys(path, dataSource, keys)
if err != nil {
b.Error(err)
return
}
}
}
|
<reponame>hiteshsahu/Hassium-WordPress-Theme
//Global Variables
var app = angular.module('HitMan',['ngRoute', 'ngAnimate']);
app.controller('AssassinControler', function($scope, $rootScope) {
//Variables
$scope.orderList = "name";
$scope.skill_require = "";
$scope.coreSkill = appData.CORE_SKILLS;
$scope.projects = appData.PROJECTS;
$scope.opeSourceProjects = appData.OPEN_SOURCE;
$scope.socialMedia = appData.SOCIAL;
$scope.projectCount = $scope.projects.length;
$scope.isSkillPresent= function(skillPresent){
if($scope.skill_require !== "" && skillPresent.indexOf($scope.skill_require) !== -1)
{ return "glow"}
else
{return "";}
}
});
app.config(function($routeProvider, $locationProvider){
$routeProvider
.when("/", {
templateUrl : "./templates/visitors.html",
})
.when('/visitors',{
templateUrl : "./templates/visitors.html",
animation: 'first',
})
.when('/form',{
templateUrl : "./templates/form.html",
animation: 'second',
})
.when('/report',{
templateUrl : "./templates/report.html",
animation: 'first',
})
.when('/about',{
templateUrl : "./templates/about.html",
animation: 'first',
})
.otherwise({
template: "<div class='box n'>Well... Hello There</div>",
animation: 'welcome'
});
});
|
<gh_stars>10-100
import { CircularProgress, createStyles, makeStyles } from "@material-ui/core";
import clsx from "clsx";
const useStyles = makeStyles(() =>
createStyles({
root: {
height: "100%",
width: "100%",
display: "flex",
alignItems: "center",
justifyContent: "center",
},
fullPage: {
height: "100vh",
width: "100vw",
},
})
);
type LoadingDivProps = {
fullPage?: boolean;
};
const LoadingDiv = ({ fullPage }: LoadingDivProps): JSX.Element => {
const classes = useStyles();
return (
<div className={clsx(classes.root, fullPage && classes.fullPage)}>
<CircularProgress size={48} />
</div>
);
};
export default LoadingDiv;
|
#!/usr/bin/env bash
# Copyright 2016 Vimal Manohar
# 2016 Yiming Wang
# 2016 Johns Hopkins University (author: Daniel Povey)
# Apache 2.0
# This script demonstrates how to re-segment training data selecting only the
# "good" audio that matches the transcripts.
# The basic idea is to decode with an existing in-domain acoustic model, and a
# biased language model built from the reference, and then work out the
# segmentation from a ctm like file.
# For nnet3 and chain results after cleanup, see the scripts in
# local/nnet3/run_tdnn.sh and local/chain/run_tdnn_6z.sh
# GMM Results for speaker-independent (SI) and speaker adaptive training (SAT) systems on dev and test sets
# [will add these later].
set -e
set -o pipefail
set -u
stage=0
cleanup_stage=0
data=data/train_960
cleanup_affix=cleaned
srcdir=exp/tri6b
nj=100
decode_nj=16
decode_num_threads=4
. ./path.sh
. ./cmd.sh
. ./utils/parse_options.sh
cleaned_data=${data}_${cleanup_affix}
dir=${srcdir}_${cleanup_affix}_work
cleaned_dir=${srcdir}_${cleanup_affix}
if [ $stage -le 1 ]; then
# This does the actual data cleanup.
steps/cleanup/clean_and_segment_data.sh --stage $cleanup_stage --nj $nj --cmd "$train_cmd" \
$data data/lang $srcdir $dir $cleaned_data
fi
if [ $stage -le 2 ]; then
steps/align_fmllr.sh --nj $nj --cmd "$train_cmd" \
$cleaned_data data/lang $srcdir ${srcdir}_ali_${cleanup_affix}
fi
if [ $stage -le 3 ]; then
steps/train_sat.sh --cmd "$train_cmd" \
7000 150000 $cleaned_data data/lang ${srcdir}_ali_${cleanup_affix} ${cleaned_dir}
fi
if [ $stage -le 4 ]; then
# Test with the models trained on cleaned-up data.
utils/mkgraph.sh data/lang_test_tgsmall ${cleaned_dir} ${cleaned_dir}/graph_tgsmall
for dset in test_clean test_other dev_clean dev_other; do
(
steps/decode_fmllr.sh --nj $decode_nj --num-threads $decode_num_threads \
--cmd "$decode_cmd" \
${cleaned_dir}/graph_tgsmall data/${dset} ${cleaned_dir}/decode_${dset}_tgsmall
steps/lmrescore.sh --cmd "$decode_cmd" data/lang_test_{tgsmall,tgmed} \
data/${dset} ${cleaned_dir}/decode_${dset}_{tgsmall,tgmed}
steps/lmrescore_const_arpa.sh \
--cmd "$decode_cmd" data/lang_test_{tgsmall,tglarge} \
data/${dset} ${cleaned_dir}/decode_${dset}_{tgsmall,tglarge}
steps/lmrescore_const_arpa.sh \
--cmd "$decode_cmd" data/lang_test_{tgsmall,fglarge} \
data/${dset} ${cleaned_dir}/decode_${dset}_{tgsmall,fglarge}
) &
done
fi
wait;
exit 0;
|
<reponame>rovedit/Fort-Candle
#pragma once
#include "types/Light.hh"
#include "types/Material.hh"
#include "types/Particle.hh"
#include "types/Properties.hh"
#include "types/Structure.hh"
#include "types/Unit.hh"
#include "types/Nature.hh"
#include "types/Audio.hh"
|
<reponame>theBoyMo/RadioPlayer<filename>app/src/main/java/com/oandmdigital/radioplayer/common/LoggingFragment.java
package com.oandmdigital.radioplayer.common;
import android.content.Context;
import android.content.res.Configuration;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
public class LoggingFragment extends Fragment {
private final String LOG_TAG = this.getClass().getSimpleName();
@Override
public void onAttach(Context context) {
super.onAttach(context);
Log.d(LOG_TAG, "onAttach()");
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d(LOG_TAG, "onCreate()");
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
Log.d(LOG_TAG, "onCreateView()");
return super.onCreateView(inflater, container, savedInstanceState);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
Log.d(LOG_TAG, "onViewCreated()");
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
Log.d(LOG_TAG, "onActivityCreated()");
}
@Override
public void onStart() {
super.onStart();
Log.d(LOG_TAG, "onStart()");
}
@Override
public void onResume() {
super.onResume();
Log.d(LOG_TAG, "onResume()");
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
Log.d(LOG_TAG, "onSaveInstanceState()");
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
Log.d(LOG_TAG, "onConfigurationChanged()");
}
@Override
public void onPause() {
super.onPause();
Log.d(LOG_TAG, "onPause()");
}
@Override
public void onStop() {
super.onStop();
Log.d(LOG_TAG, "onStop()");
}
@Override
public void onDestroyView() {
super.onDestroyView();
Log.d(LOG_TAG, "onDestroyView()");
}
@Override
public void onDestroy() {
super.onDestroy();
Log.d(LOG_TAG, "onDestroy()");
}
@Override
public void onDetach() {
super.onDetach();
Log.d(LOG_TAG, "onDetach()");
}
}
|
import java.util.Arrays;
import java.util.List;
public class PermissionChecker {
// Method to check if the user has the required permission
public boolean checkPermission(String userRole, String requiredPermission) {
// Assume a method to retrieve user's permissions based on role
List<String> userPermissions = getUserPermissionsByRole(userRole);
// Check if the user has the required permission
return userPermissions.contains(requiredPermission);
}
// Dummy method to retrieve user's permissions based on role
private List<String> getUserPermissionsByRole(String userRole) {
// Replace with actual logic to retrieve user's permissions based on role
if (userRole.equals("admin")) {
return Arrays.asList("MANAGEMENT_USERS_READ", "MANAGEMENT_USERS_WRITE", "MANAGEMENT_USERS_DELETE");
} else if (userRole.equals("manager")) {
return Arrays.asList("MANAGEMENT_USERS_READ", "MANAGEMENT_USERS_WRITE");
} else {
return Arrays.asList("MANAGEMENT_USERS_READ");
}
}
}
|
<filename>trustconnector/src/main/java/libs/trustconnector/scdp/util/ByteArray.java
package libs.trustconnector.scdp.util;
import libs.trustconnector.scdp.util.tlv.bertlv.*;
import libs.trustconnector.scdp.util.tlv.*;
import libs.trustconnector.scdp.util.tlv.LV;
import libs.trustconnector.scdp.util.tlv.bertlv.BERLVBuilder;
public final class ByteArray
{
private byte[] byteValue;
static final int[] SHIFT_MASK;
static final int[] SHIFT_MASK_RIGHT;
private static final char[] charHex;
public ByteArray() {
}
public ByteArray(final int initLen) {
this.byteValue = new byte[initLen];
}
public ByteArray(final byte[] byr) {
if (byr != null) {
this.byteValue = byr.clone();
}
}
public ByteArray(final byte[] byr, final int offset, final boolean bBERLV) {
int length = byr[offset] & 0xFF;
int vOff = 1;
if (bBERLV) {
if (length == 129) {
length = (byr[offset + 1] & 0xFF);
vOff = 2;
}
else if (length == 130) {
length = (byr[offset + 1] & 0xFF);
length <<= 8;
length |= (byr[offset + 2] & 0xFF);
vOff = 3;
}
else if (length == 131) {
length = (byr[offset + 1] & 0xFF);
length <<= 8;
length |= (byr[offset + 2] & 0xFF);
length <<= 8;
length |= (byr[offset + 3] & 0xFF);
vOff = 4;
}
else {
if (length != 132) {
throw new DataFormatException("new ByteArray from Berlv failed:length overflow! max length is 0xFFFFFFFF");
}
length = (byr[offset + 1] & 0xFF);
length <<= 8;
length |= (byr[offset + 2] & 0xFF);
length <<= 8;
length |= (byr[offset + 3] & 0xFF);
length <<= 8;
length |= (byr[offset + 4] & 0xFF);
vOff = 5;
}
}
System.arraycopy(byr, offset + vOff, this.byteValue = new byte[length], 0, length);
}
public ByteArray(final byte[] byr, final int offset, final int length) {
this.byteValue = new byte[length];
if (byr != null) {
System.arraycopy(byr, offset, this.byteValue, 0, length);
}
}
public ByteArray(final String hex) {
this.byteValue = convert(hex, StringFormat.HEX);
}
public ByteArray(final String str, final StringFormat format) {
this.byteValue = convert(str, format);
}
public int length() {
if (this.byteValue == null) {
return 0;
}
return this.byteValue.length;
}
public void setByte(final int index, final int v) {
this.byteValue[index] = (byte)v;
}
public void setInt2(final int index, final int v) {
this.byteValue[index] = (byte)(v >> 8);
this.byteValue[index + 1] = (byte)(v & 0xFF);
}
public void setInt3(final int index, final int v) {
this.byteValue[index] = (byte)(v >> 16 & 0xFF);
this.byteValue[index + 1] = (byte)(v >> 8 & 0xFF);
this.byteValue[index + 2] = (byte)(v & 0xFF);
}
public void setInt(final int index, final int v) {
this.setInt4(index, v);
}
public void setInt4(final int index, final int v) {
this.byteValue[index + 0] = (byte)(v >> 24 & 0xFF);
this.byteValue[index + 1] = (byte)(v >> 16 & 0xFF);
this.byteValue[index + 2] = (byte)(v >> 8 & 0xFF);
this.byteValue[index + 3] = (byte)(v & 0xFF);
}
public byte getByte(final int index) {
return this.byteValue[index];
}
public int getUnsignedByte(final int index) {
return this.byteValue[index] & 0xFF;
}
public int getInt2(final int index) {
int v = this.byteValue[index] & 0xFF;
v <<= 8;
v |= (this.byteValue[index + 1] & 0xFF);
return v;
}
public int getInt3(final int index) {
int v = this.byteValue[index] & 0xFF;
v <<= 8;
v |= (this.byteValue[index + 1] & 0xFF);
v <<= 8;
v |= (this.byteValue[index + 2] & 0xFF);
return v;
}
public int getInt(final int index) {
return this.getInt4(index);
}
public int getInt4(final int index) {
int v = this.byteValue[index] & 0xFF;
v <<= 8;
v |= (this.byteValue[index + 1] & 0xFF);
v <<= 8;
v |= (this.byteValue[index + 2] & 0xFF);
v <<= 8;
v |= (this.byteValue[index + 3] & 0xFF);
return v;
}
public int getInt(final int index, final int len) {
return Util.bytesToInt(this.byteValue, index, len);
}
public byte[] toBytes() {
if (this.byteValue == null) {
return null;
}
return this.byteValue.clone();
}
public byte[] toBytes(final int index, final int length) {
final byte[] v = new byte[length];
System.arraycopy(this.byteValue, index, v, 0, length);
return v;
}
public void getBytes(final int index, final byte[] out, final int offset, final int length) {
System.arraycopy(this.byteValue, index, out, offset, length);
}
public void setBytes(final int index, final byte[] content) {
System.arraycopy(content, 0, this.byteValue, index, content.length);
}
public void setBytes(final int index, final byte[] content, final int offset, final int length) {
System.arraycopy(content, offset, this.byteValue, index, length);
}
public void and(final ByteArray byr) {
if (this.byteValue == null || byr.byteValue == null) {
return;
}
final int length1 = this.byteValue.length;
final int length2 = byr.byteValue.length;
for (int length3 = (length1 > length2) ? length2 : length1, i = 0; i < length3; ++i) {
final byte[] byteValue = this.byteValue;
final int n = i;
byteValue[n] &= byr.byteValue[i];
}
}
public void or(final byte[] byr, final int offset, final int length) {
if (this.byteValue == null || byr == null) {
return;
}
final int length2 = this.byteValue.length;
final int length3 = length;
for (int len = (length2 > length3) ? length3 : length2, i = 0; i < len; ++i) {
final byte[] byteValue = this.byteValue;
final int n = i;
byteValue[n] |= byr[i];
}
}
public void or(final ByteArray byr) {
this.or(byr.byteValue, 0, (byr == null) ? 0 : byr.byteValue.length);
}
public void xor(final byte[] byr, final int offset, final int length) {
if (this.byteValue == null || byr == null) {
return;
}
final int length2 = this.byteValue.length;
final int length3 = length;
for (int len = (length2 > length3) ? length3 : length2, i = 0; i < len; ++i) {
final byte[] byteValue = this.byteValue;
final int n = i;
byteValue[n] ^= byr[i];
}
}
public void xor(final ByteArray byr) {
this.xor(byr.byteValue, 0, (byr == null) ? 0 : byr.byteValue.length);
}
public void not() {
if (this.byteValue == null) {
return;
}
for (int length = this.byteValue.length, i = 0; i < length; ++i) {
this.byteValue[i] ^= -1;
}
}
public byte[] left(final int length) {
final byte[] v = new byte[length];
System.arraycopy(this.byteValue, 0, v, 0, length);
return v;
}
public byte[] right(final int length) {
final byte[] v = new byte[length];
System.arraycopy(this.byteValue, this.byteValue.length - length, v, 0, length);
return v;
}
public boolean isStartWith(final byte[] start) {
return this.byteValue != null && start != null && this.byteValue.length >= start.length && compare(this.byteValue, 0, start, 0, start.length) == 0;
}
public boolean isEndWith(final byte[] end) {
return this.byteValue != null && end != null && this.byteValue.length >= end.length && compare(this.byteValue, this.byteValue.length - end.length, end, 0, end.length) == 0;
}
public int find(final ByteArray target) {
return this.find(target, 0);
}
public int find(final ByteArray target, final int startIndex) {
if (target == null) {
return -1;
}
return this.find(target.byteValue, 0, target.length(), startIndex);
}
public int find(final byte[] target, final int startIndex) {
return this.find(target, 0, target.length, startIndex);
}
public int find(final byte[] target, final int tarOff, final int tarLen, final int startIndex) {
if (this.byteValue == null || target == null) {
return -1;
}
final int targetLen = target.length;
final int srcLen = this.byteValue.length;
if (srcLen < targetLen) {
return -1;
}
for (int offset = startIndex; offset + targetLen < srcLen; ++offset) {
if (compare(this.byteValue, offset, target, 0, targetLen) == 0) {
return offset;
}
}
return -1;
}
public void append(final ByteArray append) {
this.append(append.byteValue);
}
public void append(final byte a) {
if (this.byteValue == null) {
(this.byteValue = new byte[1])[0] = a;
return;
}
final int aLen = this.byteValue.length;
final byte[] newV = new byte[aLen + 1];
System.arraycopy(this.byteValue, 0, newV, 0, aLen);
newV[aLen] = a;
this.byteValue = newV;
}
public void append(final int v, final int byteLen) {
this.append(Util.intToBytes(v, byteLen));
}
public void append(final byte[] c) {
if (c == null) {
return;
}
final int aLen = c.length;
int bLen = 0;
if (this.byteValue != null) {
bLen = this.byteValue.length;
final byte[] newV = new byte[aLen + bLen];
System.arraycopy(this.byteValue, 0, newV, 0, bLen);
System.arraycopy(c, 0, newV, bLen, aLen);
this.byteValue = newV;
return;
}
this.byteValue = c.clone();
}
public void append(final byte[] c, final int offset, final int length) {
if (c == null) {
return;
}
final int aLen = length;
int bLen = 0;
if (this.byteValue != null) {
bLen = this.byteValue.length;
}
final byte[] newV = new byte[aLen + bLen];
if (this.byteValue != null) {
System.arraycopy(this.byteValue, 0, newV, 0, bLen);
}
System.arraycopy(c, offset, newV, bLen, aLen);
this.byteValue = newV;
}
public void append(final String hex) {
this.append(convert(hex));
}
public void insert(final ByteArray value, final int insPos) {
this.insert(value.byteValue, insPos);
}
public void insert(final byte[] value, int insPos) {
if (this.byteValue == null) {
this.byteValue = value.clone();
return;
}
final int srcLen = this.byteValue.length;
if (insPos > srcLen) {
insPos = srcLen;
}
final int tarLen = value.length;
final byte[] newV = new byte[srcLen + tarLen];
if (insPos > 0) {
System.arraycopy(this.byteValue, 0, newV, 0, insPos);
}
System.arraycopy(value, 0, newV, insPos, tarLen);
if (srcLen - insPos > 0) {
System.arraycopy(this.byteValue, insPos, newV, insPos + tarLen, srcLen - insPos);
}
this.byteValue = newV;
}
public boolean remove(final int startPos, final int length) {
if (startPos < 0 || length < 0) {
return false;
}
if (length == 0) {
return true;
}
if (this.byteValue == null) {
return false;
}
final int orgLen = this.byteValue.length;
if (startPos + length > orgLen) {
return false;
}
final byte[] newV = new byte[orgLen - length];
System.arraycopy(this.byteValue, 0, newV, 0, startPos);
if (startPos + length < orgLen) {
System.arraycopy(this.byteValue, startPos + length, newV, startPos, orgLen - startPos - length);
}
this.byteValue = newV;
return true;
}
public boolean remove(final int count) {
if (count == 0) {
return true;
}
if (this.byteValue == null) {
return false;
}
final int orgLen = this.byteValue.length;
if (count > orgLen) {
return false;
}
if (orgLen == count) {
this.byteValue = null;
return true;
}
final byte[] newV = new byte[orgLen - count];
System.arraycopy(this.byteValue, 0, newV, 0, orgLen - count);
this.byteValue = newV;
return true;
}
public int compare(final int index, final byte[] dst, final int dstOff, final int length) {
final byte[] t = this.toBytes(index, length);
return compare(t, 0, dst, dstOff, length);
}
public void reinit() {
this.byteValue = null;
}
public void clearContent() {
for (int totalLen = this.byteValue.length, i = 0; i < totalLen; ++i) {
this.byteValue[i] = 0;
}
}
public byte[][] split(final int len) {
if (this.byteValue == null) {
return null;
}
final int lenV = this.byteValue.length;
int f = lenV / len;
if (lenV % len != 0) {
++f;
}
final byte[][] a = new byte[f][];
int leftLen = lenV;
int offset = 0;
for (int i = 0; i < f; ++i) {
final int aLen = (leftLen > len) ? len : leftLen;
a[i] = new byte[aLen];
System.arraycopy(this.byteValue, offset, a[i], 0, aLen);
leftLen -= aLen;
offset += aLen;
}
return a;
}
public byte[][] split(final int firstItemLen, final int len) {
if (this.byteValue == null) {
return null;
}
int lenV = this.byteValue.length;
final int itemLen = (firstItemLen > lenV) ? lenV : firstItemLen;
final byte[] firstItem = new byte[itemLen];
System.arraycopy(this.byteValue, 0, firstItem, 0, itemLen);
lenV -= itemLen;
int f = 0;
if (lenV != 0) {
f = lenV / len;
if (lenV % len != 0) {
++f;
}
}
final byte[][] a = new byte[++f][];
a[0] = firstItem;
int leftLen = lenV;
int offset = itemLen;
for (int i = 1; i < f; ++i) {
final int aLen = (leftLen > len) ? len : leftLen;
a[i] = new byte[aLen];
System.arraycopy(this.byteValue, offset, a[i], 0, aLen);
leftLen -= aLen;
offset += aLen;
}
return a;
}
public boolean shiftLeft(final int bitCount) {
if (this.byteValue == null) {
return false;
}
if (bitCount < 1 || bitCount > 7) {
return false;
}
final int vLen = this.byteValue.length;
final int expByteLen = vLen + 1;
final int shiftBitN = 8 - bitCount;
final byte[] t = new byte[expByteLen];
t[0] = (byte)(this.byteValue[0] >> shiftBitN & ~ByteArray.SHIFT_MASK[bitCount - 1]);
for (int i = 1; i < expByteLen - 1; ++i) {
t[i] = (byte)((this.byteValue[i - 1] << bitCount & ByteArray.SHIFT_MASK[bitCount - 1]) | (this.byteValue[i] >> shiftBitN & ~ByteArray.SHIFT_MASK[bitCount - 1]));
}
t[expByteLen - 1] = (byte)(this.byteValue[expByteLen - 1 - 1] << bitCount & ByteArray.SHIFT_MASK[bitCount - 1]);
this.byteValue = t;
return true;
}
public boolean shiftRight(final int bitCount) {
if (this.byteValue == null) {
return false;
}
if (bitCount < 1 || bitCount > 7) {
return false;
}
final int vLen = this.byteValue.length;
final int expByteLen = vLen + 1;
final int shiftBitN = 8 - bitCount;
final byte[] t = new byte[expByteLen];
t[0] = (byte)(this.byteValue[0] >> bitCount & ByteArray.SHIFT_MASK_RIGHT[bitCount - 1]);
for (int i = 1; i < expByteLen - 1; ++i) {
t[i] = (byte)((this.byteValue[i] >> bitCount & ByteArray.SHIFT_MASK_RIGHT[bitCount - 1]) | (this.byteValue[i - 1] << shiftBitN & ~ByteArray.SHIFT_MASK_RIGHT[bitCount - 1]));
}
t[expByteLen - 1] = (byte)(this.byteValue[expByteLen - 1 - 1] << shiftBitN & ~ByteArray.SHIFT_MASK_RIGHT[bitCount - 1]);
this.byteValue = t;
return true;
}
@Override
public boolean equals(final Object obj) {
byte[] dst = null;
if (obj instanceof ByteArray) {
final ByteArray a = (ByteArray)obj;
dst = a.byteValue;
}
else if (obj instanceof byte[]) {
dst = (byte[])obj;
}
return dst != null && dst.length == this.byteValue.length && compare(this.byteValue, 0, dst, 0, this.byteValue.length) == 0;
}
@Override
public String toString() {
return this.toString(StringFormat.HEX);
}
public String toString(final StringFormat format) {
if (this.byteValue == null) {
return "";
}
return convert(this.byteValue, 0, this.byteValue.length, format);
}
public byte[] toLV(final boolean bBERlv) {
if (bBERlv) {
return this.toBERLV();
}
return this.toLV();
}
public byte[] toLV() {
if (this.byteValue == null) {
final byte[] a = { 0 };
return a;
}
final int len = this.byteValue.length;
if (len > 255) {
throw new DataFormatException("toLV failed:length overflow! max length is 255");
}
final byte[] a2 = new byte[len + 1];
a2[0] = (byte)len;
System.arraycopy(this.byteValue, 0, a2, 1, len);
return a2;
}
public byte[] toBERLV() {
if (this.byteValue == null) {
final byte[] a = { 0 };
return a;
}
final LV lv = BERLVBuilder.buildLV(this.byteValue, 0, this.byteValue.length);
return lv.toBytes();
}
public static boolean checkStrFormat(final String hex, final StringFormat format) {
final int length = hex.length();
if (format == StringFormat.HEX) {
for (int i = 0; i < length; ++i) {
final char c = hex.charAt(i);
if ((c < '0' || c > '9') && (c < 'a' || c > 'F') && (c < 'a' || c > 'f') && c != ' ' && c != '\t' && c != '\n' && c != '\r') {
return false;
}
}
return true;
}
if (format == StringFormat.UCS2) {
return true;
}
if (format == StringFormat.ASCII) {
for (int i = 0; i < length; ++i) {
final char a = hex.charAt(i);
if (a >= '\0' && a > '\u00ff') {
return false;
}
}
return true;
}
return false;
}
public static byte[] convert(final String hex) {
return convert(hex, StringFormat.HEX);
}
public static byte[] convert(String hex, final StringFormat format) {
int length = hex.length();
byte[] v = null;
int validLen = 0;
if (format == StringFormat.HEX) {
hex = Util.stringTrimAll(hex);
length = hex.length();
if (length % 2 != 0) {
DataFormatException.throwIt("hex string length error:" + hex);
}
v = new byte[(length + 1) / 2];
for (int i = 0; i < length; i += 2) {
final byte a = Util.hexCharToByte(hex.charAt(i));
final byte b = Util.hexCharToByte(hex.charAt(i + 1));
v[validLen++] = (byte)(a << 4 | b);
}
}
else if (format == StringFormat.UCS2) {
v = new byte[(length + 1) * 2];
for (int i = 0; i < length; ++i) {
final char a2 = hex.charAt(i);
v[validLen++] = (byte)(a2 >> 8 & 0xFF);
v[validLen++] = (byte)(a2 & '\u00ff');
}
}
else if (format == StringFormat.ASCII) {
v = new byte[length];
for (int i = 0; i < length; ++i) {
final char a2 = hex.charAt(i);
if (a2 < '\0' || a2 > '\u00ff') {
throw new DataFormatException("convert from acsii to byte array failed with invalid char=" + a2 + "at index=" + i);
}
v[validLen++] = (byte)a2;
}
}
final byte[] value = new byte[validLen];
System.arraycopy(v, 0, value, 0, validLen);
return value;
}
public static String convert(final byte[] data, final int offset, final int length) {
return convert(data, offset, length, StringFormat.HEX);
}
public static char intToChar(final int v) {
if (v > 15 || v < 0) {
return '\uffff';
}
return ByteArray.charHex[v];
}
public static String convert(final byte[] data, final int offset, final int length, final StringFormat format) {
final StringBuilder builder = new StringBuilder();
if (format == StringFormat.HEX) {
for (int end = offset + length, i = offset; i < end; ++i) {
char a = ByteArray.charHex[data[i] >> 4 & 0xF];
builder.append(a);
a = ByteArray.charHex[data[i] & 0xF];
builder.append(a);
}
}
else if (format == StringFormat.UCS2) {
for (int end = offset + length, i = offset; i < end; i += 2) {
final char a = (char)((data[i] << 8 & 0xFF00) | (data[i + 1] & 0xFF));
builder.append(a);
}
}
else if (format == StringFormat.ASCII) {
for (int end = offset + length, i = offset; i < end; ++i) {
final char a = (char)data[i];
if (a < '\0' || a > '\u00ff') {
throw new DataFormatException("convert from acsii to byte array failed with invalid char=" + a + "at index=" + i);
}
builder.append(a);
}
}
else if (format == StringFormat.ASCII_7_BIT) {
final byte[] d = Util.bit7ToBit8(data, offset, length);
for (int end2 = offset + length, j = offset; j < end2; ++j) {
final char a2 = (char)d[j];
if (a2 < '\0' || a2 > '\u00ff') {
throw new DataFormatException("convert from acsii to byte array failed with invalid char=" + a2 + "at index=" + j);
}
builder.append(a2);
}
}
return builder.toString();
}
public static String convert(final byte[] data, final StringFormat format) {
if (data == null) {
return "";
}
return convert(data, 0, data.length, format);
}
public static String convert(final byte[] data) {
if (data == null) {
return "";
}
return convert(data, 0, data.length, StringFormat.HEX);
}
public static boolean compare(final byte[] src, final byte[] dst) {
if (src == null && dst == null) {
return true;
}
if (src == null && dst != null) {
return false;
}
if (dst == null && src != null) {
return false;
}
if (dst.length != src.length) {
return false;
}
for (int length = src.length, i = 0; i < length; ++i) {
if (src[i] != dst[i]) {
return false;
}
}
return true;
}
public static int compare(final byte[] src, final int offset, final byte[] dst, final int doffset, final int length) {
for (int i = 0; i < length; ++i) {
if (src[offset + i] > dst[doffset + i]) {
return 1;
}
if (src[offset + i] < dst[doffset + i]) {
return -1;
}
}
return 0;
}
public static boolean isAllNum(final byte[] src, final int off, final int length, final byte value) {
for (int i = 0; i < length; ++i) {
if (src[off + i] != value) {
return false;
}
}
return true;
}
public static void initContentWithInc(final byte[] src, final int off, final int length, byte initValue) {
for (int i = 0; i < length; ++i) {
src[off + i] = initValue;
++initValue;
}
}
public static void xor(final byte[] src1, final int src1Off, final byte[] src2, final int src2Off, final byte[] dst, final int dstOff, final int length) {
for (int i = 0; i < length; ++i) {
dst[dstOff + i] = (byte)(src1[src1Off + i] ^ src2[src2Off + i]);
}
}
public static void and(final byte[] src1, final int src1Off, final byte[] src2, final int src2Off, final byte[] dst, final int dstOff, final int length) {
for (int i = 0; i < length; ++i) {
dst[dstOff + i] = (byte)(src1[src1Off + i] & src2[dstOff + i]);
}
}
public static void or(final byte[] src1, final int src1Off, final byte[] src2, final int src2Off, final byte[] dst, final int dstOff, final int length) {
for (int i = 0; i < length; ++i) {
dst[dstOff + i] = (byte)(src1[src1Off + i] | src2[dstOff + i]);
}
}
public static void not(final byte[] src, final int src1Off, final byte[] dst, final int dstOff, final int length) {
for (int i = 0; i < length; ++i) {
dst[dstOff + i] = (byte)~src[src1Off + i];
}
}
public static byte[] not(final byte[] src) {
final int length = src.length;
final byte[] dst = new byte[length];
for (int i = 0; i < length; ++i) {
dst[i] = (byte)~src[i];
}
return dst;
}
static {
SHIFT_MASK = new int[] { 254, 252, 248, 240, 224, 192, 128 };
SHIFT_MASK_RIGHT = new int[] { 127, 63, 31, 15, 7, 3, 1 };
charHex = new char[] { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };
}
}
|
<reponame>JLLeitschuh/Symfony-2-Eclipse-Plugin
/*******************************************************************************
* This file is part of the Symfony eclipse plugin.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
******************************************************************************/
package com.dubture.symfony.core.model;
import org.eclipse.dltk.internal.core.ModelElement;
import org.eclipse.dltk.internal.core.SourceType;
import com.dubture.symfony.index.model.Route;
@SuppressWarnings("restriction")
public class Action extends SourceType {
public Action(ModelElement parent, String name) {
super(parent, name);
}
public boolean hasTemplate() {
return true;
}
public Route getRoute() {
Route route = new Route("home", "/foo", "AcmeDemoBundle:Demo:index");
return route;
}
}
|
<reponame>yuri-zubov/paycall-sms
module PayCallSms
class IncomingMessageParser
attr_reader :logger
# Create new sms sender with given +gateway+
def initialize(options={})
@options = options
@logger = Logging.logger[self.class]
end
# params will look something like the following:
# msgId - uniq id of the message
# sender - the phone that have sent the message
# recipient - the virtual phone number that received the message (at gateway operator)
# segments - number of segments
# content - text of the message
def from_http_push_params(params)
%w(msgId sender recipient content).each do |p|
raise ArgumentError.new("Missing http parameter #{p}. Parameters were: #{params.inspect}") if params[p].blank?
end
logger.debug "Parsing http push reply xml: \n#{params['IncomingXML']}"
parse_reply_values_hash(
phone: params['sender'],
reply_to_phone: params['recipient'],
text: params['content'],
message_id: params['msgId'],
received_at: params['receivedTime'],
)
end
# This method receives sms reply +values+ Hash and tries to type cast it's values
# @raises Smsim::GatewayError when values hash is missing attributes or when one of attributes fails to be type casted
#
# Method returns object with the following attributes:
# * +phone+ - the phone that sent the sms (from which sms reply was received)
# * +text+ - contents of the message that were received
# * +reply_to_phone+ - the phone to sms which reply was sent (gateway phone number)
# * +received_at+ - when the sms was received (as reported by gateway server)
# * +message_id+ - uniq message id generated from phone,reply_to_phone and received_at timestamp
def parse_reply_values_hash(values)
logger.debug "Parsing reply_values_hash: #{values.inspect}"
[:message_id, :phone, :text, :reply_to_phone].each do |key|
raise ArgumentError.new("Missing sms reply values key #{key}. Values were: #{values.inspect}") if values[key].blank?
end
values[:phone] = PhoneNumberUtils.ensure_country_code(values[:phone])
values[:reply_to_phone] = PhoneNumberUtils.ensure_country_code(values[:reply_to_phone])
if values[:received_at].is_a?(String)
begin
Time.use_zone(@options[:time_zone] || Time.zone || 'Jerusalem') do
values[:received_at] = DateTime.strptime(values[:received_at], '%Y-%m-%d %H:%M:%S')
values[:received_at] = Time.zone.parse(values[:received_at].strftime('%Y-%m-%d %H:%M:%S')) #convert to ActiveSupport::TimeWithZone
end
rescue Exception => e
raise ArgumentError.new("received_at could not be converted to date. received_at was: #{values[:received_at]}")
end
else
values[:received_at] = Time.now
end
OpenStruct.new(values)
end
end
end
|
//
// Created by ooooo on 2020/1/3.
//
#ifndef CPP_0637_SOLUTION1_H
#define CPP_0637_SOLUTION1_H
#include "TreeNode.h"
#include <queue>
#include <vector>
class Solution {
public:
vector<double> averageOfLevels(TreeNode *root) {
if (!root) return {};
vector<double> res;
queue<TreeNode *> q;
q.push(root);
while (!q.empty()) {
int size = q.size(), count = q.size();
double sum = 0;
while (size--) {
TreeNode *node = q.front();
q.pop();
sum += node->val;
if (node->left) q.push(node->left);
if (node->right) q.push(node->right);
}
res.push_back(sum / count);
}
return res;
}
};
#endif //CPP_0637_SOLUTION1_H
|
./docker_cmd.sh \
python -m unittest discover -v -f -p "*test.py" -s swords/ -t .
|
#
# Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# @test
# @bug 4215035
# @summary standard extensions path is hard-coded in default system policy file
#
# @build Ext_AllPolicy
# @run shell Ext_AllPolicy.sh
#
# For testing extenions, the classes live in jar files, and therefore
# we shouldn't be able to find the raw class file.
#
# set a few environment variables so that the shell-script can run stand-alone
# in the source directory
if [ "${TESTSRC}" = "" ] ; then
TESTSRC="."
fi
if [ "${TESTCLASSES}" = "" ] ; then
TESTCLASSES="."
fi
if [ "${TESTJAVA}" = "" ] ; then
echo "TESTJAVA not set. Test cannot execute."
echo "FAILED!!!"
exit 1
fi
if [ "${COMPILEJAVA}" = "" ]; then
COMPILEJAVA="${TESTJAVA}"
fi
# set platform-dependent variables
OS=`uname -s`
case "$OS" in
SunOS | Linux | Darwin )
NULL=/dev/null
PS=":"
FS="/"
;;
CYGWIN* )
NULL=/dev/null
PS=";"
FS="/"
;;
Windows_95 | Windows_98 | Windows_NT )
NULL=NUL
PS=";"
FS="\\"
;;
* )
echo "Unrecognized system!"
exit 1;
;;
esac
# the test code
cd ${TESTCLASSES}
${COMPILEJAVA}${FS}bin${FS}jar ${TESTTOOLVMOPTS} -cvf Ext_AllPolicy.jar Ext_AllPolicy.class
rm Ext_AllPolicy.class
${TESTJAVA}${FS}bin${FS}java ${TESTVMOPTS} \
-Djava.security.manager -Djava.ext.dirs="${TESTCLASSES}" Ext_AllPolicy
exit $?
|
<filename>_data/blogposts.js
const fetch = require("node-fetch");
// function to get blogposts
async function getAllBlogposts() {
// max number of records to fetch per query
const recordsPerQuery = 100;
// number of records to skip (start at 0)
let recordsToSkip = 0;
let makeNewQuery = true;
let blogposts = [];
// make queries until makeNewQuery is set to false
while (makeNewQuery) {
try {
// initiate fetch
const data = await fetch("https://rgsite-cms.herokuapp.com/graphql", {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "application/json",
},
body: JSON.stringify({
query: `{
articles{
id,
created_at,
title,
slug,
content,
image{
id,
name,
alternativeText,
width,
height,
url
},
category{
id,
name
}
}
}`,
}),
});
// store the JSON response when promise resolves
const response = await data.json();
// handle CMS errors
if (response.errors) {
let errors = response.errors;
errors.map((error) => {
console.log(error.message);
});
throw new Error("Houston... We have a CMS problem");
}
// update blogpost array with the data from the JSON response
blogposts = blogposts.concat(response.data.articles);
// prepare for next query
recordsToSkip += recordsPerQuery;
// stop querying if we are getting back less than the records we fetch per query
if (response.data.articles.length < recordsPerQuery) {
makeNewQuery = false;
}
} catch (error) {
throw new Error(error);
}
}
// format blogposts objects
const blogpostsFormatted = blogposts.map((item) => {
return {
id: item.id,
title: item.title,
slug: item.slug,
image: item.image,
body: item.content,
date: item.created_at,
category: item.category,
};
});
// return formatted blogposts
return blogpostsFormatted;
}
// export for 11ty
module.exports = getAllBlogposts;
|
<reponame>hoangmirs/go-scraper
package test_helpers
import (
"github.com/hoangmirs/go-scraper/conf"
"github.com/hoangmirs/go-scraper/database"
"github.com/beego/beego/v2/core/logs"
"github.com/gocraft/work"
)
func GetWorkerClient() *work.Client {
return work.NewClient(conf.GetString("workerNamespace"), database.GetRedisPool())
}
func DeleteRedisJobs() {
_, err := database.GetRedisPool().Get().Do("DEL", redisKeyJobs(conf.GetString("workerNamespace"), conf.GetString("scraperJobName")))
if err != nil {
logs.Error("Error when deleting redis jobs: %v", err)
}
}
func redisKeyJobs(namespace, jobName string) string {
return redisKeyJobsPrefix(namespace) + jobName
}
func redisKeyJobsPrefix(namespace string) string {
return redisNamespacePrefix(namespace) + "jobs:"
}
func redisNamespacePrefix(namespace string) string {
l := len(namespace)
if (l > 0) && (namespace[l-1] != ':') {
namespace = namespace + ":"
}
return namespace
}
|
if test ! $(which node -v)
then
sudo n latest
fi
if test ! $(which spoof)
then
sudo npm install spoof -g
fi
|
/*
* Backoff.java
*
* Created on November 19, 2006, 5:43 PM
*
* From "Multiprocessor Synchronization and Concurrent Data Structures",
* by <NAME> and <NAME>.
* Copyright 2006 Elsevier Inc. All rights reserved.
*/
package tamp.ch07.Spin.spin;
import java.util.Random;
/**
* Adaptive exponential backoff class. Encapsulates back-off code
* common to many locking classes.
*
* @author <NAME>
*/
public class Backoff {
final int minDelay, maxDelay;
int limit; // wait between limit and 2*limit
final Random random; // add randomness to wait
/**
* Prepare to pause for random duration.
*
* @param min smallest back-off
* @param max largest back-off
*/
public Backoff(int min, int max) {
if (max < min) {
throw new IllegalArgumentException("max must be greater than min");
}
minDelay = min;
maxDelay = min;
limit = minDelay;
random = new Random();
}
/**
* Backoff for random duration.
*
* @throws java.lang.InterruptedException
*/
public void backoff() throws InterruptedException {
int delay = random.nextInt(limit);
if (limit < maxDelay) { // double limit if less than max
limit = 2 * limit;
}
Thread.sleep(delay);
}
}
|
#!/bin/bash
if [[ $# -lt 2 || $(echo "$@" | grep " -h") ]]; then
echo "Usage: export-production-database.sh <database name> <database dump file>"
exit
fi
database=$1
databaseDumpFile=$2
dir=$(mktemp -p . -d data-dump-XXXX);
docker-compose -f docker/Compose.prod.yml run -v "$PWD/$dir":/data -w /data --rm alice_db \
bash -c "PGPASSWORD=password pg_dump -h db -U postgres -w $database > data.sql"
mv $dir/data.sql $databaseDumpFile
rm -rf $dir
|
function generatePaginationHTML(totalResources, currentPage) {
const totalPages = Math.ceil(totalResources / 10); // Assuming 10 resources per page
let paginationHTML = '<td colspan="5" align="center">';
if (currentPage > 1) {
paginationHTML += '<a href="?page=1"><<</a>'; // Link to first page
paginationHTML += '<a href="?page=' + (currentPage - 1) + '"><</a>'; // Link to previous page
}
let startPage = Math.max(1, currentPage - 5);
let endPage = Math.min(totalPages, currentPage + 5);
if (startPage > 1) {
paginationHTML += '<a href="?page=1">1</a>'; // Link to first page
if (startPage > 2) {
paginationHTML += '...'; // Ellipsis
}
}
for (let i = startPage; i <= endPage; i++) {
if (i === currentPage) {
paginationHTML += '[' + i + ']'; // Current page as plain text
} else {
paginationHTML += '<a href="?page=' + i + '">' + i + '</a>'; // Link to page i
}
}
if (endPage < totalPages) {
if (endPage < totalPages - 1) {
paginationHTML += '...'; // Ellipsis
}
paginationHTML += '<a href="?page=' + totalPages + '">' + totalPages + '</a>'; // Link to last page
}
if (currentPage < totalPages) {
paginationHTML += '<a href="?page=' + (currentPage + 1) + '">></a>'; // Link to next page
paginationHTML += '<a href="?page=' + totalPages + '">>></a>'; // Link to last page
}
paginationHTML += '</td>';
return paginationHTML;
}
// Example usage
console.log(generatePaginationHTML(20, 3)); // Output: << 1 2 [3] 4 5 >>
console.log(generatePaginationHTML(100, 8)); // Output: << 1 ... 6 7 [8] 9 10 ... 100 >>
|
package com.asadmshah.moviegur.models;
import com.asadmshah.moviegur.models.tmdb.MovieResponse;
import com.asadmshah.moviegur.utils.TestResourceLoader;
import com.bluelinelabs.logansquare.LoganSquare;
import org.junit.Test;
import java.io.InputStream;
import static org.junit.Assert.assertEquals;
public class MovieUnitTest {
@Test
public void convertJsonModelToMovieModel() throws Exception {
InputStream inputStream = TestResourceLoader.load(this, "tmdb_movie.json");
Movie movie = Movie.create(LoganSquare.parse(inputStream, MovieResponse.class));
assertEquals(939841200000L, movie.releaseDate());
assertEquals(28, movie.credits().castMembers().size());
assertEquals(17, movie.credits().crewMembers().size());
assertEquals(1, movie.genres().size());
assertEquals(25, movie.images().backdrops().size());
assertEquals(55, movie.images().posters().size());
assertEquals(2, movie.reviews().size());
}
}
|
import h5py
import os
import unittest
import xml.etree.ElementTree as et
from afqmctools.utils.qmcpack_utils import write_xml_input
class TestXMLWrite(unittest.TestCase):
def test_write(self):
options = {
"execute": {
"nWalkers": 10,
"blocks": 1000,
"timestep": 0.01,
"Estimator": {
"back_propagation": {
"ortho": 1,
"naverages": 4,
"obs": {
"OneRDM": {
"filename": "this.h5"
},
"TwoRDM": {
"filename": "that.h5"
}
},
"block_size": 2,
"nsteps": 200
}
}
}
}
with h5py.File("afqmc.h5", 'w') as fh5:
fh5['Wavefunction/NOMSD/dims'] = [37, 3, 4, 1]
write_xml_input("afqmc.xml", "afqmc.h5", "afqmc.h5",
id_name="afqmc", options=options, rng_seed=7)
xml_file = et.parse("afqmc.xml")
nmo = xml_file.find("./AFQMCInfo/parameter[@name='NMO']").text
self.assertEqual(nmo,"37")
nalpha = xml_file.find("./AFQMCInfo/parameter[@name='NAEA']").text
self.assertEqual(nalpha,"3")
obs = xml_file.find("./execute/Estimator[@name='back_propagation']")
fname = obs.find("./TwoRDM/parameter[@name='filename']").text
self.assertEqual(fname, "that.h5")
def tearDown(self):
cwd = os.getcwd()
files = ['afqmc.h5', 'afqmc.xml']
for f in files:
try:
os.remove(cwd+'/'+f)
except OSError:
pass
if __name__ == '__main__':
unittest.main()
|
#!/bin/bash
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# Tests if a Docker image built from Dockerfile.devel basically functions.
#
# It does this by loading up a half plus two toy model in the Docker image
# and querying it, validating the response.
#
# The image passed to this test must be already available locally.
#
# Ex: $ bazel test :unittest_dockerfile_devel_mkl \
# --test_arg=tensorflow/serving:latest-devel-mkl \
# --test_output=streamed --verbose_failures
declare -r PROJDIR=$(pwd)/tensorflow_serving
source ${PROJDIR}/tools/docker/tests/docker_test_lib.sh || exit 1
# Values to fill in for test
# ------------------------------------------------------------------------------
declare -r USE_NVIDIA_RUNTIME=false
declare -r IS_MKL_IMAGE=true
declare -r IS_DEVEL_IMAGE=true
declare -r MODELDIR="${PROJDIR}/servables/tensorflow/testdata"
declare -r MODELNAME="saved_model_half_plus_two_mkl"
declare -r REQUEST='{"instances": [1.0,2.0,5.0]}'
declare -r RESPONSE='{"predictions":[2.5,3.0,4.5]}'
# ------------------------------------------------------------------------------
# Grab the last argument as the image, so we can override the test arg in
# the BUILD file
test_docker_image ${@: -1}
|
#!/usr/bin/env bash
dotnet $CONDA_PREFIX/lib/dotnet/tools/PSMStatistics/ProteomIQon.PSMStatistics.dll "$@"
|
import {
SERVICE_LIST_REQUEST,
SERVICE_LIST_SUCCESS,
SERVICE_LIST_FAIL,
SERVICE_DETAILS_REQUEST,
SERVICE_DETAILS_SUCCESS,
SERVICE_DETAILS_FAIL ,
SERVICE_SAVE_REQUEST,
SERVICE_SAVE_SUCCESS,
SERVICE_SAVE_FAIL,
SERVICE_DELETE_REQUEST,
SERVICE_DELETE_SUCCESS,
SERVICE_DELETE_FAIL,
SERVICE_CATEGORY_LIST_REQUEST,
SERVICE_CATEGORY_LIST_SUCCESS,
SERVICE_CATEGORY_LIST_FAIL,
SERVICE_UPDATE_REQUEST,
SERVICE_UPDATE_SUCCESS,
SERVICE_UPDATE_FAIL
} from "../constants/serviceConstants";
import Axios from 'axios';
const listServices = () => async (dispatch) => {
try{
dispatch({type: SERVICE_LIST_REQUEST});
const {data} = await Axios.get("/api/service");
dispatch({type: SERVICE_LIST_SUCCESS, payload: data});
}catch(error){
dispatch({ type: SERVICE_LIST_FAIL, payload: error.message})
}
}
export const updateService = (service) => async (dispatch, getState) => {
try {
dispatch({ type: SERVICE_UPDATE_REQUEST, payload: service });
const {userSignin: {userInfo}} = getState();
const { data } = await Axios.put(`/api/service/${service._id}`, service, {
headers: {
'Authorization':"Bearer" + userInfo.token
},
});
dispatch({ type: SERVICE_UPDATE_SUCCESS, payload: data });
} catch (error) {
dispatch({
type: SERVICE_UPDATE_FAIL,
payload:
error.response && error.response.data.message
? error.response.data.message
: error.message,
});
}
};
const detailsService = (serviceId) => async (dispatch) =>{
try{
dispatch({type: SERVICE_DETAILS_REQUEST, payload: serviceId});
const {data} = await Axios.get("/api/service/" + serviceId);
dispatch({type: SERVICE_DETAILS_SUCCESS, payload: data});
}catch(error){
dispatch({ type: SERVICE_DETAILS_FAIL, payload: error.message})
}
}
const saveService = (service) => async (dispatch, getState) =>{
try{
dispatch({type: SERVICE_SAVE_REQUEST, payload: service});
const {userSignin: {userInfo}} = getState();
if(!service._id){
const {data} = await Axios.post("/api/service", service, {
headers:{
'Authorization':"Bearer" + userInfo.token
}});
dispatch({type: SERVICE_SAVE_SUCCESS, payload: data});
}else{
const {data} = await Axios.put("/api/service/" + service._id, service,
{headers:{
'Authorization':"Bearer" + userInfo.token
}});
dispatch({type: SERVICE_SAVE_SUCCESS, payload: data});
}
}catch(error){
dispatch({ type: SERVICE_SAVE_FAIL, payload: error.message})
}
}
const deleteService = (serviceId) => async (dispatch, getState) =>{
try{
const {userSignin: {userInfo}} = getState();
dispatch({type: SERVICE_DELETE_REQUEST, payload: serviceId});
const {data} = await Axios.delete("/api/service/" + serviceId,{
headers:{
'Authorization':"Bearer" + userInfo.token
}
});
dispatch({type: SERVICE_DELETE_SUCCESS, payload: data, success: true});
}catch(error){
dispatch({ type: SERVICE_DELETE_FAIL, payload: error.message})
}
}
const listServiceCategories = () => async (dispatch) => {
dispatch({ type: SERVICE_CATEGORY_LIST_REQUEST, loading: true });
try {
const result = await Axios.get('/api/service/categories');
dispatch({ type: SERVICE_CATEGORY_LIST_SUCCESS, payload: result.data });
} catch (error) {
dispatch({
type: SERVICE_CATEGORY_LIST_FAIL,
payload:
error.response && error.response.data.message
? error.response.data.message
: error.message,
});
}
};
export {listServices, detailsService, saveService, deleteService, listServiceCategories}
|
#!/bin/bash
#
# Script for container to install rocksdb for production use and not for testing
# as docker build would slow down testing
apt-get -y update \
&& apt-get install -y --no-install-recommends wget make build-essential checkinstall libgflags-dev libsnappy-dev zlib1g-dev libbz2-dev liblz4-dev git \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
mkdir /build \
&& cd /build \
&& git clone https://github.com/facebook/rocksdb.git \
&& cd rocksdb \
&& git checkout v5.14.2 \
&& INSTALL_PATH=/usr make install-shared \
&& rm -rf /build
pip install -U faust[rocksdb]
|
#!/bin/bash
if test -z "${SCF_INSTANCE_ID}" ; then
# This is not running on CAASP; this is not needed
exit 0
fi
if test -z "${KUBE_COMPONENT_INDEX}" ; then
printf "Your CAASP is broken; no index was specified\n" >&2
exit 1
fi
if test "${DIEGO_CELL_SUBNET}" == "${DIEGO_CELL_SUBNET%.0.0/16}" ; then
printf "Your diego cell subnet pool must be a /16\n" >&2
exit 1
fi
target_prefix="${DIEGO_CELL_SUBNET%.0.0/16}"
if test "${KUBE_COMPONENT_INDEX}" -lt 0 -o "${KUBE_COMPONENT_INDEX}" -gt 254 ; then
printf "Instance index %s is not supported\n" "${KUBE_COMPONENT_INDEX}" >&2
exit 1
fi
cell_subnet="${target_prefix}.${KUBE_COMPONENT_INDEX}.0/24"
perl -p -i -e "s@^properties.garden.network_pool:.*@properties.garden.network_pool: ${cell_subnet}@" /opt/fissile/env2conf.yml
|
#!/usr/bin/env bats
load helpers
BATS_TESTS_DIR=test/bats/tests/gcp
WAIT_TIME=60
SLEEP_TIME=1
NAMESPACE=default
PROVIDER_NAMESPACE=kube-system
PROVIDER_YAML=https://raw.githubusercontent.com/GoogleCloudPlatform/secrets-store-csi-driver-provider-gcp/main/deploy/provider-gcp-plugin.yaml
BASE64_FLAGS="-w 0"
export RESOURCE_NAME=${RESOURCE_NAME:-"projects/735463103342/secrets/test-secret-a/versions/latest"}
export FILE_NAME=${FILE_NAME:-"secret"}
export SECRET_VALUE=${SECRET_VALUE:-"aHVudGVyMg=="}
@test "install gcp provider" {
run kubectl apply -f $PROVIDER_YAML --namespace $PROVIDER_NAMESPACE
assert_success
kubectl wait --for=condition=Ready --timeout=120s pod -l app=csi-secrets-store-provider-gcp --namespace $PROVIDER_NAMESPACE
GCP_PROVIDER_POD=$(kubectl get pod --namespace $PROVIDER_NAMESPACE -l app=csi-secrets-store-provider-gcp -o jsonpath="{.items[0].metadata.name}")
run kubectl get pod/$GCP_PROVIDER_POD --namespace $PROVIDER_NAMESPACE
assert_success
}
@test "secretproviderclasses crd is established" {
kubectl wait --for condition=established --timeout=60s crd/secretproviderclasses.secrets-store.csi.x-k8s.io
run kubectl get crd/secretproviderclasses.secrets-store.csi.x-k8s.io
assert_success
}
@test "Test rbac roles and role bindings exist" {
run kubectl get clusterrole/secretproviderclasses-role
assert_success
run kubectl get clusterrole/secretproviderrotation-role
assert_success
run kubectl get clusterrole/secretprovidersyncing-role
assert_success
run kubectl get clusterrolebinding/secretproviderclasses-rolebinding
assert_success
run kubectl get clusterrolebinding/secretproviderrotation-rolebinding
assert_success
run kubectl get clusterrolebinding/secretprovidersyncing-rolebinding
assert_success
}
@test "deploy gcp secretproviderclass crd" {
envsubst < $BATS_TESTS_DIR/gcp_v1alpha1_secretproviderclass.yaml | kubectl apply -f -
cmd="kubectl get secretproviderclasses.secrets-store.csi.x-k8s.io/gcp -o yaml | grep gcp"
wait_for_process $WAIT_TIME $SLEEP_TIME "$cmd"
}
@test "CSI inline volume test with pod portability" {
envsubst < $BATS_TESTS_DIR/pod-secrets-store-inline-volume-crd.yaml | kubectl apply -f -
kubectl wait --for=condition=Ready --timeout=60s pod/secrets-store-inline-crd
run kubectl get pod/secrets-store-inline-crd
assert_success
}
@test "CSI inline volume test with pod portability - read gcp kv secret from pod" {
result=$(kubectl exec secrets-store-inline-crd -- cat /mnt/secrets-store/$FILE_NAME)
[[ "${result//$'\r'}" == "${SECRET_VALUE}" ]]
}
@test "CSI inline volume test with pod portability - unmount succeeds" {
# https://github.com/kubernetes/kubernetes/pull/96702
# kubectl wait --for=delete does not work on already deleted pods.
# Instead we will start the wait before initiating the delete.
kubectl wait --for=delete --timeout=${WAIT_TIME}s pod/secrets-store-inline-crd &
WAIT_PID=$!
sleep 1
run kubectl delete pod secrets-store-inline-crd
# On Linux a failure to unmount the tmpfs will block the pod from being
# deleted.
run wait $WAIT_PID
assert_success
# Sleep to allow time for logs to propagate.
sleep 10
# save debug information to archive in case of failure
archive_info
# On Windows, the failed unmount calls from: https://github.com/kubernetes-sigs/secrets-store-csi-driver/pull/545
# do not prevent the pod from being deleted. Search through the driver logs
# for the error.
run bash -c "kubectl logs -l app=secrets-store-csi-driver --tail -1 -c secrets-store -n kube-system | grep '^E.*failed to clean and unmount target path.*$'"
assert_failure
}
teardown_file() {
archive_provider "app=csi-secrets-store-provider-gcp" || true
archive_info || true
}
|
#!/bin/bash
# Copyright 2018 Johns Hopkins University (Jesus Villalba)
# Apache 2.0.
#
cmd=run.pl
thr_ahc=0.9
if [ -f path.sh ]; then . ./path.sh; fi
. parse_options.sh || exit 1;
set -e
if [ $# -ne 6 ]; then
echo "Usage: $0 <ndx> <enroll-file> <ref-vector-file> <enr-vector-file> <test-vector-file> <output-scores>"
exit 1;
fi
ndx_file=$1
enroll_file=$2
ref_vector_file=$3
enr_vector_file=$4
test_vector_file=$5
output_file=$6
output_dir=$(dirname $output_file)
mkdir -p $output_dir/log
name=$(basename $output_file)
echo "$0 score $ndx_file"
$cmd $output_dir/log/${name}.log \
python steps_be/eval-face-vid-be-v4.py \
--ref-v-file scp:$ref_vector_file \
--enr-v-file scp:$enr_vector_file \
--test-v-file scp:$test_vector_file \
--ndx-file $ndx_file \
--enroll-file $enroll_file \
--score-file $output_file \
--thr-ahc $thr_ahc
|
rm -rf ./demo/project/.cache
|
#!/bin/bash
TEMP=$(php-config --prefix)
echo $TEMP
if [ "$(uname -s)" = 'Linux' ]; then
PHP_PREFIX=$(php-config --prefix)
PHP_SAPIS=$(php-config --php-sapis)
echo $PHP_PREFIX
echo $PHP_SAPIS
fi
|
if [ "$DEBUG" = "1" ]; then
# cmdline.txt
install -m 644 $PIP_BOOT_DIR/cmdline-debug.txt "${ROOTFS_DIR}/boot/bank_a/cmdline.txt"
# Network config
sudo install -m 600 -o root -g root files/wpa_supplicant.conf "${ROOTFS_DIR}/etc/wpa_supplicant/wpa_supplicant.conf"
# Updater config
sudo install -m 644 -o root -g root files/updater.json "${ROOTFS_DIR}/opt/pip/etc/pip-updater/config.json"
fi
|
#!/bin/bash
set -e
. ./config.sh
for PROJECT in $PROJECTS; do
(cd $PROJECT && $MAKE clean)
done
rm -rf sysroot
rm -rf isodir
rm -rf myos.iso
|
#pragma once
#include <PlayFabServerSdk/PlayFabError.h>
#include <PlayFabServerSdk/PlayFabMatchmakerDataModels.h>
#include <AzCore/EBus/EBus.h>
namespace PlayFabServerSdk
{
class PlayFabServer_MatchmakerRequests
: public AZ::EBusTraits
{
public:
static const AZ::EBusHandlerPolicy HandlerPolicy = AZ::EBusHandlerPolicy::Single;
static const AZ::EBusAddressPolicy AddressPolicy = AZ::EBusAddressPolicy::Single;
virtual int GetPendingCalls() = 0;
// ------------ Generated API call wrappers
virtual int AuthUser(MatchmakerModels::AuthUserRequest& request, ProcessApiCallback<MatchmakerModels::AuthUserResponse> callback = nullptr, ErrorCallback errorCallback = nullptr, void* customData = nullptr) = 0;
virtual int PlayerJoined(MatchmakerModels::PlayerJoinedRequest& request, ProcessApiCallback<MatchmakerModels::PlayerJoinedResponse> callback = nullptr, ErrorCallback errorCallback = nullptr, void* customData = nullptr) = 0;
virtual int PlayerLeft(MatchmakerModels::PlayerLeftRequest& request, ProcessApiCallback<MatchmakerModels::PlayerLeftResponse> callback = nullptr, ErrorCallback errorCallback = nullptr, void* customData = nullptr) = 0;
virtual int StartGame(MatchmakerModels::StartGameRequest& request, ProcessApiCallback<MatchmakerModels::StartGameResponse> callback = nullptr, ErrorCallback errorCallback = nullptr, void* customData = nullptr) = 0;
virtual int UserInfo(MatchmakerModels::UserInfoRequest& request, ProcessApiCallback<MatchmakerModels::UserInfoResponse> callback = nullptr, ErrorCallback errorCallback = nullptr, void* customData = nullptr) = 0;
};
using PlayFabServer_MatchmakerRequestBus = AZ::EBus<PlayFabServer_MatchmakerRequests>;
// #THIRD_KIND_PLAYFAB_BEHAVIOR_CONTEXT: dbowen (2017/08/11)
// Simple request bus API - this version is exposed to Behavior context (Lua) and does not allow callbacks to be set.
// This is because behavior context has no way to set the callbacks (it uses a notification bus instead), additionally
// it does not support optional arguments in the bus interface so without this we would need to supply "nil" values
// for each callback function in Lua.
class PlayFabServer_MatchmakerSimpleRequests
: public AZ::EBusTraits
{
public:
static const AZ::EBusHandlerPolicy HandlerPolicy = AZ::EBusHandlerPolicy::Single;
static const AZ::EBusAddressPolicy AddressPolicy = AZ::EBusAddressPolicy::Single;
virtual int GetPendingCalls() = 0;
// ------------ Generated API call wrappers
virtual int AuthUser(MatchmakerModels::AuthUserRequest& request) = 0;
virtual int PlayerJoined(MatchmakerModels::PlayerJoinedRequest& request) = 0;
virtual int PlayerLeft(MatchmakerModels::PlayerLeftRequest& request) = 0;
virtual int StartGame(MatchmakerModels::StartGameRequest& request) = 0;
virtual int UserInfo(MatchmakerModels::UserInfoRequest& request) = 0;
};
using PlayFabServer_MatchmakerSimpleRequestBus = AZ::EBus<PlayFabServer_MatchmakerSimpleRequests>;
// #THIRD_KIND_END
} // namespace PlayFabServerSdk
|
class BankAccount:
def __init__(self, account_number, account_holder, balance):
self.account_number = account_number
self.account_holder = account_holder
self.balance = balance
def deposit(self, amount):
self.balance += amount
def withdraw(self, amount):
if self.balance >= amount:
self.balance -= amount
else:
print("Insufficient funds")
def get_balance(self):
return self.balance
def main():
accounts = {}
while True:
print("\n1. Create Account")
print("2. Deposit Funds")
print("3. Withdraw Funds")
print("4. Check Balance")
print("5. Exit")
choice = input("Enter your choice: ")
if choice == "1":
account_number = input("Enter account number: ")
account_holder = input("Enter account holder's name: ")
initial_balance = float(input("Enter initial balance: "))
accounts[account_number] = BankAccount(account_number, account_holder, initial_balance)
print("Account created successfully")
elif choice == "2":
account_number = input("Enter account number: ")
if account_number in accounts:
amount = float(input("Enter amount to deposit: "))
accounts[account_number].deposit(amount)
print("Funds deposited successfully")
else:
print("Account not found")
elif choice == "3":
account_number = input("Enter account number: ")
if account_number in accounts:
amount = float(input("Enter amount to withdraw: "))
accounts[account_number].withdraw(amount)
else:
print("Account not found")
elif choice == "4":
account_number = input("Enter account number: ")
if account_number in accounts:
print("Current balance:", accounts[account_number].get_balance())
else:
print("Account not found")
elif choice == "5":
print("Exiting...")
break
else:
print("Invalid choice")
if __name__ == "__main__":
main()
|
const binToDec = (binary) => {
let decimal = 0;
let power = 0;
for (let i = binary.length - 1; i >= 0; i--) {
const bit = binary[i];
decimal += bit * (2 ** power);
power += 1;
}
return decimal;
};
console.log(binToDec('1101')); // 13
|
const occurences = (arr) => {
const obj = {};
arr.forEach(str => {
if (obj[str]) {
obj[str] += 1;
} else {
obj[str] = 1;
}
});
return obj;
};
|
package com.google.developers.event;
/**
* Created by frren on 2015-09-10.
*/
public interface RegisterFormResponseSpreadsheet extends SpreadsheetFacts {
String QR_CODE_COLUMN = "QR code";
String CHECK_IN_COLUMN = "Check-in";
String CLIENT_IP_COLUMN = "Client IP";
}
|
cd /home/ubuntu/codes/backend/saleor
source /home/ubuntu/codes/backend/venv/bin/activate
export ALLOWED_HOSTS=3.6.136.178
export ALLOWED_CLIENT_HOSTS=3.6.136.178
export DEBUG=True
export SECRET_KEY=123456
export INTERNAL_IPS=127.0.0.1,3.6.136.178,
export API_URI=http://3.6.136.178:8000/graphql/
export APP_MOUNT_URI=/dashboard/
export DEFAULT_COUNTRY=IN
export DEFAULT_CURRENCY=INR
python manage.py runserver 0.0.0.0:8000
|
# -----------------------------------
# Script that builds Dart app and pushes it to gh-pages.
#
# Set following variables:
# -----------------------------------
build_folder='example'
github_repo='git@github.com:marcojakob/dart-event-bus.git'
# -----------------------------------
# Build.
# -----------------------------------
pub install
pub build ${build_folder}
# -----------------------------------
# Configure git in build subfolder
# -----------------------------------
cd build/${build_folder}
git init
git add .
# -----------------------------------
# Deploy to github pages.
# -----------------------------------
git commit -m 'deploy commit from drone'
git push -f ${github_repo} master:gh-pages
|
<reponame>apdavison/hbp-validation-framework
import axios from "axios";
import { baseUrl, querySizeLimit } from "./globals";
const buildQuery = (filterDict) => {
let q = "";
for (var key in filterDict) {
for (var value of filterDict[key]) {
q += `&${key}=${value}`;
}
}
return q.slice(1);
};
class DataStore {
constructor(baseUrl, auth) {
this.baseUrl = baseUrl;
this.auth = auth;
this.models = {};
this.tests = {};
this.results = {};
this.projects = [];
this.vocab = null;
this.queries = {
models: {},
tests: {},
};
}
getRequestConfig(source = null) {
let config = {
headers: {
Authorization: "Bearer " + this.auth.token,
},
};
if (source) {
config.cancelToken = source.token;
}
return config;
}
get(url, source = null) {
return axios.get(url, this.getRequestConfig(source));
}
post(url, payload, source = null) {
let config = this.getRequestConfig(source);
config.headers["Content-type"] = "application/json";
return axios.post(url, payload, config);
}
put(url, payload, source = null) {
let config = this.getRequestConfig(source);
config.headers["Content-type"] = "application/json";
return axios.put(url, payload, config);
}
async queryModels(filters, source = null) {
const query = buildQuery(filters);
if (this.queries.models[query]) {
console.log("Using saved query");
const idList = this.queries.models[query];
return idList.map((id) => {
return this.models[id];
});
} else {
console.log("No saved query, requesting models from server");
const url =
this.baseUrl +
"/models/?" +
encodeURI(query) +
"&size=" +
querySizeLimit +
"&summary=true";
return this.get(url, source).then((res) => {
this.queries.models[query] = [];
res.data.forEach((model) => {
// todo: check if model is already cached with results/versions, don't overwrite if so
model.loadedResults = false;
model.loadedVersions = false;
model.instances = [];
model.results = [];
this.models[model.id] = model;
this.queries.models[query].push(model.id);
});
return res.data;
});
}
}
async getModel(identifier, source = null) {
if (this.models[identifier] && this.models[identifier].loadedVersions) {
return this.models[identifier];
} else {
const url = this.baseUrl + "/models/" + identifier;
return this.get(url, source).then((res) => {
const model = res.data;
if (model.id !== identifier && model.alias !== identifier) {
throw new Error(
`Error, retrieved id ${model.id} doesn't match requested identifier ${identifier}`
);
}
model.loadedVersions = true;
model.loadedResults = false;
model.results = [];
this.models[identifier] = model;
return this.models[identifier];
});
}
}
modelAliasIsUnique(alias, source = null) {
return datastore
.getModel(encodeURI(alias), source)
.then((res) => {
return false;
})
.catch((err) => {
if (axios.isCancel(err)) {
console.log("Error: ", err.message);
} else {
return true;
}
});
}
getModelInstanceFromVersion(modelID, version, source = null) {
// we don't use saved values here, as this function is used for a uniqueness check
const url =
this.baseUrl +
"/models/" +
modelID +
"/instances/?version=" +
version;
return this.get(url, source);
}
getModelInstanceFromID(modelInstanceID, source = null) {
const url =
this.baseUrl +
"/models/query/instances/" +
encodeURI(modelInstanceID);
return this.get(url, source);
}
createModel(modelData, source = null) {
const url = this.baseUrl + "/models/";
return this.post(url, modelData, source).then((res) => {
const model = res.data;
model.loadedResults = true;
model.loadedVersions = true;
if (model.instances === null) {
model.instances = [];
}
model.results = [];
this.models[model.id] = model;
return model;
});
}
updateModel(modelData, source = null) {
const url = this.baseUrl + "/models/" + modelData.id;
return this.put(url, modelData, source).then((res) => {
const model = res.data;
model.loadedVersions = true;
model.loadedResults = false;
if (model.instances === null) {
model.instances = [];
}
this.models[model.id] = model;
return model;
});
}
createModelInstance(modelID, modelInstanceData, source = null) {
const url = this.baseUrl + "/models/" + modelID + "/instances/";
return this.post(url, modelInstanceData, source).then((res) => {
const modelInstance = res.data;
this.models[modelID].instances.push(modelInstance);
return modelInstance;
});
}
updateModelInstance(modelID, modelInstanceData, source = null) {
const url =
this.baseUrl +
"/models/" +
modelID +
"/instances/" +
modelInstanceData.id;
return this.put(url, modelInstanceData, source).then((res) => {
const modelInstance = res.data;
this.models[modelID].instances.forEach((inst, i) => {
if (inst.id === modelInstance.id) {
this.models[modelID].instances[i] = modelInstance;
}
});
return modelInstance;
});
}
async getTest(identifier, source = null) {
if (this.tests[identifier] && this.tests[identifier].loadedVersions) {
return this.tests[identifier];
} else {
const url = this.baseUrl + "/tests/" + identifier;
return this.get(url, source).then((res) => {
const test = res.data;
if (test.id !== identifier && test.alias !== identifier) {
throw new Error(
`Error, retrieved id ${test.id} doesn't match requested identifier ${identifier}`
);
}
test.loadedVersions = true;
test.loadedResults = false;
test.results = [];
this.tests[identifier] = test;
return this.tests[identifier];
});
}
}
async queryTests(filters, source = null) {
const query = buildQuery(filters);
if (this.queries.tests[query]) {
console.log("Using saved query");
const idList = this.queries.tests[query];
return idList.map((id) => {
return this.tests[id];
});
} else {
console.log("No saved query, requesting tests from server");
const url =
this.baseUrl +
"/tests/?" +
encodeURI(query) +
"&size=" +
querySizeLimit +
"&summary=true";
return this.get(url, source).then((res) => {
this.queries.tests[query] = [];
res.data.forEach((test) => {
test.loadedVersions = false;
test.instances = [];
test.loadedResults = false;
test.results = [];
this.tests[test.id] = test;
this.queries.tests[query].push(test.id);
});
return res.data;
});
}
}
testAliasIsUnique(alias, source = null) {
return datastore
.getTest(encodeURI(alias), source)
.then((res) => {
return false;
})
.catch((err) => {
if (axios.isCancel(err)) {
console.log("Error: ", err.message);
} else {
return true;
}
});
}
getTestInstanceFromVersion(testID, version, source = null) {
const url =
this.baseUrl +
"/tests/" +
testID +
"/instances/?version=" +
version;
return this.get(url, source);
}
getTestInstanceFromID(testInstanceID, source = null) {
const url =
this.baseUrl +
"/tests/query/instances/" +
encodeURI(testInstanceID);
return this.get(url, source);
}
createTest(testData, source = null) {
const url = this.baseUrl + "/tests/";
return this.post(url, testData, source).then((res) => {
const test = res.data;
test.loadedResults = true;
test.loadedVersions = true;
if (test.instances === null) {
test.instances = [];
}
test.results = [];
this.tests[test.id] = test;
return test;
});
}
updateTest(testData, source = null) {
const url = this.baseUrl + "/tests/" + testData.id;
return this.put(url, testData, source).then((res) => {
const test = res.data;
test.loadedVersions = true;
test.loadedResults = false;
if (test.instances === null) {
test.instances = [];
}
this.tests[test.id] = test;
return test;
});
}
createTestInstance(testID, testInstanceData, source = null) {
const url = this.baseUrl + "/tests/" + testID + "/instances/";
return this.post(url, testInstanceData, source).then((res) => {
const testInstance = res.data;
this.tests[testID].instances.push(testInstance);
return testInstance;
});
}
updateTestInstance(testID, testInstanceData, source = null) {
const url =
this.baseUrl +
"/tests/" +
testID +
"/instances/" +
testInstanceData.id;
return this.put(url, testInstanceData, source).then((res) => {
const testInstance = res.data;
this.tests[testID].instances.forEach((inst, i) => {
if (inst.id === testInstance.id) {
this.tests[testID].instances[i] = testInstance;
}
});
return testInstance;
});
}
async getProjects(source = null) {
if (this.projects.length > 0) {
return this.projects;
} else {
const url = this.baseUrl + "/projects";
return this.get(url, source)
.then((res) => {
let editableProjects = [];
res.data.forEach((proj) => {
if (proj.permissions.UPDATE) {
editableProjects.push(proj.project_id);
}
});
editableProjects.sort();
this.projects = editableProjects;
return this.projects;
})
.catch((err) => {
console.log("Error: ", err.message);
});
}
}
async getResultsByModel(modelId, source = null) {
if (this.models[modelId].loadedResults) {
return this.models[modelId].results.map((resultId) => {
return this.results[resultId];
});
} else {
const url =
this.baseUrl +
"/results-summary/?model_id=" +
modelId +
"&size=" +
querySizeLimit;
return this.get(url, source).then((res) => {
const resultIds = [];
res.data.forEach((result) => {
resultIds.push(result.id);
this.results[result.id] = result;
});
this.models[modelId].results = resultIds;
this.models[modelId].loadedResults = true;
return res.data;
});
}
}
getResultsByModelInstances(modelInstanceIds, source = null) {
const url =
this.baseUrl +
"/results-extended/?model_instance_id=" +
modelInstanceIds.join("&model_instance_id=") +
"&size=" +
querySizeLimit;
return this.get(url, source).then((res) => {
res.data.forEach((result) => {
this.results[result.id] = result;
});
return res.data;
});
}
async getResultsByTest(testId, source = null) {
if (this.tests[testId].loadedResults) {
return this.tests[testId].results.map((resultId) => {
return this.results[resultId];
});
} else {
const url =
this.baseUrl +
"/results-summary/?test_id=" +
testId +
"&size=" +
querySizeLimit;
return this.get(url, source).then((res) => {
const resultIds = [];
res.data.forEach((result) => {
resultIds.push(result.id);
this.results[result.id] = result;
});
this.tests[testId].results = resultIds;
this.tests[testId].loadedResults = true;
return res.data;
});
}
}
getResultsByTestInstance(testInstanceIds, source = null) {
const url =
this.baseUrl +
"/results-extended/?test_instance_id=" +
testInstanceIds.join("&test_instance_id=") +
"&size=" +
querySizeLimit;
return this.get(url, source).then((res) => {
res.data.forEach((result) => {
this.results[result.id] = result;
});
return res.data;
});
}
async getResult(resultID, source = null) {
if (this.results[resultID]) {
return this.results[resultID];
} else {
const url = this.baseUrl + "/results-extended/" + resultID;
return this.get(url, source).then((res) => {
this.results[resultID] = res.data;
return this.results[resultID];
});
}
}
async getValidFilterValues(source = null) {
if (this.vocab === null) {
return this.get(`${this.baseUrl}/vocab/`, source).then((res) => {
this.vocab = { ...res.data };
return this.vocab;
});
} else {
return this.vocab;
}
}
}
export const datastore = new DataStore(baseUrl, null);
|
<reponame>fmmajd/gopostgres<gh_stars>0
package gopostgres
import (
"github.com/jackc/pgx/v4"
"testing"
)
func TestInitDB(t *testing.T) {
InitDB("test", "user", "pass", "localhost", nil, pgx.LogLevelDebug)
if DB.connection == nil {
t.Error("database could not be initialized correctly")
}
}
|
#!/bin/bash
echo "================================================================================"
echo "MonPoly's violations of (x1 <- MAX x1; x2 (ONCE[1,1] (P0(y1,x1,x2) )))"
echo "================================================================================"
monpoly -sig bug.sig -formula bug.mfotl -log bug.log -no_rw -nonewlastts -nofilteremptytp -nofilterrel
echo "================================================================================"
echo "Verimon's violations of (x1 <- MAX x1; x2 (ONCE[1,1] (P0(y1,x1,x2) )))"
echo "================================================================================"
verimon -sig bug.sig -formula bug.mfotl -log bug.log -no_rw -nonewlastts -nofilteremptytp -nofilterrel
|
import React, {Component} from 'react';
import {StyleSheet, View, Text, ActivityIndicator} from 'react-native';
import Deck from './Deck';
export default class App extends Component {
state = {
decks: [],
cards: [],
isLoading: true,
};
componentDidMount() {
// get decks from API, set state
fetch(https://myapi.com/decks)
.then(res => res.json())
.then(res => this.setState({ decks: res }));
// get cards from API, set state
fetch(https://myapi.com/cards)
.then(res => res.json())
.then(res => this.setState({ cards: res, isLoading: false }));
}
render() {
const { decks, isLoading } = this.state;
const decksWithCards = decks.map(deck => {
const cards = this.state.cards.filter(
card => card.deckId === deck.id
);
const addCardsToDecks = { ...deck, cards: cards };
return addCardsToDecks;
});
if (isLoading) {
return (
<View style={styles.container}>
<ActivityIndicator size="large" color="#330066" animating />
</View>
);
}
return (
<View style={styles.container}>
{decksWithCards.map(deck => {
return <Deck key={deck.id} {...deck} />;
})}
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#fff',
alignItems: 'center',
justifyContent: 'center'
}
});
|
import click
from observatory.api.server.openapi_renderer import OpenApiRenderer
@click.group()
def cli():
"""The Observatory API command line tool.
COMMAND: the commands to run include:\n
- generate-open-api: generate an OpenAPI specification for the Observatory API.\n
"""
pass
@cli.command()
@click.argument("template-file", type=click.Path(exists=True, file_okay=True, dir_okay=False))
@click.argument("output-file", type=click.Path(exists=False, file_okay=True, dir_okay=False))
@click.option("--cloud-endpoints", is_flag=True, default=False, help="Generate OpenAPI config for Cloud Endpoints.")
@click.option(
"--api-client", is_flag=True, default=False, help="Generate OpenAPI config for OpenAPI client generation."
)
def generate_openapi_spec(template_file, output_file, cloud_endpoints, api_client):
"""Generate an OpenAPI specification for the Observatory API.\n
TEMPLATE_FILE: the type of config file to generate.
OUTPUT_FILE: the type of config file to generate.
"""
# Render file
renderer = OpenApiRenderer(template_file, cloud_endpoints=cloud_endpoints, api_client=api_client)
render = renderer.render()
# Save file
with open(output_file, mode="w") as f:
f.write(render)
if __name__ == "__main__":
cli()
|
#!/bin/bash
function docker_tag_exists() {
EXISTS=$(curl -s https://hub.docker.com/v2/repositories/$1/tags/?page_size=10000 | jq -r "[.results | .[] | .name == \"$2\"] | any")
test $EXISTS = true
}
if docker_tag_exists svenruppert/maven-3.3.9-openjdk 1.10.0; then
echo skip building, image already existing - svenruppert/maven-3.3.9-openjdk 1.10.0
else
echo start building the images
docker build -t svenruppert/maven-3.3.9-openjdk .
docker tag svenruppert/maven-3.3.9-openjdk:latest svenruppert/maven-3.3.9-openjdk:1.10.0
docker push svenruppert/maven-3.3.9-openjdk:1.10.0
fi
|
<reponame>snowcrystall/gitaly_emg
package fstype
const unknownFS = "unknown"
// FileSystem will return the type of filesystem being used at the passed path
func FileSystem(path string) string {
return detectFileSystem(path)
}
|
curl \
-s \
-X POST \
-H "authorization: Bearer $GITHUB_TOKEN" \
-H "Accept: application/vnd.github.v3+json" \
https://api.github.com/repos/$OWNER/$REPO/labels \
-d "{\"name\":\"$NAME\"}"
|
<reponame>stungkit/cadence-web<gh_stars>100-1000
// polyfills for web browser to node
import 'babel-polyfill';
import AbortController from 'node-abort-controller';
import atob from 'atob';
import { injectMomentDurationFormat, jsonTryParse } from '~helpers';
import { enableFetchMocks } from 'jest-fetch-mock';
import { mockUserAgent } from 'jest-useragent-mock';
import Vue from 'vue';
import Vuex from 'vuex';
global.atob = atob;
global.JSON.tryParse = jsonTryParse;
injectMomentDurationFormat();
if (!window.AbortController) {
window.AbortController = AbortController;
}
enableFetchMocks();
window.fetch = global.fetch;
mockUserAgent('chrome');
Vue.use(Vuex);
|
module Upcastable
class UpcastedObject
def self.define_delegate_method(m)
define_method(m) do |*args, &block|
@object.send(m, *args, &block)
end
end
def initialize(object, ancestor, base = object.class)
unless base <= ancestor
raise ArgumentError, "#{ancestor} is not an ancestor of #{base}"
end
@object = object
@ancestor = ancestor
end
(instance_methods - [:public_send, :clone]).each do |m|
define_delegate_method(m)
end
def initialize_clone(other)
@object = @object.clone
end
def send(m, *args, &block)
unless @ancestor.method_defined?(m) && @ancestor.private_method_defined?(m)
raise NoMethodError, "`#{m}' is not defined in #{@ancestor}"
end
@object.send(m, *args, &block)
end
def respond_to?(m, private = false)
if private
@ancestor.private_method_defined?(m) || @ancestor.method_defined?(m)
else
@ancestor.method_defined?(m)
end
end
def upcast_to(ancestor)
return self if ancestor == @ancestor
UpcastedObject.new(@object, ancestor, @ancestor)
end
def upcasting
@ancestor
end
def upcasted?
true
end
def downcast
@object
end
def method_missing(m, *args, &block)
unless @ancestor.method_defined?(m)
raise NoMethodError, "`#{m}' is not defined in #{@ancestor}"
end
@object.send(m, *args, &block)
end
end
end
|
#!/bin/bash
PROJECTNAME=$1
# Change to the script's working directory no matter from where the script was called (except if there are symlinks used)
# Solution from: http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "$SCRIPT_DIR"
cd ../Support/$PROJECTNAME.spritebuilder/
# Generate template project
echo Generating: $PROJECTNAME
# only remove files if they exist
if [ -d $PROJECTNAME.xcodeproj/xcuserdata/ ]; then
rm -rf $PROJECTNAME.xcodeproj/xcuserdata/
fi
if [ -d $PROJECTNAME.xcodeproj/project.xcworkspace/xcuserdata/ ]; then
rm -rf $PROJECTNAME.xcodeproj/project.xcworkspace/xcuserdata/
fi
if [ -f ../../Generated/$PROJECTNAME.zip ]; then
rm ../../Generated/$PROJECTNAME.zip
fi
zip -q -r ../../Generated/$PROJECTNAME.zip .* -x "../*" "*.git*" "*/tests/*" "*.DS_Store"
# Adds default project .gitignore file to archive
cp ../default_projects.gitignore ./.gitignore
zip -q ../../Generated/$PROJECTNAME.zip .gitignore
rm .gitignore
echo ""
|
#!/bin/sh
red="\033[0;31m"
yellow="\033[1;33m"
green="\033[1;32m"
reset="\033[0m"
read -a changed_files <<< $(git diff --cached --name-only --raw)
# check if there're any JS related files in commit
runTests=false
for file in "${changed_files[@]}"
do
extension="${file##*.}"
case $extension in js | coffee | jsx | ts | es6) runTests=true; break;;
esac
done
[[ $runTests == false ]] && exit 0;
# now if your "package.json" isn't in the root directory
# just "cd" there
# eg.
# cd folder/with/my-package
printf "${yellow}Starting Unit Tests for JS Files:${reset}\n"
# now lets run tests
# eg.
# karma start --single-run --browsers PhantomJS --reporters dots
# or
# eslint .
# or
# mocha --colors --bail
# but we'll do more general
npm test
# now if tests failed let's abort commit by "exit 1"
# if not, congratulations, commit is now in Git
testResults=$?
if [ $testResults -eq 1 ]
then
echo -e "${red}\n Tests FAILED\n\n commit ABORTED${reset}"
exit 1
else
echo -e "${green}\nOK\n${reset}"
fi
exit 0
|
<reponame>aminmeyghani/javascript-for-beginners
var taskBaseP = {
pName: 'taskBaseP',
getName() {
return this.name;
}
};
var taskP = Object.create(taskBaseP);
Object.assign(taskP, {
// task related stuff. More specific.
pName: 'taskP',
setDone() {
this.isDone = true;
},
reset() {
this.isDone = false;
},
getIsDone() {
return this.isDone;
}
});
var t1 = Object.create(taskP);
t1.name = 'clean';
t1.isDone = false;
console.log(t1.getIsDone());
t1.setDone();
console.log(t1.getIsDone());
var pProtoLink = t1.__proto__;
console.log(pProtoLink);
|
package io.opensphere.core.util.swing.wizard.model;
import java.awt.Component;
/** The model for the panels displayed for each wizard step. */
@FunctionalInterface
public interface WizardPanelModel
{
/**
* Get the wizard panel for a given step.
*
* @param stepTitle The title for the step.
* @return The component.
*/
Component getWizardPanel(String stepTitle);
}
|
package router
import (
"github.com/drone/drone/server/handler"
"github.com/drone/drone/server/middleware"
"github.com/zenazn/goji/web"
)
func New() *web.Mux {
mux := web.New()
mux.Get("/api/logins", handler.GetLoginList)
mux.Get("/api/auth/:host", handler.GetLogin)
mux.Post("/api/auth/:host", handler.GetLogin)
mux.Get("/api/badge/:host/:owner/:name/status.svg", handler.GetBadge)
mux.Get("/api/badge/:host/:owner/:name/cc.xml", handler.GetCC)
mux.Get("/api/hook/:host/:token", handler.PostHook)
mux.Put("/api/hook/:host/:token", handler.PostHook)
mux.Post("/api/hook/:host/:token", handler.PostHook)
// these routes are here for backward compatibility
// to help people troubleshoot why their upgrade isn't
// working correctly. remove at some point
mux.Get("/api/hook/:host", handler.PostHook)
mux.Put("/api/hook/:host", handler.PostHook)
mux.Post("/api/hook/:host", handler.PostHook)
////
streams := web.New()
streams.Get("/api/stream/stdout/:id", handler.WsConsole)
streams.Get("/api/stream/user", handler.WsUser)
mux.Handle("/api/stream/*", streams)
repos := web.New()
repos.Use(middleware.SetRepo)
repos.Use(middleware.RequireRepoRead)
repos.Use(middleware.RequireRepoAdmin)
repos.Get("/api/repos/:host/:owner/:name/branches/:branch/commits/:commit/console", handler.GetOutput)
repos.Get("/api/repos/:host/:owner/:name/branches/:branch/commits/:commit", handler.GetCommit)
repos.Post("/api/repos/:host/:owner/:name/branches/:branch/commits/:commit", handler.PostCommit)
repos.Get("/api/repos/:host/:owner/:name/commits", handler.GetCommitList)
repos.Get("/api/repos/:host/:owner/:name", handler.GetRepo)
repos.Put("/api/repos/:host/:owner/:name", handler.PutRepo)
repos.Post("/api/repos/:host/:owner/:name", handler.PostRepo)
repos.Delete("/api/repos/:host/:owner/:name", handler.DelRepo)
mux.Handle("/api/repos/:host/:owner/:name*", repos)
users := web.New()
users.Use(middleware.RequireUserAdmin)
users.Get("/api/users/:host/:login", handler.GetUser)
users.Post("/api/users/:host/:login", handler.PostUser)
users.Delete("/api/users/:host/:login", handler.DelUser)
users.Get("/api/users", handler.GetUserList)
mux.Handle("/api/users*", users)
user := web.New()
user.Use(middleware.RequireUser)
user.Get("/api/user/feed", handler.GetUserFeed)
user.Get("/api/user/repos", handler.GetUserRepos)
user.Get("/api/user", handler.GetUserCurrent)
user.Put("/api/user", handler.PutUser)
mux.Handle("/api/user*", user)
work := web.New()
work.Use(middleware.RequireUserAdmin)
work.Get("/api/work/started", handler.GetWorkStarted)
work.Get("/api/work/pending", handler.GetWorkPending)
work.Get("/api/work/assignments", handler.GetWorkAssigned)
work.Get("/api/workers", handler.GetWorkers)
work.Post("/api/workers", handler.PostWorker)
work.Delete("/api/workers", handler.DelWorker)
mux.Handle("/api/work*", work)
return mux
}
|
<reponame>danieldiamond/gitlab-analytics<gh_stars>1-10
#!/usr/bin/env python3
import logging
import sys
from os import environ as env
from typing import Dict, List
from fire import Fire
from snowflake.sqlalchemy import URL
from sqlalchemy import create_engine
# Set logging defaults
logging.basicConfig(stream=sys.stdout, level=20)
class SnowflakeManager:
def __init__(self, config_vars: Dict):
self.engine = create_engine(
URL(
user=config_vars["SNOWFLAKE_USER"],
password=config_vars["SNOWFLAKE_PASSWORD"],
account=config_vars["SNOWFLAKE_ACCOUNT"],
role=config_vars["SNOWFLAKE_SYSADMIN_ROLE"],
warehouse=config_vars["SNOWFLAKE_LOAD_WAREHOUSE"],
)
)
# Snowflake database name should be in CAPS
# see https://gitlab.com/meltano/analytics/issues/491
self.analytics_database = "{}_ANALYTICS".format(
config_vars["SNOWFLAKE_DATABASE"].upper()
)
self.raw_database = "{}_RAW".format(config_vars["SNOWFLAKE_DATABASE"].upper())
def generate_db_queries(
self, database_name: str, cloned_database: str
) -> List[str]:
"""
Generate the queries to clone and provide permissions for databases.
"""
# Queries for database cloning and permissions
check_db_exists_query = """use database "{0}";"""
create_query = """create or replace database "{0}" {1};"""
grant_query = """grant ownership on database "{0}" to TRANSFORMER;"""
usage_roles = ["LOADER", "TRANSFORMER", "ENGINEER"]
usage_grant_query_with_params = (
"""grant create schema, usage on database "{0}" to {1}"""
)
usage_grant_queries = [
usage_grant_query_with_params.format(database_name, role)
for role in usage_roles
]
# The order of the queries matters!
queries = [
check_db_exists_query.format(database_name),
create_query.format(database_name, cloned_database),
grant_query.format(database_name),
] + usage_grant_queries
return queries
def manage_clones(
self, database: str, empty: bool = False, force: bool = False
) -> None:
"""
Manage zero copy clones in Snowflake.
"""
databases = {"analytics": self.analytics_database, "raw": self.raw_database}
create_db = databases[database]
clone_db = f"clone {database}" if not empty else ""
queries = self.generate_db_queries(create_db, clone_db)
# if force is false, check if the database exists
if force:
logging.info("Forcing a create or replace...")
db_exists = False
else:
try:
logging.info("Checking if DB exists...")
connection = self.engine.connect()
connection.execute(queries[0])
logging.info("DBs exist...")
db_exists = True
except:
logging.info("DB does not exist...")
db_exists = False
finally:
connection.close()
self.engine.dispose()
# If the DB doesn't exist or --force is true, create or replace the db
if not db_exists:
logging.info("Creating or replacing DBs")
for query in queries[1:]:
try:
logging.info("Executing Query: {}".format(query))
connection = self.engine.connect()
[result] = connection.execute(query).fetchone()
logging.info("Query Result: {}".format(result))
finally:
connection.close()
self.engine.dispose()
def delete_clones(self):
"""
Delete a clone.
"""
db_list = [self.analytics_database, self.raw_database]
for db in db_list:
query = 'DROP DATABASE IF EXISTS "{}";'.format(db)
try:
logging.info("Executing Query: {}".format(query))
connection = self.engine.connect()
[result] = connection.execute(query).fetchone()
logging.info("Query Result: {}".format(result))
finally:
connection.close()
self.engine.dispose()
def create_table_clone(
self,
source_schema: str,
source_table: str,
target_table: str,
target_schema: str = None,
timestamp: str = None,
):
"""
Create a zero copy clone of a table (optionally at a given timestamp)
source_schema: schema of table to be cloned
source_table: name of table to cloned
target_table: name of clone table
target_schema: schema of clone table
timestamp: timestamp indicating time of clone in format yyyy-mm-dd hh:mi:ss
"""
timestamp_format = """yyyy-mm-dd hh:mi:ss"""
if not target_schema:
target_schema = source_schema
database = env["SNOWFLAKE_TRANSFORM_DATABASE"]
queries = [f"""USE "{database}"; """]
# Tries to create the schema its about to write to
# If it does exists, {schema} already exists, statement succeeded.
# is returned.
schema_check = f"""CREATE SCHEMA IF NOT EXISTS "{database}".{target_schema};"""
queries.append(schema_check)
clone_sql = f"""create table if not exists {target_schema}.{target_table} clone "{database}".{source_schema}.{source_table}"""
if timestamp and timestamp_format:
clone_sql += f""" at (timestamp => to_timestamp_tz('{timestamp}', '{timestamp_format}'))"""
clone_sql += " COPY GRANTS;"
queries.append(f"drop table if exists {target_schema}.{target_table};")
queries.append(clone_sql)
connection = self.engine.connect()
try:
for q in queries:
logging.info("Executing Query: {}".format(q))
[result] = connection.execute(q).fetchone()
logging.info("Query Result: {}".format(result))
finally:
connection.close()
self.engine.dispose()
return self
if __name__ == "__main__":
snowflake_manager = SnowflakeManager(env.copy())
Fire(snowflake_manager)
|
/**
* Created by hfutlj on 2018/5/9.
* 中级处理者,教导主任,能批准7天以内的假期
* 上级为校长
*/
public class Dean extends AbstractManager {
@Override
public void handle(int i) {
if (i <= 7) {
System.out.println("我是教导主任,我批准你" + i + "天假");
} else {
sendNext(i);
}
}
}
|
<reponame>systembugtj/android-lockpattern<gh_stars>0
/*
* Copyright 2012 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package haibison.android.lockpattern.utils;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.Key;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.KeySpec;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.SecretKeySpec;
import haibison.android.underdogs.NonNull;
import haibison.android.underdogs.Nullable;
/**
* The <strong>simple-and-weak</strong> encryption utilities.
*
* @author <NAME>
*/
public class SimpleWeakEncryption {
private static final String TRANSFORMATION = "AES/CBC/PKCS5Padding";
private static final String SECRET_KEY_FACTORY_ALGORITHM = "PBEWithMD5AndDES";
private static final String SECRET_KEY_SPEC_ALGORITHM = "AES";
private static final int KEY_LEN = 256;
private static final int IV_LEN = 16;
private static final int ITERATION_COUNT = 512;
private static final char SEPARATOR = '@';
public static final String UTF8 = "UTF-8";
public static final String SHA256 = "SHA-256";
/**
* This is singleton class.
*/
private SimpleWeakEncryption() {
}
/**
* Encrypts {@code data} by {@code key}.
*
* @param password the secret key.
* @param salt the salt, can be {@code null}. But it is highly recommended that you should provide it.
* @param data the data.
* @return the encrypted data.
* @throws RuntimeException which wraps the original exception related to cipher process.
*/
@NonNull
public static String encrypt(@NonNull final char[] password, @Nullable byte[] salt, @NonNull final String data) {
byte[] bytes;
try {
bytes = data.getBytes(UTF8);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
Cipher cipher = null;
try {
cipher = Cipher.getInstance(TRANSFORMATION);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
} catch (NoSuchPaddingException e) {
throw new RuntimeException(e);
}
/**
* cipher.getIV() doesn't work the same for different API levels. So we're using this technique.
*/
final byte[] iv = SecureRandom.getSeed(IV_LEN);
try {
cipher.init(Cipher.ENCRYPT_MODE, genKey(password, salt), new IvParameterSpec(iv));
} catch (InvalidKeyException e) {
throw new RuntimeException(e);
} catch (InvalidAlgorithmParameterException e) {
throw new RuntimeException(e);
}
try {
bytes = cipher.doFinal(bytes);
return String.format("%s%s%s", Base36.toBase36(iv), SEPARATOR, Base36.toBase36(bytes));
} catch (IllegalBlockSizeException e) {
throw new RuntimeException(e);
} catch (BadPaddingException e) {
throw new RuntimeException(e);
}
}
/**
* Decrypts an encrypted string ({@code data}) by {@code key}.
*
* @param password the password.
* @param salt the salt, can be {@code null}. But it is highly recommended that you should provide it.
* @param data the data.
* @return the decrypted string, or {@code null} if {@code password} is invalid.
* @throws RuntimeException which wraps the original exception related to cipher process.
*/
@NonNull
public static String decrypt(@NonNull final char[] password, @Nullable byte[] salt, @NonNull final String data) {
Cipher cipher = null;
try {
cipher = Cipher.getInstance(TRANSFORMATION);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
} catch (NoSuchPaddingException e) {
throw new RuntimeException(e);
}
final int iSeparator = data.indexOf(SEPARATOR);
try {
cipher.init(Cipher.DECRYPT_MODE, genKey(password, salt),
new IvParameterSpec(Base36.toBytes(data.substring(0, iSeparator))));
} catch (InvalidKeyException e) {
throw new RuntimeException(e);
} catch (InvalidAlgorithmParameterException e) {
throw new RuntimeException(e);
}
try {
return new String(cipher.doFinal(Base36.toBytes(data.substring(iSeparator + 1))), UTF8);
} catch (IllegalBlockSizeException e) {
throw new RuntimeException(e);
} catch (BadPaddingException e) {
throw new RuntimeException(e);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
/**
* Generates secret key.
*
* @param password the password.
* @param salt the salt, can be {@code null}. But it is highly recommended that you should provide it.
* @return the secret key.
* @throws RuntimeException which wraps the original exception related to cipher process.
*/
@NonNull
private static Key genKey(@NonNull char[] password, @Nullable byte[] salt) {
SecretKeyFactory factory;
try {
factory = SecretKeyFactory.getInstance(SECRET_KEY_FACTORY_ALGORITHM);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
if (salt != null && salt.length > 0)
salt = sha256(salt);
else
salt = sha256(new String(password));
KeySpec spec = new PBEKeySpec(password, salt, ITERATION_COUNT, KEY_LEN);
SecretKey tmp = null;
try {
tmp = factory.generateSecret(spec);
} catch (InvalidKeySpecException e) {
throw new RuntimeException(e);
}
return new SecretKeySpec(sha256(tmp.getEncoded()), SECRET_KEY_SPEC_ALGORITHM);
}
/**
* Calculates SHA-256 of a string.
*
* @param s the string.
* @return the SHA-256 of given string.
* @throws RuntimeException which wraps {@link UnsupportedEncodingException} in case the system does not support {@link #UTF8}.
*/
@NonNull
public static byte[] sha256(@NonNull String s) {
try {
return sha256(s.getBytes(UTF8));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
/**
* Calculates SHA-256 of a byte array.
*
* @param bytes the byte array.
* @return the SHA-256 of given data.
* @throws RuntimeException which wraps {@link NoSuchAlgorithmException} in case the system does not support calculating message digest of
* {@link #SHA256}.
*/
@NonNull
public static byte[] sha256(@NonNull byte[] bytes) {
try {
MessageDigest md = MessageDigest.getInstance(SHA256);
md.update(bytes);
return md.digest();
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
/**
* Base-36 utilities.
*
* @author <NAME>
*/
public static class Base36 {
/**
* This is singleton class.
*/
private Base36() {
}
/**
* Converts a byte array to base-36.
*
* @param bytes the byte array.
* @return the base-36 string representing the data given.
*/
@NonNull
public static String toBase36(@NonNull byte[] bytes) {
return new BigInteger(bytes).toString(Character.MAX_RADIX);
}
/**
* Converts a base-36 string to its byte array.
*
* @param base36 the base-36 string.
* @return the original data.
*/
@NonNull
public static byte[] toBytes(@NonNull String base36) {
return new BigInteger(base36, Character.MAX_RADIX).toByteArray();
}
}
}
|
#!/bin/sh
docker build -t rmpestano/jenkins-living-documentation ../docker
|
#!/usr/bin/env bash
set -e
if [[ $1 = "--ci" ]]; then
echo "Launch script finished"
else
trap "trap - SIGTERM && kill -- -$$" SIGINT SIGTERM EXIT
fi
SERVER_VERSION="10.1.3.Final"
SERVER_HOME=server/infinispan-server-$SERVER_VERSION
CLUSTER_SIZE_MAIN="$SERVER_HOME/bin/cli.sh -c localhost:11322 -f batch "
ZIP_ROOT="http://downloads.jboss.org/infinispan"
CONF_DIR_TO_COPY_FROM="spec/configs/"
IS_SSL_PROCESSED=0
SERVER_DIR="infinispan-server"
function waitForClusters()
{
cat > batch<<EOF
describe
disconnect
EOF
MEMBERS_MAIN=''
while [ "$MEMBERS_MAIN" != '3' ];
do
MEMBERS_MAIN=$($CLUSTER_SIZE_MAIN | grep cluster_size | cut -d':' -f2 | sed 's/.$//' | sed -e 's/^[[:space:]]*//')
echo "Waiting for clusters to form (main: $MEMBERS_MAIN)"
sleep 20
done
}
function prepareServerDir()
{
local isCi=$1
local confPath=$2
local isSsl=$3
local dirName=${4}
if [ ! -f server/infinispan-server-$SERVER_VERSION.zip ]; then
cd server
wget $ZIP_ROOT/$SERVER_VERSION/infinispan-server-$SERVER_VERSION.zip
unzip infinispan-server-$SERVER_VERSION.zip
cd ..
fi
if [[ -z "${SERVER_TMP}" ]]; then
SERVER_TMP=server/${SERVER_DIR}
mkdir ${SERVER_TMP} 2>/dev/null
echo "Created temporary directory: $SERVER_TMP"
cp -r ${SERVER_HOME}/* $SERVER_TMP
echo "Server copied to temporary directory."
$SERVER_TMP/bin/user-tool.sh -u admin -p 'mypassword'
echo "Admin user added."
fi
cp -r ${SERVER_HOME}/server ${SERVER_TMP}/${dirName}
cp "${CONF_DIR_TO_COPY_FROM}/${confPath}" ${SERVER_TMP}/${dirName}/conf
echo "Infinispan configuration file ${confPath} copied to server ${dirName}."
if [[ ${isSsl} = "true" && ${IS_SSL_PROCESSED} = 0 ]]; then
./make-ssl.sh
echo "Generate TLS/SSL certificates"
IS_SSL_PROCESSED=1
fi
if [[ ${isSsl} = "true" ]]; then
cp out/ssl/ca/ca.jks $SERVER_TMP/${dirName}/conf
cp out/ssl/server/server.jks $SERVER_TMP/${dirName}/conf
cp out/ssl/sni-trust1/trust1.jks $SERVER_TMP/${dirName}/conf
cp out/ssl/sni-trust2/trust2.jks $SERVER_TMP/${dirName}/conf
cp out/ssl/sni-untrust/untrust.jks $SERVER_TMP/${dirName}/conf
echo "Security key and trust stores copied to temporary server."
fi
export SERVER_TMP=${SERVER_TMP}
}
function startServer()
{
local isCi=$1
local confPath=$2
local isSsl=$3
local port=${4}
local nodeName=${5}
local jvmParam=${6}
prepareServerDir "${isCi}" ${confPath} ${isSsl} ${nodeName}
if [[ ! -z ${port} ]]; then
portStr="-p ${port}"
fi
if [[ ${isCi} = "--ci" ]]; then
nohup $SERVER_TMP/bin/server.sh -c ${confPath} -s ${SERVER_TMP}/${nodeName} ${portStr:-""} --node-name=${nodeName} ${jvmParam:-} &
else
${SERVER_TMP}/bin/server.sh -c ${confPath} -s ${SERVER_TMP}/${nodeName} ${portStr:-} --node-name=${nodeName} ${jvmParam:-} &
fi
}
#deleting the testable server directory
rm -drf server/${SERVER_DIR}
export JAVA_OPTS="-Xms512m -Xmx1024m -XX:MetaspaceSize=128M -XX:MaxMetaspaceSize=512m"
startServer "$1" infinispan.xml false 11222 "server-local"
startServer "$1" infinispan-clustered.xml false 11322 "server-one"
startServer "$1" infinispan-clustered.xml false 11332 "server-two"
startServer "$1" infinispan-clustered.xml false 11342 "server-three"
startServer "$1" infinispan-ssl.xml true 11622 "server-ssl"
startServer "$1" infinispan-ssl1.xml true 11632 "server-ssl1"
startServer "$1" infinispan-ssl2.xml true 11642 "server-ssl2"
#Preparing server dirs for failover tests (3 servers)
prepareServerDir "$1" infinispan-clustered.xml false "server-failover-one"
prepareServerDir "$1" infinispan-clustered.xml false "server-failover-two"
prepareServerDir "$1" infinispan-clustered.xml false "server-failover-three"
#Preparing server dirs for xsite tests (2 servers)
prepareServerDir "$1" infinispan-xsite-EARTH.xml false "server-earth"
prepareServerDir "$1" infinispan-xsite-MOON.xml false "server-moon"
waitForClusters
echo "Infinispan test server started."
if [[ $1 = "--ci" ]]; then
echo "Launch script finished"
else
# Wait until script stopped
while :
do
sleep 5
done
fi
|
#!/bin/bash
docker run -d -p 5000:5000 -v /home/vysakh/Accubits/INTEL/Accelerated-Face-Reidentification-and-Emotion-Recognition/docker_fetch:/tmp/image emotyx
|
<reponame>github-clonner/chef-patissier<filename>packages/recipe-web/gulpfile.js
const chef = require('@dameblanche/core');
const config = {
'root': {
'src': './src',
'dest': './public'
},
'browsersync': {
},
// Grouped by what can run in parallel
'tasks': [
['clean'],
['eslint', 'stylelint'],
['images', 'svgsprite', 'static'],
['templates', 'css', [ 'webpack', { ifProduction: true }]],
[['rev', { ifProduction: true }]],
[['sizeReport', { ifProduction: true }]]
]
};
module.exports = chef(config);
|
<reponame>slaufer/Prebid.js<filename>test/spec/modules/pubgeniusBidAdapter_spec.js
import { expect } from 'chai';
import { spec } from 'modules/pubgeniusBidAdapter.js';
import { deepClone, parseQueryStringParameters } from 'src/utils.js';
import { config } from 'src/config.js';
import { server } from 'test/mocks/xhr.js';
const {
code,
supportedMediaTypes,
isBidRequestValid,
buildRequests,
interpretResponse,
getUserSyncs,
onTimeout,
} = spec;
describe('pubGENIUS adapter', () => {
describe('code', () => {
it('should be pubgenius', () => {
expect(code).to.equal('pubgenius');
});
});
describe('supportedMediaTypes', () => {
it('should contain only banner', () => {
expect(supportedMediaTypes).to.deep.equal(['banner']);
});
});
describe('isBidRequestValid', () => {
let bid = null;
beforeEach(() => {
bid = {
mediaTypes: {
banner: {
sizes: [[300, 600], [300, 250]],
},
},
params: {
adUnitId: 1112,
},
};
});
it('should return true with numeric adUnitId ', () => {
expect(isBidRequestValid(bid)).to.be.true;
});
it('should return true with string adUnitId ', () => {
bid.params.adUnitId = '1112';
expect(isBidRequestValid(bid)).to.be.true;
});
it('should return false without adUnitId', () => {
delete bid.params.adUnitId;
expect(isBidRequestValid(bid)).to.be.false;
});
it('should return false with adUnitId of invalid type', () => {
bid.params.adUnitId = [1112];
expect(isBidRequestValid(bid)).to.be.false;
});
it('should return false with empty sizes', () => {
bid.mediaTypes.banner.sizes = [];
expect(isBidRequestValid(bid)).to.be.false;
});
it('should return false with invalid size', () => {
bid.mediaTypes.banner.sizes = [[300, 600, 250]];
expect(isBidRequestValid(bid)).to.be.false;
});
});
describe('buildRequests', () => {
const origBidderTimeout = config.getConfig('bidderTimeout');
const origPageUrl = config.getConfig('pageUrl');
const origCoppa = config.getConfig('coppa');
after(() => {
config.setConfig({
bidderTimeout: origBidderTimeout,
pageUrl: origPageUrl,
coppa: origCoppa,
});
});
let bidRequest = null;
let bidderRequest = null;
let expectedRequest = null;
beforeEach(() => {
bidRequest = {
adUnitCode: 'test-div',
auctionId: 'fake-auction-id',
bidId: 'fakebidid',
bidder: 'pubgenius',
bidderRequestId: 'fakebidderrequestid',
bidRequestsCount: 1,
bidderRequestsCount: 1,
bidderWinsCount: 0,
mediaTypes: {
banner: {
sizes: [[300, 600], [300, 250]],
},
},
params: {
adUnitId: 1112,
},
transactionId: 'fake-transaction-id',
};
bidderRequest = {
auctionId: 'fake-auction-id',
bidderCode: 'pubgenius',
bidderRequestId: 'fakebidderrequestid',
refererInfo: {},
};
expectedRequest = {
method: 'POST',
url: 'https://ortb.adpearl.io/prebid/auction',
data: {
id: 'fake-auction-id',
imp: [
{
id: 'fakebidid',
banner: {
format: [{ w: 300, h: 600 }, { w: 300, h: 250 }],
topframe: 0,
},
tagid: '1112',
},
],
tmax: 1200,
ext: {
pbadapter: {
version: '1.0.0',
},
},
},
};
config.setConfig({
bidderTimeout: 1200,
pageUrl: undefined,
coppa: undefined,
});
});
it('should build basic requests correctly', () => {
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should build requests with multiple ad units', () => {
const bidRequest1 = deepClone(bidRequest);
bidRequest1.adUnitCode = 'test-div-1';
bidRequest1.bidId = 'fakebidid1';
bidRequest1.mediaTypes.banner.sizes = [[728, 90]];
bidRequest1.params.adUnitId = '1111';
expectedRequest.data.imp.push({
id: 'fakebidid1',
banner: {
format: [{ w: 728, h: 90 }],
topframe: 0,
},
tagid: '1111',
});
expect(buildRequests([bidRequest, bidRequest1], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should take bid floor in bidder params', () => {
bidRequest.params.bidFloor = 0.5;
expectedRequest.data.imp[0].bidfloor = 0.5;
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should take position in bidder params', () => {
bidRequest.params.position = 3;
expectedRequest.data.imp[0].banner.pos = 3;
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should take pageUrl in config over referer in refererInfo', () => {
config.setConfig({ pageUrl: 'http://pageurl.org' });
bidderRequest.refererInfo.referer = 'http://referer.org';
expectedRequest.data.site = { page: 'http://pageurl.org' };
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should use canonical URL over referer in refererInfo', () => {
bidderRequest.refererInfo.canonicalUrl = 'http://pageurl.org';
bidderRequest.refererInfo.referer = 'http://referer.org';
expectedRequest.data.site = { page: 'http://pageurl.org' };
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should take gdprConsent when GDPR does not apply', () => {
bidderRequest.gdprConsent = {
gdprApplies: false,
consentString: 'fakeconsent',
};
expectedRequest.data.regs = {
ext: { gdpr: 0 },
};
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should take gdprConsent when GDPR applies', () => {
bidderRequest.gdprConsent = {
gdprApplies: true,
consentString: 'fakeconsent',
};
expectedRequest.data.regs = {
ext: { gdpr: 1 },
};
expectedRequest.data.user = {
ext: { consent: 'fakeconsent' },
};
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should take uspConsent', () => {
bidderRequest.uspConsent = '1---';
expectedRequest.data.regs = {
ext: { us_privacy: '1---' },
};
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should take schain', () => {
const schain = {
ver: '1.0',
complete: 1,
nodes: [
{
asi: 'indirectseller.com',
sid: '0001',
hp: 1
}
]
};
bidRequest.schain = deepClone(schain);
expectedRequest.data.source = {
ext: { schain: deepClone(schain) },
};
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should take coppa', () => {
config.setConfig({ coppa: true });
expectedRequest.data.regs = { coppa: 1 };
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should take user IDs', () => {
const eid = {
source: 'adserver.org',
uids: [
{
id: 'fake-user-id',
atype: 1,
ext: { rtiPartner: 'TDID' },
},
],
};
bidRequest.userIdAsEids = [deepClone(eid)];
expectedRequest.data.user = {
ext: {
eids: [deepClone(eid)],
},
};
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should not take unsupported user IDs', () => {
bidRequest.userIdAsEids = [
{
source: 'pubcid.org',
uids: [
{
id: 'fake-user-id',
atype: 1,
},
],
},
];
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
it('should not take empty user IDs', () => {
bidRequest.userIdAsEids = [];
expect(buildRequests([bidRequest], bidderRequest)).to.deep.equal(expectedRequest);
});
});
describe('interpretResponse', () => {
let serverResponse = null;
let expectedBidResponse = null;
beforeEach(() => {
serverResponse = {
body: {
seatbid: [
{
seat: 'pubgenius',
bid: [
{
impid: 'fakebidid',
price: 0.3,
w: 300,
h: 250,
adm: 'fake_creative',
exp: 60,
crid: 'fakecreativeid',
},
],
},
],
},
};
expectedBidResponse = {
requestId: 'fakebidid',
cpm: 0.3,
currency: 'USD',
width: 300,
height: 250,
ad: 'fake_creative',
ttl: 60,
creativeId: 'fakecreativeid',
netRevenue: true,
};
});
it('should interpret response correctly', () => {
expect(interpretResponse(serverResponse)).to.deep.equal([expectedBidResponse]);
});
it('should interpret response with adomain', () => {
serverResponse.body.seatbid[0].bid[0].adomain = ['fakeaddomain'];
expectedBidResponse.meta = {
advertiserDomains: ['fakeaddomain'],
};
expect(interpretResponse(serverResponse)).to.deep.equal([expectedBidResponse]);
});
it('should interpret no bids', () => {
expect(interpretResponse({ body: {} })).to.deep.equal([]);
});
});
describe('getUserSyncs', () => {
let syncOptions = null;
let expectedSync = null;
beforeEach(() => {
syncOptions = {
iframeEnabled: true,
pixelEnabled: true,
};
expectedSync = {
type: 'iframe',
url: 'https://ortb.adpearl.io/usersync/pixels.html?',
};
});
it('should return iframe pixels', () => {
expect(getUserSyncs(syncOptions)).to.deep.equal([expectedSync]);
});
it('should return empty when iframe is not enabled', () => {
syncOptions.iframeEnabled = false;
expect(getUserSyncs(syncOptions)).to.deep.equal([]);
});
it('should return sync when GDPR applies', () => {
const gdprConsent = {
gdprApplies: true,
consentString: 'fake-gdpr-consent',
};
expectedSync.url = expectedSync.url + parseQueryStringParameters({
gdpr: 1,
consent: 'fake-gdpr-consent',
});
expect(getUserSyncs(syncOptions, [], gdprConsent)).to.deep.equal([expectedSync]);
});
it('should return sync when GDPR does not apply', () => {
const gdprConsent = {
gdprApplies: false,
};
expectedSync.url = expectedSync.url + parseQueryStringParameters({ gdpr: 0 });
expect(getUserSyncs(syncOptions, [], gdprConsent)).to.deep.equal([expectedSync]);
});
it('should return sync with US privacy', () => {
expectedSync.url = expectedSync.url + parseQueryStringParameters({ us_privacy: '1---' });
expect(getUserSyncs(syncOptions, [], undefined, '1---')).to.deep.equal([expectedSync]);
});
});
describe('onTimeout', () => {
it('should send timeout data', () => {
const timeoutData = {
bidder: 'pubgenius',
bidId: 'fakebidid',
params: {
adUnitId: 1234,
},
adUnitCode: 'fake-ad-unit-code',
timeout: 3000,
auctionId: 'fake-auction-id',
};
onTimeout(timeoutData);
expect(server.requests[0].method).to.equal('POST');
expect(server.requests[0].url).to.equal('https://ortb.adpearl.io/prebid/events?type=timeout');
expect(JSON.parse(server.requests[0].requestBody)).to.deep.equal(timeoutData);
});
});
});
|
#!/usr/bin/env node
var tele = require("./index.js");
var fs = require("fs");
var path = require("path-extra");
var argv = require("optimist")
.default("port", 42424)
.default("bridge", true)
.boolean("v").describe("v", "verbose")
.boolean("nolan").describe("nolan", "disable lan usage")
.argv;
if(argv.v) tele.debug(console.log);
tele.info(function(){console.log.apply(console,arguments)});
if(argv.port == 42420)
{
console.log("that port is reserved");
process.exit(1);
}
// localize our id file
argv.id = (argv.id) ? path.resolve(argv.id) : path.join(path.homedir(),".seed.json");
if(argv.seeds) argv.seeds = path.resolve(argv.seeds);
tele.init(argv, function(err, seed){
if(!seed) return console.log("something went wrong :(",err) || process.exit(1);
var info = {paths:seed.paths, parts:seed.parts, keys:seed.keys};
var seeds = {};
seeds[seed.hashname] = info;
console.log(JSON.stringify(seeds,null,2));
console.log("connected to "+Object.keys(seed.lines).length+" mesh seed peers");
});
|
from flask import Flask, session
app = Flask(name)
app.config['SECRET_KEY'] = 'secret key here'
@app.route('/protected')
def protected():
if 'username' in session:
return 'You are logged in as ' + session['username']
return 'You are not logged in'
@app.route('/login', methods=['POST'])
def login():
session['username'] = request.form['username']
return 'You are logged in as ' + session['username']
@app.route('/logout')
def logout():
session.pop('username', None)
return 'You are logged out'
if name == 'main':
app.run(debug=True)
|
package note_test
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"testing"
"time"
"github.com/elgopher/noteo/date"
"github.com/elgopher/noteo/note"
"github.com/elgopher/noteo/tag"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNew(t *testing.T) {
t.Run("should return note for missing file", func(t *testing.T) {
assert.NotNil(t, note.New("missing"))
})
}
func TestNewWithModified(t *testing.T) {
t.Run("should return note for missing file", func(t *testing.T) {
assert.NotNil(t, note.NewWithModified("missing", time.Now()))
})
}
func TestNote_Path(t *testing.T) {
t.Run("should return path", func(t *testing.T) {
n := note.New("path")
// expect
assert.Equal(t, "path", n.Path())
})
}
func TestNote_Modified(t *testing.T) {
t.Run("should return error for missing file", func(t *testing.T) {
n := note.New("missing")
// when
_, err := n.Modified()
// then
assert.Error(t, err)
})
t.Run("should not error for existing file", func(t *testing.T) {
file, err := ioutil.TempFile("", "noteo-test")
require.NoError(t, err)
n := note.New(file.Name())
// when
_, err = n.Modified()
// then
assert.NoError(t, err)
})
t.Run("should return passed modified time", func(t *testing.T) {
givenTime, err := time.Parse(time.RFC3339, "2006-01-02T15:04:05Z")
require.NoError(t, err)
n := note.NewWithModified("path", givenTime)
// when
modified, err := n.Modified()
// then
require.NoError(t, err)
assert.Equal(t, givenTime, modified)
})
}
func TestNote_Created(t *testing.T) {
t.Run("should return zero value Time (1 Jan 1970) for note without Created tag", func(t *testing.T) {
filename := writeTempFile(t, "body")
n := note.New(filename)
// when
created, err := n.Created()
// then
require.NoError(t, err)
assert.Equal(t, time.Time{}, created)
})
t.Run("should return time from Created tag", func(t *testing.T) {
filename := writeTempFile(t, "---\nCreated: 2006-01-02\n---\nbody")
n := note.New(filename)
// when
created, err := n.Created()
// then
require.NoError(t, err)
expectedTime, err := date.Parse("2006-01-02")
require.NoError(t, err)
assert.Equal(t, expectedTime, created)
})
}
func TestNote_Tags(t *testing.T) {
t.Run("should return tags", func(t *testing.T) {
tests := map[string]struct {
content string
expectedTags []string
}{
"no tags": {
content: "",
},
"empty tags": {
content: "---\nTags: \n---",
},
"one tag": {
content: "---\nTags: tag\n---",
expectedTags: []string{"tag"},
},
"space separated": {
content: "---\nTags: tag1 tag2\n---",
expectedTags: []string{"tag1", "tag2"},
},
"comma separated": {
content: "---\nTags: tag1,tag2\n---",
expectedTags: []string{"tag1", "tag2"},
},
"list": {
content: "---\nTags: [tag1, tag2]\n---",
expectedTags: []string{"tag1", "tag2"},
},
"tag with space in the beginning": {
content: "---\nTags: [\" tag\"]\n---",
expectedTags: []string{"tag"},
},
"tag with space on the end": {
content: "---\nTags: [\"tag \"]\n---",
expectedTags: []string{"tag"},
},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
filename := writeTempFile(t, test.content)
n := note.New(filename)
// expect
assertTags(t, n, test.expectedTags...)
})
}
})
}
func TestNote_SetTag(t *testing.T) {
t.Run("should add tag for file without front matter", func(t *testing.T) {
filename := writeTempFile(t, "text")
n := note.New(filename)
newTag, err := tag.New("tag")
require.NoError(t, err)
// when
err = n.SetTag(newTag)
// then
require.NoError(t, err)
assertTags(t, n, "tag")
})
t.Run("should set tag with relative date", func(t *testing.T) {
date.SetNow(func() time.Time {
return time.Date(2020, 9, 10, 16, 30, 11, 0, time.FixedZone("CEST", 60*60*2))
})
defer date.SetNow(time.Now)
filename := writeTempFile(t, "")
n := note.New(filename)
// when
err := n.SetTag(newTag(t, "deadline:now"))
// then
require.NoError(t, err)
assertTags(t, n, "deadline:2020-09-10T16:30:11+02:00")
})
t.Run("should update existing tag", func(t *testing.T) {
filename := writeTempFile(t, "---\nTags: tag:1\n---\nbody")
n := note.New(filename)
newTag, err := tag.New("tag:2")
require.NoError(t, err)
// when
err = n.SetTag(newTag)
// then
require.NoError(t, err)
assertTags(t, n, "tag:2")
})
}
func TestNote_RemoveTag(t *testing.T) {
t.Run("should remove tag", func(t *testing.T) {
filename := writeTempFile(t, "---\nTags: tag another\n---\ntext")
n := note.New(filename)
tagToRemove := newTag(t, "tag")
// when
err := n.RemoveTag(tagToRemove)
// then
require.NoError(t, err)
assertTags(t, n, "another")
})
t.Run("should remove last remaining tag", func(t *testing.T) {
filename := writeTempFile(t, "---\nTags: tag\n---\ntext")
n := note.New(filename)
tagToRemove := newTag(t, "tag")
// when
err := n.RemoveTag(tagToRemove)
// then
require.NoError(t, err)
assertNoTags(t, n)
})
t.Run("removing missing tag does nothing", func(t *testing.T) {
filename := writeTempFile(t, "content")
n := note.New(filename)
missingTag := newTag(t, "missing")
// when
err := n.RemoveTag(missingTag)
// then
require.NoError(t, err)
assertNoTags(t, n)
})
}
func TestNote_Save(t *testing.T) {
t.Run("should add yaml front matter for file without it", func(t *testing.T) {
filename := writeTempFile(t, "text")
n := note.New(filename)
require.NoError(t, n.SetTag(newTag(t, "tag")))
// when
saved, err := n.Save()
// then
require.NoError(t, err)
assert.True(t, saved)
// and
assertFileEquals(t, filename, "---\nTags: tag\n---\ntext")
})
t.Run("should update front matter", func(t *testing.T) {
filename := writeTempFile(t, "---\nTags: foo\n---\n\ntext")
n := note.New(filename)
require.NoError(t, n.SetTag(newTag(t, "tag")))
// when
saved, err := n.Save()
// then
require.NoError(t, err)
assert.True(t, saved)
// and
assertFileEquals(t, filename, "---\nTags: foo tag\n---\n\ntext")
})
t.Run("should not save file if nothing changed", func(t *testing.T) {
filename := writeTempFile(t, "text")
n := note.New(filename)
// when
saved, err := n.Save()
// then
assert.False(t, saved)
assert.NoError(t, err)
})
}
func TestNote_Body(t *testing.T) {
t.Run("should return body when note does not have a front matter", func(t *testing.T) {
filename := writeTempFile(t, "body")
n := note.New(filename)
// when
actual, err := n.Body()
// then
require.NoError(t, err)
assert.Equal(t, "body", actual)
})
t.Run("should return body when note has a front matter", func(t *testing.T) {
filename := writeTempFile(t, "---\nTags: tag\n---\nbody")
n := note.New(filename)
// when
actual, err := n.Body()
// then
require.NoError(t, err)
assert.Equal(t, "body", actual)
})
t.Run("should return empty body", func(t *testing.T) {
filename := writeTempFile(t, "---\nTags: tag\n---\n")
n := note.New(filename)
// when
actual, err := n.Body()
// then
require.NoError(t, err)
assert.Empty(t, actual)
})
}
func TestNote_UpdateLink(t *testing.T) {
t.Run("should not change the body if link is missing", func(t *testing.T) {
filename := writeTempFile(t, "body")
n := note.New(filename)
// when
err := n.UpdateLink("from", "to")
require.NoError(t, err)
// then
body, err := n.Body()
require.NoError(t, err)
assert.Equal(t, "body", body)
})
tests := []func(string) string{
func(filename string) string {
return "from.md"
},
func(filename string) string {
return filepath.Join(filepath.Dir(filename), "from.md")
},
func(filename string) string {
return filepath.Join("..", filepath.Base(filepath.Dir(filename)), "from.md")
},
}
t.Run("should update markdown link when from parameter is", func(t *testing.T) {
for _, from := range tests {
filename := writeTempFile(t, "[link](from.md)")
n := note.New(filename)
t.Run(from(filename), func(t *testing.T) {
// when
err := n.UpdateLink(from(filename), "to.md")
require.NoError(t, err)
// then
body, err := n.Body()
require.NoError(t, err)
assert.Equal(t, "[link](to.md)", body)
})
}
})
t.Run("should update markdown link when link path is", func(t *testing.T) {
for _, linkPath := range tests {
filename := writeTempFileWithFunction(t, func(filename string) string {
return fmt.Sprintf("[link](%s)", linkPath(filename))
})
n := note.New(filename)
t.Run(linkPath(filename), func(t *testing.T) {
// when
err := n.UpdateLink("from.md", "to.md")
require.NoError(t, err)
// then
body, err := n.Body()
require.NoError(t, err)
assert.Equal(t, "[link](to.md)", body)
})
}
})
t.Run("should update markdown link when directory is renamed", func(t *testing.T) {
filename := writeTempFile(t, "[link](source/file.md)")
n := note.New(filename)
// when
err := n.UpdateLink("source", "target")
require.NoError(t, err)
// then
body, err := n.Body()
require.NoError(t, err)
assert.Equal(t, "[link](target/file.md)", body)
})
t.Run("should not update markdown link", func(t *testing.T) {
filename := writeTempFile(t, "[link](other.md)")
n := note.New(filename)
// when
err := n.UpdateLink("from.md", "to.md")
require.NoError(t, err)
// then
body, err := n.Body()
require.NoError(t, err)
assert.Equal(t, "[link](other.md)", body)
})
}
func writeTempFile(t *testing.T, content string) string {
file, err := ioutil.TempFile("", "noteo-test")
require.NoError(t, err)
require.NoError(t, ioutil.WriteFile(file.Name(), []byte(content), os.ModePerm))
return file.Name()
}
func writeTempFileWithFunction(t *testing.T, content func(filename string) string) string {
file, err := ioutil.TempFile("", "noteo-test")
require.NoError(t, err)
require.NoError(t, ioutil.WriteFile(file.Name(), []byte(content(file.Name())), os.ModePerm))
return file.Name()
}
func assertFileEquals(t *testing.T, filename, expectedContent string) {
bytes, err := ioutil.ReadFile(filename)
require.NoError(t, err)
assert.Equal(t, expectedContent, string(bytes))
}
func assertNoTags(t *testing.T, n *note.Note) {
assertTags(t, n)
}
func assertTags(t *testing.T, n *note.Note, expectedTags ...string) {
tags, err := n.Tags()
require.NoError(t, err)
require.Equal(t, len(expectedTags), len(tags), "different tags len")
for i, expectedTag := range expectedTags {
assert.Equal(t, newTag(t, expectedTag), tags[i])
}
}
func newTag(t *testing.T, name string) tag.Tag {
createdTag, err := tag.New(name)
require.NoError(t, err)
return createdTag
}
|
/*
* Check if assets are loaded
* --------------------------
*
* assetLoaded
*
* @param assets [HTML element]
* @param return [promise]
*
* assetsLoaded
*
* @param assets [array] of [HTML elements]
* @param done callback [function] when finished loading
*/
const assetLoaded = ( asset ) => {
let type = asset.tagName;
return new Promise( ( resolve, reject ) => {
let proxy = null;
if( type === 'IMG' )
proxy = new Image();
if( type === 'IFRAME' )
proxy = new Iframe();
if( type === 'VIDEO' )
proxy = new Video();
proxy.src = asset.src;
const res = () => {
resolve( asset );
};
const err = ( message ) => {
reject( message );
};
if( proxy.complete )
res();
proxy.onload = res;
proxy.onerror = err;
} );
};
const assetsLoaded = ( assets = [], done = () => {} ) => {
if( assets.length == 0 ) {
done( false );
return;
}
Promise.all( assets.map( assetLoaded ) )
.then( data => {
done( data );
} )
.catch( err => {
console.log( err );
done( false );
} );
};
export { assetLoaded, assetsLoaded };
|
<gh_stars>0
package io.github.achacha.decimated.deadcode;
import java.time.Instant;
/**
* Data available when trigger is executed
*/
public interface TriggerAccessPoint {
/**
* Location of the execution
* @return String with full package, class and line number
*/
String getLocation();
/**
* Time when this trigger executed
* @return {@link Instant}
*/
Instant getAccessTime();
/**
* Throwable which contains the stack trace that includes the dead code location
* @return {@link Throwable}
*/
Throwable getThrowable();
}
|
#!/usr/bin/python
from sys import argv
import hashlib
import ntpath
files = argv
def sha256_checksum(filename, block_size=65536):
sha256 = hashlib.sha256()
with open(filename, 'rb') as f:
for block in iter(lambda: f.read(block_size), b''):
sha256.update(block)
return sha256.hexdigest()
def parse_hash_file(hash_file):
f = open(hash_file)
items = map(lambda x: x.split('='), f.readlines())
res = {k[7:-1]: v[:-1] for k, v in items}
return res
def path_leaf(path):
head, tail = ntpath.split(path)
return tail or ntpath.basename(head)
def check_hashes(hashes, files):
acc = []
for file in files:
real_name = path_leaf(file)
if real_name not in hashes.keys():
acc.append(real_name)
continue
hash = hashes[real_name]
if check_hash_for_file(hash, real_name) == False:
acc.append(real_name)
continue
if len(acc) == 0:
return "All certificates are valid"
else:
return "Certificates invalid :%s" % (', '.join(acc))
def check_hash_for_file(hash, file):
real_hash = sha256_checksum(file)
if real_hash in hash:
return True
else:
return False
hashes = parse_hash_file(files[1])
print (check_hashes(hashes, files[2:]))
|
<gh_stars>1-10
"""
This module provides a SUO-KIF ontology files parser.
Possible usage:
>>> from kifparser import KIFParser
>>> p = KIFParser()
>>> largest = None
>>> # io should be a file-like object open for reading
>>> for obj in p.parse(io):
>>> if largest is None:
>>> largest = obj
>>> continue
>>> if obj.end - obj.start > largest.end - largest.start:
>>> largest = obj
>>> conc = largest.eval_conc()
>>> print(conc)
In the example above `obj' is a constituent object extracted by the
parser, and `conc' is the concept evaluated from this constituent.
"""
from typing import TextIO, Generator
import tqdm
from aiire import Agenda
from .grammar import KIFGrammar
class KIFAgenda(Agenda):
"""KIFAgenda is AIIRE Agenda for KIF parser with KIF grammar."""
def __init__(self):
"""Create KIF agenda."""
super().__init__(KIFGrammar())
class KIFParser(object):
"""KIFParser is the main parser class for SUO-KIF."""
def parse(
self, io: TextIO
) -> Generator[KIFGrammar.Constituent, None, None]:
"""
Parse an IO (file-like object).
Draws a progress bar while parsing.
@param io: a file-like object to parse
@yield: all possible constituents parser could recognize
"""
agenda = KIFAgenda()
pos = 0
for line in tqdm.tqdm(io):
for char in line:
yield from agenda.put(KIFGrammar.Atom(pos, pos + 1, char))
pos += 1
|
#!/bin/bash
WORKDIR=`dirname $0`
cd $WORKDIR
./getDnbTitel.sh
./split.sh
./convert.sh
./createDnbIndex.sh
./json2es.sh
|
import numpy as np
from sklearn.linear_model import LinearRegression
# Create an instance of the linear regression model
regression_model = LinearRegression()
# Fit the model to the given data set
regression_model.fit(X, y)
# Make predictions using the model
predictions = regression_model.predict(X)
|
#!/usr/bin/env bash
set -e
set -u
set -o pipefail
# -------------------------------------------------------------------------------------------------
# PUBLIC FUNCTIONS
# -------------------------------------------------------------------------------------------------
###
### Check if source and destination file exist
###
### @param rel_path filename
### @param abs_path source directory
### @param abs_path destination directory
###
check_src_dst_file_exist() {
local f="${1}"
local src_dir="${2}"
local dst_dir="${3}"
if [ ! -f "${src_dir}/${f}" ]; then
printf "[TEST] [FAIL] Source file does not exist: %s\\r\\n" "${src_dir}/${f}"
exit 1
fi
if [ ! -f "${dst_dir}/${f}" ]; then
printf "[TEST] [FAIL] Destination file does not exist: %s\\r\\n" "${dst_dir}/${f}"
exit 1
fi
printf "[TEST] [OK] Source and Destination files exist\\r\\n"
}
###
### Check if destination file is a file
###
### @param rel_path filename
### @param abs_path destination directory
###
check_dst_file_is_file() {
local f="${1}"
local dst_dir="${2}"
if [ -d "${dst_dir}/${f}" ]; then
printf "[TEST] [FAIL] Destination file is a directory: %s\\r\\n" "${dst_dir}/${f}"
exit 1
fi
if [ -L "${dst_dir}/${f}" ]; then
printf "[TEST] [FAIL] Destination file is a symlink: %s\\r\\n" "${dst_dir}/${f}"
exit 1
fi
printf "[TEST] [OK] Destination file is a regular file\\r\\n"
}
###
### Check if destination file is a symlink
###
### @param rel_path filename
### @param abs_path destination directory
###
check_dst_file_is_link() {
local f="${1}"
local dst_dir="${2}"
if [ -d "${dst_dir}/${f}" ]; then
printf "[TEST] [FAIL] Destination file is a directory: %s\\r\\n" "${dst_dir}/${f}"
exit 1
fi
if [ -L "${dst_dir}/${f}" ]; then
printf "[TEST] [OK] Destination file is a symlink\\r\\n"
return
fi
printf "[TEST] [FAIL] Destination file is not a symlink: %s\\r\\n" "${dst_dir}/${f}"
exit 1
}
###
### Check if source and destination file equal
###
### @param rel_path filename
### @param abs_path source directory
### @param abs_path destination directory
###
check_src_dst_file_equal() {
local f="${1}"
local src_dir="${2}"
local dst_dir="${3}"
if ! run "cmp -s '${src_dir}/${f}' '${dst_dir}/${f}'"; then
printf "[TEST] [FAIL] Source (%s) and dest (%s) files differ\\r\\n" "${src_dir}/${f}" "${dst_dir}/${f}"
exit 1
else
printf "[TEST] [OK] Source and dest files are equal\\r\\n"
fi
}
|
#!/bin/sh
set -e
set -x
. /opt/flask-ioc-lookup/env/bin/activate
project_dir=${project_dir-"/opt/flask-ioc-lookup/"}
# gunicorn settings
workers=${workers-1}
worker_class=${worker_class-sync}
worker_threads=${worker_threads-1}
worker_timeout=${worker_timeout-30}
# Need to tell Gunicorn to trust the X-Forwarded-* headers
forwarded_allow_ips=${forwarded_allow_ips-'*'}
# set PYTHONPATH if it is not already set using Docker environment
export PYTHONPATH=${PYTHONPATH-${project_dir}}
# nice to have in docker run output, to check what
# version of something is actually running.
/opt/flask-ioc-lookup/env/bin/pip freeze
echo ""
echo "$0: Starting ioc_lookup"
exec start-stop-daemon --start -c ioc:ioc --exec \
/opt/flask-ioc-lookup/env/bin/gunicorn \
--user=ioc --group=ioc -- \
--bind 0.0.0.0:5000 \
--workers ${workers} --worker-class ${worker_class} \
--threads ${worker_threads} --timeout ${worker_timeout} \
--forwarded-allow-ips="${forwarded_allow_ips}" \
ioc_lookup.app:app
|
<gh_stars>1-10
const Action = require('../JobAction').AbstractJobAction
const Parliaments = require('@parameter').Parliament.Number
const TfIdfClassification = require('@model').TfIdfClassification
class ClassificationResultAdapterAction extends Action {
constructor (params) {
super()
this.params = params
}
async perform (results) {
return Parliaments.map(parliament => {
const data = results
.filter(result => { return result.parliament === parliament && result.raw })
.map(result => { return new TfIdfClassification(result.bill, result.raw) })
return {
params: { parliament: parliament },
data: data
}
})
}
}
module.exports.ClassificationResultAdapterAction = ClassificationResultAdapterAction
|
'use strict';
//dependencies
var path = require('path');
var nock = require('nock');
var faker = require('faker');
var expect = require('chai').expect;
var Transport = require(path.join(__dirname, '..', '..'));
var multi =
require(path.join(__dirname, 'fixtures',
'send_multi_multiple_destination_response.json'));
//TODO test for alternative flow
describe('Send Multiple SMS', function () {
it('should have /sms/1/text/multi as send multi sms url', function (done) {
var transport = new Transport();
expect(transport.sendMultiUrl).to.equal('/sms/1/text/multi');
done();
});
it('should return unauthorized error when invalid credentials provided',
function (done) {
var transport = new Transport({
username: faker.internet.userName(),
password: <PASSWORD>()
});
var sms = {
messages: [{
from: 'InfoSMS',
to: [
'41793026727',
'41793026731'
],
text: 'May the Force be with you!'
}, {
from: '41793026700',
to: '41793026785',
text: 'A long time ago, in a galaxy far, far away.'
}]
};
nock(transport.baseUrl)
.post(transport.sendMultiUrl)
.reply(function ( /*uri, requestBody*/ ) {
//assert headers
expect(this.req.headers.accept).to.equal('application/json');
expect(this.req.headers['content-type']).to.equal(
'application/json');
expect(this.req.headers.host).to.equal('api.infobip.com');
expect(this.req.headers.authorization).to.not.be.null;
//fake invalid credentials
return [401, {
requestError: {
serviceException: {
messageId: 'UNAUTHORIZED',
text: 'Invalid login details'
}
}
}];
});
//send SMS(s)
transport.sendMultiSMS(sms, function (error, response) {
expect(response).to.be.undefined;
expect(error).to.exist;
expect(error.code).to.equal(401);
expect(error.name).to.equal('UNAUTHORIZED');
expect(error.message).to.equal('Invalid login details');
done();
});
});
it('should send multiple sms', function (done) {
var transport = new Transport({
username: faker.internet.userName(),
password: <PASSWORD>()
});
var sms = {
messages: [{
from: 'InfoSMS',
to: [
'41793026727',
'41793026731'
],
text: 'May the Force be with you!'
}, {
from: '41793026700',
to: '41793026785',
text: 'A long time ago, in a galaxy far, far away.'
}]
};
nock(transport.baseUrl)
.post(transport.sendMultiUrl)
.reply(function (uri, requestBody) {
//assert headers
expect(this.req.headers.accept).to.equal('application/json');
expect(this.req.headers['content-type']).to.equal(
'application/json');
expect(this.req.headers.host).to.equal('api.infobip.com');
expect(this.req.headers.authorization).to.not.be.null;
//assert request body
expect(requestBody).to.exist;
expect(requestBody).to.eql(sms);
return [200, multi];
});
//send SMS(s)
transport.sendMultiSMS(sms, function (error, response) {
expect(error).to.be.null;
expect(response).to.exist;
expect(response.messages).to.exist;
expect(response.messages.length).to.be.equal(3);
done();
});
});
});
|
#!/usr/bin/env bash
# Copyright (c) 2020 gyselroth GmbH
# Licensed under the MIT License - https://opensource.org/licenses/MIT
VALGRIND_LOG="test/tmp/mem-leak.log"
# shellcheck disable=SC2034
VALGRIND="valgrind -v --leak-check=full\
--log-file=${VALGRIND_LOG}"
# shellcheck disable=SC2034
VALGRIND_ERR_PATTERN="ERROR SUMMARY: [1-9] errors from [1-9] contexts"
|
#include <stdio.h>
// Linear search function
int linear_search(int n, int A[], int key)
{
int i;
// Traverse the array
for (i = 0; i < n; i++) {
// If element is found return its position
if (A[i] == key)
return i;
}
// Element not found
return -1;
}
int main()
{
int n, key, A[20], i, pos;
// Read the array size
scanf("%d", &n);
// Read the array elements
for (i = 0; i < n; i++)
scanf("%d", &A[i]);
// Read the search element
scanf("%d", &key);
// Call the linear search function
pos = linear_search(n, A, key);
if (pos == -1)
printf("Element not found\n");
else
printf("Element found at position %d\n", pos+1);
return 0;
}
|
from django import forms
class ProfileForm(forms.Form):
name = forms.CharField(max_length = 100)
picture = forms.ImageField()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.