text
stringlengths 1
1.05M
|
|---|
$(function(){
$('.slideshow > :gt(0)').hide();
setInterval(function(){$('.slideshow > :first-child').fadeOut().next().fadeIn().end().appendTo('.slideshow');}, 3000);
});
|
<gh_stars>1-10
import _ from 'lodash';
export default {
upperCaseFirstLetter(str) {
return str?.length === 0 ? str : `${str[0].toUpperCase() + str.substr(1)}`;
},
strToCamelCase(str) {
if (!str || str.length === 0) {
return str;
}
return _.camelCase(str);
}
};
|
<filename>examples/sendMessageEmbed.js
const { Client, Intents, RichEmbed } = require('esmerald.js');
const client = new Client({
token: 'TOKEN',
intents: [Intents.GUILDS],
});
client.users.fetch('USER_ID').then((user) => {
const embed = new RichEmbed()
.setColor(0x00ff00)
.setTitle('Avatar')
.setDescription(`Avatar from **${user.username}**`)
.setImage(user.avatarURL({ dynamic: true }));
return client.createMessage('CHANNEL_ID', { embeds: [embed] });
});
client.connect();
|
def bedroom_lights_status(BB_DO1, BB_DO2):
if not BB_DO1 and not BB_DO2:
return "Both bedroom lights are off"
elif BB_DO1 or BB_DO2:
return "At least one bedroom light is on"
else:
return "Both bedroom lights are on"
# Test the function with the initial values
BB_DO1 = False
BB_DO2 = False
print(bedroom_lights_status(BB_DO1, BB_DO2)) # Output: "Both bedroom lights are off"
|
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.editor.views.tree.actions;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.gef.commands.CommandStack;
import org.eclipse.jface.viewers.ISelectionProvider;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.swt.widgets.Display;
import com.archimatetool.editor.tools.GenerateViewCommand;
import com.archimatetool.model.IAdapter;
import com.archimatetool.model.IArchimateElement;
import com.archimatetool.model.IArchimateModel;
import com.archimatetool.model.IArchimateModelObject;
/**
* Generate View Action
*
* @author <NAME>
*/
public class GenerateViewAction extends ViewerAction {
public GenerateViewAction(ISelectionProvider selectionProvider) {
super(selectionProvider);
setText(Messages.GenerateViewAction_0);
}
@Override
public void run() {
List<IArchimateElement> selected = getValidSelectedObjects(getSelection());
if(!selected.isEmpty()) {
GenerateViewCommand command = new GenerateViewCommand(selected);
if(command.openDialog(Display.getCurrent().getActiveShell())) {
CommandStack commandStack = (CommandStack)((IAdapter)selected.get(0)).getAdapter(CommandStack.class);
commandStack.execute(command);
}
}
}
@Override
public void update() {
setEnabled(!getValidSelectedObjects(getSelection()).isEmpty());
}
private List<IArchimateElement> getValidSelectedObjects(IStructuredSelection selection) {
List<IArchimateElement> list = new ArrayList<IArchimateElement>();
if(isSameModel(selection)) {
for(Object o : selection.toArray()) {
// Only Elements
if(o instanceof IArchimateElement) {
if(!list.contains(o)) {
list.add((IArchimateElement)o);
}
}
}
}
return list;
}
/**
* As this action is for the models tree, it's possible a user could select objects
* from different models. We don't want this.
*/
private boolean isSameModel(IStructuredSelection selection) {
IArchimateModel model = null;
for(Object o : selection.toArray()) {
if(o instanceof IArchimateModelObject) {
IArchimateModel nextModel = ((IArchimateModelObject)o).getArchimateModel();
if(model != null && model != nextModel) {
return false;
}
model = nextModel;
}
}
return true;
}
}
|
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
model_name=l2tl_svhn
steps=1200
python3 train_l2tl.py \
--train_batch_size=8 \
--learning_rate=0.005 \
--rl_learning_rate=0.01 \
--target_num_classes=5 \
--train_steps=$steps \
--source_train_batch_multiplier=2 \
--loss_weight_scale=100. \
--num_choices=100 \
--first_pretrain_steps=0 \
--target_val_batch_multiplier=4 \
--target_train_batch_multiplier=1 \
--model_dir=trained_models/${model_name} \
--warm_start_ckpt_path=trained_models/mnist_pretrain/model.ckpt-2000
python3 evaluate.py \
--ckpt_path=trained_models/${model_name}/model.ckpt-$steps \
--target_dataset=svhn_cropped_small \
--src_num_classes=5 \
--cls_dense_name=final_target_dense
|
#curl -L https://github.com/rockie-yang/data/raw/master/sfpd/sfpd.csv.gz -o data/sfpd.csv.gz
docker run -d --rm -p 8080:8080 -p 8040:4040 -v `pwd`/data:/data -v `pwd`/notebook:/usr/zeppelin/notebook knockdata/zeppelin-highcharts
|
#ifndef CLIENT_TO_SERVER_REQUEST_HPP_
#define CLIENT_TO_SERVER_REQUEST_HPP_
//============================================================================
// Name :
// Author : Avi
// Revision : $Revision: #32 $
//
// Copyright 2009-2020 ECMWF.
// This software is licensed under the terms of the Apache Licence version 2.0
// which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
// In applying this licence, ECMWF does not waive the privileges and immunities
// granted to it by virtue of its status as an intergovernmental organisation
// nor does it submit to any jurisdiction.
//
// Description :
//============================================================================
#include <boost/core/noncopyable.hpp>
#include "ClientToServerCmd.hpp"
// Base class for client to server requesting.
// This class is used in the IPC messaging from client to server.
class ClientToServerRequest : private boost::noncopyable {
public:
ClientToServerRequest() = default;
~ClientToServerRequest() = default;
void set_cmd(const Cmd_ptr& cmd) { cmd_ = cmd; cmd_->setup_user_authentification(); }
Cmd_ptr get_cmd() const { return cmd_;}
/// This is called in the server only, to handle the quest.
STC_Cmd_ptr handleRequest(AbstractServer*) const;
std::ostream& print(std::ostream& os) const;
bool getRequest() const { return (cmd_.get()) ? cmd_->get_cmd() : false; }
bool terminateRequest() const { return (cmd_.get()) ? cmd_->terminate_cmd() : false; }
bool groupRequest() const { return (cmd_.get()) ? cmd_->group_cmd() : false; }
void cleanup() { if (cmd_.get()) cmd_->cleanup();} // reclaim memory *AFTER* command has run
/// Used by boost test, to verify persistence
bool operator==(const ClientToServerRequest& rhs) const;
private:
Cmd_ptr cmd_;
friend class cereal::access;
template<class Archive>
void serialize(Archive & ar)
{
ar(CEREAL_NVP(cmd_));
}
};
std::ostream& operator<<(std::ostream& os, const ClientToServerRequest& d);
#endif
|
#!/bin/bash
./sbt \
"project accumulo" test \
"project accumulo-spark" test \
"project cassandra" test \
"project cassandra-spark" test \
"project gdal" test \
"project gdal-spark" test \
"project geotools" test \
"project hbase" test \
"project hbase-spark" test \
"project layer" test \
"project proj4" test \
"project raster" test \
"project s3" test \
"project s3-spark" test \
"project shapefile" test \
"project spark" test \
"project spark-pipeline" test \
"project store" test \
"project util" test \
"project vector" test \
"project vectortile" test || { exit 1; }
|
#!/bin/bash
#SBATCH -J astar # job name
#SBATCH -o Pipeline.o%j # output and error file name (%j expands to jobID)
#SBATCH -n 1 # total number of mpi tasks requested
#SBATCH -N 1 # total number of mpi tasks requested
#SBATCH -p largemem # queue (partition) -- normal, development, etc.
#SBATCH -t 00:30:00 # run time (hh:mm:ss) - 1.5 hours
#SBATCH --mail-type=begin # email me when the job starts
#SBATCH --mail-type=end # email me when the job finishes
#SBATCH -A TG-ASC130023
module swap intel gcc/4.9.1
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
SEQ="../../seqs/Balibase/Ref1/5_medium_high_id/5ptp.fasta"
CMD="../../bin/msa_pastar"
THREADS="-t 32"
HASH="-y FZORDER"
HASH_SHIFT="-s 12"
OPT="$THREADS $HASH $HASH_SHIFT"
cd $DIR"/../../seqs/Balibase"
strace -ve wait4 /usr/bin/time -v $CMD $OPT $SEQ >> $SEQ.output 2>&1
|
<filename>spring/Web/SpringMvc0001/src/main/java/com/curso/spring/springmvc0001/web/controladores/PrimerControlador.java
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.curso.spring.springmvc0001.web.controladores;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.mvc.Controller;
/**
*
* @author <NAME>
*/
public class PrimerControlador implements Controller{
@Override
public ModelAndView handleRequest(HttpServletRequest request, HttpServletResponse response) throws Exception {
return new ModelAndView("primera");
}
}
|
<reponame>amitse/fluentui-react-native
import { createMockThemingModuleHelper } from '../MockThemingModule';
export const ThemingModuleHelper = createMockThemingModuleHelper();
|
<filename>jsclient/src/components/ParameterList.js
import React from 'react';
import PropTypes from 'prop-types';
import Parameter from './Parameter';
import './ParameterList.css';
class ParameterList extends React.Component {
constructor(props) {
super(props);
this.onSaveToFlash = this.onSaveToFlash.bind(this);
this.onRefresh = this.onRefresh.bind(this);
this.state = {alertMessage: "", error: false};
}
onSaveToFlash() {
this.setState({alertMessage: "Saving...", error: false});
this.props.onPersist().then(
() => { this.setState({alertMessage: "Saved"}); },
(e) => { this.setState({error: true, alertMessage: e.message}); },
);
}
onRefresh() {
this.setState({alertMessage: "Reading parmeters...", error: false});
this.props.onRefresh().then(
() => { this.setState({alertMessage: "Refresh complete"}); },
(e) => { this.setState({error: true, alertMessage: e.message}); },
);
}
render() {
let params = [];
this.props.parameterList.forEach((p) => {
params.push(
<Parameter
key={p.id}
id={p.id}
name={p.name}
description={p.description}
type={p.type}
dirty={this.props.parameterDirtyMap[p.id]}
value={this.props.parameters[p.id]}
onSave={this.props.onSave}
onChange={this.props.onChange}
/>,
);
});
return <div className="parameter-list">
<button className="refreshButton" onClick={this.onRefresh}>Refresh from Device</button>
<button className="saveButton" onClick={this.onSaveToFlash}>Save to Flash</button>
<div className="parameterAlert"><span className={this.state.error ? "error" : ""}>{this.state.alertMessage}</span></div>
{params}
</div>;
}
}
ParameterList.propTypes = {
parameterList: PropTypes.arrayOf(PropTypes.exact({
id: PropTypes.number.isRequired,
name: PropTypes.string.isRequired,
description: PropTypes.string,
type: PropTypes.oneOf(["float", "int", "bool"]),
})),
parameters: PropTypes.object,
parameterDirtyMap: PropTypes.object,
onRefresh: PropTypes.func,
onSave: PropTypes.func,
onChange: PropTypes.func,
onPersist: PropTypes.func,
};
export default ParameterList;
|
import glob
import os
import os.path as osp
missingLists = []
srcs = glob.glob('main*')
srcs = [x for x in srcs if osp.isdir(x) ]
for src in srcs:
scenes = glob.glob(osp.join(src, 'scene*') )
scenes = sorted(scenes )
with open(src + '.txt', 'w') as fOut:
for scene in scenes:
envNames = glob.glob(osp.join(scene, 'imenv_*.hdr') )
envDirectNames = glob.glob(osp.join(scene, 'imenvDirect_*.hdr') )
if len(envNames) != len(envDirectNames ):
fOut.write('%s\n' % scene )
print(scene )
|
int fibonacci(int n)
{
if (n <= 1)
return n;
return fibonacci(n - 1) + fibonacci(n + 2);
}
|
def generate_create_table_query(columns: list) -> str:
query = "CREATE TABLE table_name (\n"
for column in columns:
query += f" {column['name']} {column['data_type']}"
if column.get('primary_key'):
query += " PRIMARY KEY"
if column.get('unique'):
query += " UNIQUE"
if column.get('nullable') is False:
query += " NOT NULL"
query += ",\n"
query = query.rstrip(",\n") + "\n);"
return query
|
<gh_stars>1-10
/*
*
*/
package net.community.chest.ui.helpers.input;
import javax.swing.InputVerifier;
import javax.swing.JComponent;
import net.community.chest.awt.attributes.AttrUtils;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @author <NAME>.
* @since Jan 12, 2009 2:17:06 PM
*/
public class TextInputVerifier extends InputVerifier {
public TextInputVerifier ()
{
super();
}
public boolean verifyText (final String text)
{
return (text != null) && (text.length() > 0);
}
/*
* @see javax.swing.InputVerifier#verify(javax.swing.JComponent)
*/
@Override
public boolean verify (JComponent input)
{
// if cannot retrieve the text then cannot parse it
if (!AttrUtils.isTextableComponent(input))
return false;
try
{
return verifyText(AttrUtils.getComponentText(input));
}
catch(RuntimeException e)
{
return false;
}
}
public static final TextInputVerifier TEXT=new TextInputVerifier();
}
|
#!/bin/bash
# This file is used by
# https://github.com/flutter/tests/tree/master/registry/flutter_packages.test
# to run Dart static analysis and tests in this repository as a presubmit
# for the flutter/flutter repository.
# Changes to this file (and any tests in this repository) are only honored
# after the commit hash in the "flutter_packages.test" mentioned above has been
# updated.
# Remember to also update the Windows version (tests_script.bat) when
# changing this file.
set -e
cd example
flutter packages get
cd ../
flutter analyze
flutter test
|
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import QuoteRightSvg from '@rsuite/icon-font/lib/legacy/QuoteRight';
const QuoteRight = createSvgIcon({
as: QuoteRightSvg,
ariaLabel: 'quote right',
category: 'legacy',
displayName: 'QuoteRight'
});
export default QuoteRight;
|
#!/bin/bash
#!/bin/sh
banner() {
clear
printf "************************************************************************************************ \n"
printf "โโโโโโโโโโโโโโโโโโโโโโโ โโโ โโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโโโโโโโโโโโ โโโโโโโโโโ \n"
printf "โโโโโโโโโโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโ\n"
printf "โโโโโโโโโโโโโโ โโโโโโโโโโโ โโโโโโโโโ โโโโโโโโโโโโโโโโโโโโโโโโโโโโ โโโ โโโ โโโโโโโโโโโ\n"
printf "โโโโโโโโโโโโโโ โโโโโโโโโโโโ โโโโโโโโโโ โโโโโโโโโโโโโโโโโโโโโโโโโโโโ โโโ โโโ โโโโโโโโโโ \n"
printf "โโโโโโโโโโโโโโโโโโโ โโโ โโโโโโโ โโโโโโโโโโโ โโโ โโโโโโโโโโโโโโโโ โโโ โโโโโโโโโโโโ \n"
printf "โโโโโโโโโโโโโโโโโโโ โโโ โโโโโ โโโโโโโโโโโ โโโ โโโโโโโโโโโโโโโโ โโโ โโโโโโโ โโโ \n"
printf "**************************************************************************************************\n"
}
menu() {
printf "\e[1;92m[\e[0m\e[1;77m01\e[0m\e[1;92m]\e[0m\e[1;93m TO INSTALL PI-HOLE (FOR YOUR OWN DNS)\e[0m\n"
printf "\e[1;92m[\e[0m\e[1;77m02\e[0m\e[1;92m]\e[0m\e[1;93m TO INSTALL PI-VPN (FOR YOUR VPN)\e[0m\n"
printf "\e[1;92m[\e[0m\e[1;77m03\e[0m\e[1;92m]\e[0m\e[1;93m FOR MANGING THE DNS\e[0m\n"
printf "\e[1;92m[\e[0m\e[1;77m04\e[0m\e[1;92m]\e[0m\e[1;93m FOR ADDING THE CLIENT FOR VPN\e[0m\n"
printf "\n"
printf "\e[1;93m[\e[0m\e[1;77m99\e[0m\e[1;93m]\e[0m\e[1;77m Exit\e[0m\n"
printf "\n"
read -p $'\e[1;92m[*] Choose an option:\e[0m\e[1;77m ' option
if [[ $option == 1 || $option == 01 ]]; then
curl -sSL https://install.pi-hole.net | bash
clear
banner
menu
elif [[ $option == 2 || $option == 02 ]]; then
curl -L https://install.pivpn.io | bash
clear
banner
menu
elif [[ $option == 3 || $option == 03 ]]; then
bash yourblacklist.sh
clear
banner
menu
elif [[ $option == 4 || $option == 04 ]]; then
bash Addclient.sh
clear
banner
menu
elif [[ $option == 99 ]]; then
exit 1
else
printf "\e[5;93m[\e[1;77m!\e[0m\e[1;93m] Invalid option!\e[0m"
sleep 0.5
clear
menu
fi
}
banner
menu
|
#! /bin/bash
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
source ~/cpo200/config
sed -i \
-e s/'<your-default-zone>'/$CPO200_ZONE/ \
-e s/'<guestbook-sql-ip-address>'/$CPO200_SQL_ADDRESS/ \
-e s/'<guestbook-sql-password>'/$CPO200_SQL_PW/ \
-e s/'<guestbook-external-ip-address>'/$CPO200_GB_DM_IP/ \
-e s/'<startup-scripts-bucket>'/$CPO200_SCRIPTS_BUCKET/ \
guestbook.yaml
|
package com.company.warlock.core.generator.action.config;
import com.baomidou.mybatisplus.annotation.DbType;
import com.baomidou.mybatisplus.generator.config.rules.NamingStrategy;
/**
* ้ป่ฎค็ไปฃ็ ็ๆ็้
็ฝฎ
*
* @author fengshuonan
* @date 2017-10-28-ไธๅ8:27
*/
public class GunsGeneratorConfig extends AbstractGeneratorConfig {
protected void globalConfig() {
globalConfig.setOutputDir("D:\\ycw\\Warlock\\src\\main\\java");//ๅ่ชๅทฑ้กน็ฎ็็ปๅฏน่ทฏๅพ,ๆณจๆๅ
ทไฝๅฐjava็ฎๅฝ
globalConfig.setFileOverride(true); //ๆฏๅฆ่ฆ็ๅทฒๆ็ๆไปถ
globalConfig.setEnableCache(false); //ๆฏๅฆๅจxmlไธญๆทปๅ ไบ็บง็ผๅญ้
็ฝฎ
globalConfig.setBaseResultMap(true); //ๅผๅฏ BaseResultMap
globalConfig.setBaseColumnList(true); //ๅผๅฏ baseColumnList
globalConfig.setOpen(false); //ๆฏๅฆๆๅผ่พๅบ็ฎๅฝ
globalConfig.setAuthor("ycw");
}
protected void dataSourceConfig() {
dataSourceConfig.setDbType(DbType.ORACLE);
dataSourceConfig.setDriverName("oracle.jdbc.OracleDriver");
dataSourceConfig.setUsername("fmc");
dataSourceConfig.setPassword("<PASSWORD>");
dataSourceConfig.setUrl("jdbc:oracle:thin:@127.0.0.1:1521:orcl");
}
protected void strategyConfig() {
strategyConfig.setTablePrefix(new String[]{""});// ๆญคๅคๅฏไปฅไฟฎๆนไธบๆจ็่กจๅ็ผ
strategyConfig.setNaming(NamingStrategy.underline_to_camel); //ๆฐๆฎๅบ่กจๆ ๅฐๅฐๅฎไฝ็ๅฝๅ็ญ็ฅ
strategyConfig.setInclude("WX_ACCOUNT"); //ๆๅฎ่กจ
}
protected void packageConfig() {
packageConfig.setParent(null);
packageConfig.setEntity("com.company.warlock.modular.wx.entity");
packageConfig.setMapper("com.company.warlock.modular.wx.mapper");
packageConfig.setXml("com.company.warlock.modular.wx.mapper.mapping");
}
protected void contextConfig() {
contextConfig.setProPackage("com.company.warlock");
contextConfig.setCoreBasePackage("com.company.warlock.core");
contextConfig.setBizChName("่ดฆๅท็ฎก็");
contextConfig.setBizEnName("wxAccount");
contextConfig.setModuleName("wx"); //็ถๅ
ๆจกๅๅ
contextConfig.setProjectPath("D:\\ycw\\Warlock");//ๅ่ชๅทฑ้กน็ฎ็็ปๅฏน่ทฏๅพ
contextConfig.setEntityName("wxAccount");
sqlConfig.setParentMenuName("็ณป็ป็ฎก็");//่ฟ้ๅๅทฒๆ่ๅ็ๅ็งฐ,ๅฝๅ็ถ่็น
/**
* mybatis-plus ็ๆๅจๅผๅ
ณ
*/
contextConfig.setEntitySwitch(true);
contextConfig.setDaoSwitch(true);
contextConfig.setServiceSwitch(true);
/**
* guns ็ๆๅจๅผๅ
ณ
*/
contextConfig.setControllerSwitch(true);
contextConfig.setIndexPageSwitch(true);
contextConfig.setAddPageSwitch(true);
contextConfig.setEditPageSwitch(true);
contextConfig.setJsSwitch(true);
contextConfig.setInfoJsSwitch(true);
contextConfig.setSqlSwitch(true);
}
@Override
protected void config() {
globalConfig();
dataSourceConfig();
strategyConfig();
packageConfig();
contextConfig();
}
}
|
<filename>lesson_5/hw_6.py<gh_stars>0
import re
from transliterate import translit
def normalize_str(string):
if type(string).__name__ != 'str':
return ''
translate_str = translit(string, 'ru', reversed=True)
result_str = re.sub('[^0-9A-Za-z]', '_', translate_str)
return result_str
if __name__ == '__main__':
print(normalize_str("!โ%:,.;()+=-\//\ ะะธัะพะฝ ะดะท 6 "))
|
#!/usr/bin/env bash
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
source "$(dirname "$0")"/common_kubernetes.sh
export FLINK_JOB=org.apache.flink.examples.java.wordcount.WordCount
export FLINK_IMAGE_NAME=test_kubernetes_embedded_job
export OUTPUT_VOLUME=${TEST_DATA_DIR}/out
export OUTPUT_FILE=kubernetes_wc_out
export FLINK_JOB_PARALLELISM=1
export FLINK_JOB_ARGUMENTS='"--output", "/cache/kubernetes_wc_out"'
SUCCEEDED=1
function cleanup {
if [ $SUCCEEDED != 0 ];then
debug_and_show_logs
fi
kubectl delete job flink-job-cluster
kubectl delete service flink-job-cluster
kubectl delete deployment flink-task-manager
stop_kubernetes
}
start_kubernetes
mkdir -p $OUTPUT_VOLUME
cd "$DOCKER_MODULE_DIR"
build_image_with_jar ${FLINK_DIR}/examples/batch/WordCount.jar ${FLINK_IMAGE_NAME}
cd "$END_TO_END_DIR"
kubectl create -f ${KUBERNETES_MODULE_DIR}/job-cluster-service.yaml
envsubst '${FLINK_IMAGE_NAME} ${FLINK_JOB} ${FLINK_JOB_PARALLELISM} ${FLINK_JOB_ARGUMENTS}' < ${CONTAINER_SCRIPTS}/job-cluster-job.yaml.template | kubectl create -f -
envsubst '${FLINK_IMAGE_NAME} ${FLINK_JOB_PARALLELISM}' < ${CONTAINER_SCRIPTS}/task-manager-deployment.yaml.template | kubectl create -f -
kubectl wait --for=condition=complete job/flink-job-cluster --timeout=1h
kubectl cp `kubectl get pods | awk '/task-manager/ {print $1}'`:/cache/${OUTPUT_FILE} ${OUTPUT_VOLUME}/${OUTPUT_FILE}
check_result_hash "WordCount" ${OUTPUT_VOLUME}/${OUTPUT_FILE} "${RESULT_HASH}"
SUCCEEDED=$?
|
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
GOPATH=$(go env GOPATH)
SRC=$GOPATH/src
BIN=$GOPATH/bin
ROOT=$GOPATH
REPO_ROOT=$GOPATH/src/github.com/appscode/osm
source "$REPO_ROOT/hack/libbuild/common/lib.sh"
source "$REPO_ROOT/hack/libbuild/common/public_image.sh"
APPSCODE_ENV=${APPSCODE_ENV:-dev}
IMG=osm
DIST=$GOPATH/src/github.com/appscode/osm/dist
mkdir -p $DIST
if [ -f "$DIST/.tag" ]; then
export $(cat $DIST/.tag | xargs)
fi
clean() {
pushd $GOPATH/src/github.com/appscode/osm/hack/docker
rm osm Dockerfile
popd
}
build_binary() {
pushd $GOPATH/src/github.com/appscode/osm
./hack/builddeps.sh
./hack/make.py build
detect_tag $DIST/.tag
popd
}
build_docker() {
pushd $GOPATH/src/github.com/appscode/osm/hack/docker
cp $DIST/osm/osm-alpine-amd64 osm
chmod 755 osm
cat >Dockerfile <<EOL
FROM alpine
RUN set -x \
&& apk add --update --no-cache ca-certificates
COPY osm /usr/bin/osm
USER nobody:nobody
ENTRYPOINT ["osm"]
EOL
local cmd="docker build -t appscode/$IMG:$TAG ."
echo $cmd; $cmd
rm osm Dockerfile
popd
}
build() {
build_binary
build_docker
}
docker_push() {
if [ "$APPSCODE_ENV" = "prod" ]; then
echo "Nothing to do in prod env. Are you trying to 'release' binaries to prod?"
exit 0
fi
if [ "$TAG_STRATEGY" = "git_tag" ]; then
echo "Are you trying to 'release' binaries to prod?"
exit 1
fi
hub_canary
}
docker_release() {
if [ "$APPSCODE_ENV" != "prod" ]; then
echo "'release' only works in PROD env."
exit 1
fi
if [ "$TAG_STRATEGY" != "git_tag" ]; then
echo "'apply_tag' to release binaries and/or docker images."
exit 1
fi
hub_up
}
source_repo $@
|
def filter_numbers(nums):
result = []
for num in nums:
if num <= 50:
result.append(num)
return result
result = filter_numbers(numbers)
print(result)
|
<reponame>kuro46/CommandUtility<gh_stars>1-10
package dev.shirokuro.commandutility;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.Objects;
final class ReflectionUtils {
private ReflectionUtils() {
}
/**
* Returns human-readable method information
*
* @return string representation of method
*/
public static String methodInfo(final Method method) {
Objects.requireNonNull(method, "method");
return "'" + method.getName() + "' in '" + method.getDeclaringClass().getName() + "'";
}
/**
* Checks equality of method parameters.
*
* @return {@code true} if {@code method}'s parameter is equal to {@code classes}.
* Otherwise {@code false}
*/
public static boolean equalsMethodParams(final Method method, final Class<?>... classes) {
Objects.requireNonNull(method, "method");
Objects.requireNonNull(classes, "classes");
return Arrays.equals(method.getParameterTypes(), classes);
}
/**
* Asserts {@code method} is {@code public}.
*
* @throws IllegalArgumentException when {@code method} is not {@code public}
*/
public static void assertPublic(final Method method) {
Objects.requireNonNull(method, "method");
if (!Modifier.isPublic(method.getModifiers())) {
throw new IllegalArgumentException("Method: " + methodInfo(method) + " is not public!");
}
}
}
|
from selenium import webdriver
url = "https://www.example.com"
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument('--headless')
driver = webdriver.Chrome(options=chrome_options)
driver.get(url)
htmlCode = driver.page_source
print(htmlCode)
|
bin/console catalog:import_data_to_tmp https://katalogas.unishop.lt lit-LT
bin/console catalog:import_data_to_tmp https://katalogas.unishop.lt eng-GB
bin/console catalog:import_data_to_tmp https://katalogas.unishop.lt rus-RU
bin/console catalog:import_data_to_tmp https://katalogas.unishop.lt lav-LV
bin/console catalog:import_data_to_tmp https://katalogas.unishop.lt pol-PL
bin/console catalog:import_data_to_tmp https://katalogas.unishop.lt swe-SE
#bin/console catalog:import_data_to_tmp https://katalogas.unishop.lt eng-US
|
#!/usr/bin/env bash
cd ../raft-java-core && mvn clean install -DskipTests
cd -
mvn clean package
EXAMPLE_TAR=raft-java-example-1.9.0-deploy.tar.gz
ROOT_DIR=./env
mkdir -p $ROOT_DIR
cd $ROOT_DIR
mkdir example1
cd example1
cp -f ../../target/$EXAMPLE_TAR .
tar -zxvf $EXAMPLE_TAR
chmod +x ./bin/*.sh
nohup ./bin/run_server.sh ./data "127.0.0.1:8051:1,127.0.0.1:8052:2,127.0.0.1:8053:3" "127.0.0.1:8051:1" >> nohup1.out &
cd -
mkdir example2
cd example2
cp -f ../../target/$EXAMPLE_TAR .
tar -zxvf $EXAMPLE_TAR
chmod +x ./bin/*.sh
nohup ./bin/run_server.sh ./data "127.0.0.1:8051:1,127.0.0.1:8052:2,127.0.0.1:8053:3" "127.0.0.1:8052:2" >> nohup2.out &
cd -
mkdir example3
cd example3
cp -f ../../target/$EXAMPLE_TAR .
tar -zxvf $EXAMPLE_TAR
chmod +x ./bin/*.sh
nohup ./bin/run_server.sh ./data "127.0.0.1:8051:1,127.0.0.1:8052:2,127.0.0.1:8053:3" "127.0.0.1:8053:3" >> nohup3.out &
cd -
mkdir client
cd client
cp -f ../../target/$EXAMPLE_TAR .
tar -zxvf $EXAMPLE_TAR
chmod +x ./bin/*.sh
cd -
|
<gh_stars>0
package com.ua.nure.TestHelper.repository;
import com.ua.nure.TestHelper.domain.User;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import java.util.List;
public interface UserRepository extends JpaRepository<User, Long> {
User findByEmailAndPassword(String email, String password);
User findByEmail(String email);
@Query(value = "SELECT * FROM users,groups WHERE users.id_user = groups.id_student AND groups.link =?1", nativeQuery = true)
List<User> findAllStudentsByLink(String Link);
@Query(value = "SELECT * FROM users WHERE users.position = 0", nativeQuery = true)
List<User> getAllByPosition_Student();
@Query(value = "SELECt id_user, firstName, lastName,activity, email, password, position FROM users, groups WHERE groups.enabled = 1 AND groups.link in (select link FROM links WHERE id_teacher = ?1 and link = ?2) AND groups.id_student = users.id_user;", nativeQuery = true)
List<User> getAllActivatedStudents(String idTeacher, String link);
User getByIdUser(String idUser);
}
|
CONFIGURATION=$(<./generator_config.txt)
LAST_SCRIPT_VERSION=$(echo "$CONFIGURATION" | sed -n 's/SCRIPT_VERSION=\(.*\)/\1/p')
[ "$LAST_SCRIPT_VERSION" != "$SCRIPT_VERSION" ] && echo "Script version mismatch. Please reconfigure the generator." && exit 0;
API_PROJECT_DIRECTORY=$(echo "$CONFIGURATION" | sed -n 's/API_PROJECT_DIRECTORY=\(.*\)/\1/p')
API_PROJECT_FOLDER=$(echo "$CONFIGURATION" | sed -n 's/API_PROJECT_FOLDER=\(.*\)/\1/p')
CONTROLLER_FOLDER=$(echo "$CONFIGURATION" | sed -n 's/CONTROLLER_FOLDER=\(.*\)/\1/p')
API_PROJECT_NAMESPACE=$(echo "$CONFIGURATION" | sed -n 's/API_PROJECT_NAMESPACE=\(.*\)/\1/p')
SERVICE_PROJECT_FOLDER=$(echo "$CONFIGURATION" | sed -n 's/SERVICE_PROJECT_FOLDER=\(.*\)/\1/p')
SERVICE_PROJECT_DIRECTORY=$(echo "$CONFIGURATION" | sed -n 's/SERVICE_PROJECT_DIRECTORY=\(.*\)/\1/p')
SERVICE_FOLDER=$(echo "$CONFIGURATION" | sed -n 's/SERVICE_FOLDER=\(.*\)/\1/p')
SERVICE_PROJECT_NAMESPACE=$(echo "$CONFIGURATION" | sed -n 's/SERVICE_PROJECT_NAMESPACE=\(.*\)/\1/p')
CONTROLLER_NAMESPACE=$(echo "$CONTROLLER_FOLDER" | sed -e 's/[/]/./g')
CONTROLLER_NAMESPACE=$(echo "$CONTROLLER_NAMESPACE" | sed -e 's/^[.]*//g')
CONTROLLER_NAMESPACE=$(echo "$CONTROLLER_NAMESPACE" | sed -e 's/[.]*$//g')
CONTROLLER_NAMESPACE="$API_PROJECT_NAMESPACE.$CONTROLLER_NAMESPACE"
SERVICE_NAMESPACE=$(echo "$SERVICE_FOLDER" | sed -e 's/[/]/./g')
SERVICE_NAMESPACE=$(echo "$SERVICE_NAMESPACE" | sed -e 's/^[.]*//g')
SERVICE_NAMESPACE=$(echo "$SERVICE_NAMESPACE" | sed -e 's/[.]*$//g')
SERVICE_NAMESPACE="$SERVICE_PROJECT_NAMESPACE.$SERVICE_NAMESPACE"
ROOT_FOLDER=$(readlink -m .)
SOLUTION_FOLDER=$(readlink -m $SOLUTION_FOLDER)
cd $SOLUTION_FOLDER
API_PROJECT_DIRECTORY=$(readlink -m $API_PROJECT_DIRECTORY)
API_PROJECT_FOLDER=$(readlink -m $API_PROJECT_FOLDER)
cd $API_PROJECT_FOLDER
CONTROLLER_FOLDER=$(readlink -m $CONTROLLER_FOLDER)
cd $SOLUTION_FOLDER
SERVICE_PROJECT_DIRECTORY=$(readlink -m $SERVICE_PROJECT_DIRECTORY)
SERVICE_PROJECT_FOLDER=$(readlink -m $SERVICE_PROJECT_FOLDER)
cd $SERVICE_PROJECT_FOLDER
SERVICE_FOLDER=$(readlink -m $SERVICE_FOLDER)
cd $ROOT_FOLDER
|
๏ปฟ#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wunknown-pragmas"
#pragma ide diagnostic ignored "OCUnusedGlobalDeclarationInspection"
#pragma clang diagnostic ignored "-Wuninitialized"
#pragma ide diagnostic ignored "readability-non-const-parameter"
/*
* mov.c (c) 2018-20 <NAME>
*/
long mov_demo1(long i) {
return i;
}
int mov_demo2() {
int i = 3;
return i;
}
long mov_demo3(long *i) {
return *i;
}
long *mov_demo4() {
long *i;
*i = 3;
return i;
}
long *mov_demo5(long *i) {
long *j;
*j = *i;
return j;
}
struct foo {
long i;
long j;
};
long mov_demo6(struct foo *bar) {
return bar->j;
}
long mov_demo7(long *i, long j) {
return i[j];
}
long mov_demo8(long *i) {
return i[3];
}
long mov_demo9(long *i, int j) {
return i[j];
}
long mov_demo10(long *i, unsigned int j) {
return i[j];
}
int mov_demo11(int *i, int j) {
return i[j];
}
long mov_demo12(struct foo *bar, long j) {
struct foo baz = bar[j];
return baz.i;
}
long mov_demo13(char *i, int j) {
return i[j];
}
long mov_demo14(char *i, unsigned int j) {
return i[j];
}
long iterate(long *i, long max) {
long sum = 0;
for (int j = 0; j < max; j++) {
sum += i[j];
}
return sum;
}
long shift(long i, long j) {
return i + (j << 12);
}
long othershift(long i, long j) {
return i + (j >> 12);
}
long ldr_demo(long *i) {
return *i;
}
short ldrh_demo(short *i) {
return *i;
}
char ldrb_demo(char *i) {
return *i;
}
long *str_demo1(long i) {
long *j;
*j = i;
return j;
}
char *stb_demo2(char i) {
char *j;
*j = i;
return j;
}
int main() {
return 0;
}
#pragma clang diagnostic pop
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package bd;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import javax.faces.application.FacesMessage;
import javax.faces.context.FacesContext;
import modelo.Acceso;
import modelo.Empresa;
import modelo.SubEmpresa;
/**
*
* @author JuanVilla
*/
public class AccesoDAO extends GestorBD {
private Connection conexion;
public AccesoDAO() {
super();
}
public AccesoDAO(Connection conexion) {
this.conexion = conexion;
}
public String guardarAcceso(Acceso acceso) throws SQLException{
FacesContext contextoJSF = FacesContext.getCurrentInstance();
int actualizado=0;
try {
bd.conectar(getUsuario(), getClave(), getServidor(), getPuerto(), getBasedatos());
sql = "insert into accesos" +
"(nom_acceso, ip_local, ip_publica, user_acceso, contraseรฑa, cod_subempresa, cod_empresa)" +
"values ("
+ "'" + acceso.getNom_acceso() +"',"
+ "'" + acceso.getIp_local() +"',"
+ "'" + acceso.getIp_externa() +"',"
+ "'" + acceso.getUser_acceso() +"',"
+ "'" + acceso.getContraseรฑa() +"',"
+ "'" + acceso.getEmpresa().getNit_empresa()+"',"
+ "'" + acceso.getSub_empresa().getNit_subempresa()+"')";
actualizado = bd.actualizar(sql);
} catch (SQLException E) {
contextoJSF.addMessage(null, new FacesMessage(FacesMessage.SEVERITY_WARN, "", E.getMessage()));
} finally {
bd.desconectar();
}
return Integer.toString(actualizado);
}
public ArrayList<Acceso> listarAcceso() throws SQLException {
Acceso acceso;
ArrayList<Acceso> listarAcceso = new ArrayList<>();
ResultSet rs;
Consulta consulta = null;
consulta = new Consulta(conexion);
try {
//Sq1 Carga la lista completa de accesos sin filtrar
sql
= " SELECT "
+ " e.nom_empresa, s.nom_subempresa,"
+ " a.cod_acceso, a.nom_acceso, a.ip_local, a.ip_publica, a.contraseรฑa, a.user_acceso "
+ " from accesos a inner join empresas e on (nit_empresa=cod_empresa) "
+ " inner join subempresa s on (nit_subempresa=cod_subempresa) ";
rs = consulta.ejecutar(sql);
while (rs.next()) {
acceso = new Acceso ();
acceso.setNom_acceso(rs.getString("nom_acceso"));
acceso.setIp_local(rs.getString("ip_local"));
acceso.setIp_externa(rs.getString("ip_publica"));
acceso.setUser_acceso(rs.getString("user_acceso"));
acceso.setContraseรฑa(rs.getString("contraseรฑa"));
acceso.getEmpresa().setNom_empresa(rs.getString("nom_empresa"));
acceso.getSub_empresa().setNom_empresa(rs.getString("nom_subempresa"));;
listarAcceso.add(acceso);
}
return listarAcceso;
} catch (SQLException ex) {
throw ex;
} finally {
consulta.desconectar();
}
}
public ArrayList<Acceso> listarAcceso() throws SQLException {
Acceso acceso;
ArrayList<Acceso> listarAcceso = new ArrayList<>();
ResultSet rs;
Consulta consulta = null;
consulta = new Consulta(conexion);
}
|
<gh_stars>1-10
import { StringFormatter } from './StringFormatter';
import { ValueTooHugeError } from './ValueTooHugeError';
import { ValueInvalidError } from './ValueInvalidError';
describe('StringFormatter.numberWithDelimiter', () => {
const formatter = new StringFormatter();
it('์ธ์๋ฆฌ๋ง๋ค ๊ตฌ๋ถ์๊ฐ ์ถ๊ฐ๋ ์ซ์ ํ์์ ๋ฌธ์์ด', () => {
expect(formatter.numberWithDelimiter(1000)).toBe('1,000');
});
it('ํฐ ์ซ์๋ ์ธ์๋ฆฌ๋ง๋ค ๊ตฌ๋ถ์๊ฐ ์ถ๊ฐ๋ ์ซ์ ํ์์ ๋ฌธ์์ด', () => {
expect(formatter.numberWithDelimiter(12345678901234)).toBe(
'12,345,678,901,234',
);
});
it('์์์ ์๋ ๊ฐ์ ๊ตฌ๋ถ์๊ฐ ์ถ๊ฐ๋์ง ์์ต๋๋ค.', () => {
expect(formatter.numberWithDelimiter(1000.1234)).toBe('1,000.1234');
});
it('์ซ์ํ์์ ๋ฌธ์์ด์ ์
๋ ฅํ๋ฉด ์ธ์๋ฆฌ๋ง๋ค ๊ตฌ๋ถ์๊ฐ ์ถ๊ฐ๋ ์ซ์ ํ์์ ๋ฌธ์์ด', () => {
expect(formatter.numberWithDelimiter('1000')).toBe('1,000');
});
it('์ซ์ํ์์ ๋ฌธ์์ด์ ์
๋ ฅํ๋ฉด ์์์ ์๋ ๊ฐ์ ๊ตฌ๋ถ์๊ฐ ์ถ๊ฐ๋์ง ์์ต๋๋ค.', () => {
expect(formatter.numberWithDelimiter('1000.1234')).toBe('1,000.1234');
});
it('๋น ๋ถ์์ด์ ๋น ๋ฌธ์์ด์ ๋ฐํํฉ๋๋ค.', () => {
expect(formatter.numberWithDelimiter('')).toBe('');
});
});
describe('StringFormatter.fileSize', () => {
const formatter = new StringFormatter();
it('๊ฐ์ด ๊ธฐ์ค๊ฐ (1024) ๋ณด๋ค ์์ ๊ฒฝ์ฐ', () => {
expect(formatter.fileSize(1000)).toBe('1000 Bytes');
});
it('๊ฐ์ด ์ซ์ ํ์์ ๋ฌธ์์ด์ธ ๊ฒฝ์ฐ', () => {
expect(formatter.fileSize('1000')).toBe('1000 Bytes');
});
it('๊ฐ์ด ์ซ์ ํ์์ ๋ฌธ์์ด์ด ์๋ ๊ฒฝ์ฐ', () => {
expect(() => formatter.fileSize('hello world')).toThrow(
ValueInvalidError,
);
});
it('๊ฐ์ด ์์์ ์ ํฌํจํ๊ณ ๊ธฐ์ค๊ฐ (1024) ๋ณด๋ค ์์ ๊ฒฝ์ฐ', () => {
expect(formatter.fileSize(900.21)).toBe('900 Bytes');
});
it('๊ฐ์ด ์์์ ์ ํฌํจํ๊ณ ๊ธฐ์ค๊ฐ (1024) ๋ณด๋ค ์๊ณ ํฌ๋งทํฐ๊ฐ ์ง์ ๋ ๊ฒฝ์ฐ', () => {
expect(formatter.fileSize(1002.21, formatter.numberWithDelimiter)).toBe(
'1,002 Bytes',
);
});
it('KB - 1 ์ฐ์ฐ ๊ฒฐ๊ณผ๊ฐ ์์์ ์ ํฌํจํ์ง ์์ ', () => {
expect(
formatter.fileSize(2 * 1024, formatter.numberWithDelimiter),
).toBe('2 KB');
});
it('KB - 2 ์ฐ์ฐ ๊ฒฐ๊ณผ๊ฐ ์์์ ์ ํฌํจ ', () => {
expect(
formatter.fileSize(2.05 * 1024, formatter.numberWithDelimiter),
).toBe('2.05 KB');
});
it('MB - 1 ์ฐ์ฐ ๊ฒฐ๊ณผ๊ฐ ์์์ ์ ํฌํจํ์ง ์์', () => {
expect(
formatter.fileSize(2 * 1024 * 1024, formatter.numberWithDelimiter),
).toBe('2 MB');
});
it('MB - 2 ์ฐ์ฐ ๊ฒฐ๊ณผ๊ฐ ์์์ ์ ํฌํจ', () => {
expect(
formatter.fileSize(
2.012 * 1024 * 1024,
formatter.numberWithDelimiter,
),
).toBe('2.01 MB');
});
it('PB - 1 ์ฐ์ฐ ๊ฒฐ๊ณผ๊ฐ ์์์ ์ ํฌํจํ์ง ์์', () => {
expect(
formatter.fileSize(
2 * 1024 * 1024 * 1024 * 1024 * 1024,
formatter.numberWithDelimiter,
),
).toBe('2 PB');
});
it('PB - 2 ์ฐ์ฐ ๊ฒฐ๊ณผ๊ฐ ์์ซ์ ์ ํฌํจํจ', () => {
expect(
formatter.fileSize(
2.159 * 1024 * 1024 * 1024 * 1024 * 1024,
formatter.numberWithDelimiter,
),
).toBe('2.16 PB');
});
it('PB - 2 ๋งค์ฐ ํฐ ๊ฐ', () => {
expect(
formatter.fileSize(
2 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
formatter.numberWithDelimiter,
),
).toBe('2,048 PB');
});
it('Error ๋งค์ฐ ํฐ ๊ฐ', () => {
expect(() =>
formatter.fileSize(
Math.pow(2 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024, 1024),
formatter.numberWithDelimiter,
),
).toThrow(ValueTooHugeError);
});
it('์ฌ์ฉ์ ์ ์ ๋จ์ ์ฌ์ฉ 1', () => {
expect(
formatter.fileSize(
2 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
formatter.numberWithDelimiter,
['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'],
),
).toBe('2 EB');
});
it('์ฌ์ฉ์ ์ ์ ๋จ์ ์ฌ์ฉ 2', () => {
expect(
formatter.fileSize(
2.45 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
formatter.numberWithDelimiter,
['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'],
),
).toBe('2.45 EB');
});
});
|
<reponame>tosin2013/microshift<gh_stars>100-1000
// +build !ignore_autogenerated_openshift
// Code generated by conversion-gen. DO NOT EDIT.
package docker10
import (
unsafe "unsafe"
docker10 "github.com/openshift/api/image/docker10"
image "github.com/openshift/openshift-apiserver/pkg/image/apis/image"
conversion "k8s.io/apimachinery/pkg/conversion"
runtime "k8s.io/apimachinery/pkg/runtime"
)
func init() {
localSchemeBuilder.Register(RegisterConversions)
}
// RegisterConversions adds conversion functions to the given scheme.
// Public to allow building arbitrary schemes.
func RegisterConversions(s *runtime.Scheme) error {
if err := s.AddGeneratedConversionFunc((*docker10.DockerConfig)(nil), (*image.DockerConfig)(nil), func(a, b interface{}, scope conversion.Scope) error {
return Convert_docker10_DockerConfig_To_image_DockerConfig(a.(*docker10.DockerConfig), b.(*image.DockerConfig), scope)
}); err != nil {
return err
}
if err := s.AddGeneratedConversionFunc((*image.DockerConfig)(nil), (*docker10.DockerConfig)(nil), func(a, b interface{}, scope conversion.Scope) error {
return Convert_image_DockerConfig_To_docker10_DockerConfig(a.(*image.DockerConfig), b.(*docker10.DockerConfig), scope)
}); err != nil {
return err
}
if err := s.AddGeneratedConversionFunc((*docker10.DockerImage)(nil), (*image.DockerImage)(nil), func(a, b interface{}, scope conversion.Scope) error {
return Convert_docker10_DockerImage_To_image_DockerImage(a.(*docker10.DockerImage), b.(*image.DockerImage), scope)
}); err != nil {
return err
}
if err := s.AddGeneratedConversionFunc((*image.DockerImage)(nil), (*docker10.DockerImage)(nil), func(a, b interface{}, scope conversion.Scope) error {
return Convert_image_DockerImage_To_docker10_DockerImage(a.(*image.DockerImage), b.(*docker10.DockerImage), scope)
}); err != nil {
return err
}
return nil
}
func autoConvert_docker10_DockerConfig_To_image_DockerConfig(in *docker10.DockerConfig, out *image.DockerConfig, s conversion.Scope) error {
out.Hostname = in.Hostname
out.Domainname = in.Domainname
out.User = in.User
out.Memory = in.Memory
out.MemorySwap = in.MemorySwap
out.CPUShares = in.CPUShares
out.CPUSet = in.CPUSet
out.AttachStdin = in.AttachStdin
out.AttachStdout = in.AttachStdout
out.AttachStderr = in.AttachStderr
out.PortSpecs = *(*[]string)(unsafe.Pointer(&in.PortSpecs))
out.ExposedPorts = *(*map[string]struct{})(unsafe.Pointer(&in.ExposedPorts))
out.Tty = in.Tty
out.OpenStdin = in.OpenStdin
out.StdinOnce = in.StdinOnce
out.Env = *(*[]string)(unsafe.Pointer(&in.Env))
out.Cmd = *(*[]string)(unsafe.Pointer(&in.Cmd))
out.DNS = *(*[]string)(unsafe.Pointer(&in.DNS))
out.Image = in.Image
out.Volumes = *(*map[string]struct{})(unsafe.Pointer(&in.Volumes))
out.VolumesFrom = in.VolumesFrom
out.WorkingDir = in.WorkingDir
out.Entrypoint = *(*[]string)(unsafe.Pointer(&in.Entrypoint))
out.NetworkDisabled = in.NetworkDisabled
out.SecurityOpts = *(*[]string)(unsafe.Pointer(&in.SecurityOpts))
out.OnBuild = *(*[]string)(unsafe.Pointer(&in.OnBuild))
out.Labels = *(*map[string]string)(unsafe.Pointer(&in.Labels))
return nil
}
// Convert_docker10_DockerConfig_To_image_DockerConfig is an autogenerated conversion function.
func Convert_docker10_DockerConfig_To_image_DockerConfig(in *docker10.DockerConfig, out *image.DockerConfig, s conversion.Scope) error {
return autoConvert_docker10_DockerConfig_To_image_DockerConfig(in, out, s)
}
func autoConvert_image_DockerConfig_To_docker10_DockerConfig(in *image.DockerConfig, out *docker10.DockerConfig, s conversion.Scope) error {
out.Hostname = in.Hostname
out.Domainname = in.Domainname
out.User = in.User
out.Memory = in.Memory
out.MemorySwap = in.MemorySwap
out.CPUShares = in.CPUShares
out.CPUSet = in.CPUSet
out.AttachStdin = in.AttachStdin
out.AttachStdout = in.AttachStdout
out.AttachStderr = in.AttachStderr
out.PortSpecs = *(*[]string)(unsafe.Pointer(&in.PortSpecs))
out.ExposedPorts = *(*map[string]struct{})(unsafe.Pointer(&in.ExposedPorts))
out.Tty = in.Tty
out.OpenStdin = in.OpenStdin
out.StdinOnce = in.StdinOnce
out.Env = *(*[]string)(unsafe.Pointer(&in.Env))
out.Cmd = *(*[]string)(unsafe.Pointer(&in.Cmd))
out.DNS = *(*[]string)(unsafe.Pointer(&in.DNS))
out.Image = in.Image
out.Volumes = *(*map[string]struct{})(unsafe.Pointer(&in.Volumes))
out.VolumesFrom = in.VolumesFrom
out.WorkingDir = in.WorkingDir
out.Entrypoint = *(*[]string)(unsafe.Pointer(&in.Entrypoint))
out.NetworkDisabled = in.NetworkDisabled
out.SecurityOpts = *(*[]string)(unsafe.Pointer(&in.SecurityOpts))
out.OnBuild = *(*[]string)(unsafe.Pointer(&in.OnBuild))
out.Labels = *(*map[string]string)(unsafe.Pointer(&in.Labels))
return nil
}
// Convert_image_DockerConfig_To_docker10_DockerConfig is an autogenerated conversion function.
func Convert_image_DockerConfig_To_docker10_DockerConfig(in *image.DockerConfig, out *docker10.DockerConfig, s conversion.Scope) error {
return autoConvert_image_DockerConfig_To_docker10_DockerConfig(in, out, s)
}
func autoConvert_docker10_DockerImage_To_image_DockerImage(in *docker10.DockerImage, out *image.DockerImage, s conversion.Scope) error {
out.ID = in.ID
out.Parent = in.Parent
out.Comment = in.Comment
out.Created = in.Created
out.Container = in.Container
if err := Convert_docker10_DockerConfig_To_image_DockerConfig(&in.ContainerConfig, &out.ContainerConfig, s); err != nil {
return err
}
out.DockerVersion = in.DockerVersion
out.Author = in.Author
out.Config = (*image.DockerConfig)(unsafe.Pointer(in.Config))
out.Architecture = in.Architecture
out.Size = in.Size
return nil
}
// Convert_docker10_DockerImage_To_image_DockerImage is an autogenerated conversion function.
func Convert_docker10_DockerImage_To_image_DockerImage(in *docker10.DockerImage, out *image.DockerImage, s conversion.Scope) error {
return autoConvert_docker10_DockerImage_To_image_DockerImage(in, out, s)
}
func autoConvert_image_DockerImage_To_docker10_DockerImage(in *image.DockerImage, out *docker10.DockerImage, s conversion.Scope) error {
out.ID = in.ID
out.Parent = in.Parent
out.Comment = in.Comment
out.Created = in.Created
out.Container = in.Container
if err := Convert_image_DockerConfig_To_docker10_DockerConfig(&in.ContainerConfig, &out.ContainerConfig, s); err != nil {
return err
}
out.DockerVersion = in.DockerVersion
out.Author = in.Author
out.Config = (*docker10.DockerConfig)(unsafe.Pointer(in.Config))
out.Architecture = in.Architecture
out.Size = in.Size
return nil
}
// Convert_image_DockerImage_To_docker10_DockerImage is an autogenerated conversion function.
func Convert_image_DockerImage_To_docker10_DockerImage(in *image.DockerImage, out *docker10.DockerImage, s conversion.Scope) error {
return autoConvert_image_DockerImage_To_docker10_DockerImage(in, out, s)
}
|
<filename>node_modules/redux-persist-expire/node_modules/redux-persist/lib/stateReconciler/autoMergeLevel1.js
'use strict';
exports.__esModule = true;
var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; };
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
exports.default = autoMergeLevel1;
function autoMergeLevel1(inboundState, originalState, reducedState, _ref) {
var debug = _ref.debug;
var newState = _extends({}, reducedState);
// only rehydrate if inboundState exists and is an object
if (inboundState && (typeof inboundState === 'undefined' ? 'undefined' : _typeof(inboundState)) === 'object') {
Object.keys(inboundState).forEach(function (key) {
// ignore _persist data
if (key === '_persist') return;
// if reducer modifies substate, skip auto rehydration
if (originalState[key] !== reducedState[key]) {
if (process.env.NODE_ENV !== 'production' && debug) console.log('redux-persist/stateReconciler: sub state for key `%s` modified, skipping.', key);
return;
}
// otherwise hard set the new value
newState[key] = inboundState[key];
});
}
if (process.env.NODE_ENV !== 'production' && debug && inboundState && (typeof inboundState === 'undefined' ? 'undefined' : _typeof(inboundState)) === 'object') console.log('redux-persist/stateReconciler: rehydrated keys \'' + Object.keys(inboundState).join(', ') + '\'');
return newState;
}
/*
autoMergeLevel1:
- merges 1 level of substate
- skips substate if already modified
*/
|
export Client from './class/client'
export Response from './class/response'
export Conversation from './class/conversation'
export Entity from './class/entity'
export RecastError from './class/error'
|
<filename>src/core/cloak_normal_item.h<gh_stars>1-10
#ifndef INCLUDED_CORE_CLOAK_NORMAL_ITEM_H
#define INCLUDED_CORE_CLOAK_NORMAL_ITEM_H
#include "normal_item.h"
#include "core/property_loader.h"
#include "platform/export.h"
class CloakNormalItem : public NormalItem
{
public:
CloakNormalItem( int32_t id );
protected:
CloakNormalItem();
private:
public:
friend class ::boost::serialization::access;
template<class Archive>
void serialize( Archive& ar, const unsigned int version );
};
template<class Archive>
void CloakNormalItem::serialize( Archive& ar, const unsigned int version )
{
ar& boost::serialization::base_object<NormalItem>( *this );
}
REAPING2_CLASS_EXPORT_KEY2( CloakNormalItem, CloakNormalItem, "cloak_normal_item" );
#endif//INCLUDED_CORE_CLOAK_NORMAL_ITEM_H
//command: "classgenerator.exe" -g "normal_item" -c "cloak_normal_item"
|
#!/bin/bash -e
# the original chamfer map created in marching_cubes_fetus.pl
# Date: 21 Aug 2019
# Author: Jennings Zhang <jenni_zh@protonmail.com>
# &run( 'mincdefrag', $wm_mask, "${tmpdir}/wm_mask_defragged.mnc", 1, 6 );
# &run( 'mincchamfer', '-quiet', '-max_dist', '10.0',
# "${tmpdir}/wm_mask_defragged.mnc", "${tmpdir}/chamfer_outside.mnc" );
# &run( 'minccalc', '-quiet', '-clobber', '-expression', '1-A[0]',
# "${tmpdir}/wm_mask_defragged.mnc", "${tmpdir}/wm_mask_negated.mnc" );
# &run( 'mincchamfer', '-quiet', '-max_dist', '5.0',
# "${tmpdir}/wm_mask_negated.mnc", "${tmpdir}/chamfer_inside.mnc" );
# unlink( "${tmpdir}/wm_mask_negated.mnc" );
# &run( 'minccalc', '-quiet', '-clob', '-expression', "10.0-A[0]+A[1]",
# "${tmpdir}/chamfer_inside.mnc", "${tmpdir}/chamfer_outside.mnc",
# "${tmpdir}/wm_chamfer.mnc" );
# unlink( "${tmpdir}/chamfer_outside.mnc" );
# unlink( "${tmpdir}/chamfer_inside.mnc" );
function show_help () {
cat << EOF
usage: $0 wm.mnc chamfer.mnc
Creates a bidirectional radial distance map for the volume (wm.mnc) using mincchamfer.
options:
-h show this help message and exit
-v verbose output
-k keep temporary files
-c boundary value (default: 10)
-i [1-6] treats the input as painted labels instead of a binary mask.
The chamfer is generated around the outer surface of the layer
as specified by the given isovalue.
1 = CSF
2 = gray matter (cortical plate)
3 = white matter (subplate zone)
4 = intermediate zone
5 = subventricular zone
6 = ventricle
EOF
}
[[ $1 == *"-h"* ]] && show_help && exit 0
quiet="-quiet"
keep=0
label=1
iso=10
while getopts ":hvki:c:" opt; do
case $opt in
h ) show_help && exit 0 ;;
v ) quiet="" ;;
k ) keep=1 ;;
i ) label=$OPTARG ;;
c ) iso=$OPTARG ;;
\? ) echo "Invalid option: -$OPTARG\nRun $0 -h for help."
exit 1 ;;
esac
done
shift $((OPTIND-1))
wm_mask=$1
output_chamfer=$2
if [ -z "$wm_mask" ] || [ -z "$output_chamfer" ]; then
echo "Missing filenames, run $0 -h for help."
exit 1
fi
tmpdir=$(mktemp -d -t chamfer-$(date +%Hh%M,%S)-XXXXXXXXX)
wm_mask_defragged=$tmpdir/wm_mask_defragged.mnc
negative_mask=$tmpdir/wm_mask_negated.mnc
outer_chamfer=$tmpdir/chamfer_outer.mnc
inner_chamfer=$tmpdir/chamfer_inner.mnc
# create a binary mask from painted labels
bin_mask=$tmpdir/wm_mask.mnc
if [ "$label" -gt "1" ]; then
label="$((label-1)).5" # label = label - 0.5
[ "$quiet" = "" ] && set -x
minccalc $quiet -byte -clob -expression "A[0]>$label" $wm_mask $bin_mask
{ set +x; } 2> /dev/null
else
# number range must be compatible with mincchamer
mincreshape $quiet -image_range 0 255 $wm_mask $bin_mask
fi
wm_mask=$bin_mask
if [ "$quiet" = "" ]; then
set -x # print commands before running them
mincdefrag $wm_mask $wm_mask_defragged 0 6
mincdefrag $wm_mask_defragged $wm_mask_defragged 1 6
else
mincdefrag $wm_mask $wm_mask_defragged 0 6 > /dev/null
mincdefrag $wm_mask_defragged $wm_mask_defragged 1 6 > /dev/null
fi
mincchamfer $quiet -max_dist 10.0 $wm_mask_defragged $outer_chamfer
minccalc $quiet -clob -expression "A[0]<0.5" $wm_mask_defragged $negative_mask
mincchamfer $quiet -max_dist 5.0 $negative_mask $inner_chamfer
minccalc $quiet -clob -expression "$iso-A[0]+A[1]" \
$inner_chamfer $outer_chamfer $output_chamfer
# output volume will be of type image: unsigned byte 0 to 255
# seems incorrect, I think it should be float, but it works
# mincreshape $quiet -clobber -signed -float $output_chamfer $float_chamfer
{ set +x; } 2> /dev/null
if [ "$keep" = "0" ]; then
rm -r $tmpdir
[ "$quiet" = "" ] && echo "Removed $tmpdir" || true
else
echo "-k flag specified, intermediate files are in $tmpdir"
echo "Run rm -r /tmp/chamfer-* to clean up tempoary tiles."
fi
|
#!/bin/bash
# This is the script that's executed by travis, you can run it yourself to run
# the exact same suite
#
# When running it locally the most important thing to set is the CHANNEL env
# var, otherwise it will run tests against every version of rust that it knows
# about (nightly, beta, stable, 1.13.0):
#
# $ CHANNEL=stable ./ci/travis.sh
set -e
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
main() {
if [[ -n "$CHANNEL" ]] ; then
if [[ "$CHANNEL" == 1.13.0 ]]; then
banner "Building $CHANNEL"
build_only
else
banner "Building/testing $CHANNEL"
build_and_test
banner "Testing Core $CHANNEL"
build_core_test
fi
else
CHANNEL=nightly
matching_banner "Test $CHANNEL"
if [[ "${CLIPPY}" = y ]] ; then
run_clippy
else
build_and_test
fi
CHANNEL=beta
matching_banner "Test $CHANNEL"
build_and_test
CHANNEL=stable
matching_banner "Test $CHANNEL"
build_and_test
build_core_test
CHANNEL=1.13.0
matching_banner "Test $CHANNEL"
build_only
fi
}
build_and_test() {
# interleave building and testing in hope that it saves time
# also vary the local time zone to (hopefully) catch tz-dependent bugs
# also avoid doc-testing multiple times---it takes a lot and rarely helps
cargo clean
if [ "${WASMBIND}" != "y" ]; then
build_and_test_nonwasm
else
build_and_test_wasm
fi
if [[ "$CHANNEL" == stable ]]; then
if [[ -n "$TRAVIS" ]] ; then
check_readme
fi
fi
}
build_and_test_nonwasm() {
channel build -v
TZ=ACST-9:30 channel test -v --lib
channel build -v --features rustc-serialize
TZ=EST4 channel test -v --features rustc-serialize --lib
channel build -v --features serde
TZ=UTC0 channel test -v --features serde --lib
channel build -v --features serde,rustc-serialize
TZ=Asia/Katmandu channel test -v --features serde,rustc-serialize
# without default "clock" feature
channel build -v --no-default-features --features std
TZ=ACST-9:30 channel test -v --no-default-features --lib
channel build -v --no-default-features --features std,rustc-serialize
TZ=EST4 channel test -v --no-default-features --features rustc-serialize --lib
channel build -v --no-default-features --features std,serde
TZ=UTC0 channel test -v --no-default-features --features serde --lib
channel build -v --no-default-features --features std,serde,rustc-serialize
TZ=Asia/Katmandu channel test -v --no-default-features --features std,serde,rustc-serialize --lib
channel build -v --no-default-features --features 'serde'
TZ=UTC0 channel test -v --no-default-features --features 'serde' --lib
channel build -v --no-default-features --features 'alloc serde'
TZ=UTC0 channel test -v --no-default-features --features 'alloc serde' --lib
}
build_and_test_wasm() {
touch tests/wasm.rs # ensure rebuild happens so TZ / NOW take effect
TZ=ACST-9:30 NOW=$(date +%s) wasm-pack test --node -- --features wasmbind
touch tests/wasm.rs
TZ=EST4 NOW=$(date +%s) wasm-pack test --node -- --features wasmbind
touch tests/wasm.rs
TZ=UTC0 NOW=$(date +%s) wasm-pack test --node -- --features wasmbind
touch tests/wasm.rs
TZ=Asia/Katmandu NOW=$(date +%s) wasm-pack test --node -- --features wasmbind
}
build_only() {
# Rust 1.13 doesn't support custom derive, so, to avoid doctests which
# validate that, we just build there.
cargo clean
channel build -v
channel build -v --features rustc-serialize
channel build -v --features serde
channel build -v --no-default-features --features std
}
build_core_test() {
channel_run rustup target add thumbv6m-none-eabi --toolchain "$CHANNEL"
(
cd ci/core-test
channel build -v --target thumbv6m-none-eabi
)
}
run_clippy() {
# cached installation will not work on a later nightly
if [ -n "${TRAVIS}" ] && ! cargo install clippy --debug --force; then
echo "COULD NOT COMPILE CLIPPY, IGNORING CLIPPY TESTS"
exit
fi
cargo clippy --features 'serde rustc-serialize' -- -Dclippy
}
check_readme() {
make readme
(set -x; git diff --exit-code -- README.md) ; echo $?
}
# script helpers
banner() {
echo "======================================================================"
echo "$*"
echo "======================================================================"
}
underline() {
echo "$*"
echo "${*//?/^}"
}
matching_banner() {
if channel_matches || ! is_ci ; then
banner "$*"
echo_versions
fi
}
echo_versions() {
channel_run rustc --version
channel_run cargo --version
node --version
}
channel() {
channel_run cargo "$@"
}
channel_run() {
if channel_matches ; then
local the_cmd="$ $*"
underline "$the_cmd"
"$@"
elif ! is_ci ; then
local cmd="$1"
shift
if [[ $cmd == cargo || $cmd == rustc ]] ; then
underline "$ $cmd +${CHANNEL} $*"
"$cmd" "+${CHANNEL}" "$@"
else
underline "$ $cmd $*"
"$cmd" "$@"
fi
fi
}
channel_matches() {
if is_ci ; then
if [[ "${TRAVIS_RUST_VERSION}" = "${CHANNEL}"
|| "${APPVEYOR_RUST_CHANNEL}" = "${CHANNEL}" ]] ; then
return 0
fi
fi
return 1
}
is_ci() {
if [[ -n "$TRAVIS" || -n "$APPVEYOR" ]] ; then
return 0
else
return 1
fi
}
main
|
<filename>packages/ethers-ethereum/src/index.ts
import type { Contract} from "ethers"
import { ethers } from "ethers"
import type { TransactionResponse } from "@ethersproject/abstract-provider"
import type * as EthereumProvider from "@rarible/ethereum-provider"
import { signTypedData } from "@rarible/ethereum-provider"
import type { Address, Binary, BigNumber, Word } from "@rarible/types"
import { toAddress, toBigNumber, toBinary, toWord } from "@rarible/types"
import type { MessageTypes, TypedMessage } from "@rarible/ethereum-provider/src/domain"
import type { TypedDataSigner } from "@ethersproject/abstract-signer"
import { encodeParameters } from "./abi-coder"
export class EthersWeb3ProviderEthereum implements EthereumProvider.Ethereum {
constructor(readonly web3Provider: ethers.providers.Web3Provider, readonly from?: string) {
this.send = this.send.bind(this)
}
createContract(abi: any, address?: string): EthereumProvider.EthereumContract {
if (!address) {
throw new Error("No Contract address provided, it's required for EthersEthereum")
}
return new EthersContract(new ethers.Contract(address, abi, this.web3Provider.getSigner()))
}
send(method: string, params: any): Promise<any> {
return this.web3Provider.send(method, params)
}
personalSign(message: string): Promise<string> {
return this.web3Provider.getSigner().signMessage(message)
}
async signTypedData<T extends MessageTypes>(data: TypedMessage<T>): Promise<string> {
const signer = await this.getFrom()
return signTypedData(this.send, signer, data)
}
async getFrom(): Promise<string> {
if (!this.from) {
const [first] = await this.web3Provider.listAccounts()
return first
}
return this.from
}
encodeParameter(type: any, parameter: any): string {
return encodeParameters([type], [parameter])
}
async getBalance(address: Address): Promise<BigNumber> {
const balance = await this.web3Provider.getBalance(address)
return toBigNumber(balance.toString())
}
async getChainId(): Promise<number> {
const { chainId } = await this.web3Provider.getNetwork()
return chainId
}
}
export class EthersEthereum implements EthereumProvider.Ethereum {
constructor(readonly signer: TypedDataSigner & ethers.Signer) {}
createContract(abi: any, address?: string): EthereumProvider.EthereumContract {
if (!address) {
throw new Error("No Contract address provided, it's required for EthersEthereum")
}
return new EthersContract(new ethers.Contract(address, abi, this.signer))
}
personalSign(message: string): Promise<string> {
return this.signer.signMessage(message)
}
async signTypedData<T extends MessageTypes>(data: TypedMessage<T>): Promise<string> {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { EIP712Domain, ...types } = data.types
return this.signer._signTypedData(data.domain, types, data.message)
}
getFrom(): Promise<string> {
return this.signer.getAddress()
}
encodeParameter(type: any, parameter: any): string {
return encodeParameters([type], [parameter])
}
async getBalance(address: Address): Promise<BigNumber> {
if (!this.signer.provider) {
throw new Error("EthersEthereum: signer provider does not exist")
}
const balance = await this.signer.provider.getBalance(address)
return toBigNumber(balance.toString())
}
async getChainId(): Promise<number> {
return this.signer.getChainId()
}
}
export class EthersContract implements EthereumProvider.EthereumContract {
constructor(private readonly contract: Contract) {
}
functionCall(name: string, ...args: any): EthereumProvider.EthereumFunctionCall {
return new EthersFunctionCall(this.contract, name, args)
}
}
export class EthersFunctionCall implements EthereumProvider.EthereumFunctionCall {
constructor(
private readonly contract: Contract,
private readonly name: string,
private readonly args: any[],
) {}
async getCallInfo(): Promise<EthereumProvider.EthereumFunctionCallInfo> {
return {
method: this.name,
args: this.args,
contract: this.contract.address,
from: undefined,
}
}
get data(): string {
return (this.contract.populateTransaction[this.name](...this.args) as any).data
}
async estimateGas(options?: EthereumProvider.EthereumSendOptions) {
const func = this.contract.estimateGas[this.name].bind(null, ...this.args)
const value = await func(options)
return value.toNumber()
}
call(options?: EthereumProvider.EthereumSendOptions): Promise<any> {
const func = this.contract[this.name].bind(null, ...this.args)
if (options) {
return func(options)
} else {
return func()
}
}
async send(options?: EthereumProvider.EthereumSendOptions): Promise<EthereumProvider.EthereumTransaction> {
const func = this.contract[this.name].bind(null, ...this.args)
if (options) {
return new EthersTransaction(await func(options))
} else {
return new EthersTransaction(await func())
}
}
}
export class EthersTransaction implements EthereumProvider.EthereumTransaction {
constructor(private readonly tx: TransactionResponse) {}
get hash(): Word {
return toWord(this.tx.hash)
}
async wait(): Promise<EthereumProvider.EthereumTransactionReceipt> {
const receipt = await this.tx.wait()
return {
...receipt,
status: receipt.status === 1,
events: (receipt as any).events,
}
}
get to(): Address | undefined {
return this.tx.to ? toAddress(this.tx.to) : undefined
}
get from(): Address {
return toAddress(this.tx.from)
}
get data(): Binary {
return toBinary(this.tx.data)
}
get nonce(): number {
return this.tx.nonce
}
}
|
<gh_stars>100-1000
import { ITokenizerHandle, tokenize } from "protobufjs";
import * as vscode from "vscode";
class position {
constructor(public line: number, public col: number) {}
static from(pos: position): position {
return Object.assign(new position(0, 0), pos);
}
}
class token {
constructor(public tok: string, public pos: position) {}
}
class tokenizer {
private _pos: position = new position(0, 0);
private _token_width: number = 0;
private _handler: ITokenizerHandle;
constructor(public doc: vscode.TextDocument) {
this._handler = tokenize(doc.getText(), false);
}
public peek(): token | null {
const tok = this._handler.peek();
if (tok == null) {
return null;
}
let row = this._handler.line,
col = this._pos.col;
if (row !== this._pos.line) {
col = 0;
}
col = this.doc.lineAt(row).text.indexOf(tok, this._pos.col);
return new token(tok, new position(row, col));
}
public next(): token | null {
const tok = this._handler.next();
if (tok === null) {
return null;
}
const lineno = this._handler.line;
if (lineno !== this._pos.line) {
this._pos = new position(lineno, 0);
this._token_width = 0;
}
// ๆณจๆ vscode ็่กๅทๆฏไป0ๅผๅง็๏ผprotobufjs ็ tokenize ่ฟๅ็่กๅทๆฏไป 1 ๅผๅง็
this._pos.col = this.doc.lineAt(lineno - 1).text.indexOf(tok, this._pos.col + this._token_width);
this._token_width = tok.length;
return new token(tok, position.from(this._pos));
}
public peek_expect(regexp: RegExp): token | null {
const t = this.peek();
if (t === null) {
return null;
}
if (regexp.test(t.tok)) {
return t;
}
return null;
}
public expect(regexp: RegExp): token | null {
const t = this.next();
if (t === null) {
return null;
}
if (regexp.test(t.tok)) {
throw new Error("unexpected token");
}
return t;
}
public get position(): position {
return position.from(this._pos);
}
}
class scope {
constructor(
public name: "message" | "enum" | "service" | "rpc" | "returns" | "rpcbody" | "",
public sym: "(" | "{",
public pos: position,
public end: position = null
) {}
}
// check is pos inside range(begin,end)
function isContains(pos: position, begin: position, end: position) {
// scope ่ตทๆญข้ฝๅจๅไธ่ก๏ผๅ
ๆ ๅจ่ตทๆญข่ๅดๅ
return (
(begin.line === end.line && begin.line === pos.line && pos.col >= begin.col && pos.col <= end.col) ||
// scope ไธๅจๅไธ่ก๏ผๅ
ๆ ๅจ่ตทๅง่ก็่ตทๅงtokenๅ
(begin.line !== end.line && begin.line === pos.line && pos.col >= begin.col) ||
// scope ไธๅจๅไธ่ก๏ผๅ
ๆ ๅจ็ปๆ่ก็็ปๆtokenๅ
(begin.line !== end.line && end.line === pos.line && pos.col <= end.col) ||
// scope ไธๅจๅไธ่ก๏ผๅ
ๆ ๅจ่ตทๅง่กๅ็ปๆ่กไธญ้ด
(begin.line !== end.line && pos.line < end.line && pos.line > begin.line)
);
}
// ๅ
จๅฑ่ฟๅ null
// ๅ
ถไปๆ
ๅต่ฟๅ scope
export function SyntacticGuessScope(document: vscode.TextDocument, cursorPosition: vscode.Position): scope | null {
const stack: scope[] = [];
const tkn = new tokenizer(document);
for (let tok = tkn.next(); tok !== null; tok = tkn.next()) {
switch (tok.tok) {
case "message": {
// take next token until reach left brace
let t = tkn.next();
for (; t !== null && t.tok !== "{"; t = tkn.next()) {}
stack.push(new scope("message", "{", position.from(t.pos)));
break;
}
case "enum": {
// take next token until reach left brace
let t = tkn.next();
for (; t !== null && t.tok !== "{"; t = tkn.next()) {}
stack.push(new scope("enum", "{", position.from(t.pos)));
break;
}
case "rpc": {
// take next token until reach left paren
let t = tkn.next();
for (; t !== null && t.tok !== "("; t = tkn.next()) {}
stack.push(new scope("rpc", "(", position.from(t.pos)));
break;
}
case "returns": {
// take next token until reach left paren
let t = tkn.next();
for (; t !== null && t.tok !== "("; t = tkn.next()) {}
stack.push(new scope("returns", "(", position.from(t.pos)));
break;
}
case "service": {
// take next token until reach left brace
let t = tkn.next();
for (; t !== null && t.tok !== "{"; t = tkn.next()) {}
stack.push(new scope("service", "{", position.from(t.pos)));
break;
}
case "(":
stack.push(new scope("", "(", position.from(tok.pos)));
break;
case "{":
stack.push(new scope("", "{", position.from(tok.pos)));
break;
case "}": {
// ๅน้
ๆ ้กถ็็ฌฆๅทๅนถไธๆฏๆไปฌๅ
ณๆณจ็ scope ็ฑปๅ (name!=='')๏ผๆญคๆถๆๅผๅงๆฃๆฅๅ
ๆ ไฝ็ฝฎๆฏไธๆฏๅจ scope ๅบ้ดๅ
const lastScope = stack[stack.length - 1];
if (lastScope.sym === "{") {
stack.pop();
if (lastScope.name !== "") {
// ๅ
ๆ ๅจ่ๅดๅ
็ดๆฅ่ฟๅ
// ๆณจๆ vscode ็่กๅทๆฏไป0ๅผๅง็๏ผprotobufjs ็ tokenize ่ฟๅ็่กๅทๆฏไป 1 ๅผๅง็
if (isContains(new position(cursorPosition.line + 1, cursorPosition.character), lastScope.pos, tok.pos)) {
lastScope.end = position.from(tok.pos);
return lastScope;
}
}
} else {
// mismatch!
return null;
}
break;
}
case ")": {
// ๅน้
ๆ ้กถ็็ฌฆๅทๅนถไธๆฏๆไปฌๅ
ณๆณจ็ scope ็ฑปๅ (name!=='')๏ผๆญคๆถๆๅผๅงๆฃๆฅๅ
ๆ ไฝ็ฝฎๆฏไธๆฏๅจ scope ๅบ้ดๅ
const lastScope = stack[stack.length - 1];
if (lastScope.sym === "(") {
stack.pop();
if (lastScope.name !== "") {
// ๅ
ๆ ๅจ่ๅดๅ
็ดๆฅ่ฟๅ
// ๆณจๆ vscode ็่กๅทๆฏไป0ๅผๅง็๏ผprotobufjs ็ tokenize ่ฟๅ็่กๅทๆฏไป 1 ๅผๅง็
const cursor = new position(cursorPosition.line + 1, cursorPosition.character);
if (isContains(cursor, lastScope.pos, tok.pos)) {
lastScope.end = position.from(tok.pos);
return lastScope;
}
// ้ๅฏนrpc ็ option
if (lastScope.name === "returns") {
if (tkn.peek().tok === "{") {
const t = tkn.next();
stack.push(new scope("rpcbody", "{", position.from(t.pos)));
}
}
}
} else {
// mismatch!
return null;
}
break;
}
}
}
while (stack.length > 0) {
const lastScope = stack.pop();
if (lastScope.name !== "") {
return lastScope;
}
}
return null;
}
|
#include <iostream>
#include <boost/process.hpp>
namespace bp = boost::process;
int main() {
std::string command;
std::cout << "Enter the command to execute: ";
std::getline(std::cin, command);
try {
bp::ipstream pipe_stream;
bp::child c(command, bp::std_out > pipe_stream);
std::string line;
while (pipe_stream && std::getline(pipe_stream, line) && !line.empty()) {
std::cout << line << std::endl;
}
c.wait();
} catch (const std::exception& e) {
std::cerr << "Error executing command: " << e.what() << std::endl;
}
return 0;
}
|
package arez.spytools;
import arez.Arez;
import arez.spy.SpyEventHandler;
import java.util.HashMap;
import java.util.Map;
import java.util.function.BiConsumer;
import javax.annotation.Nonnull;
import static org.realityforge.braincheck.Guards.*;
/**
* Abstract base class for processing spy events.
* Simplifies handling of events by delegating to a specific processor
* based on types of the events. Note that the type must be the concrete
* type of the subclass.
*/
public abstract class AbstractSpyEventProcessor
implements SpyEventHandler
{
/**
* The processors that can be delegated to.
*/
private final Map<Class<?>, BiConsumer<SpyUtil.NestingDelta, ?>> _processors = new HashMap<>();
/**
* The current nesting level.
*/
private int _nestingLevel;
/**
* Method invoked by subclasses to register
*
* @param <T> the event type.
* @param type the type of the event to register.
* @param processor the processor to handle event with.
*/
protected final <T> void on( @Nonnull final Class<T> type,
@Nonnull final BiConsumer<SpyUtil.NestingDelta, T> processor )
{
if ( Arez.shouldCheckApiInvariants() )
{
apiInvariant( () -> !_processors.containsKey( type ),
() -> "Attempting to call AbstractSpyEventProcessor.on() to register a processor " +
"for type " + type + " but an existing processor already exists for type" );
}
_processors.put( type, processor );
}
/**
* Handle the specified event by delegating to the registered processor.
*
* @param event the event that occurred.
*/
@Override
@SuppressWarnings( { "ConstantConditions", "unchecked" } )
public final void onSpyEvent( @Nonnull final Object event )
{
assert null != event;
final BiConsumer<SpyUtil.NestingDelta, Object> processor =
(BiConsumer<SpyUtil.NestingDelta, Object>) _processors.get( event.getClass() );
if ( null != processor )
{
final SpyUtil.NestingDelta delta = getNestingDelta( event );
if ( SpyUtil.NestingDelta.DECREASE == delta )
{
_nestingLevel -= 1;
decreaseNestingLevel();
}
processor.accept( delta, event );
if ( SpyUtil.NestingDelta.INCREASE == delta )
{
_nestingLevel += 1;
increaseNestingLevel();
}
}
else
{
handleUnhandledEvent( event );
}
}
/**
* Return the current nesting level.
*
* @return the current nesting level.
*/
protected final int getNestingLevel()
{
return _nestingLevel;
}
/**
* Hook method called when the nesting level increases.
* Override as appropriate in subclasses.
*/
protected void increaseNestingLevel()
{
}
/**
* Hook method called when the nesting level decreases.
* Override as appropriate in subclasses.
*/
protected void decreaseNestingLevel()
{
}
/**
* Handle the specified event that had no processors defined for it.
*
* @param event the unhandled event.
*/
protected void handleUnhandledEvent( @Nonnull final Object event )
{
}
/**
* Return the change in nesting level for event if any.
* This method is used rather than directly deferring to {@link SpyUtil#getNestingDelta(Class)} so that
* subclasses can handle custom events.
*
* @param event the event.
* @return the delta in nesting level.
*/
@Nonnull
protected SpyUtil.NestingDelta getNestingDelta( @Nonnull final Object event )
{
return SpyUtil.getNestingDelta( event.getClass() );
}
}
|
#!/bin/bash
env_name=$1
echo Preparing clean environment on $(hostname) in $(ls -id $(pwd))
export LD_LIBRARY_PATH=/usr/local/cuda-10.0/lib64
export CUDA_VISIBLE_DEVICES=$EXECUTOR_NUMBER
export CONDA_ENVS_PATH=$PWD/conda
export CONDA_PKGS_DIRS=$PWD/conda/pkgs
export MXNET_HOME=$PWD/tests/data
export HOROVOD_WITHOUT_TENSORFLOW=1
export HOROVOD_WITHOUT_PYTORCH=1
export HOROVOD_WITH_MXNET=1
make clean
conda env update --prune -p conda/${env_name} -f env/${env_name}.yml
conda activate ./conda/${env_name}
conda list
printenv
pip install -v -e .
pip install horovod --no-cache-dir -U
python -m spacy download en
python -m spacy download de
python -m nltk.downloader all
|
export function formatFloatIfLarge(x: number, decimal: number): string {
const stringedNumber = x.toString();
if (stringedNumber.length > decimal && stringedNumber.indexOf('.') > 0) {
return x.toFixed(decimal);
}
return x.toString();
}
|
#!/bin/bash
#
# grc overides for ls
# Made possible through contributions from generous benefactors like
# `brew install coreutils`
if gls &>/dev/null
then
alias ls="gls -F --color"
alias l="gls -lAh --color"
alias ll="gls -l --color"
alias la='gls -A --color'
fi
|
package demo.sap.safetyandroid.test.pages;
import androidx.test.InstrumentationRegistry;
import androidx.test.uiautomator.UiDevice;
import androidx.test.uiautomator.UiObject;
import androidx.test.uiautomator.UiObjectNotFoundException;
import androidx.test.uiautomator.UiSelector;
import demo.sap.safetyandroid.test.core.AbstractLoginPage;
import demo.sap.safetyandroid.test.core.Credentials;
import demo.sap.safetyandroid.test.core.UIElements;
import demo.sap.safetyandroid.test.core.WizardDevice;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.action.ViewActions.click;
import static androidx.test.espresso.assertion.ViewAssertions.matches;
import static androidx.test.espresso.matcher.RootMatchers.isDialog;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import demo.sap.safetyandroid.test.core.matcher.SimpleInputCellMatcher;
import static androidx.test.espresso.action.ViewActions.typeText;
public class LoginPage {
public static class BasicAuthPage extends AbstractLoginPage {
public BasicAuthPage() {
uiDevice = UiDevice.getInstance(InstrumentationRegistry.getInstrumentation());
}
@Override
public void authenticate() {
// Click to the input field
onView(new SimpleInputCellMatcher(UIElements.LoginScreen.BasicAuthScreen.usernameText)).perform(typeText(Credentials.USERNAME));
onView(new SimpleInputCellMatcher(UIElements.LoginScreen.BasicAuthScreen.passwordText)).perform(typeText(Credentials.PASSWORD));
// Click Login on the dialog
onView(withId(UIElements.LoginScreen.BasicAuthScreen.okButton)).perform(click());
}
public void useWrongCredentials() {
UiObject usernameField = uiDevice.findObject(new UiSelector()
.resourceId(UIElements.LoginScreen.BasicAuthScreen.usernameID));
usernameField.waitForExists(WAIT_TIMEOUT);
// Click to the input field
WizardDevice.fillInputField(UIElements.LoginScreen.BasicAuthScreen.usernameText, Credentials.WRONGUSERNAME);
WizardDevice.fillInputField(UIElements.LoginScreen.BasicAuthScreen.passwordText, Credentials.WRONGPASSWORD);
// Click Login on the dialog
onView(withId(UIElements.LoginScreen.BasicAuthScreen.okButton)).inRoot(isDialog()).check(matches(isDisplayed())).perform(click());
}
}
public static class WebviewPage extends AbstractLoginPage {
public WebviewPage() {
uiDevice = UiDevice.getInstance(InstrumentationRegistry.getInstrumentation());
}
@Override
public void authenticate() {
fillCredentials();
// Check whether it's oauth or not
UiObject authButton = uiDevice.findObject(new UiSelector()
.resourceId(UIElements.LoginScreen.OauthScreen.oauthAuthorizeButton));
if (authButton.waitForExists(WAIT_TIMEOUT)) {
// Oauth case
clickAuthorizeButton();
}
}
private void fillCredentials() {
UiObject usernameField = uiDevice.findObject(new UiSelector()
.resourceId(UIElements.LoginScreen.OauthScreen.oauthUsernameText));
usernameField.waitForExists(WAIT_TIMEOUT);
UiObject passwordField = uiDevice.findObject(new UiSelector()
.resourceId(UIElements.LoginScreen.OauthScreen.oauthPasswordText));
UiObject logonButton = uiDevice.findObject(new UiSelector()
.resourceId(UIElements.LoginScreen.OauthScreen.oauthLogonButton));
try {
usernameField.clearTextField();
usernameField.setText(Credentials.USERNAME);
passwordField.clearTextField();
passwordField.setText(Credentials.PASSWORD);
logonButton.click();
} catch (UiObjectNotFoundException e) {
// TODO error handling
}
}
private void useWrongCredentials() {
UiObject usernameField = uiDevice.findObject(new UiSelector()
.resourceId(UIElements.LoginScreen.OauthScreen.oauthUsernameText));
usernameField.waitForExists(WAIT_TIMEOUT);
UiObject passwordField = uiDevice.findObject(new UiSelector()
.resourceId(UIElements.LoginScreen.OauthScreen.oauthPasswordText));
UiObject logonButton = uiDevice.findObject(new UiSelector()
.resourceId(UIElements.LoginScreen.OauthScreen.oauthLogonButton));
try {
usernameField.clearTextField();
usernameField.setText(Credentials.WRONGUSERNAME);
passwordField.clearTextField();
passwordField.setText(<PASSWORD>);
logonButton.click();
} catch (UiObjectNotFoundException e) {
// TODO error handling
}
}
private void clickAuthorizeButton() {
UiObject authButton = uiDevice.findObject(new UiSelector()
.resourceId(UIElements.LoginScreen.OauthScreen.oauthAuthorizeButton));
try {
authButton.clickAndWaitForNewWindow();
} catch (UiObjectNotFoundException e) {
// TODO error handling
}
}
}
public static class NoAuthPage extends AbstractLoginPage {
@Override
public void authenticate() {
// in no-auth case we don't need to authenticate
}
}
}
|
<gh_stars>0
if (!Meteor.isClient) {
return;
}
var MESSAGE_QUERIES = 'messages:queries';
var MESSAGE_CURRENT = 'messages:current';
Template.messagesNew.helpers({
queries: function() {
return Session.get(MESSAGE_QUERIES);
}
});
Template.messagesNew.events({
'keyup #message-new-text': function(e) {
var $len = $('#message-new-text-length');
var $text = $(e.target);
var len = $text.val().length;
$len.html(len === 0 ? '' : len + (len === 1 ? ' char' : ' chars'));
},
'click .query-add': function(e) {
e.preventDefault();
var id = chance.word({ length:16 });
var queries = Session.get(MESSAGE_QUERIES);
if (!queries) {
queries = [];
}
queries.push({
id: id
});
Session.set(MESSAGE_QUERIES, queries);
},
'click .btn-remove': function(e) {
e.preventDefault();
e.stopImmediatePropagation();
var $target = Template.instance().$(e.currentTarget);
var id = $target.data('id'); // ************* This is wrong any click but first
TARGET = $target; // for debug only
var queries = Session.get(MESSAGE_QUERIES);
var toRemove = -1;
$target.blur();
//
queries.forEach(function(query, index) {
if (query.id === id) {
toRemove = index;
}
});
if (toRemove === -1) {
return console.warn('Nothing found with id:', id);
}
console.log('Removing query with id:', id, 'in position:', toRemove);
queries.splice(toRemove, 1);
Session.set(MESSAGE_QUERIES, queries);
}
});
|
def merge_arrays(arr1, arr2):
merged_arr = []
i, j = 0, 0
# Traverse both array
while i < len(arr1) and j < len(arr2):
# Check if current element of first
# array is smaller than current element
# of second array. If yes, store first
# array element and increment first array
# index. Otherwise do same with second array
if arr1[i] < arr2[j]:
merged_arr.append(arr1[i])
i += 1
else:
merged_arr.append(arr2[j])
j += 1
# Store remaining elements of first array
while i < len(arr1):
merged_arr.append(arr1[i])
i += 1
# Store remaining elements of second array
while j < len(arr2):
merged_arr.append(arr2[j])
j += 1
return merged_arr
|
#!/bin/bash
fswatch -o -r ../ | xargs -n 1 ./upload.sh
|
#!/usr/bin/env bash
src=en
tgt=de
bedropout=0.5
ARCH=transformer_wmt_en_de
ROOT=/apdcephfs/share_47076/elliottyan/co-work-projects/fairseq-bert
#### MODIFY ######
KD_ALPHA=0.75
DATA_SIG=wmt14_en_de-bert-or-bart
MODEL_SIG=d512_bart_fill_kd_bart_decoder_init_parameter1_alpha_${KD_ALPHA}
#### MODIFY ######
DATAPATH=$ROOT/data-bin/$DATA_SIG
SAVEDIR=$ROOT/checkpoints/$DATA_SIG/$MODEL_SIG
mkdir -p $SAVEDIR
export CUDA_VISIBLE_DEVICES=0,1,2,3
# export CUDA_VISIBLE_DEVICES=1
LC_ALL=en_US.UTF-8 python $ROOT/fairseq_cli/train.py $DATAPATH \
-a $ARCH --optimizer adam --lr 0.0007 -s $src -t $tgt \
--no-epoch-checkpoints --save-interval-updates 5000 \
--dropout 0.1 --max-tokens 4000 --lr-scheduler inverse_sqrt --weight-decay 0.0001 \
--adam-betas '(0.9,0.98)' --save-dir $SAVEDIR --label-smoothing 0.1 \
--log-interval 100 --disable-validation \
--fp16 --update-freq 1 --ddp-backend=no_c10d \
--max-update 200000 --warmup-updates 4000 --warmup-init-lr '1e-07' \
--criterion new_fill_distillation_loss \
--denoising --text-filling --use-bartinput --bart-decoder --bart-decoder-init \
--left-pad-source --mask-ratio 0.1 --random-ratio 0.0 \
--insert-ratio 0.0 --rotate-ratio 0.0 --permute-sentence-ratio 0.0 \
--kd-alpha $KD_ALPHA --bart-model-name $ROOT/pretrain_models/bart-base \
--bert-model-name $ROOT/pretrain_models/bert-base-cased-new
# --use-bertinput
# --share-all-embeddings
# --input-mapping
# --text-filling
# --bart-model-name $ROOT/pretrain_models/bart-base
# --denoising
|
package controllers
import (
"encoding/base64"
"encoding/json"
"fmt"
"html/template"
"net/http"
"os"
"path/filepath"
"strings"
ctx "github.com/gorilla/context"
"github.com/gorilla/csrf"
"github.com/gorilla/mux"
"github.com/gorilla/sessions"
"github.com/gorilla/websocket"
"github.com/prateeknischal/webtail/util"
)
var (
upgrader = websocket.Upgrader{
ReadBufferSize: 1024,
WriteBufferSize: 1024,
}
)
// RootHandler - http handler for handling / path
func RootHandler(w http.ResponseWriter, r *http.Request) {
t := template.New("index").Delims("<<", ">>")
t, err := t.ParseFiles("templates/index.tmpl")
t = template.Must(t, err)
if err != nil {
panic(err)
}
var fileList = make(map[string]interface{})
fileList["FileList"] = util.Conf.Dir
fileList[csrf.TemplateTag] = csrf.Token(r)
fileList["token"] = csrf.Token(r)
t.Execute(w, fileList)
}
// WSHandler - Websocket handler
func WSHandler(w http.ResponseWriter, r *http.Request) {
conn, err := upgrader.Upgrade(w, r, w.Header())
if err != nil {
fmt.Fprintln(os.Stderr, err)
http.Error(w, "Could not open websocket connection", http.StatusBadRequest)
return
}
filenameB, _ := base64.StdEncoding.DecodeString(mux.Vars(r)["b64file"])
filename := string(filenameB)
// sanitize the file if it is present in the index or not.
filename = filepath.Clean(filename)
ok := false
for _, wFile := range util.Conf.Dir {
if filename == wFile {
ok = true
break
}
}
// If the file is found, only then start tailing the file.
// This is to prevent arbitrary file access. Otherwise send a 403 status
// This should take care of stacking of filenames as it would first
// be searched as a string in the index, if not found then rejected.
if ok {
go util.TailFile(conn, filename)
}
w.WriteHeader(http.StatusUnauthorized)
}
// LoginHandler - handles the POST reques to /login
func LoginHandler(w http.ResponseWriter, r *http.Request) {
session := ctx.Get(r, "session").(*sessions.Session)
var isValid = false
var username = "Anon"
var err error
if util.Conf.ForceAuth {
isValid, username, err = util.Login(r)
fmt.Println(isValid, username)
}
if err != nil {
fmt.Fprintf(os.Stderr, "Login Failure for %s: %s", username, err)
}
if isValid {
session.Values["id"] = username
session.Save(r, w)
http.Redirect(w, r, "/", 302)
} else {
session.Save(r, w)
http.Redirect(w, r, "/login?err=invalid", 302)
}
}
// LoginPageHandler - GET response to login page
func LoginPageHandler(w http.ResponseWriter, r *http.Request) {
if util.Conf.ForceAuth == false {
http.Redirect(w, r, "/", 302)
}
t := template.New("login").Delims("<<", ">>")
t, err := t.ParseFiles("templates/login.tmpl")
t = template.Must(t, err)
if err != nil {
panic(err)
}
t.Execute(w, map[string]interface{}{
csrf.TemplateTag: csrf.TemplateField(r),
})
}
// LogoutHandler - handles logout requests
func LogoutHandler(w http.ResponseWriter, r *http.Request) {
if util.Conf.ForceAuth == false {
http.Redirect(w, r, "/", 302)
}
session := ctx.Get(r, "session").(*sessions.Session)
delete(session.Values, "id")
session.Save(r, w)
http.Redirect(w, r, "/login?logout=success", 302)
}
// AuthHandler - checks if user is logged in
func AuthHandler(handler http.Handler) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if u := ctx.Get(r, "user"); u != nil {
handler.ServeHTTP(w, r)
} else {
http.Redirect(w, r, "/login", 302)
}
}
}
// GetContext wraps each request in a function which fills in the context for a given request.
// This includes setting the User and Session keys and values as necessary for use in later functions.
func GetContext(handler http.Handler) http.HandlerFunc {
// Set the context here
return func(w http.ResponseWriter, r *http.Request) {
// Parse the request form
err := r.ParseForm()
if err != nil {
http.Error(w, "Error parsing request", http.StatusInternalServerError)
}
// Set the context appropriately here.
session, _ := util.Store.Get(r, "webtail")
// Put the session in the context so that we can
// reuse the values in different handlers
ctx.Set(r, "session", session)
if id, ok := session.Values["id"]; ok {
ctx.Set(r, "user", id)
ctx.Set(r, "isLoggedIn", true)
} else {
ctx.Set(r, "user", nil)
ctx.Set(r, "isLoggedIn", false)
}
// If running on No-Login enforced mode then will set an anon context
if !util.Conf.ForceAuth {
ctx.Set(r, "user", "Anon")
ctx.Set(r, "isLoggedIn", false)
}
w.Header().Set("X-CSRF-Token", csrf.Token(r))
handler.ServeHTTP(w, r)
// Remove context contents
ctx.Clear(r)
}
}
// UserDetails - returns user name who is logged in
func UserDetails(w http.ResponseWriter, r *http.Request) {
username := ctx.Get(r, "user").(string)
isLoggedIn := ctx.Get(r, "isLoggedIn").(bool)
var resp = struct {
Username string `json:"username"`
IsLoggedIn bool `json:"isLoggedIn"`
}{
Username: username,
IsLoggedIn: isLoggedIn,
}
json.NewEncoder(w).Encode(resp)
}
// CSRFExemptPrefixes - list of endpoints that does not require csrf protction
var CSRFExemptPrefixes = []string{
// "/user",
}
// CSRFExceptions - exempts ajax calls from csrf tokens
func CSRFExceptions(handler http.Handler) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
for _, prefix := range CSRFExemptPrefixes {
if strings.HasPrefix(r.URL.Path, prefix) {
r = csrf.UnsafeSkipCheck(r)
break
}
}
handler.ServeHTTP(w, r)
}
}
|
import * as commandLineArgs from "command-line-args";
import * as semver from "semver";
import { isReleaseType } from "../release-type";
import { VersionBumpOptions } from "../types/version-bump-options";
import { ExitCode } from "./exit-code";
import { usageText } from "./help";
/**
* The parsed command-line arguments
*/
export interface ParsedArgs {
help?: boolean;
version?: boolean;
quiet?: boolean;
options: VersionBumpOptions;
}
/**
* Parses the command-line arguments
*/
export function parseArgs(argv: string[]): ParsedArgs {
try {
let args = commandLineArgs(
[
{ name: "preid", type: String },
{ name: "commit", alias: "c", type: String },
{ name: "tag", alias: "t", type: String },
{ name: "push", alias: "p", type: Boolean },
{ name: "all", alias: "a", type: Boolean },
{ name: "no-verify", type: Boolean },
{ name: "quiet", alias: "q", type: Boolean },
{ name: "version", alias: "v", type: Boolean },
{ name: "help", alias: "h", type: Boolean },
{ name: "ignore-scripts", type: Boolean },
{ name: "git-files", type: String, multiple: true },
{ name: "files", type: String, multiple: true, defaultOption: true },
],
{ argv }
);
let parsedArgs: ParsedArgs = {
help: args.help as boolean,
version: args.version as boolean,
quiet: args.quiet as boolean,
options: {
preid: args.preid as string,
commit: args.commit as string | boolean,
tag: args.tag as string | boolean,
push: args.push as boolean,
all: args.all as boolean,
noVerify: args["no-verify"] as boolean,
files: args.files as string[],
gitFiles: args["git-files"] as string[],
ignoreScripts: args["ignore-scripts"] as boolean,
}
};
// If --preid is used without an argument, then throw an error, since it's probably a mistake.
// If they want to use the default value ("beta"), then they should not pass the argument at all
if (args.preid === null) {
throw new Error("The --preid option requires a value, such as \"alpha\", \"beta\", etc.");
}
// If --commit is used without an argument, then treat it as a boolean flag
if (args.commit === null) {
parsedArgs.options.commit = true;
}
// If --tag is used without an argument, then treat it as a boolean flag
if (args.tag === null) {
parsedArgs.options.tag = true;
}
// If a version number or release type was specified, then it will mistakenly be added to the "files" array
if (parsedArgs.options.files && parsedArgs.options.files.length > 0) {
let firstArg = parsedArgs.options.files[0];
if (firstArg === "prompt" || isReleaseType(firstArg) || semver.valid(firstArg)) {
parsedArgs.options.release = firstArg;
parsedArgs.options.files.shift();
}
}
return parsedArgs;
}
catch (error) {
// There was an error parsing the command-line args
return errorHandler(error as Error);
}
}
function errorHandler(error: Error): never {
console.error(error.message);
console.error(usageText);
return process.exit(ExitCode.InvalidArgument);
}
|
#!/bin/bash
if [ "$EUID" -ne 0 ]
then echo "Please run as root"
exit
fi
SAVED_DIR=`pwd`
# Make sure you change the directory if your local setup is different
cd /var/www/html/scripts
php sendEmails.php
cd ${SAVED_DIR}
|
package de.ids_mannheim.korap.oauth2.constant;
/**
* Defines possible OAuth2 client types.
*
* Quoted from RFC 6749:
* <ul>
*
* <li> <b>Confidential clients</b> are clients capable of maintaining
* the confidentiality of their
* credentials (e.g., client implemented on a secure server with
* restricted access to the client credentials), or capable of secure
* client authentication using other means.
* </li>
*
* <li>
* <b>Public clients</b> are Clients incapable of maintaining the
* confidentiality of their credentials (e.g., clients executing on
* the device used by the resource owner, such as an installed
* native application or a web browser-based application), and
* incapable of secure client authentication via any other means.
* Mobile and Javascript apps are considered public clients.
* </li>
* </ul>
*
* @author margaretha
*
*/
public enum OAuth2ClientType {
CONFIDENTIAL, PUBLIC;
}
|
// Fibonacci series
public class Fibonacci
{
public static void Main()
{
int n1 = 0, n2 = 1, n3, i, number= 50;
Console.Write(n1+" "+n2+" "); //printing 0 and 1
for(i=2;i < number;++i)
{
n3=n1+n2;
Console.Write(n3+" ");
n1=n2;
n2=n3;
}
}
}
|
#!/bin/sh
# Terminate already running bar instances
killall -q polybar
# Wait until the processes have been shut down
while pgrep -u $USER -x polybar >/dev/null; do sleep 1; done
# Stole from here:
# https://github.com/jaagr/polybar/issues/763#issuecomment-331604987
if type "xrandr"; then
for m in $(xrandr --query | grep " connected" | cut -d" " -f1); do
export MONITOR=$m
MONITOR=$m polybar --reload i3wmthemer_bar &
unset MONITOR
done
else
polybar --reload i3wmthemer_bar
fi
echo "Bars launched..."
|
#!/bin/bash
source scripts/helper.sh
DEPS=(automake make libtool)
if is_osx; then
DEPS+=(gnu-sed)
else
DEPS+=(wget sudo clang unzip)
fi
echo "Checking and installing dependencies '${DEPS[*]}'..."
# Check and install missing dependencies
if ! is_osx; then
apt-get update
fi
for DEP in "${DEPS[@]}"; do
check_dep "$DEP"
done
# Check for JDK
JAVA_VERSION=$(java -version 2>&1 | awk -F '"' '/version/ {print $2}')
if [[ $JAVA_VERSION < "1.8.0" ]]; then
echo "Installing JDK 8..."
install_java
else
echo "JDK 8 has been installed at $JAVA_HOME"
fi
# Check for NDK
NDK_VERSION="r19c"
NDK_PATH=$(check_ndk_path $NDK_VERSION)
if [ "$NDK_PATH" == "" ]; then
echo "Installing NDK..."
pushd "$HOME" || exit
install_ndk $NDK_VERSION
popd || exit
else
echo "NDK has been installed at $NDK_PATH"
fi
|
({
init: function(cmp, event, helper) {
var key = $A.lockerService.getKeyForNamespace("apiviewer");
var secureWindow = $A.lockerService.getEnv(key);
var report = helper.utils.tester.testObject(window, secureWindow);
helper.utils.tester.sortReport(report);
cmp.set("v.report", report);
window.__secureWindowTabTesterReport = report;
}
})
|
#!/bin/sh
[ -d $HOME/.emacs.d ] && exit 1
exit 0
|
#!/bin/sh
TOPOLOGY_NAME="thresh-cluster"
MYSQL_WAIT_RETRIES=${MYSQL_WAIT_RETRIES:-"24"}
MYSQL_WAIT_DELAY=${MYSQL_WAIT_DELAY:-"5"}
KAFKA_WAIT_RETRIES=${KAFKA_WAIT_RETRIES:-"24"}
KAFKA_WAIT_DELAY=${KAFKA_WAIT_DELAY:-"5"}
THRESH_STACK_SIZE=${THRESH_STACK_SIZE:-"1024k"}
echo "Waiting for MySQL to become available..."
success="false"
for i in $(seq "$MYSQL_WAIT_RETRIES"); do
if mysqladmin status \
--host="$MYSQL_DB_HOST" \
--port="$MYSQL_DB_PORT" \
--user="$MYSQL_DB_USERNAME" \
--password="$MYSQL_DB_PASSWORD" \
--connect_timeout=10; then
echo "MySQL is available, continuing..."
success="true"
break
else
echo "Connection attempt $i of $MYSQL_WAIT_RETRIES failed"
sleep "$MYSQL_WAIT_DELAY"
fi
done
if [ "$success" != "true" ]; then
echo "Unable to reach MySQL database! Exiting..."
sleep 1
exit 1
fi
if [ -n "$KAFKA_WAIT_FOR_TOPICS" ]; then
echo "Waiting for Kafka topics to become available..."
success="false"
for i in $(seq "$KAFKA_WAIT_RETRIES"); do
if python /kafka_wait_for_topics.py; then
success="true"
break
else
echo "Kafka not yet ready (attempt $i of $KAFKA_WAIT_RETRIES)"
sleep "$KAFKA_WAIT_DELAY"
fi
done
if [ "$success" != "true" ]; then
echo "Kafka failed to become ready, exiting..."
sleep 1
exit 1
fi
fi
if [ "${NO_STORM_CLUSTER}" = "true" ]; then
echo "Using Thresh Config file /storm/conf/thresh-config.yml. Contents:"
grep -vi password /storm/conf/thresh-config.yml
# shellcheck disable=SC2086
JAVAOPTS="-XX:MaxRAM=$(python /memory.py $WORKER_MAX_MB) -XX:+UseSerialGC -Xss$THRESH_STACK_SIZE"
if [ "$LOCAL_JMX" = "true" ]; then
JAVAOPTS="$JAVAOPTS -Dcom.sun.management.jmxremote=true"
port="${LOCAL_JMX_PORT:-9090}"
JAVAOPTS="$JAVAOPTS -Dcom.sun.management.jmxremote.port=$port"
JAVAOPTS="$JAVAOPTS -Dcom.sun.management.jmxremote.rmi.port=$port"
JAVAOPTS="$JAVAOPTS -Dcom.sun.management.jmxremote.ssl=false"
JAVAOPTS="$JAVAOPTS -Dcom.sun.management.jmxremote.authenticate=false"
JAVAOPTS="$JAVAOPTS -Dcom.sun.management.jmxremote.local.only=false"
fi
if [ -n "$LOG_CONFIG_FILE" ]; then
JAVAOPTS="$JAVAOPTS -Dlog4j.configurationFile=$LOG_CONFIG_FILE"
fi
echo "Submitting storm topology as local cluster using JAVAOPTS of $JAVAOPTS"
# shellcheck disable=SC2086
java $JAVAOPTS -classpath "/monasca-thresh.jar:/storm/lib/*" monasca.thresh.ThresholdingEngine /storm/conf/thresh-config.yml thresh-cluster local
exit $?
fi
echo "Waiting for storm to become available..."
success="false"
for i in $(seq "$STORM_WAIT_RETRIES"); do
if timeout -t "$STORM_WAIT_TIMEOUT" storm list; then
echo "Storm is available, continuing..."
success="true"
break
else
echo "Connection attempt $i of $STORM_WAIT_RETRIES failed"
sleep "$STORM_WAIT_DELAY"
fi
done
if [ "$success" != "true" ]; then
echo "Unable to connect to Storm! Exiting..."
sleep 1
exit 1
fi
topologies=$(storm list | awk '/-----/,0{if (!/-----/)print $1}')
found="false"
for topology in $topologies; do
if [ "$topology" = "$TOPOLOGY_NAME" ]; then
found="true"
echo "Found existing storm topology with name: $topology"
break
fi
done
if [ "$found" = "true" ]; then
echo "Storm topology already exists, will not submit again"
# TODO handle upgrades
else
echo "Using Thresh Config file /storm/conf/thresh-config.yml. Contents:"
grep -vi password /storm/conf/thresh-config.yml
echo "Submitting storm topology..."
storm jar /monasca-thresh.jar \
monasca.thresh.ThresholdingEngine \
/storm/conf/thresh-config.yml \
"$TOPOLOGY_NAME"
fi
|
#!/bin/bash
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
assert_ok "$FLOW" check-contents test.js < test.js
|
import { NestFactory } from '@nestjs/core';
import { ExpressAdapter } from '@nestjs/platform-express';
import * as compression from 'compression';
import * as morgan from 'morgan';
import { AppModule } from './app.module';
import { ConfigService } from './shared/services/config.service';
import { SharedModule } from './shared/shared.module';
import { setupSwagger } from './viveo-swagger';
async function bootstrap() {
const app = await NestFactory.create(AppModule, new ExpressAdapter());
app.use(compression());
app.use(morgan('combined'));
const configService = app.select(SharedModule).get(ConfigService);
if (['development', 'staging'].includes(configService.nodeEnv)) {
setupSwagger(app);
}
const port = configService.getNumber('PORT');
await app.listen(port);
console.info(`server running on port ${port}`);
}
bootstrap();
|
#!/bin/bash
#
# Copyright (C) 2016 The CyanogenMod Project
# Copyright (C) 2017-2020 The LineageOS Project
# Copyright (C) 2020 Raphielscape LLC. and Haruka LLC.
#
# SPDX-License-Identifier: Apache-2.0
#
set -e
DEVICE=**** FILL IN DEVICE NAME ****
VENDOR=**** FILL IN VENDOR NAME ****
# Load extract_utils and do some sanity checks
MY_DIR="${BASH_SOURCE%/*}"
if [[ ! -d "${MY_DIR}" ]]; then MY_DIR="${PWD}"; fi
HENTAI_ROOT="${MY_DIR}/../../.."
HELPER="${HENTAI_ROOT}/vendor/hentai/build/tools/extract_utils.sh"
if [ ! -f "${HELPER}" ]; then
echo "Unable to find helper script at ${HELPER}"
exit 1
fi
source "${HELPER}"
# Default to sanitizing the vendor folder before extraction
CLEAN_VENDOR=true
KANG=
SECTION=
while [ "${#}" -gt 0 ]; do
case "${1}" in
-n | --no-cleanup )
CLEAN_VENDOR=false
;;
-k | --kang )
KANG="--kang"
;;
-s | --section )
SECTION="${2}"; shift
CLEAN_VENDOR=false
;;
* )
SRC="${1}"
;;
esac
shift
done
if [ -z "${SRC}" ]; then
SRC="adb"
fi
function blob_fixup() {
case "${1}" in
vendor/lib/libsample1.so)
sed -i 's|/data/misc/sample1|/data/misc/sample2|g' "${2}"
;;
vendor/lib64/libsample2.so)
"${PATCHELF}" --remove-needed "libsample3.so" "${2}"
"${PATCHELF}" --add-needed "libsample4.so" "${2}"
;;
vendor/lib/libsample5.so)
"${PATCHELF}" --replace-needed "libsample6.so" "libsample7.so" "${2}"
;;
vendor/lib/libsample7.so)
"${PATCHELF}" --set-soname "libsample7.so" "${2}"
;;
esac
}
# Initialize the helper
setup_vendor "${DEVICE}" "${VENDOR}" "${HENTAI_ROOT}" false "${CLEAN_VENDOR}"
extract "${MY_DIR}/proprietary-files.txt" "${SRC}" "${KANG}" --section "${SECTION}"
"${MY_DIR}/setup-makefiles.sh"
|
import numpy as np
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
model = Sequential()
model.add(Dense(8, activation='relu', input_shape=(3,)))
model.add(Dense(4, activation='relu'))
model.add(Dense(1))
model.compile(optimizer='adam',
loss='mse',
metrics=['mae'])
# Loading the data
file_name = "your_data_file.npz"
sp_data = np.load(file_name)
x_train=sp_data['x_train']
y_train=sp_data['y_train']
#Training the model
model.fit(x_train, y_train, epochs=5)
|
package input
import (
"context"
_ "embed"
"encoding/json"
"errors"
"fmt"
"strings"
"github.com/qri-io/jsonschema"
)
//go:embed schemas/event.json
var embeddedSchemaEvent string
//go:embed schemas/manifest.json
var embeddedSchemaManifest string
type Schema = jsonschema.Schema
type schemaCollection struct {
schemaManifest *Schema
schemaEvent *Schema
}
func stringToSchema(jsc string) *Schema {
s := &Schema{}
if err := json.Unmarshal([]byte(jsc), s); err != nil {
panic(err)
}
return s
}
func formatJsonSchemaError(i int, e jsonschema.KeyError) string {
m := "[JsonSchemaError #%d] at property '%s': %s\n"
return fmt.Sprintf(m, i, e.PropertyPath, e.Message)
}
func validate(s *Schema, data []byte) error {
ctx := context.Background()
if errs, _ := s.ValidateBytes(ctx, data); len(errs) > 0 {
errorList := []string{}
for i, err := range errs {
errorList = append(errorList, formatJsonSchemaError(i+1, err))
}
return errors.New(strings.Join(errorList, "\n"))
}
return nil
}
func newSchemaCollection() *schemaCollection {
return &schemaCollection{
schemaEvent: stringToSchema(embeddedSchemaEvent),
schemaManifest: stringToSchema(embeddedSchemaManifest),
}
}
func (c schemaCollection) validateEvent(data []byte) error {
return validate(c.schemaEvent, data)
}
func (c schemaCollection) validateManifest(data []byte) error {
return validate(c.schemaManifest, data)
}
|
<reponame>world9604/DoubleUEyeD<filename>app/src/main/java/com/hongbog/util/HttpConnection.java
package com.hongbog.util;
import com.hongbog.dto.SensorDTO;
import okhttp3.Callback;
import okhttp3.MediaType;
import okhttp3.MultipartBody;
import okhttp3.OkHttpClient;
import okhttp3.RequestBody;
/**
* Created by taein on 2018-07-06.
*/
public class HttpConnection {
private static final String ADDRESS = "http://192.168.0.2:8080";
private static final String MAPPING_NAME = "/uploadData.do";
private final String URL = ADDRESS + MAPPING_NAME;
private OkHttpClient client;
private static HttpConnection instance = new HttpConnection();
public static HttpConnection getInstance() {
return instance;
}
private HttpConnection(){ this.client = new OkHttpClient(); }
public String requestUploadPhoto(byte[] BitmapBytes, String label, SensorDTO sensorVales, Callback callback) {
try {
RequestBody requestBody = new MultipartBody.Builder().setType(MultipartBody.FORM)
.addFormDataPart("photo", label + ".png", RequestBody.create(MediaType.parse("image/png"), BitmapBytes))
.addFormDataPart("label", label)
.addFormDataPart("roll", sensorVales.getRoll())
.addFormDataPart("pitch", sensorVales.getPitch())
.addFormDataPart("yaw", sensorVales.getYaw())
.addFormDataPart("br", sensorVales.getBr())
.build();
okhttp3.Request request = new okhttp3.Request.Builder()
.url(URL)
.post(requestBody)
.build();
client.newCall(request).enqueue(callback);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
}
|
#!/usr/bin/env bash
set -euxo pipefail
rm output/main.js ||:
rm output/package.json ||:
rm output/preload.js ||:
rm output/renderer.js ||:
rm output/README.md ||:
sbt electronOutput
electron output
|
import re
with open('/Users/jamison/Downloads/rosalind_rna.txt') as f:
data = f.read()
print re.sub(r'T', r'U', data)
## Or alternatively: print(data.replace('T', 'U'))
|
#!/bin/bash
sysctl -w net.ipv4.conf.all.forwarding=1
sysctl -w net.ipv4.conf.all.rp_filter=0
ip address add 1.0.1.18/24 dev eth4
sysctl -w net.ipv4.conf.eth4.forwarding=1
sysctl -w net.ipv4.conf.eth4.rp_filter=0
sysctl -w net.ipv6.conf.eth4.disable_ipv6=0
sysctl -w net.ipv6.conf.eth4.autoconf=0
ip address add 2000:0:1:1:201:ff:fe00:49/64 dev eth4
ip link set eth4 up
|
<filename>lcof_019/cpp_019/Solution1.h
//
// Created by ooooo on 2020/3/14.
//
#ifndef CPP_019__SOLUTION1_H_
#define CPP_019__SOLUTION1_H_
#include <iostream>
#include <vector>
#include <iostream>
#include <vector>
using namespace std;
class Solution {
public:
bool dfs(int i, int j) {
if (i >= s.size() && j >= p.size()) return true;
if (i < s.size() && j == p.size()) return false;
// ๅ้ข็ๅญ็ฌฆ่ฆไนๅญๅจ '*'๏ผ่ฆไนไธๅญๅจ
if (j + 1 < p.size() && p[j + 1] == '*') {
if (p[j] == s[i] || (p[j] == '.' && i < s.size())) {
return dfs(i, j + 2) || dfs(i + 1, j) || dfs(i + 1, j + 2);
} else {
// ็ดๆฅๅฟฝ็ฅ '*'
return dfs(i, j + 2);
}
}
if (p[j] == s[i] || (p[j] == '.' && i < s.size())) {
return dfs(i + 1, j + 1);
}
return false;
}
string s, p;
bool isMatch(string s, string p) {
if (s.empty() && p.empty()) return true;
this->s = s;
this->p = p;
return dfs(0, 0);
}
};
#endif //CPP_019__SOLUTION1_H_
|
public class Rectangle {
// Length of the rectangle
double length;
// Breadth of the rectangle
double breadth;
// Constructor
public Rectangle(double length, double breadth)
{
this.length = length;
this.breadth = breadth;
}
// Method to calculate the area of the rectangle
public double getArea()
{
return length * breadth;
}
}
|
namespace uchan {
interface PersistableConstructor {
new(): Persistable;
}
export interface Persistable {
fromObject(obj: any);
toObject(): any;
}
export class Persistence {
prefix = 'uchan_';
localStorage: Storage;
callbacks: { [key: string]: (() => void) [] } = {};
constructor() {
this.localStorage = window.localStorage;
window.addEventListener("storage", (e) => this.onStorageChanged(e));
}
persist(key: string, result: Persistable) {
let finalKey = this.prefix + key;
this.localStorage.setItem(finalKey, JSON.stringify(result.toObject()))
}
persistList(key: string, result: Persistable[]) {
let finalKey = this.prefix + key;
let objects = [];
for (let i = 0; i < result.length; i++) {
objects.push(result[i].toObject());
}
this.localStorage.setItem(finalKey, JSON.stringify(objects));
}
retrieve(key: string, constructor: PersistableConstructor): Persistable {
let finalKey = this.prefix + key;
let value = JSON.parse(this.localStorage.getItem(finalKey));
if (value == null) {
return null;
}
let result = new constructor();
result.fromObject(value);
return result;
}
retrieveList(key: string, constructor: PersistableConstructor): Persistable[] {
let finalKey = this.prefix + key;
let values = JSON.parse(this.localStorage.getItem(finalKey));
if (values == null) {
return [];
}
let results = [];
for (let i = 0; i < values.length; i++) {
let result = new constructor();
result.fromObject(values[i]);
results.push(result);
}
return results;
}
onStorageChanged(event: StorageEvent) {
if (event.key.indexOf(this.prefix) == 0) {
let finalKey = event.key.substr(this.prefix.length);
if (finalKey in this.callbacks) {
let callbacks = this.callbacks[finalKey];
for (let i = 0; i < callbacks.length; i++) {
callbacks[i]();
}
}
}
}
addCallback(name: string, func: () => void) {
let list = this.callbacks[name];
if (!list) {
list = [];
this.callbacks[name] = list;
}
list.push(func);
}
}
}
|
<reponame>morz/react-admin-import-csv<filename>src/csv-extractor.spec.ts<gh_stars>100-1000
import { processCsvData, getCsvData, processCsvFile } from "./csv-extractor";
import * as fs from "fs";
import * as path from "path";
function getFile(relPath: string): fs.ReadStream {
const testCsvPath = path.join(__dirname, relPath)
const csvFile = fs.createReadStream(testCsvPath)
return csvFile;
}
describe("csv extractor", () => {
test("processCsvData", () => {
const output = processCsvData([
["id", "title"],
["1", "One"],
["2", "Two"]
]);
expect(output).toHaveLength(2)
expect(output[0]['id']).toBe("1")
});
test("getCsvData test1.csv", async () => {
const file = getFile('../test-csvs/test1.csv')
const data = await getCsvData(file)
expect(data).toHaveLength(6)
});
test("getCsvData test2.csv", async () => {
const file = getFile('../test-csvs/test2.csv')
const data = await getCsvData(file)
expect(data).toHaveLength(6)
});
test("processCsvFile test1.csv", async () => {
const file = getFile('../test-csvs/test1.csv')
const data = await processCsvFile(file)
expect(data).toHaveLength(5)
});
test("process csv data with nested object", () => {
const output = processCsvData([
["id", "item.title"],
["1", "One"],
["2", "Two"]
]);
expect(output).toHaveLength(2)
expect(output[0].id).toBe("1")
expect(output[0].item.title).toBe("One")
});
test("process csv data with tab delimiter", async () => {
const file = getFile('../test-csvs/test1.tsv')
const output = await getCsvData(file, {
delimiter: '\t',
});
expect(output).toHaveLength(6)
});
});
|
<reponame>firatalcin/Patika-Java-Web-Development
import java.util.Scanner;
public class Main {
/*
KDV Tutarฤฑ Hesaplayan Program
Java ile kullanฤฑcฤฑdan alฤฑnan para deฤerinin KDV'li fiyatฤฑnฤฑ ve KDV tutarฤฑnฤฑ hesaplayฤฑp ekrana bastฤฑran programฤฑ yazฤฑn.
(Not : KDV tutarฤฑnฤฑ 18% olarak alฤฑn)
KDV'siz Fiyat = 10;
KDV'li Fiyat = 11.8;
KDV tutarฤฑ = 1.8;
รdev
Eฤer girilen tutar 0 ve 1000 TL arasฤฑnda ise KDV oranฤฑ %18 , tutar 1000 TL'den bรผyรผk ise KDV oranฤฑnฤฑ %8 olarak
KDV tutarฤฑ hesaplayan programฤฑ yazฤฑnฤฑz.
*/
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println("Fiyat bilgisi giriniz: ");
double fiyat = scanner.nextDouble();
double kdvOrani = (fiyat >= 0 && fiyat<= 1000) ? 0.18 : 0.08;
double kdvTutari = fiyat * kdvOrani;
double kdvliTutar = fiyat + (fiyat * kdvOrani);
System.out.println("KDV'siz Fiyat: " + fiyat);
System.out.println("KDV'li Fiyat: " + kdvliTutar);
System.out.println("KDV Tutarฤฑ: " + kdvTutari);
}
}
|
background.js
let button = document.createElement("button");
button.innerHTML = "Change Color";
let body = document.getElementsByTagName("body")[0];
body.appendChild(button);
button.addEventListener ("click", function() {
body.style.backgroundColor = '#ff8e17';
});
manifest.json
{
"manifest_version": 2,
"name": "Background Color Changer",
"description": "An extension that change background color of the page when clicked",
"version": "1.0",
"browser_action": {
"default_icon": "icon.png",
"default_popup": "popup.html",
},
"permissions": [
"activeTab"
]
}
popup.html
<!DOCTYPE html>
<html>
<head>
<script src="background.js"></script>
</head>
<body>
</body>
</html>
|
#include "WPlanner.h"
int main(){
ict::WPlanner app("WPlanner.txt");
return app.run();
}
|
import React, { useState } from 'react';
const CreateUserForm = () => {
const [name, setName] = useState('');
const [email, setEmail] = useState('');
const [password, setPassword] = useState('');
const handleSubmit = (e) => {
e.preventDefault();
// create user
};
return (
<form onSubmit={handleSubmit}>
<label>Name</label>
<input type="text" value={name} onChange={(e) => setName(e.target.value)} />
<label>Email</label>
<input type="text" value={email} onChange={(e) => setEmail(e.target.value)} />
<label>Password</label>
<input
type="password"
value={password}
onChange={(e) => setPassword(e.target.value)}
required
minLength={8}
/>
<button type="submit">Create</button>
</form>
);
};
export default CreateUserForm;
|
/* **** Notes
Compare addresses.
Remarks:
Refer at fn. cmpr.
*/
# define CAR
# include "../../../incl/config.h"
signed(__cdecl compare(signed char(*di),signed char(*si))) {
/* **** CODE/TEXT */
if(!di) return(0x00);
if(!si) return(0x00);
if(si<(di)) si++;
else return(0x00);
return(0x01+(compare(di,si)));
}
|
const mongoose = require('mongoose');
const itemSchema = new mongoose.Schema({
user: { type: mongoose.Schema.Types.ObjectId, ref: 'User'},
itemName: { type: String },
price: { type: Number },
description: { type: String },
})
module.exports = mongoose.model('Item', itemSchema);
|
angular.module('cms.shared').factory('shared.vimeoService', [
'$http',
'$q',
'shared.errorService',
function (
$http,
$q,
errorService
) {
var service = {},
serviceUrl = 'https://vimeo.com/api/oembed.json?url=https%3A%2F%2Fvimeo.com%2F';
/* QUERIES */
service.getVideoInfo = function (id) {
return wrapGetResponse(serviceUrl + id)
.then(function (response) {
return JSON.parse(response.responseText);
});
}
function wrapGetResponse(url) {
var def = $q.defer();
var xhr = new XMLHttpRequest();
xhr.addEventListener("load", onComplete);
xhr.open("GET", url);
xhr.send();
function onComplete() {
var response = this;
var isUnexpectedError = false;
var errorMsg = "";
switch (response.status) {
case 200:
break;
case 404:
errorMsg = "You aren't able to access the video because of privacy or permissions issues, or because the video is still transcoding.";
break;
case 403:
errorMsg = "Embed permissions are disabled for this video, so you can't embed it.";
break;
default:
isUnexpectedError = true;
errorMsg = "Something unexpected happened whilst connecting to the Vimeo API.";
}
if (!errorMsg.length) {
def.resolve(response);
} else {
var error = {
title: 'Vimeo API Error',
message: errorMsg,
response: response
}
if (isUnexpectedError) {
errorService.raise(error);
}
def.reject(error);
}
}
return def.promise;
}
return service;
}]);
/**
* Allows a user to search/select a vimeo video. By default this maps to the VimeoVideo c#
* object and allows editing of the title/description field, but if you set the cms-model-type attribute
* to be 'id' then this will map to a simple id field and will not allow editing of the title/description fields.
*/
angular.module('cms.shared').directive('cmsFormFieldVimeo', [
'_',
'shared.pluginModulePath',
'shared.pluginContentPath',
'shared.modalDialogService',
'shared.stringUtilities',
'baseFormFieldFactory',
function (
_,
modulePath,
contentPath,
modalDialogService,
stringUtilities,
baseFormFieldFactory) {
/* VARS */
var assetReplacementPath = contentPath + 'img/AssetReplacement/',
noImagePath = assetReplacementPath + 'vimeo-replacement.png',
baseConfig = baseFormFieldFactory.defaultConfig;
/* CONFIG */
var config = {
templateUrl: modulePath + 'UIComponents/FormFieldVimeo.html',
scope: _.extend(baseFormFieldFactory.defaultConfig.scope, {
modelType: '@cmsModelType'
}),
passThroughAttributes: [
'required'
],
link: link
};
return baseFormFieldFactory.create(config);
/* LINK */
function link(scope, el, attributes, controllers) {
var vm = scope.vm,
isRequired = _.has(attributes, 'required');
init();
return baseConfig.link(scope, el, attributes, controllers);
/* INIT */
function init() {
vm.showPicker = showPicker;
vm.remove = remove;
vm.isRemovable = vm.model && !isRequired;
setButtonText();
}
/* EVENTS */
function remove() {
setVideo(null);
}
function showPicker() {
modalDialogService.show({
templateUrl: modulePath + 'UIComponents/VimeoPickerDialog.html',
controller: 'VimeoPickerDialogController',
options: {
currentVideo: _.clone(vm.model),
modelType: vm.modelType,
onSelected: setVideo
}
});
}
/**
* Initialise the state when the video is changed
*/
function setVideo(video) {
if (video) {
vm.isRemovable = !isRequired;
vm.model = video;
} else {
vm.isRemovable = false;
if (vm.model) {
vm.model = null;
}
}
setButtonText();
}
/* Helpers */
function setButtonText() {
vm.buttonText = vm.model ? 'Change' : 'Select';
}
}
}]);
/**
* Directive that allows a user to enter a vimeo Url or Id which will be
* verified and then used to get the video information from vimeo, which
* is then passed to the optional cmsOnVideoSelected scope function.
* Does not support non-edit mode since so far it's only used in the
* VimeoPickerDialog.
*/
angular.module('cms.shared').directive('cmsFormFieldVimeoId', [
'_',
'shared.pluginModulePath',
'shared.LoadState',
'shared.vimeoService',
'shared.validationErrorService',
'baseFormFieldFactory',
function (
_,
modulePath,
LoadState,
vimeoService,
validationErrorService,
baseFormFieldFactory) {
var config = {
templateUrl: modulePath + 'UIComponents/FormFieldVimeoId.html',
scope: _.extend(baseFormFieldFactory.defaultConfig.scope, {
onVideoSelected: '&cmsOnVideoSelected'
}),
passThroughAttributes: [
'required'
],
link: link
};
return baseFormFieldFactory.create(config);
/* LINK */
function link(scope, el, attributes, controllers) {
var vm = scope.vm,
isRequired = _.has(attributes, 'required'),
formController = controllers[0];
init();
return baseFormFieldFactory.defaultConfig.link(scope, el, attributes, controllers);
/* INIT */
function init() {
vm.setEditing = toggleEditing.bind(null, true);
vm.updateVideoId = updateVideoId;
vm.cancelEditing = cancelEditing;
vm.updateIdLoadState = new LoadState();
scope.$watch('vm.model', function (newValue) {
toggleEditing(!newValue);
});
}
/* ACTIONS */
function updateVideoId() {
var inputId = vm.idOrUrlInput,
videoId = parseVideoId(inputId);
if (!inputId) {
vm.model = null;
triggerOnVideoSelected(null);
} else if (inputId && !videoId) {
addError('The url/id is invalid');
}
else if (!videoId || videoId == vm.model) {
cancelEditing();
} else {
vm.updateIdLoadState.on();
vimeoService
.getVideoInfo(videoId)
.then(onInfoLoaded)
.catch(onFail)
.finally(vm.updateIdLoadState.off);
}
function onFail(response) {
addError(response.message);
}
function onInfoLoaded(info) {
if (info) {
vm.model = vm.idOrUrlInput = info.video_id;
triggerOnVideoSelected(info);
} else {
addError('Video not found');
}
}
function triggerOnVideoSelected(info) {
if (vm.onVideoSelected) vm.onVideoSelected({ model: info })
}
function addError(message) {
validationErrorService.raise([{
properties: [vm.modelName],
message: message
}]);
}
}
function cancelEditing() {
vm.idOrUrlInput = vm.model;
vm.onChange();
toggleEditing(false);
}
/* Helpers */
function toggleEditing(isEditing) {
vm.isEditing = isEditing;
}
function parseVideoId(urlOrId) {
var urlRegex = /^.*(vimeo\.com\/)((channels\/[A-z]+\/)|(groups\/[A-z]+\/videos\/))?([0-9]+)/,
matches;
if (!urlOrId) return;
if (/^\d+$/.test(urlOrId)) {
return urlOrId;
}
matches = urlRegex.exec(urlOrId);
return matches && matches[5];
}
}
}]);
angular.module('cms.shared').controller('VimeoPickerDialogController', [
'$scope',
'shared.LoadState',
'shared.stringUtilities',
'shared.vimeoService',
'options',
'close',
function (
$scope,
LoadState,
stringUtilities,
vimeoService,
options,
close) {
var vm = $scope;
init();
/* INIT */
function init() {
vm.onOk = onOk;
vm.onCancel = onCancel;
vm.close = onCancel;
vm.onVideoSelected = onVideoSelected;
vm.isModelId = options.modelType === 'id';
vm.loadState = new LoadState();
if (vm.isModelId && options.currentVideo) {
vm.loadState.on();
vimeoService
.getVideoInfo(options.currentVideo)
.then(onVideoSelected)
.finally(vm.loadState.off);
} else {
vm.model = options.currentVideo;
}
}
/* ACTIONS */
function onVideoSelected(model) {
if (model) {
vm.model = {
id: model.video_id,
title: model.title,
description: stringUtilities.stripTags(model.description),
width: model.width,
height: model.height,
uploadDate: model.upload_date,
duration: model.duration,
thumbnailUrl: model.thumbnail_url,
thumbnailWidth: model.thumbnail_width,
thumbnailHeight: model.thumbnail_height
};
} else {
vm.model = null;
}
}
function onCancel() {
close();
}
function onOk() {
if (vm.model && vm.isModelId) {
options.onSelected(vm.model.id);
} else {
options.onSelected(vm.model);
}
close();
}
}]);
/**
* Displays a vimeo video preview. Model can be an object with an id or the video id itself.
*/
angular.module('cms.shared').directive('cmsVimeoVideo', [
'$sce',
'shared.pluginModulePath',
'shared.pluginContentPath',
'shared.urlLibrary',
function (
$sce,
modulePath,
contentPath,
urlLibrary) {
return {
restrict: 'E',
scope: {
model: '=cmsModel'
},
templateUrl: modulePath + 'UIComponents/VimeoVideo.html',
link: function (scope, el, attributes) {
scope.replacementUrl = contentPath + 'img/AssetReplacement/vimeo-replacement.png';
scope.$watch('model', function (model) {
var id;
if (model) {
id = model.id || model;
scope.videoUrl = $sce.trustAsResourceUrl('//player.vimeo.com/video/' + id)
} else {
scope.videoUrl = null;
}
});
}
};
}]);
|
#!/bin/sh
# Import GPG public keys
echo "Importing GPG public keys..."
gpg --import /keys/*
# Create and install crontab file
echo "Installing crontab..."
echo "$CRON_INTERVAL /backup.sh" >> /backup.cron
echo "Launching crontab..."
crontab /backup.cron
tail -f /dev/null
|
#!/usr/bin/env bash
#####################################################
# Updated by Afiniel for Yiimpool use...
#####################################################
source /etc/functions.sh
source /etc/yiimpool.conf
source $HOME/yiimpool/daemon_builder/.my.cnf
cd $HOME/yiimpool/daemon_builder
# Set what we need
now=$(date +"%m_%d_%Y")
set -e
NPROC=$(nproc)
if [[ ! -e '$STORAGE_ROOT/coin_builder/temp_coin_builds' ]]; then
sudo mkdir -p $STORAGE_ROOT/daemon_builder/temp_coin_builds
else
echo "temp_coin_builds already exists.... Skipping"
fi
# Just double checking folder permissions
sudo setfacl -m u:$USER:rwx $STORAGE_ROOT/daemon_builder/temp_coin_builds
cd $STORAGE_ROOT/daemon_builder/temp_coin_builds
# Kill the old coin and get the github info
read -r -e -p "Enter the name of the coin : " coin
read -r -e -p "Paste the github link for the coin : " git_hub
read -r -e -p "Do you need to use a specific github branch of the coin (y/n) : " branch_git_hub
if [[ ("$branch_git_hub" == "y" || "$branch_git_hub" == "Y" || "$branch_git_hub" == "yes" || "$branch_git_hub" == "Yes" || "$branch_git_hub" == "YES") ]]; then
read -r -e -p "Please enter the branch name exactly as in github, i.e. v2.5.1 : " branch_git_hub_ver
fi
read -r -e -p "Enter the coind name as it is in yiimp, example bitcoind : " pkillcoin
coindir=$coin$now
# save last coin information in case coin build fails
echo '
lastcoin='"${coindir}"'
' | sudo -E tee $STORAGE_ROOT/daemon_builder/temp_coin_builds/.lastcoin.conf >/dev/null 2>&1
# Clone the coin
if [[ ! -e $coindir ]]; then
git clone $git_hub $coindir
cd "${coindir}"
if [[ ("$branch_git_hub" == "y" || "$branch_git_hub" == "Y" || "$branch_git_hub" == "yes" || "$branch_git_hub" == "Yes" || "$branch_git_hub" == "YES") ]]; then
git fetch
git checkout "$branch_git_hub_ver"
fi
else
echo "$STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir} already exists.... Skipping"
echo "If there was an error in the build use the build error options on the installer"
exit 0
fi
# Build the coin under the proper configuration
if [[ ("$autogen" == "true") ]]; then
if [[ ("$berkeley" == "4.8") ]]; then
echo "Building using Berkeley 4.8..."
basedir=$(pwd)
sh autogen.sh
if [[ ! -e '$STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/share/genbuild.sh' ]]; then
echo "genbuild.sh not found skipping"
else
sudo chmod 777 $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/share/genbuild.sh
fi
if [[ ! -e '$STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/leveldb/build_detect_platform' ]]; then
echo "build_detect_platform not found skipping"
else
sudo chmod 777 $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/leveldb/build_detect_platform
fi
./configure CPPFLAGS="-I${STORAGE_ROOT}/berkeley/db4/include -O2" LDFLAGS="-L${STORAGE_ROOT}/berkeley/db4/lib" --without-gui --disable-tests
else
echo "Building using Berkeley 5.1..."
basedir=$(pwd)
sh autogen.sh
if [[ ! -e '$STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/share/genbuild.sh' ]]; then
echo "genbuild.sh not found skipping"
else
sudo chmod 777 $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/share/genbuild.sh
fi
if [[ ! -e '$STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/leveldb/build_detect_platform' ]]; then
echo "build_detect_platform not found skipping"
else
sudo chmod 777 $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/leveldb/build_detect_platform
fi
./configure CPPFLAGS="-I${STORAGE_ROOT}/berkeley/db5/include -O2" LDFLAGS="-L${STORAGE_ROOT}/berkeley/db5/lib" --without-gui --disable-tests
fi
make -j$(nproc)
else
echo "Building using makefile.unix method..."
cd $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src
if [[ ! -e '$STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/obj' ]]; then
mkdir -p $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/obj
else
echo "Hey the developer did his job and the src/obj dir is there!"
fi
if [[ ! -e '$STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/obj/zerocoin' ]]; then
mkdir -p $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/obj/zerocoin
else
echo "Wow even the /src/obj/zerocoin is there! Good job developer!"
fi
cd $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/leveldb
sudo chmod +x build_detect_platform
sudo make clean
sudo make libleveldb.a libmemenv.a
cd $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src
sed -i '/USE_UPNP:=0/i BDB_LIB_PATH = /home/crypto-data/berkeley/db4/lib\nBDB_INCLUDE_PATH = /home/crypto-data/berkeley/db4/include\nOPENSSL_LIB_PATH = /home/crypto-data/openssl/lib\nOPENSSL_INCLUDE_PATH = /home/crypto-data/openssl/include' makefile.unix
sed -i '/USE_UPNP:=1/i BDB_LIB_PATH = /home/crypto-data/berkeley/db4/lib\nBDB_INCLUDE_PATH = /home/crypto-data/berkeley/db4/include\nOPENSSL_LIB_PATH = /home/crypto-data/openssl/lib\nOPENSSL_INCLUDE_PATH = /home/crypto-data/openssl/include' makefile.unix
make -j$NPROC -f makefile.unix USE_UPNP=-
fi
clear
# LS the SRC dir to have user input bitcoind and bitcoin-cli names
cd $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/
find . -maxdepth 1 -type f \( -perm -1 -o \( -perm -10 -o -perm -100 \) \) -printf "%f\n"
read -r -e -p "Please enter the coind name from the directory above, example bitcoind :" coind
read -r -e -p "Is there a coin-cli, example bitcoin-cli [y/N] :" ifcoincli
if [[ ("$ifcoincli" == "y" || "$ifcoincli" == "Y") ]]; then
read -r -e -p "Please enter the coin-cli name :" coincli
fi
clear
# Strip and copy to /usr/bin
sudo pkill -9 ${pkillcoin}
sudo strip $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/${coind}
sudo cp $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/${coind} /usr/bin
if [[ ("$ifcoincli" == "y" || "$ifcoincli" == "Y") ]]; then
sudo strip $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/${coincli}
sudo cp $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}/src/${coincli} /usr/bin
fi
# Have user verify con.conf file and start coin
echo "I am now going to open nano, please verify if there any changes that are needed such as adding or removing addnodes."
read -n 1 -s -r -p "Press any key to continue"
sudo nano $STORAGE_ROOT/wallets/."${coind::-1}"/${coind::-1}.conf
clear
cd $HOME/yiimpool/daemon_builder
echo "Starting ${coind::-1}"
"${coind}" -datadir=$STORAGE_ROOT/wallets/."${coind::-1}" -conf="${coind::-1}.conf" -daemon -shrinkdebugfile -reindex
# If we made it this far everything built fine removing last coin.conf and build directory
sudo rm -r $STORAGE_ROOT/daemon_builder/temp_coin_builds/.lastcoin.conf
sudo rm -r $STORAGE_ROOT/daemon_builder/temp_coin_builds/${coindir}
sudo rm -r $HOME/yiimpool/daemon_builder/.my.cnf
clear
echo "Upgrade of ${coind::-1} is completed and running. The blockchain is being reindexed, it could be several minutes before you can connect to your coin."
echo Type daemonbuilder at anytime to install a new coin!
|
#: @hide_from_man_page
#: * `vendor-install` [<target>]
#:
#: Install Homebrew's portable Ruby.
# HOMEBREW_CURLRC, HOMEBREW_LIBRARY, HOMEBREW_STDERR is from the user environment
# HOMEBREW_CACHE, HOMEBREW_CURL, HOMEBREW_LINUX, HOMEBREW_LINUX_MINIMUM_GLIBC_VERSION, HOMEBREW_MACOS,
# HOMEBREW_MACOS_VERSION_NUMERIC and HOMEBREW_PROCESSOR are set by brew.sh
# shellcheck disable=SC2154
source "${HOMEBREW_LIBRARY}/Homebrew/utils/lock.sh"
VENDOR_DIR="${HOMEBREW_LIBRARY}/Homebrew/vendor"
# Built from https://github.com/Homebrew/homebrew-portable-ruby.
if [[ -n "${HOMEBREW_MACOS}" ]]
then
if [[ "${HOMEBREW_PROCESSOR}" == "Intel" ]]
then
ruby_FILENAME="portable-ruby-2.6.8.yosemite.bottle.tar.gz"
ruby_SHA="0cb1cc7af109437fe0e020c9f3b7b95c3c709b140bde9f991ad2c1433496dd42"
fi
elif [[ -n "${HOMEBREW_LINUX}" ]]
then
case "${HOMEBREW_PROCESSOR}" in
x86_64)
ruby_FILENAME="portable-ruby-2.6.8.x86_64_linux.bottle.tar.gz"
ruby_SHA="cd7fffb18ef9338baa670fc5e8fce99b0e3cc0f0fd7627bcbb56f3c8d54161d4"
;;
*) ;;
esac
fi
# Dynamic variables can't be detected by shellcheck
# shellcheck disable=SC2034
if [[ -n "${ruby_SHA}" && -n "${ruby_FILENAME}" ]]
then
ruby_URLs=()
if [[ -n "${HOMEBREW_ARTIFACT_DOMAIN}" ]]
then
ruby_URLs+=("${HOMEBREW_ARTIFACT_DOMAIN}/v2/homebrew/portable-ruby/portable-ruby/blobs/sha256:${ruby_SHA}")
fi
if [[ -n "${HOMEBREW_BOTTLE_DOMAIN}" ]]
then
ruby_URLs+=("${HOMEBREW_BOTTLE_DOMAIN}/bottles-portable-ruby/${ruby_FILENAME}")
fi
ruby_URLs+=(
"https://ghcr.io/v2/homebrew/portable-ruby/portable-ruby/blobs/sha256:${ruby_SHA}"
"https://github.com/Homebrew/homebrew-portable-ruby/releases/download/2.6.8/${ruby_FILENAME}"
)
ruby_URL="${ruby_URLs[0]}"
fi
check_linux_glibc_version() {
if [[ -z "${HOMEBREW_LINUX}" || -z "${HOMEBREW_LINUX_MINIMUM_GLIBC_VERSION}" ]]
then
return 0
fi
local glibc_version
local glibc_version_major
local glibc_version_minor
local minimum_required_major="${HOMEBREW_LINUX_MINIMUM_GLIBC_VERSION%.*}"
local minimum_required_minor="${HOMEBREW_LINUX_MINIMUM_GLIBC_VERSION#*.}"
if [[ "$(/usr/bin/ldd --version)" =~ \ [0-9]\.[0-9]+ ]]
then
glibc_version="${BASH_REMATCH[0]// /}"
glibc_version_major="${glibc_version%.*}"
glibc_version_minor="${glibc_version#*.}"
if ((glibc_version_major < minimum_required_major || glibc_version_minor < minimum_required_minor))
then
odie "Vendored tools require system Glibc ${HOMEBREW_LINUX_MINIMUM_GLIBC_VERSION} or later (yours is ${glibc_version})."
fi
else
odie "Failed to detect system Glibc version."
fi
}
# Execute the specified command, and suppress stderr unless HOMEBREW_STDERR is set.
quiet_stderr() {
if [[ -z "${HOMEBREW_STDERR}" ]]
then
command "$@" 2>/dev/null
else
command "$@"
fi
}
fetch() {
local -a curl_args
local url
local sha
local first_try=1
local vendor_locations
local temporary_path
curl_args=()
# do not load .curlrc unless requested (must be the first argument)
# HOMEBREW_CURLRC isn't misspelt here
# shellcheck disable=SC2153
if [[ -z "${HOMEBREW_CURLRC}" ]]
then
curl_args[${#curl_args[*]}]="-q"
fi
# Authorization is needed for GitHub Packages but harmless on GitHub Releases
curl_args+=(
--fail
--remote-time
--location
--user-agent "${HOMEBREW_USER_AGENT_CURL}"
--header "Authorization: Bearer QQ=="
)
if [[ -n "${HOMEBREW_QUIET}" ]]
then
curl_args[${#curl_args[*]}]="--silent"
elif [[ -z "${HOMEBREW_VERBOSE}" ]]
then
curl_args[${#curl_args[*]}]="--progress-bar"
fi
if [[ "${HOMEBREW_MACOS_VERSION_NUMERIC}" -lt "100600" ]]
then
curl_args[${#curl_args[*]}]="--insecure"
fi
temporary_path="${CACHED_LOCATION}.incomplete"
mkdir -p "${HOMEBREW_CACHE}"
[[ -n "${HOMEBREW_QUIET}" ]] || ohai "Downloading ${VENDOR_URL}" >&2
if [[ -f "${CACHED_LOCATION}" ]]
then
[[ -n "${HOMEBREW_QUIET}" ]] || echo "Already downloaded: ${CACHED_LOCATION}" >&2
else
for url in "${VENDOR_URLs[@]}"
do
[[ -n "${HOMEBREW_QUIET}" || -n "${first_try}" ]] || ohai "Downloading ${url}" >&2
first_try=''
if [[ -f "${temporary_path}" ]]
then
# HOMEBREW_CURL is set by brew.sh (and isn't mispelt here)
# shellcheck disable=SC2153
"${HOMEBREW_CURL}" "${curl_args[@]}" -C - "${url}" -o "${temporary_path}"
if [[ $? -eq 33 ]]
then
[[ -n "${HOMEBREW_QUIET}" ]] || echo "Trying a full download" >&2
rm -f "${temporary_path}"
"${HOMEBREW_CURL}" "${curl_args[@]}" "${url}" -o "${temporary_path}"
fi
else
"${HOMEBREW_CURL}" "${curl_args[@]}" "${url}" -o "${temporary_path}"
fi
[[ -f "${temporary_path}" ]] && break
done
if [[ ! -f "${temporary_path}" ]]
then
vendor_locations="$(printf " - %s\n" "${VENDOR_URLs[@]}")"
odie <<EOS
Failed to download ${VENDOR_NAME} from the following locations:
${vendor_locations}
Do not file an issue on GitHub about this; you will need to figure out for
yourself what issue with your internet connection restricts your access to
GitHub (used for Homebrew updates and binary packages).
EOS
fi
trap '' SIGINT
mv "${temporary_path}" "${CACHED_LOCATION}"
trap - SIGINT
fi
if [[ -x "/usr/bin/shasum" ]]
then
sha="$(/usr/bin/shasum -a 256 "${CACHED_LOCATION}" | cut -d' ' -f1)"
elif [[ -x "$(type -P sha256sum)" ]]
then
sha="$(sha256sum "${CACHED_LOCATION}" | cut -d' ' -f1)"
elif [[ -x "$(type -P ruby)" ]]
then
sha="$(
ruby <<EOSCRIPT
require 'digest/sha2'
digest = Digest::SHA256.new
File.open('${CACHED_LOCATION}', 'rb') { |f| digest.update(f.read) }
puts digest.hexdigest
EOSCRIPT
)"
else
odie "Cannot verify checksum ('shasum' or 'sha256sum' not found)!"
fi
if [[ "${sha}" != "${VENDOR_SHA}" ]]
then
odie <<EOS
Checksum mismatch.
Expected: ${VENDOR_SHA}
Actual: ${sha}
Archive: ${CACHED_LOCATION}
To retry an incomplete download, remove the file above.
EOS
fi
}
install() {
local tar_args
if [[ -n "${HOMEBREW_VERBOSE}" ]]
then
tar_args="xvzf"
else
tar_args="xzf"
fi
mkdir -p "${VENDOR_DIR}/portable-${VENDOR_NAME}"
safe_cd "${VENDOR_DIR}/portable-${VENDOR_NAME}"
trap '' SIGINT
if [[ -d "${VENDOR_VERSION}" ]]
then
mv "${VENDOR_VERSION}" "${VENDOR_VERSION}.reinstall"
fi
safe_cd "${VENDOR_DIR}"
[[ -n "${HOMEBREW_QUIET}" ]] || ohai "Pouring ${VENDOR_FILENAME}" >&2
tar "${tar_args}" "${CACHED_LOCATION}"
safe_cd "${VENDOR_DIR}/portable-${VENDOR_NAME}"
if quiet_stderr "./${VENDOR_VERSION}/bin/${VENDOR_NAME}" --version >/dev/null
then
ln -sfn "${VENDOR_VERSION}" current
if [[ -d "${VENDOR_VERSION}.reinstall" ]]
then
rm -rf "${VENDOR_VERSION}.reinstall"
fi
else
rm -rf "${VENDOR_VERSION}"
if [[ -d "${VENDOR_VERSION}.reinstall" ]]
then
mv "${VENDOR_VERSION}.reinstall" "${VENDOR_VERSION}"
fi
odie "Failed to install ${VENDOR_NAME} ${VENDOR_VERSION}!"
fi
trap - SIGINT
}
homebrew-vendor-install() {
local option
local url_var
local sha_var
for option in "$@"
do
case "${option}" in
-\? | -h | --help | --usage)
brew help vendor-install
exit $?
;;
--verbose) HOMEBREW_VERBOSE=1 ;;
--quiet) HOMEBREW_QUIET=1 ;;
--debug) HOMEBREW_DEBUG=1 ;;
--*) ;;
-*)
[[ "${option}" == *v* ]] && HOMEBREW_VERBOSE=1
[[ "${option}" == *q* ]] && HOMEBREW_QUIET=1
[[ "${option}" == *d* ]] && HOMEBREW_DEBUG=1
;;
*)
[[ -n "${VENDOR_NAME}" ]] && odie "This command does not take multiple vendor targets!"
VENDOR_NAME="${option}"
;;
esac
done
[[ -z "${VENDOR_NAME}" ]] && odie "This command requires a vendor target!"
[[ -n "${HOMEBREW_DEBUG}" ]] && set -x
check_linux_glibc_version
filename_var="${VENDOR_NAME}_FILENAME"
sha_var="${VENDOR_NAME}_SHA"
url_var="${VENDOR_NAME}_URL"
VENDOR_FILENAME="${!filename_var}"
VENDOR_SHA="${!sha_var}"
VENDOR_URL="${!url_var}"
VENDOR_VERSION="$(cat "${VENDOR_DIR}/portable-${VENDOR_NAME}-version")"
if [[ -z "${VENDOR_URL}" || -z "${VENDOR_SHA}" ]]
then
odie "No Homebrew ${VENDOR_NAME} ${VENDOR_VERSION} available for ${HOMEBREW_PROCESSOR} processors!"
fi
# Expand the name to an array of variables
# The array name must be "${VENDOR_NAME}_URLs"! Otherwise substitution errors will occur!
# shellcheck disable=SC2086
read -r -a VENDOR_URLs <<<"$(eval "echo "\$\{${url_var}s[@]\}"")"
CACHED_LOCATION="${HOMEBREW_CACHE}/${VENDOR_FILENAME}"
lock "vendor-install-${VENDOR_NAME}"
fetch
install
}
|
public class Search {
public static int searchValue (int[] arr, int value) {
for (int i = 0; i < arr.length; i++) {
if (arr[i] == value) {
return i;
}
}
return -1;
}
public static void main(String[] args) {
int[] arr = {1,2,3,7,8,9};
int value = 7;
int result = searchValue(arr, value);
if (result == -1) {
System.out.println("Value not found");
}
else {
System.out.println("Value found at index " + result);
}
}
}
|
import React, { useState } from 'react';
import {
Button,
TextField,
Typography,
LinearProgress,
} from '@material-ui/core';
import { useStaticQuery, graphql } from 'gatsby';
import emailjs from 'emailjs-com';
import SendIcon from '@material-ui/icons/Send';
import Default from '../../layouts/Default';
import SEO from '../seo';
import './ContactPage.css';
const ContactPage = () => {
const [formData, setFormData] = useState({});
const onFormChange = (property) => ({ target: { value } }) => {
setFormData({
...formData,
[property]: value,
});
};
const isSubmitEnabled = () => {
if (!formData.name || !formData.name.trim().length) {
return false;
}
if (!formData.email || !formData.email.trim().length) {
return false;
}
if (!formData.phone || !formData.phone.trim().length) {
return false;
}
if (!formData.body || !formData.body.trim().length) {
return false;
}
return true;
};
const onSubmit = async (event) => {
event.preventDefault();
const now = new Date();
setFormData({
...formData,
isLoading: true,
});
if (isSubmitEnabled()) {
const data = {
...formData,
date: `${now.getDate()}/${now.getMonth()}/${now.getFullYear()}`,
time: `${now.getHours()}:${now.getMinutes()}:${now.getSeconds()}`,
};
try {
await emailjs.send(
'bitlogic',
'basetemplate',
data,
'user_qKgUsk9KEtGfO2BIQapOO'
);
setFormData({
...formData,
sent: true,
isLoading: false,
});
} catch (e) {
console.error(e);
setFormData({
...formData,
sent: false,
isLoading: false,
});
}
}
};
const {
allContactJson: { nodes },
} = useStaticQuery(graphql`
query ContactPage {
allContactJson {
nodes {
title
description
section
}
}
}
`);
if (!nodes || !nodes.length) {
return null;
}
const contactPageData = nodes[0];
return (
<Default className="ContactPage ContactPage__Container">
<SEO title="Contact" />
<h1 className="ContactPage__Title">{contactPageData.section}</h1>
<div className="ContactPage__Content">
<div className="ContactPage__FormContainer">
<div className="ContactPage__LeftContent">
<h2 className="ContactPage__Subtitle">{contactPageData.title}</h2>
<div className="ContactPage__Description">
{contactPageData.description}
</div>
</div>
<div className="ContactPage__RightContent">
<form onSubmit={onSubmit}>
<TextField
variant="outlined"
placeholder="Nombre y Apellido"
className="ContactPage__Input"
value={formData.name || ''}
onChange={onFormChange('name')}
required
/>
<TextField
variant="outlined"
placeholder="Telรฉfono"
className="ContactPage__Input"
value={formData.phone || ''}
onChange={onFormChange('phone')}
/>
<TextField
variant="outlined"
placeholder="Correo Electrรณnico"
className="ContactPage__Input"
value={formData.email || ''}
onChange={onFormChange('email')}
/>
<TextField
variant="outlined"
placeholder="Consulta"
multiline={true}
rows={5}
className="ContactPage__Input"
value={formData.body || ''}
onChange={onFormChange('body')}
/>
<div
className={`ContactPage__FormContainer__CTAContainer ${
formData.isLoading
? 'ContactPage__FormContainer__CTAContainer--loading'
: ''
}`}
>
<LinearProgress
color="primary"
className="ContactPage__FormContainer__CTAContainer__SubmitProgress"
/>
{!formData.sent && (
<Button
className="ContactPage__FormContainer__CTAContainer__SubmitButton"
disabled={!isSubmitEnabled()}
type="submit"
variant="contained"
color="secondary"
endIcon={<SendIcon />}
>
Enviar
</Button>
)}
{formData.sent && (
<Typography variant="body1">
Tu mensaje ha sido enviado con รฉxito. ยกGracias!
</Typography>
)}
</div>
</form>
</div>
</div>
</div>
</Default>
);
};
export default ContactPage;
|
<reponame>dawmlight/vendor_oh_fun
#!/usr/bin/env python
# coding: utf-8
#
# Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import platform
import shutil
import subprocess
import sys
import time
import base64
SOURCE_ROOT = "../../"
CMAKE_GEN_PATH = "cmake-build-debug"
WORK_DIR = ""
HCGEN = ""
TEMP_DIR = 'temp'
ERROR_COLOR_PREFIX = "\033[31m"
ERROR_COLOR_END = "\033[0m"
SHOULD_CLEAN_TEMP = True
PERFORMANCE_MAX_COMPILE_TIME_MS = 1000
def text_file_compare(file_a, file_target):
if not os.path.exists(file_target):
return True
with open(file_a, 'r') as f_a:
with open(file_target, 'r') as f_b:
a_content = f_a.read().replace(r'\r\n', r'\n')
b_content = f_b.read().replace(r'\r\n', r'\n')
return a_content == b_content
def binary_file_compare(file_a, file_target, skip_size=0,
target_base64_encode=False):
if not os.path.exists(file_target):
return True
with open(file_a, 'rb') as f_a:
with open(file_target, 'rb') as f_b:
a_content = f_a.read()
b_content = f_b.read()
if target_base64_encode:
b_content = base64.b64decode(b_content)
return a_content[skip_size:] == b_content[skip_size:]
def setup_hcgen_compiler():
global SOURCE_ROOT
global CMAKE_GEN_PATH
global HCGEN
if len(sys.argv) > 1:
hcgen_path = os.path.abspath(sys.argv[1])
if hcgen_path.find('hc-gen') >= 0 and os.access(hcgen_path, os.X_OK):
HCGEN = hcgen_path
print('use specified hsc:' + hcgen_path)
return
source_root = '../../'
compiler_name = "hc-gen"
if platform.system() == "Windows":
source_root = source_root.replace("/", "\\")
compiler_name += ".exe"
source_root = os.path.abspath(os.path.join(WORK_DIR, source_root))
hcgen = os.path.join(source_root, compiler_name)
if not os.access(hcgen, os.X_OK):
hcgen = os.path.join(source_root, CMAKE_GEN_PATH, compiler_name)
if not os.access(hcgen, os.X_OK):
print("Error: hcgen not found, please make first")
exit(1)
HCGEN = hcgen
def index_case(case_path):
cases = []
for dir_name in os.listdir(case_path):
if os.path.isdir(os.path.join(case_path, dir_name)):
cases.append(dir_name)
cases.sort()
return cases
def get_golden_compile_result(mode, case_name):
result_file_name = os.path.join(WORK_DIR, case_name,
'golden_%s_compile_result.txt' % mode)
status_prefix = '[compile exit status]:'
output_prefix = '[compile console output]:\n'
with open(result_file_name, 'r') as result_file:
status = result_file.readline()
status = status[len(status_prefix):]
console_output = result_file.read()
console_output = console_output[len(output_prefix):]
return int(status), console_output.strip()
def compile_status_to_str(status):
if status:
return 'success'
else:
return 'failed'
def test_compile(case_name, mode):
output_dir = os.path.join(WORK_DIR, TEMP_DIR, case_name)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
output_file = os.path.join(output_dir, 'golden')
source_file = os.path.join(WORK_DIR, case_name, 'case.hcs')
temp_dir = os.path.join(WORK_DIR, TEMP_DIR)
if mode == 'text':
command = "%s -o %s -t %s" % (HCGEN, output_file, source_file)
else:
command = "%s -o %s %s" % (HCGEN, output_file, source_file)
status, output = subprocess.getstatusoutput(command)
golden_status, golden_output = get_golden_compile_result(mode, case_name)
if bool(status) != bool(golden_status):
print("%s mode: case %s expect compile %s but %s" %
(mode, case_name, compile_status_to_str(status),
compile_status_to_str(golden_status)))
print("Console output :\n" + output)
return False
output = output.replace(temp_dir, ".").replace(WORK_DIR, "."). \
replace('\\', '/').replace(ERROR_COLOR_PREFIX, ""). \
replace(ERROR_COLOR_END, "")
if output.strip() != golden_output:
print("output is different with golden for %s compile:" % mode)
print("EXPECT:\n" + golden_output)
print("ACTUAL:\n" + output.strip())
return False
return True
def binary_code_compile(case_name):
compile_result = test_compile(case_name, 'binary')
if not compile_result:
return False
compile_start_time = get_current_time_ms()
case_hcb = os.path.join(WORK_DIR, TEMP_DIR, case_name, 'golden.hcb')
golden_hcb = os.path.join(WORK_DIR, case_name, 'golden.hcb')
hcb_header_size = 20 # hcb compare skip hcb header
output_compare = \
binary_file_compare(case_hcb, golden_hcb, hcb_header_size, True)
if not output_compare:
print('Error: hcb output mismatch with golden')
return False
compile_finish_time = get_current_time_ms()
compile_used_time = compile_finish_time - compile_start_time
if compile_used_time > PERFORMANCE_MAX_COMPILE_TIME_MS:
print('Error: compile time %d, out of threshold %d ms'
% (compile_used_time, PERFORMANCE_MAX_COMPILE_TIME_MS))
return False
decompile_result = test_decompile(case_name)
return decompile_result
def test_text_code_compile(case_name):
compile_result = test_compile(case_name, 'text')
if not compile_result:
return False
case_c_file = os.path.join(WORK_DIR, TEMP_DIR, case_name, 'golden.c')
golden_c_file = os.path.join(WORK_DIR, case_name, 'golden.c.gen')
c_file_compare = text_file_compare(case_c_file, golden_c_file)
if not c_file_compare:
print("Error: The generated C file mismatch with golden")
case_header_file = os.path.join(WORK_DIR, TEMP_DIR, case_name, 'golden.h')
golden_header_file = os.path.join(WORK_DIR, case_name, 'golden.h.gen')
header_file_compare = \
text_file_compare(case_header_file, golden_header_file)
if not header_file_compare:
print("Error: The generated header file mismatch with golden")
return c_file_compare and header_file_compare
def test_decompile(case_name):
golden_decompile_file_name = \
os.path.join(WORK_DIR, case_name, 'golden.d.hcs')
if not os.path.exists(golden_decompile_file_name):
return True
output_dir = os.path.join(WORK_DIR, TEMP_DIR, case_name)
output_file = os.path.join(output_dir, 'case.hcs')
source_file = os.path.join(output_dir, 'golden.hcb')
command = "%s -o %s -d %s" % (HCGEN, output_file, source_file)
status, output = subprocess.getstatusoutput(command)
if status != 0:
print('decompile fail')
print(output)
return False
decompile_golden_result = binary_file_compare(
os.path.join(output_dir, 'case.d.hcs'), golden_decompile_file_name)
if not decompile_golden_result:
print('Error: case %s decompile hcs mismatch with golden' % case_name)
return False
return True
def get_current_time_ms():
return int(round(time.time() * 1000))
def test_cases(cases):
global SHOULD_CLEAN_TEMP
print('[==========] running %d cases form hcgen test' % len(cases))
failed_cases = []
test_start_time = get_current_time_ms()
for case in cases:
case_start_time = get_current_time_ms()
print('[ RUN ] %s' % case)
binary_compile_result = binary_code_compile(case)
text_compile_result = test_text_code_compile(case)
case_finish_time = get_current_time_ms()
used_time_str = ' (%d ms)' % (case_finish_time - case_start_time)
if (not binary_compile_result) or (not text_compile_result):
print('[ ERROR ] %s%s' % (case, used_time_str))
failed_cases.append(case)
else:
print('[ OK ] %s%s' % (case, used_time_str))
test_finish_time = get_current_time_ms()
print('\n[==========] running %d case (%d ms)'
% (len(cases), test_finish_time - test_start_time))
print('[ PASSED ] %d cases' % (len(cases) - len(failed_cases)))
if len(failed_cases) > 0:
SHOULD_CLEAN_TEMP = False
print('[ FAILED ] %d cases, list below:' % len(failed_cases))
for case in failed_cases:
print('[ FAILED ] %s' % case)
def setup_work_dir():
global WORK_DIR
pwd = os.path.abspath(sys.argv[0])
pwd = pwd[:pwd.rfind(os.sep)]
WORK_DIR = pwd
def test_setup():
temp_dir = os.path.join(WORK_DIR, TEMP_DIR)
if not os.path.exists(temp_dir):
os.mkdir(temp_dir)
def test_teardown():
global SHOULD_CLEAN_TEMP
if not SHOULD_CLEAN_TEMP:
return
temp_dir = os.path.join(WORK_DIR, TEMP_DIR)
if os.path.exists(temp_dir):
shutil.rmtree(temp_dir)
def clean_up():
temp_dir = os.path.join(WORK_DIR, TEMP_DIR)
if os.path.exists(temp_dir):
shutil.rmtree(temp_dir)
if __name__ == "__main__":
setup_work_dir()
clean_up()
setup_hcgen_compiler()
print("hcgen path : " + HCGEN)
cases_list = index_case(WORK_DIR)
test_setup()
test_cases(cases_list)
test_teardown()
|
#! /bin/bash
: ${PROG:=$(basename ${BASH_SOURCE})}
_cli_bash_autocomplete() {
if [[ "${COMP_WORDS[0]}" != "source" ]]; then
local cur opts base
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
if [[ "$cur" == "-"* ]]; then
opts=$( ${COMP_WORDS[@]:0:$COMP_CWORD} ${cur} --generate-bash-completion )
else
opts=$( ${COMP_WORDS[@]:0:$COMP_CWORD} --generate-bash-completion )
fi
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
fi
}
complete -o bashdefault -o default -o nospace -F _cli_bash_autocomplete $PROG
unset PROG
|
package io.opensphere.core.model.time;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import gnu.trove.list.TIntList;
/**
* BinaryTimeTreeTest.
*/
public class BinaryTimeTreeTest
{
/**
* The object on which tests are performed.
*/
private BinaryTimeTree<TimeSpanProvider> myTestObject;
/**
* Creates the resources needed to execute the tests.
*
* @throws java.lang.Exception if the resources cannot be initialized.
*/
@Before
public void setUp() throws Exception
{
myTestObject = new BinaryTimeTree<>();
}
/**
* Test method to verify that there are no private methods in the
* {@link BinaryTimeTree} class.
*/
@Test
public void testNonPrivateMethods()
{
Method[] declaredMethods = BinaryTimeTree.class.getDeclaredMethods();
for (Method method : declaredMethods)
{
if (!method.getName().startsWith("$") && !method.getName().startsWith("lambda$"))
{
assertFalse(method.getName() + " is private. No private methods are permitted.",
Modifier.isPrivate(method.getModifiers()));
}
}
}
/**
* Test method for {@link BinaryTimeTree#BinaryTimeTree()}.
*/
@Test
public void testBinaryTimeTree()
{
assertNotNull(myTestObject);
}
/**
* Test method for {@link BinaryTimeTree#clear()}.
*/
@Test
public void testClear()
{
myTestObject.clear();
assertEquals(0, myTestObject.size());
}
/**
* Test method for
* {@link BinaryTimeTree#countInRange(io.opensphere.core.model.time.TimeSpan)}
* .
*/
@Test(expected = NullPointerException.class)
public void testCountInRangeEmptyNode()
{
assertEquals(0, myTestObject.countInRange(TimeSpan.ZERO));
}
/**
* Test method for
* {@link BinaryTimeTree#countInRange(io.opensphere.core.model.time.TimeSpan)}
* .
*/
@Test
public void testCountInRange()
{
TimeSpan span1 = new TimeSpanLongLong(0, 1);
TimeSpan span2 = new TimeSpanLongLong(1, 2);
TimeSpanProvider mock1 = createNiceMock(TimeSpanProvider.class);
TimeSpanProvider mock2 = createNiceMock(TimeSpanProvider.class);
expect(mock1.getTimeSpan()).andReturn(span1).anyTimes();
expect(mock2.getTimeSpan()).andReturn(span2).anyTimes();
replay(mock1, mock2);
myTestObject.insert(Arrays.asList(mock1, mock2));
assertEquals(1, myTestObject.countInRange(span1));
}
/**
* Test method for
* {@link BinaryTimeTree#countsInBins(io.opensphere.core.model.time.TimeSpan, int)}
* .
*/
@Test
public void testCountsInBins()
{
TimeSpan span1 = new TimeSpanLongLong(0, 100);
TimeSpan span2 = new TimeSpanLongLong(0, 50);
TimeSpanProvider mock1 = createNiceMock(TimeSpanProvider.class);
TimeSpanProvider mock2 = createNiceMock(TimeSpanProvider.class);
expect(mock1.getTimeSpan()).andReturn(span1).anyTimes();
expect(mock2.getTimeSpan()).andReturn(span2).anyTimes();
replay(mock1, mock2);
myTestObject.insert(Arrays.asList(mock1, mock2));
CountReport report = myTestObject.countsInBins(span1, 50);
assertEquals(2, report.getMaxBinCount());
assertEquals(1, report.getMinBinCount());
assertEquals(76, report.getTotalCount());
}
/**
* Test method for {@link BinaryTimeTree#countsInRanges(java.util.List)}.
*/
@Test
public void testCountsInRanges()
{
TimeSpan span1 = new TimeSpanLongLong(0, 100);
TimeSpan span2 = new TimeSpanLongLong(0, 50);
TimeSpanProvider mock1 = createNiceMock(TimeSpanProvider.class);
TimeSpanProvider mock2 = createNiceMock(TimeSpanProvider.class);
expect(mock1.getTimeSpan()).andReturn(span1).anyTimes();
expect(mock2.getTimeSpan()).andReturn(span2).anyTimes();
replay(mock1, mock2);
myTestObject.insert(Arrays.asList(mock1, mock2));
TIntList list = myTestObject.countsInRanges(Arrays.asList(span1, span2));
assertEquals(2, list.size());
}
/**
* Test method for {@link BinaryTimeTree#countsInRanges(java.util.List)}.
*/
@Test
public void testCountsInRangesEmpty()
{
TIntList list = myTestObject.countsInRanges(new ArrayList<>());
assertTrue(list.isEmpty());
}
/**
* Test method for
* {@link BinaryTimeTree#insert(io.opensphere.core.model.time.TimeSpanProvider)}
* .
*/
@Test
public void testInsertE()
{
TimeSpan span1 = new TimeSpanLongLong(0, 1);
TimeSpanProvider mock1 = createNiceMock(TimeSpanProvider.class);
expect(mock1.getTimeSpan()).andReturn(span1).anyTimes();
replay(mock1);
myTestObject.insert(mock1);
assertEquals(1, myTestObject.size());
}
/**
* Test method for {@link BinaryTimeTree#insert(java.util.List)}.
*/
@Test
public void testInsertListOfE()
{
TimeSpan span1 = new TimeSpanLongLong(0, 1);
TimeSpan span2 = new TimeSpanLongLong(1, 2);
TimeSpanProvider mock1 = createNiceMock(TimeSpanProvider.class);
TimeSpanProvider mock2 = createNiceMock(TimeSpanProvider.class);
expect(mock1.getTimeSpan()).andReturn(span1).anyTimes();
expect(mock2.getTimeSpan()).andReturn(span2).anyTimes();
replay(mock1, mock2);
myTestObject.insert(Arrays.asList(mock1, mock2));
assertEquals(2, myTestObject.size());
}
/**
* Test method for
* {@link BinaryTimeTree#internalSize(io.opensphere.core.model.time.BTreeNode)}
* .
*/
@Test
public void testInternalSize()
{
BTreeNode<TimeSpanProvider> node = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode1 = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode2 = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode3 = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode4 = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode5 = new BTreeNode<>();
subNode1.getValues().add(createMock(TimeSpanProvider.class));
subNode1.getValues().add(createMock(TimeSpanProvider.class));
subNode2.getValues().add(createMock(TimeSpanProvider.class));
subNode2.getValues().add(createMock(TimeSpanProvider.class));
subNode2.getValues().add(createMock(TimeSpanProvider.class));
subNode3.getValues().add(createMock(TimeSpanProvider.class));
subNode3.getValues().add(createMock(TimeSpanProvider.class));
node.setSubNodes(new ArrayList<>(Arrays.asList(subNode1, subNode2, subNode3, subNode4, subNode5)));
node.setRange(new TimeSpanLongLong(0, 1000));
TimeSpanProvider provider1 = createMock(TimeSpanProvider.class);
TimeSpanProvider provider2 = createMock(TimeSpanProvider.class);
TimeSpanLongLong span1 = new TimeSpanLongLong(10, 20);
expect(provider1.getTimeSpan()).andReturn(span1).anyTimes();
TimeSpanLongLong span2 = new TimeSpanLongLong(20, 200);
expect(provider2.getTimeSpan()).andReturn(span2).anyTimes();
replay(provider1, provider2);
assertEquals(7, myTestObject.internalSize(node));
}
/**
* Test method for
* {@link BinaryTimeTree#maxValuesPerNodeInteral(io.opensphere.core.model.time.BTreeNode, int)}
* .
*/
@Test
public void testMaxValuesPerNodeInteral()
{
TimeSpan span1 = new TimeSpanLongLong(0, 1);
TimeSpan span2 = new TimeSpanLongLong(1, 2);
TimeSpanProvider mock1 = createNiceMock(TimeSpanProvider.class);
TimeSpanProvider mock2 = createNiceMock(TimeSpanProvider.class);
TimeSpanProvider mock3 = createNiceMock(TimeSpanProvider.class);
TimeSpanProvider mock4 = createNiceMock(TimeSpanProvider.class);
TimeSpanProvider mock5 = createNiceMock(TimeSpanProvider.class);
expect(mock1.getTimeSpan()).andReturn(span1).anyTimes();
expect(mock2.getTimeSpan()).andReturn(span2).anyTimes();
replay(mock1, mock2);
BTreeNode<TimeSpanProvider> node = new BTreeNode<>();
node.getValues().add(mock1);
node.getValues().add(mock2);
BTreeNode<TimeSpanProvider> subNode = new BTreeNode<>();
subNode.getValues().add(mock3);
subNode.getValues().add(mock4);
subNode.getValues().add(mock5);
node.getSubNodes().add(subNode);
assertEquals(3, myTestObject.maxValuesPerNodeInteral(node, 1));
}
/**
* Test method for
* {@link BinaryTimeTree#maxValuesPerNodeInteral(io.opensphere.core.model.time.BTreeNode, int)}
* .
*/
@Test
public void testMaxValuesPerNodeInteralNull()
{
assertEquals(0, myTestObject.maxValuesPerNodeInteral(null, 5));
}
/**
* Test method for
* {@link BinaryTimeTree#maxValuesPerNodeInteral(io.opensphere.core.model.time.BTreeNode, int)}
* .
*/
@Test
public void testMaxValuesPerNodeInteralEmpty()
{
assertEquals(5, myTestObject.maxValuesPerNodeInteral(new BTreeNode<>(), 5));
}
/**
* Test method for
* {@link BinaryTimeTree#enlargeTimeRange(java.util.Collection, io.opensphere.core.model.time.TimeSpan)}
* .
*/
@Test
public void testEnlargeTimeRange()
{
TimeSpan span = new TimeSpanLongLong(10, 20);
TimeSpan span1 = new TimeSpanLongLong(0, 8);
TimeSpan span2 = new TimeSpanLongLong(26, 50);
TimeSpan result = myTestObject.enlargeTimeRange(Arrays.asList(span1, span2), span);
assertEquals(0, result.getStart());
assertEquals(50, result.getEnd());
}
/**
* Test method for
* {@link BinaryTimeTree#nodeClear(io.opensphere.core.model.time.BTreeNode)}
* .
*/
@Test
public void testNodeClearWithSubnodes()
{
BTreeNode<TimeSpanProvider> node = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode1 = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode2 = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode3 = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode4 = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode5 = new BTreeNode<>();
subNode1.getValues().add(createMock(TimeSpanProvider.class));
subNode1.getValues().add(createMock(TimeSpanProvider.class));
subNode2.getValues().add(createMock(TimeSpanProvider.class));
subNode2.getValues().add(createMock(TimeSpanProvider.class));
subNode2.getValues().add(createMock(TimeSpanProvider.class));
subNode3.getValues().add(createMock(TimeSpanProvider.class));
subNode3.getValues().add(createMock(TimeSpanProvider.class));
subNode5.setValues(null);
node.setSubNodes(new ArrayList<>(Arrays.asList(subNode1, subNode2, subNode3, subNode4, subNode5)));
node.setValues(new ArrayList<>(Arrays.asList(createMock(TimeSpanProvider.class), createMock(TimeSpanProvider.class))));
myTestObject.nodeClear(node);
assertTrue(node.getValues().isEmpty());
assertTrue(node.getSubNodes().isEmpty());
}
/**
* Test method for
* {@link BinaryTimeTree#nodeClear(io.opensphere.core.model.time.BTreeNode)}
* .
*/
@Test
public void testNodeClearWithNullValues()
{
BTreeNode<TimeSpanProvider> node = new BTreeNode<>();
node.setValues(null);
node.setSubNodes(null);
myTestObject.nodeClear(node);
assertNull(node.getValues());
assertNull(node.getSubNodes());
}
/**
* Test method for
* {@link BinaryTimeTree#nodeClear(io.opensphere.core.model.time.BTreeNode)}
* .
*/
@Test
public void testNodeClearNull()
{
// no exceptions is passing
myTestObject.nodeClear(null);
}
/**
* Test method for
* {@link BinaryTimeTree#subDivide(java.util.List, io.opensphere.core.model.time.BTreeNode)}
* .
*/
@Test
public void testSubDivide()
{
myTestObject = new BinaryTimeTree<>(1, 1);
BTreeNode<TimeSpanProvider> node = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode1 = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode2 = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode3 = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode4 = new BTreeNode<>();
BTreeNode<TimeSpanProvider> subNode5 = new BTreeNode<>();
subNode1.getValues().add(createMock(TimeSpanProvider.class));
subNode1.getValues().add(createMock(TimeSpanProvider.class));
subNode2.getValues().add(createMock(TimeSpanProvider.class));
subNode2.getValues().add(createMock(TimeSpanProvider.class));
subNode2.getValues().add(createMock(TimeSpanProvider.class));
subNode3.getValues().add(createMock(TimeSpanProvider.class));
subNode3.getValues().add(createMock(TimeSpanProvider.class));
node.setSubNodes(new ArrayList<>(Arrays.asList(subNode1, subNode2, subNode3, subNode4, subNode5)));
node.setRange(new TimeSpanLongLong(0, 1000));
TimeSpanProvider provider1 = createMock(TimeSpanProvider.class);
TimeSpanProvider provider2 = createMock(TimeSpanProvider.class);
TimeSpanLongLong span1 = new TimeSpanLongLong(10, 20);
expect(provider1.getTimeSpan()).andReturn(span1).anyTimes();
TimeSpanLongLong span2 = new TimeSpanLongLong(20, 200);
expect(provider2.getTimeSpan()).andReturn(span2).anyTimes();
replay(provider1, provider2);
List<TimeSpanProvider> values = new ArrayList<>(Arrays.asList(provider1, provider2));
node.setValues(values);
myTestObject.subDivide(values, node);
assertEquals(7, node.getSubNodes().size());
}
}
|
//
// MyWorkCircleCommentToolBar.h
// czjxw
//
// Created by zhangy on 15/11/17.
// Copyright ยฉ 2015ๅนด mariocmy. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "XHMessageTextView.h"
#define kInputTextViewMinHeight 36
#define kInputTextViewMaxHeight 200
#define kHorizontalPadding 8
#define kVerticalPadding 5
@protocol MyWorkCircleCommentToolBarDelegate <NSObject>
@optional
/**
* ๆๅญ่พๅ
ฅๆกๅผๅง็ผ่พ
*
* @param messageInputTextView ่พๅ
ฅๆกๅฏน่ฑก
*/
- (void)inputTextViewDidBeginEditing:(XHMessageTextView *)messageInputTextView;
/**
* ๆๅญ่พๅ
ฅๆกๅฐ่ฆๅผๅง็ผ่พ
*
* @param messageInputTextView ่พๅ
ฅๆกๅฏน่ฑก
*/
- (void)inputTextViewWillBeginEditing:(XHMessageTextView *)messageInputTextView;
/**
* ๅ้ๆๅญๆถๆฏ๏ผๅฏ่ฝๅ
ๅซ็ณป็ป่ชๅธฆ่กจๆ
*
* @param text ๆๅญๆถๆฏ
*/
- (void)didSendText:(NSString *)text;
@required
/**
* ้ซๅบฆๅๅฐtoHeight
*/
- (void)didChangeFrameToHeight:(CGFloat)toHeight;
@end
@interface MyWorkCircleCommentToolBar : UIView
@property (nonatomic, weak) id <MyWorkCircleCommentToolBarDelegate> delegate;
/**
* ๆไฝๆ ่ๆฏๅพ็
*/
@property (strong, nonatomic) UIImage *toolbarBackgroundImage;
/**
* ่ๆฏๅพ็
*/
@property (strong, nonatomic) UIImage *backgroundImage;
/**
* ่กจๆ
็้ๅ ้กต้ข
*/
@property (strong, nonatomic) UIView *faceView;
/**
* ็จไบ่พๅ
ฅๆๆฌๆถๆฏ็่พๅ
ฅๆก
*/
@property (strong, nonatomic) XHMessageTextView *inputTextView;
/**
* ๆๅญ่พๅ
ฅๅบๅๆๅคง้ซๅบฆ๏ผๅฟ
้กป > KInputTextViewMinHeight(ๆๅฐ้ซๅบฆ)ๅนถไธ < KInputTextViewMaxHeight๏ผๅฆๅ่ฎพ็ฝฎๆ ๆ
*/
@property (nonatomic) CGFloat maxTextInputViewHeight;
/**
* ๅๅงๅๆนๆณ
*
* @param frame ไฝ็ฝฎๅๅคงๅฐ
*
* @return DXMessageToolBar
*/
- (instancetype)initWithFrame:(CGRect)frame;
/**
* ้ป่ฎค้ซๅบฆ
*
* @return ้ป่ฎค้ซๅบฆ
*/
+ (CGFloat)defaultHeight;
- (void)finishSendMessage;
-(void)becomeFirstResponderForTextField;
@end
|
#include <cstddef>
#include <pybind11/pybind11.h>
#include <pybind11/numpy.h>
#include "nifty/python/converter.hxx"
#include "nifty/graph/rag/grid_rag_features.hxx"
namespace py = pybind11;
namespace nifty{
namespace graph{
using namespace py;
template<class RAG,class T,std::size_t DATA_DIM>
void exportGridRagAccumulateLabelsT(py::module & ragModule){
ragModule.def("gridRagAccumulateLabels",
[](
const RAG & rag,
nifty::marray::PyView<T, DATA_DIM> labels
){
nifty::marray::PyView<T> nodeLabels({rag.numberOfNodes()});
{
py::gil_scoped_release allowThreads;
gridRagAccumulateLabels(rag, labels, nodeLabels);
}
return nodeLabels;
},
py::arg("graph"),py::arg("labels")
);
}
void exportGraphAccumulator(py::module & ragModule) {
// exportGridRagAccumulateLabels
{
typedef ExplicitLabelsGridRag<2, uint32_t> ExplicitLabelsGridRag2D;
typedef ExplicitLabelsGridRag<3, uint32_t> ExplicitLabelsGridRag3D;
// accumulate labels
exportGridRagAccumulateLabelsT<ExplicitLabelsGridRag2D, uint32_t, 2>(ragModule);
exportGridRagAccumulateLabelsT<ExplicitLabelsGridRag3D, uint32_t, 3>(ragModule);
// accumulate labels
exportGridRagAccumulateLabelsT<ExplicitLabelsGridRag2D, uint32_t, 2>(ragModule);
exportGridRagAccumulateLabelsT<ExplicitLabelsGridRag3D, uint32_t, 3>(ragModule);
}
}
} // end namespace graph
} // end namespace nifty
|
<reponame>Walter1412/api_node_template
'use strict';
const { Model, DataTypes } = require('sequelize');
module.exports = sequelize => {
class UserAccount extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
}
}
UserAccount.init(
{
no: {
type: DataTypes.STRING,
allowNull: false,
unique: true,
},
account: {
type: DataTypes.STRING,
validate: {
notEmpty: true,
isEmail: true,
},
allowNull: false,
unique: true,
},
name: {
type: DataTypes.STRING,
allowNull: false,
},
password: {
type: DataTypes.STRING,
allowNull: false,
},
email: {
type: DataTypes.STRING,
validate: {
isEmil: true,
},
},
mobile:{
type:DataTypes.CHAR(10),
},
hasVerified: {
type: DataTypes.CHAR(10),
allowNull: false,
defaultValue: 0,
},
verificationTime: {
type: DataTypes.DATE,
},
actFlg: {
type: DataTypes.CHAR(10),
allowNull: false,
defaultValue: 0,
},
createdUser: {
type: DataTypes.INTEGER,
},
updatedUser: {
type: DataTypes.INTEGER,
},
salt: {
type: DataTypes.STRING,
},
},
{
sequelize,
modelName: 'UserAccount',
},
);
return UserAccount;
};
|
#!/bin/bash
failed_unit_count=$(systemctl --failed --quiet | grep "units listed" | grep -o "^[0-9]* ")
[ ${failed_unit_count} -eq 0 ] && exit 0
systemctl --failed >&2
|
'''
Created on Sep 2, 2012
@author: Luke
'''
from reader.language_tools import Greek
from reader.models import Lemma, Case, WordForm, WordDescription, Dialect
from reader.importer.LineImporter import LineImporter
import re
import logging
from time import time
from django.db import transaction
logger = logging.getLogger(__name__)
class DiogenesAnalysesImporter(LineImporter):
"""
The Diogenes analyses importer imports the analyses file (greek-analyses.txt) from the Diogenes project.
The import is broken down into the following steps:
+------+--------------------+-----------------+------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Step | Function | Purpose | Input |
+------+--------------------+-----------------+------------------------------------------------------------------------------------------------------------------------------------------------------------+
| 1 | parse_entry | Get the form | a(/bra {537850 9 a(/bra_,a(/bra favourite slave fem nom/voc/acc dual}{537850 9 a(/bra_,a(/bra favourite slave fem nom/voc sg (attic doric aeolic)} |
| 2 | parse_descriptions | Parse the forms | {537850 9 a(/bra_,a(/bra favourite slave fem nom/voc/acc dual}{537850 9 a(/bra_,a(/bra favourite slave fem nom/voc sg (attic doric aeolic)} |
| 3 | parse_description | Parse the form | {537850 9 a(/bra_,a(/bra favourite slave fem nom/voc/acc dual} |
+------+--------------------+-----------------+------------------------------------------------------------------------------------------------------------------------------------------------------------+
The import process consists of populating a series of models. The hierarchy looks something like:
WordForm
|- WordDescription -> Lemma
|- Case
"""
PARSE_FIND_ATTRS = re.compile("[a-zA-Z0-9_]+")
PARSE_ANALYSIS_DESCRIPTIONS_RE = re.compile("([{][^{]*)")
PARSE_ANALYSIS_DESCRIPTION_RE = re.compile(
"[{]?(?P<reference_number>[0-9]+)\s+(?P<second_number>[0-9]+)\s+(?P<forms>[^\t]+)\t(?P<definition>.+)\t(?P<attrs>[^}]*)}(\[(?P<extra>[0-9]+)\])?")
CASE_MAP = {
'nom': 'nominative',
'voc': 'vocative',
'acc': 'accusative',
'dat': 'dative',
'gen': 'genitive'
}
TENSE_MAP = {
'aor': 'aorist',
'pres': 'present',
'imperf': 'imperfect',
'fut': 'future',
'perf': 'perfect',
'futperf': 'future perfect',
'plup': 'pluperfect'
}
NUMBER_MAP = {
'sg': WordDescription.SINGULAR,
'dual': WordDescription.DUAL,
'pl': WordDescription.PLURAL
}
PERSON_MAP = {
'1st': WordDescription.FIRST,
'2nd': WordDescription.SECOND,
'3rd': WordDescription.THIRD
}
MOOD_MAP = {
"subj": "subjunctive",
"ind": "indicative",
"imperat": "imperative",
"opt": "optative",
"interrog": "interrogative"
}
VOICE_MAP = {
"mid": WordDescription.MIDDLE,
"pass": WordDescription.PASSIVE,
"act": WordDescription.ACTIVE,
"mp": WordDescription.MIDDLE_PASSIVE
}
PARTS_OF_SPEECH_MAP = {
"conj": WordDescription.CONJUNCTION,
"prep": WordDescription.PREPOSITION
}
CLITIC_MAP = {
"proclitic": WordDescription.PROCLITIC,
"enclitic": WordDescription.ENCLITIC
}
cached_cases = None
cached_dialects = None
@classmethod
@transaction.atomic
def import_line(cls, entry, line_number=None, raise_exception_on_match_failure=False):
"""
Parse an entry in the Diogenes greek-analyses.txt file.
Example: 'ch/nta {10100899 9 e)ch/nta_,e)k-a)nta/w come opposite to imperf ind act 3rd sg (attic epic ionic)}[32695571]
Arguments:
entry -- A line in the greek-analysis file
line_number -- The line number associated with the entry
"""
# Get the form
beta_code_string = entry[0:entry.find("\t")]
# Strip out the exclamation marks which isn't necessary from what I can tell and just makes problems
beta_code_string = beta_code_string.replace("!", "")
# Find the lemma entry
greek_code_string = Greek.beta_code_to_unicode(beta_code_string)
# Cut out the stray apostraphe's
greek_code_string = greek_code_string.replace("'", "")
# Make the form
word_form = WordForm()
word_form.form = greek_code_string
word_form.save()
# Make the descriptions
form_number = 0
for desc in cls.PARSE_ANALYSIS_DESCRIPTIONS_RE.findall(entry):
form_number = form_number + 1
DiogenesAnalysesImporter.import_analysis_entry(
desc, word_form, line_number, form_number, raise_exception_on_match_failure)
# Log the line
if line_number is not None and (line_number % 1000) == 0:
logger.info("Importation progress, line_number=%i", line_number)
return word_form
@classmethod
def get_lemma(cls, reference_number):
"""
Get the lemma associated with the given reference number.
Arguments:
cls -- The class
reference_number -- The reference number associated with the lemma
"""
lemma = Lemma.objects.only("id").filter(
reference_number=reference_number)[:1]
if len(lemma) > 0:
return lemma[0]
@classmethod
def import_analysis_entry(cls, desc, word_form, line_number=None, form_number=None, raise_exception_on_match_failure=False):
"""
Import an entry from the Diogenes lemmata file.
Arguments:
cls -- The class
desc -- A string with the part of the line that describes the given form (e.g. "{537850 9 a(/bra_,a(/bra favourite slave fem nom/voc/acc dual}")
word_form -- The WordForm instance associated with the description
line_number -- The line number that this entry is found on
form_number -- The number of the form on this line (since each line can have several forms)
raise_exception_on_match_failure -- Indicates if an exception should be raised if the line could not be matched to the regular expression
"""
# Parse the description
r = cls.PARSE_ANALYSIS_DESCRIPTION_RE.search(desc)
# Stop if the regex did not match
if r is None:
if raise_exception_on_match_failure:
raise Exception("Analysis entry does not match the regex, form=%s, line_number=%r, form_number=%r" % (
word_form.form, line_number, form_number))
logger.warn("Analysis entry does not match the regex, form=%s, line_number=%r, form_number=%r" % (
word_form.form, line_number, form_number))
return
d = r.groupdict()
# Find the entry associated by the reference number
reference_number = d['reference_number']
lemma = cls.get_lemma(reference_number)
# Stop if we couldn't find a matching lemma
if lemma is None:
logger.warn("Unable to find the lemma for an analysis entry, form=%s, reference_number=%r, line_number=%r, form_number=%r" % (
word_form.form, reference_number, line_number, form_number))
else:
# Add the description of the form
word_description = WordDescription(description=desc)
word_description.word_form = word_form
word_description.lemma = lemma
word_description.meaning = d['definition']
# Parse into a list of attributes
attrs = cls.PARSE_FIND_ATTRS.findall(d['attrs'])
# Update the word description with the data from the attributes
return cls.create_description_attributes(attrs, word_description, line_number)
@classmethod
def get_case(cls, case):
"""
Get a case associated with the provided string.
Arguments:
cls -- The class
case -- A name of a case.
"""
if cls.cached_cases is None:
cls.cached_cases = Case.objects.all()
for c in cls.cached_cases:
if c.name == case:
return c
# Create the new case
new_case = Case(name=case)
new_case.save()
cls.cached_cases = None
return new_case
@classmethod
def get_dialect(cls, dialect):
"""
Get a dialect associated with the provided string.
Arguments:
cls -- The class
dialect -- A name of a dialect.
"""
if cls.cached_dialects is None:
cls.cached_dialects = Dialect.objects.all()
for c in cls.cached_dialects:
if c.name == dialect:
return c
# Create the new dialect
new_dialect = Dialect(name=dialect)
new_dialect.save()
cls.cached_dialects = None
return new_dialect
@staticmethod
def set_part_of_speech(word_description, part_of_speech, raise_if_already_set=True, dont_set_if_already_set=True):
"""
Set the part of speech.
Arguments:
word_description -- The word description instance to modify
part_of_speech -- The part of speech to set
raise_if_already_set -- Raise an exception if the part of speech was already set to something else
dont_set_if_already_set -- If the part of speech was already set, then leave the existing value
"""
if word_description.part_of_speech is not None and word_description.part_of_speech != part_of_speech and part_of_speech and raise_if_already_set:
raise Exception("Part of speech was unexpected, is %i but was set to %i" % (
word_description.part_of_speech, part_of_speech))
if dont_set_if_already_set and word_description.part_of_speech is not None:
pass
else:
word_description.part_of_speech = part_of_speech
@classmethod
def create_description_attributes(cls, attrs, word_description, raise_on_unused_attributes=False, line_number=None):
"""
Update the description with attributes from the attrs.
Arguments:
cls -- The class
attrs -- The list of attributes
word_description -- The word description instance to modify
raise_on_unused_attributes -- Raise an exception if an attribute is observed that is not recognized
line_number -- The line number of the description we are populating
"""
dialects = []
cases = []
# Go through the attributes and initialize the instance
for a in attrs:
# Handle gender
if a == "fem":
word_description.feminine = True
cls.set_part_of_speech(word_description, WordDescription.NOUN)
elif a == "masc":
word_description.masculine = True
cls.set_part_of_speech(word_description, WordDescription.NOUN)
elif a == "neut":
word_description.neuter = True
cls.set_part_of_speech(word_description, WordDescription.NOUN)
# Handle number
elif a in cls.NUMBER_MAP:
word_description.number = cls.NUMBER_MAP[a]
# Handle dialects
elif a in ["attic", "doric", "aeolic", "epic", "ionic", "homeric", "parad_form", "prose"]:
dialects.append(cls.get_dialect(a))
# Handle part of speech
elif a in ["adverb", "adverbial"]:
cls.set_part_of_speech(
word_description, WordDescription.ADVERB)
# Handle number
elif a in ["indeclform", "indecl"]:
word_description.indeclinable = True
# Handle person
elif a in cls.PERSON_MAP:
word_description.person = cls.PERSON_MAP[a]
# Superlative
elif a == "superl":
word_description.superlative = WordDescription.REGULAR
elif a == "irreg_superl":
word_description.superlative = WordDescription.IRREGULAR
# Comparative
elif a == "comp":
word_description.comparative = WordDescription.REGULAR
elif a == "irreg_comp":
word_description.comparative = WordDescription.IRREGULAR
# Handle tenses
elif a in cls.TENSE_MAP:
word_description.tense = cls.TENSE_MAP[a]
cls.set_part_of_speech(word_description, WordDescription.VERB)
# Handle moods
elif a in cls.MOOD_MAP:
word_description.mood = cls.MOOD_MAP[a]
cls.set_part_of_speech(word_description, WordDescription.VERB)
elif a == "inf":
word_description.infinitive = True
cls.set_part_of_speech(word_description, WordDescription.VERB)
elif a == "part":
word_description.participle = True
elif a == "expletive":
word_description.expletive = True
elif a == "poetic":
word_description.poetic = True
elif a in cls.PARTS_OF_SPEECH_MAP:
cls.set_part_of_speech(
word_description, cls.PARTS_OF_SPEECH_MAP[a])
elif a == "particle":
word_description.particle = True
elif a in cls.CLITIC_MAP:
word_description.clitic = cls.CLITIC_MAP[a]
elif a == "nu_movable":
word_description.movable_nu = True
elif a == "numeral":
word_description.numeral = True
elif a == "geog_name":
word_description.geog_name = True
elif a in ["a_priv", "exclam", "iota_intens", "contr", "alphabetic"]:
pass
elif a in cls.VOICE_MAP:
word_description.voice = cls.VOICE_MAP[a]
cls.set_part_of_speech(word_description, WordDescription.VERB)
# Handle cases
elif a in cls.CASE_MAP:
cases.append(cls.get_case(a))
cls.set_part_of_speech(
word_description, WordDescription.NOUN, dont_set_if_already_set=True)
# Warn in the attribute was not processed
elif raise_on_unused_attributes:
if line_number is not None:
raise Exception(
"Attribute was not expected: attribute=%s, line_number=%i" % (a, line_number))
else:
raise Exception(
"Attribute was not expected: attribute=%s" % a)
else:
if line_number is not None:
logger.warn(
"Attribute was not expected: attribute=%s, line_number=%i" % (a, line_number))
else:
logger.warn("Attribute was not expected: attribute=%s" % a)
# Save the description
word_description.save()
# Add the cases
for case in cases:
word_description.cases.add(case)
# Add the dialects
for dialect in dialects:
word_description.dialects.add(dialect)
return word_description
class DiogenesLemmataImporter():
"""
The Diogenes Lemmata importer imports the lemmata file (greek-lemmata.txt) from the Diogenes project.
"""
@staticmethod
@transaction.atomic
def parse_lemma(entry, line_number=None):
"""
Parse an entry in the Diogenes greek-lemmata.txt file.
Example: 'ch/nta {10100899 9 e)ch/nta_,e)k-a)nta/w come opposite to imperf ind act 3rd sg (attic epic ionic)}[32695571]
Arguments:
entry -- A line in the greek-lemmata file
line_number -- The line number associated with the entry
"""
split_entry = entry.split("\t")
# Convert the beta-code to Greek
lexical_form = Greek.beta_code_str_to_unicode(split_entry[0])
# Cut out unnecessary apostrophe's
lexical_form = lexical_form.replace("'", "")
# Get the reference number
reference_number = int(split_entry[1])
# Make the entry
lemma = Lemma(language="Greek")
lemma.lexical_form = lexical_form
lemma.reference_number = reference_number
lemma.save()
return lemma
@staticmethod
def import_file(file_name, return_created_objects=False, start_line_number=None):
logger.debug("Importing file, file=\"%s\"", file_name)
# Record the start time so that we can measure performance
start_time = time()
# If we are returning the objects, then initialize an array to store them. Otherwise, intialize the count.
if return_created_objects:
lemmas = []
else:
lemmas = 0
# Initialize a couple more things...
f = None # The file handle
line_number = 0 # The line number
try:
# Open the file
f = open(file_name, 'r')
# Process each line
for line in f:
# Note the line number we are importing
line_number = line_number + 1
# If we are importing starting from a particular line number, then skip lines until you get to this point
if start_line_number is not None and line_number < start_line_number:
pass # Skip this line
else:
# Import the line
lemma = DiogenesLemmataImporter.parse_lemma(
line, line_number)
if return_created_objects:
lemmas.append(lemma)
else:
lemmas = lemmas + 1
finally:
if f is not None:
f.close()
logger.info("Import complete, duration=%i", time() - start_time)
return lemmas
|
#!/bin/csh -f
#
# svn $Id: job_psas_sen.sh 2328 2014-01-23 20:16:18Z arango $
#######################################################################
# Copyright (c) 2002-2014 The ROMS/TOMS Group #
# Licensed under a MIT/X style license #
# See License_ROMS.txt #
#######################################################################
# #
# Strong/Weak constraint 4D-PSAS observation impact or sensitivity #
# job script: #
# #
# This script NEEDS to be run before any run: #
# #
# (1) It copies a new clean nonlinear model initial conditions #
# file. The nonlinear model is initialized from the #
# background or reference state. #
# (2) It copies Lanczos vectors from previous 4D-PSAS run. They #
# are stored in 4D-Var data assimilation file. #
# (3) It copies the adjoint sensitivy functional file for the #
# observation impact or sensitivity. #
# (4) Specify model, initial conditions, boundary conditions, and #
# surface forcing error convariance input standard deviations #
# files. #
# (5) Specify model, initial conditions, boundary conditions, and #
# surface forcing error convariance input/output normalization #
# factors files. #
# (6) Copy a clean copy of the observations NetCDF file. #
# (7) Create 4D-Var input script "psas.in" from template and #
# specify the error covariance standard deviation, error #
# covariance normalization factors, and observation files to #
# be used. #
# #
#######################################################################
# Set path definition to one directory up in the tree.
set Dir=`dirname ${PWD}`
# Set string manipulations perl script.
set SUBSTITUTE=${ROMS_ROOT}/ROMS/Bin/substitute
# Copy nonlinear model initial conditions file.
cp -p ${Dir}/Data/wc13_ini.nc wc13_ini.nc
# Copy Lanczos vectors from previous 4D-PSAS run. They are stored
# in 4D-Var data assimilation file.
cp -p ${Dir}/PSAS/wc13_mod.nc wc13_lcz.nc
# Copy adjoint sensitivity functional.
cp -p ${Dir}/Data/wc13_ads.nc wc13_ads.nc
# Set model, initial conditions, boundary conditions and surface
# forcing error covariance standard deviations files.
set STDnameM=${Dir}/Data/wc13_std_m.nc
set STDnameI=${Dir}/Data/wc13_std_i.nc
set STDnameB=${Dir}/Data/wc13_std_b.nc
set STDnameF=${Dir}/Data/wc13_std_f.nc
# Set model, initial conditions, boundary conditions and surface
# forcing error covariance normalization factors files.
set NRMnameM=${Dir}/Data/wc13_nrm_m.nc
set NRMnameI=${Dir}/Data/wc13_nrm_i.nc
set NRMnameB=${Dir}/Data/wc13_nrm_b.nc
set NRMnameF=${Dir}/Data/wc13_nrm_f.nc
# Set observations file.
set OBSname=wc13_obs.nc
# Get a clean copy of the observation file. This is really
# important since this file is modified.
cp -p ${Dir}/Data/${OBSname} .
# Modify 4D-Var template input script and specify above files.
set PSAS=psas.in
if (-e $PSAS) then
/bin/rm $PSAS
endif
cp s4dvar.in $PSAS
$SUBSTITUTE $PSAS ocean_std_m.nc $STDnameM
$SUBSTITUTE $PSAS ocean_std_i.nc $STDnameI
$SUBSTITUTE $PSAS ocean_std_b.nc $STDnameB
$SUBSTITUTE $PSAS ocean_std_f.nc $STDnameF
$SUBSTITUTE $PSAS ocean_nrm_m.nc $NRMnameM
$SUBSTITUTE $PSAS ocean_nrm_i.nc $NRMnameI
$SUBSTITUTE $PSAS ocean_nrm_b.nc $NRMnameB
$SUBSTITUTE $PSAS ocean_nrm_f.nc $NRMnameF
$SUBSTITUTE $PSAS ocean_obs.nc $OBSname
$SUBSTITUTE $PSAS ocean_hss.nc wc13_hss.nc
$SUBSTITUTE $PSAS ocean_lcz.nc wc13_lcz.nc
$SUBSTITUTE $PSAS ocean_mod.nc wc13_mod.nc
$SUBSTITUTE $PSAS ocean_err.nc wc13_err.nc
|
require 'rspec'
$:.unshift(File.dirname(__FILE__) + '/../lib')
require 'morpher_inflecter'
def parsed_json(text)
JSON.parse(text)
end
|
<gh_stars>100-1000
// This file is part of the Orbbec Astra SDK [https://orbbec3d.com]
// Copyright (c) 2015 Orbbec 3D
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Be excellent to each other.
// Undeprecate CRT functions
#ifndef _CRT_SECURE_NO_DEPRECATE
#define _CRT_SECURE_NO_DEPRECATE 1
#endif
#include "hnd_hand_tracker.hpp"
#include "hnd_segmentation.hpp"
#include <astra/capi/streams/hand_types.h>
#include <astra/capi/astra_ctypes.h>
#include <astra_core/plugins/Plugin.hpp>
#include <Shiny.h>
namespace astra { namespace hand {
using namespace std;
hand_tracker::hand_tracker(PluginServiceProxy& pluginService,
astra_streamset_t streamSet,
StreamDescription& depthDesc,
hand_settings& settings) :
streamset_(plugins::get_uri_for_streamset(pluginService, streamSet)),
reader_(streamset_.create_reader()),
depthStream_(reader_.stream<DepthStream>(depthDesc.subtype())),
settings_(settings),
pluginService_(pluginService),
depthUtility_(settings.processingSizeWidth, settings.processingSizeHeight, settings.depthUtilitySettings),
pointProcessor_(settings.pointProcessorSettings),
processingSizeWidth_(settings.processingSizeWidth),
processingSizeHeight_(settings.processingSizeHeight)
{
PROFILE_FUNC();
create_streams(pluginService_, streamSet);
depthStream_.start();
reader_.stream<PointStream>().start();
reader_.add_listener(*this);
}
hand_tracker::~hand_tracker()
{
PROFILE_FUNC();
if (worldPoints_ != nullptr)
{
delete[] worldPoints_;
worldPoints_ = nullptr;
}
}
void hand_tracker::create_streams(PluginServiceProxy& pluginService, astra_streamset_t streamSet)
{
PROFILE_FUNC();
LOG_INFO("hand_tracker", "creating hand streams");
auto hs = plugins::make_stream<handstream>(pluginService, streamSet, ASTRA_HANDS_MAX_HAND_COUNT);
handStream_ = std::unique_ptr<handstream>(std::move(hs));
const int bytesPerPixel = 3;
auto dhs = plugins::make_stream<debug_handstream>(pluginService,
streamSet,
processingSizeWidth_,
processingSizeHeight_,
bytesPerPixel);
debugimagestream_ = std::unique_ptr<debug_handstream>(std::move(dhs));
}
void hand_tracker::on_frame_ready(StreamReader& reader, Frame& frame)
{
PROFILE_FUNC();
if (handStream_->has_connections() ||
debugimagestream_->has_connections())
{
const DepthFrame depthFrame = frame.get<DepthFrame>();
const PointFrame pointFrame = frame.get<PointFrame>();
update_tracking(depthFrame, pointFrame);
}
PROFILE_UPDATE();
}
void hand_tracker::reset()
{
PROFILE_FUNC();
depthUtility_.reset();
pointProcessor_.reset();
}
void hand_tracker::update_tracking(const DepthFrame& depthFrame, const PointFrame& pointFrame)
{
PROFILE_FUNC();
if (!debugimagestream_->pause_input())
{
depthUtility_.depth_to_velocity_signal(depthFrame, matDepth_, matDepthFullSize_, matVelocitySignal_);
}
track_points(matDepth_, matDepthFullSize_, matVelocitySignal_, pointFrame.data());
//use same frameIndex as source depth frame
astra_frame_index_t frameIndex = depthFrame.frame_index();
if (handStream_->has_connections())
{
generate_hand_frame(frameIndex);
}
if (debugimagestream_->has_connections())
{
generate_hand_debug_image_frame(frameIndex);
}
}
void hand_tracker::track_points(BitmapF& matDepth,
BitmapF& matDepthFullSize,
BitmapMask& matVelocitySignal,
const astra::Vector3f* fullSizeWorldPoints)
{
PROFILE_FUNC();
layerSegmentation_.recreate(matDepth.size());
layerSegmentation_.fill(0);
layerScore_.recreate(matDepth.size());
layerScore_.fill(0.f);
layerEdgeDistance_.recreate(matDepth.size());
layerEdgeDistance_.fill(0.f);
debugUpdateSegmentation_.recreate(matDepth.size());
debugUpdateSegmentation_.fill(0);
debugCreateSegmentation_.recreate(matDepth.size());
debugCreateSegmentation_.fill(0);
debugRefineSegmentation_.recreate(matDepth.size());
debugRefineSegmentation_.fill(0);
updateForegroundSearched_.recreate(matDepth.size());
updateForegroundSearched_.fill(0);
createForegroundSearched_.recreate(matDepth.size());
createForegroundSearched_.fill(0);
refineForegroundSearched_.recreate(matDepth.size());
refineForegroundSearched_.fill(0);
debugUpdateScore_.recreate(matDepth.size());
debugUpdateScore_.fill(0.f);
debugCreateScore_.recreate(matDepth.size());
debugCreateScore_.fill(0.f);
matDepthWindow_.recreate(matDepth.size());
matDepthWindow_.fill(0.f);
refineSegmentation_.recreate(matDepth.size());
refineSegmentation_.fill(0);
refineScore_.recreate(matDepth.size());
refineScore_.fill(0.f);
refineEdgeDistance_.recreate(matDepth.size());
refineEdgeDistance_.fill(0.f);
debugUpdateScoreValue_.recreate(matDepth.size());
debugUpdateScoreValue_.fill(0.f);
debugCreateScoreValue_.recreate(matDepth.size());
debugCreateScoreValue_.fill(0.f);
debugRefineScoreValue_.recreate(matDepth.size());
debugRefineScoreValue_.fill(0.f);
debugCreateTestPassMap_.recreate(matDepth.size());
debugCreateTestPassMap_.fill(0);
debugUpdateTestPassMap_.recreate(matDepth.size());
debugUpdateTestPassMap_.fill(0);
debugRefineTestPassMap_.recreate(matDepth.size());
debugRefineTestPassMap_.fill(0);
int numPoints = matDepth.width()* matDepth.height();
if (worldPoints_ == nullptr || numWorldPoints_ != numPoints)
{
if (worldPoints_ != nullptr)
{
delete[] worldPoints_;
worldPoints_ = nullptr;
}
numWorldPoints_ = numPoints;
worldPoints_ = new astra::Vector3f[numPoints];
}
const conversion_cache_t depthToWorldData = depthStream_.depth_to_world_data();
bool debugLayersEnabled = debugimagestream_->has_connections();
bool enabledTestPassMap = debugimagestream_->view_type() == DEBUG_HAND_VIEW_TEST_PASS_MAP;
tracking_matrices updateMatrices(matDepthFullSize,
matDepth,
matArea_,
matAreaSqrt_,
matVelocitySignal,
updateForegroundSearched_,
layerSegmentation_,
layerScore_,
layerEdgeDistance_,
layerIntegralArea_,
layerTestPassMap_,
debugUpdateSegmentation_,
debugUpdateScore_,
debugUpdateScoreValue_,
debugUpdateTestPassMap_,
enabledTestPassMap,
fullSizeWorldPoints,
worldPoints_,
debugLayersEnabled,
depthStream_.coordinateMapper(),
depthToWorldData);
if (!debugimagestream_->pause_input())
{
pointProcessor_.initialize_common_calculations(updateMatrices);
}
//Update existing points first so that if we lose a point, we might recover it in the "add new" stage below
//without having at least one frame of a lost point.
pointProcessor_.update_tracked_points(updateMatrices);
pointProcessor_.remove_duplicate_points();
tracking_matrices createMatrices(matDepthFullSize,
matDepth,
matArea_,
matAreaSqrt_,
matVelocitySignal,
createForegroundSearched_,
layerSegmentation_,
layerScore_,
layerEdgeDistance_,
layerIntegralArea_,
layerTestPassMap_,
debugCreateSegmentation_,
debugCreateScore_,
debugCreateScoreValue_,
debugCreateTestPassMap_,
enabledTestPassMap,
fullSizeWorldPoints,
worldPoints_,
debugLayersEnabled,
depthStream_.coordinateMapper(),
depthToWorldData);
//add new points (unless already tracking)
if (!debugimagestream_->use_mouse_probe())
{
Point2i seedPosition;
Point2i nextSearchStart(0, 0);
while (segmentation::find_next_velocity_seed_pixel(matVelocitySignal, createForegroundSearched_, seedPosition, nextSearchStart))
{
pointProcessor_.update_tracked_or_create_new_point_from_seed(createMatrices, seedPosition);
}
}
else
{
debug_spawn_point(createMatrices);
}
debug_probe_point(createMatrices);
//remove old points
pointProcessor_.remove_stale_or_dead_points();
tracking_matrices refinementMatrices(matDepthFullSize,
matDepthWindow_,
matArea_,
matAreaSqrt_,
matVelocitySignal,
refineForegroundSearched_,
refineSegmentation_,
refineScore_,
refineEdgeDistance_,
layerIntegralArea_,
layerTestPassMap_,
debugRefineSegmentation_,
debugRefineScore_,
debugRefineScoreValue_,
debugRefineTestPassMap_,
enabledTestPassMap,
fullSizeWorldPoints,
worldPoints_,
false,
depthStream_.coordinateMapper(),
depthToWorldData);
pointProcessor_.update_full_resolution_points(refinementMatrices);
pointProcessor_.update_trajectories();
}
void hand_tracker::debug_probe_point(tracking_matrices& matrices)
{
if (!debugimagestream_->use_mouse_probe())
{
return;
}
Point2i probePosition = get_mouse_probe_position();
BitmapF& matDepth = matrices.depth;
float depth = matDepth.at(probePosition);
float score = debugCreateScoreValue_.at(probePosition);
float edgeDist = layerEdgeDistance_.at(probePosition);
auto segmentationSettings = settings_.pointProcessorSettings.segmentationSettings;
const test_behavior outputTestLog = TEST_BEHAVIOR_LOG;
const test_phase phase = TEST_PHASE_CREATE;
bool validPointInRange = segmentation::test_point_in_range(matrices,
probePosition,
outputTestLog);
bool validPointArea = false;
bool validRadiusTest = false;
bool validNaturalEdges = false;
if (validPointInRange)
{
validPointArea = segmentation::test_point_area_integral(matrices,
matrices.layerIntegralArea,
segmentationSettings.areaTestSettings,
probePosition,
phase,
outputTestLog);
validRadiusTest = segmentation::test_foreground_radius_percentage(matrices,
segmentationSettings.circumferenceTestSettings,
probePosition,
phase,
outputTestLog);
validNaturalEdges = segmentation::test_natural_edges(matrices,
segmentationSettings.naturalEdgeTestSettings,
probePosition,
phase,
outputTestLog);
}
bool allPointsPass = validPointInRange &&
validPointArea &&
validRadiusTest &&
validNaturalEdges;
LOG_INFO("hand_tracker", "depth: %f score: %f edge %f tests: %s",
depth,
score,
edgeDist,
allPointsPass ? "PASS" : "FAIL");
}
void hand_tracker::debug_spawn_point(tracking_matrices& matrices)
{
if (!debugimagestream_->pause_input())
{
pointProcessor_.initialize_common_calculations(matrices);
}
Point2i seedPosition = get_spawn_position();
pointProcessor_.update_tracked_or_create_new_point_from_seed(matrices, seedPosition);
}
Point2i hand_tracker::get_spawn_position()
{
auto normPosition = debugimagestream_->mouse_norm_position();
if (debugimagestream_->spawn_point_locked())
{
normPosition = debugimagestream_->spawn_norm_position();
}
int x = MAX(0, MIN(processingSizeWidth_, normPosition.x * processingSizeWidth_));
int y = MAX(0, MIN(processingSizeHeight_, normPosition.y * processingSizeHeight_));
return Point2i(x, y);
}
Point2i hand_tracker::get_mouse_probe_position()
{
auto normPosition = debugimagestream_->mouse_norm_position();
int x = MAX(0, MIN(processingSizeWidth_, normPosition.x * processingSizeWidth_));
int y = MAX(0, MIN(processingSizeHeight_, normPosition.y * processingSizeHeight_));
return Point2i(x, y);
}
void hand_tracker::generate_hand_frame(astra_frame_index_t frameIndex)
{
PROFILE_FUNC();
astra_handframe_wrapper_t* handFrame = handStream_->begin_write(frameIndex);
if (handFrame != nullptr)
{
handFrame->frame.handpoints = reinterpret_cast<astra_handpoint_t*>(&(handFrame->frame_data));
handFrame->frame.handCount = ASTRA_HANDS_MAX_HAND_COUNT;
update_hand_frame(pointProcessor_.get_trackedPoints(), handFrame->frame);
PROFILE_BEGIN(end_write);
handStream_->end_write();
PROFILE_END();
}
}
void hand_tracker::generate_hand_debug_image_frame(astra_frame_index_t frameIndex)
{
PROFILE_FUNC();
astra_imageframe_wrapper_t* debugimageframe = debugimagestream_->begin_write(frameIndex);
if (debugimageframe != nullptr)
{
debugimageframe->frame.data = reinterpret_cast<uint8_t *>(&(debugimageframe->frame_data));
astra_image_metadata_t metadata;
metadata.width = processingSizeWidth_;
metadata.height = processingSizeHeight_;
metadata.pixelFormat = astra_pixel_formats::ASTRA_PIXEL_FORMAT_RGB888;
debugimageframe->frame.metadata = metadata;
update_debug_image_frame(debugimageframe->frame);
debugimagestream_->end_write();
}
}
void hand_tracker::update_hand_frame(vector<tracked_point>& internaltracked_points, _astra_handframe& frame)
{
PROFILE_FUNC();
int handIndex = 0;
int maxHandCount = frame.handCount;
bool includeCandidates = handStream_->include_candidate_points();
for (auto it = internaltracked_points.begin(); it != internaltracked_points.end(); ++it)
{
tracked_point internalPoint = *it;
tracking_status status = internalPoint.trackingStatus;
tracked_point_type pointType = internalPoint.pointType;
bool includeByStatus = status == tracking_status::tracking ||
status == tracking_status::lost;
bool includeByType = pointType == tracked_point_type::active_point ||
(pointType == tracked_point_type::candidate_point && includeCandidates);
if (includeByStatus && includeByType && handIndex < maxHandCount)
{
astra_handpoint_t& point = frame.handpoints[handIndex];
++handIndex;
point.trackingId = internalPoint.trackingId;
point.depthPosition.x = internalPoint.fullSizePosition.x;
point.depthPosition.y = internalPoint.fullSizePosition.y;
copy_position(internalPoint.fullSizeWorldPosition, point.worldPosition);
copy_position(internalPoint.fullSizeWorldDeltaPosition, point.worldDeltaPosition);
point.status = convert_hand_status(status, pointType);
}
}
for (int i = handIndex; i < maxHandCount; ++i)
{
astra_handpoint_t& point = frame.handpoints[i];
reset_hand_point(point);
}
}
void hand_tracker::copy_position(Vector3f& source, astra_vector3f_t& target)
{
PROFILE_FUNC();
target.x = source.x;
target.y = source.y;
target.z = source.z;
}
astra_handstatus_t hand_tracker::convert_hand_status(tracking_status status, tracked_point_type type)
{
PROFILE_FUNC();
if (type == tracked_point_type::candidate_point)
{
return HAND_STATUS_CANDIDATE;
}
switch (status)
{
case tracking_status::tracking:
return HAND_STATUS_TRACKING;
break;
case tracking_status::lost:
return HAND_STATUS_LOST;
break;
case tracking_status::dead:
case tracking_status::not_tracking:
default:
return HAND_STATUS_NOTTRACKING;
break;
}
}
void hand_tracker::reset_hand_point(astra_handpoint_t& point)
{
PROFILE_FUNC();
point.trackingId = -1;
point.status = HAND_STATUS_NOTTRACKING;
point.depthPosition = astra_vector2i_t();
point.worldPosition = astra_vector3f_t();
point.worldDeltaPosition = astra_vector3f_t();
}
void mark_image_pixel(_astra_imageframe& imageFrame,
RgbPixel color,
astra::Vector2i p)
{
PROFILE_FUNC();
RgbPixel* colorData = static_cast<RgbPixel*>(imageFrame.data);
int index = p.x + p.y * imageFrame.metadata.width;
colorData[index] = color;
}
void hand_tracker::overlay_circle(_astra_imageframe& imageFrame)
{
PROFILE_FUNC();
float resizeFactor = matDepthFullSize_.width() / static_cast<float>(matDepth_.width());
scaling_coordinate_mapper mapper(depthStream_.depth_to_world_data(), resizeFactor);
RgbPixel color(255, 0, 255);
auto segmentationSettings = settings_.pointProcessorSettings.segmentationSettings;
float foregroundRadius1 = segmentationSettings.circumferenceTestSettings.foregroundRadius1;
float foregroundRadius2 = segmentationSettings.circumferenceTestSettings.foregroundRadius2;
Point2i probePosition = get_mouse_probe_position();
std::vector<astra::Vector2i> points;
segmentation::get_circumference_points(matDepth_, probePosition, foregroundRadius1, mapper, points);
for (auto p : points)
{
mark_image_pixel(imageFrame, color, p);
}
segmentation::get_circumference_points(matDepth_, probePosition, foregroundRadius2, mapper, points);
for (auto p : points)
{
mark_image_pixel(imageFrame, color, p);
}
Point2i spawnPosition = get_spawn_position();
RgbPixel spawnColor(255, 0, 255);
mark_image_pixel(imageFrame, spawnColor, Vector2i(spawnPosition.x, spawnPosition.y));
}
void hand_tracker::update_debug_image_frame(_astra_imageframe& colorFrame)
{
PROFILE_FUNC();
float maxVelocity_ = 0.1;
RgbPixel foregroundColor(0, 0, 255);
RgbPixel searchedColor(128, 255, 0);
RgbPixel searchedColor2(0, 128, 255);
RgbPixel testPassColor(0, 255, 128);
debug_handview_type view = debugimagestream_->view_type();
switch (view)
{
case DEBUG_HAND_VIEW_DEPTH:
debugVisualizer_.show_depth_matrix(matDepth_,
colorFrame);
break;
case DEBUG_HAND_VIEW_DEPTH_MOD:
debugVisualizer_.show_depth_matrix(depthUtility_.matDepthFilled(),
colorFrame);
break;
case DEBUG_HAND_VIEW_DEPTH_AVG:
debugVisualizer_.show_depth_matrix(depthUtility_.matDepthAvg(),
colorFrame);
break;
case DEBUG_HAND_VIEW_VELOCITY:
debugVisualizer_.show_velocity_matrix(depthUtility_.matDepthVel(),
maxVelocity_,
colorFrame);
break;
case DEBUG_HAND_VIEW_FILTEREDVELOCITY:
debugVisualizer_.show_velocity_matrix(depthUtility_.matDepthVelErode(),
maxVelocity_,
colorFrame);
break;
case DEBUG_HAND_VIEW_UPDATE_SEGMENTATION:
debugVisualizer_.show_norm_array<MaskType>(debugUpdateSegmentation_,
debugUpdateSegmentation_,
colorFrame);
break;
case DEBUG_HAND_VIEW_CREATE_SEGMENTATION:
debugVisualizer_.show_norm_array<MaskType>(debugCreateSegmentation_,
debugCreateSegmentation_,
colorFrame);
break;
case DEBUG_HAND_VIEW_UPDATE_SEARCHED:
case DEBUG_HAND_VIEW_CREATE_SEARCHED:
debugVisualizer_.show_depth_matrix(matDepth_,
colorFrame);
break;
case DEBUG_HAND_VIEW_CREATE_SCORE:
debugVisualizer_.show_norm_array<float>(debugCreateScore_,
debugCreateSegmentation_,
colorFrame);
break;
case DEBUG_HAND_VIEW_UPDATE_SCORE:
debugVisualizer_.show_norm_array<float>(debugUpdateScore_,
debugUpdateSegmentation_,
colorFrame);
break;
case DEBUG_HAND_VIEW_HANDWINDOW:
debugVisualizer_.show_depth_matrix(matDepthWindow_,
colorFrame);
break;
case DEBUG_HAND_VIEW_TEST_PASS_MAP:
debugVisualizer_.show_norm_array<MaskType>(debugCreateTestPassMap_,
debugCreateTestPassMap_,
colorFrame);
break;
}
if (view != DEBUG_HAND_VIEW_HANDWINDOW &&
view != DEBUG_HAND_VIEW_CREATE_SCORE &&
view != DEBUG_HAND_VIEW_UPDATE_SCORE &&
view != DEBUG_HAND_VIEW_DEPTH_MOD &&
view != DEBUG_HAND_VIEW_DEPTH_AVG &&
view != DEBUG_HAND_VIEW_TEST_PASS_MAP)
{
if (view == DEBUG_HAND_VIEW_CREATE_SEARCHED)
{
debugVisualizer_.overlay_mask(createForegroundSearched_, colorFrame, searchedColor, pixel_type::searched);
debugVisualizer_.overlay_mask(createForegroundSearched_, colorFrame, searchedColor2, pixel_type::searched_from_out_of_range);
}
else if (view == DEBUG_HAND_VIEW_UPDATE_SEARCHED)
{
debugVisualizer_.overlay_mask(updateForegroundSearched_, colorFrame, searchedColor, pixel_type::searched);
debugVisualizer_.overlay_mask(updateForegroundSearched_, colorFrame, searchedColor2, pixel_type::searched_from_out_of_range);
}
debugVisualizer_.overlay_mask(matVelocitySignal_, colorFrame, foregroundColor, pixel_type::foreground);
}
if (debugimagestream_->use_mouse_probe())
{
overlay_circle(colorFrame);
}
debugVisualizer_.overlay_crosshairs(pointProcessor_.get_trackedPoints(), colorFrame);
}
}}
|
package time
// Copyright (c) 2018, Arm Limited and affiliates.
// SPDX-License-Identifier: Apache-2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import (
"crypto/tls"
"encoding/json"
"errors"
"fmt"
"net"
"net/http"
"sync"
"syscall"
"time"
"github.com/armPelionEdge/maestro/debugging"
"github.com/armPelionEdge/maestro/log"
)
// Simple sub package to handle getting the time from a server, using a
// a very basic /time GET.
const (
defaultCheckTimeInterval = time.Duration(time.Hour * 24)
initialBackoff = time.Duration(time.Second * 10)
maxBackoff = time.Duration(5) * time.Minute
// a sanity check value. A time value below this is crazy.
// Jan 1, 2018:
recentTime = int64(1514786400000)
// SetTimeOk means the time was set Correctly
SetTimeOk = 1
// TimedOut means there was no response from server
TimedOut = 2
// BadResponse means the response from server was not formatted correctly
BadResponse = 3
// InsaneResponse means the server provided a time value which is crazy
InsaneResponse = 4
// SycallFailed means the syscall failed to work to set the time
SycallFailed = 5
)
// ClientConfig for getting time.
// /time is tricky, b/c if the time is not sane on the system
// SSL validation can break. So, even if we are using SSL, we will get
// time value with validation disabled on SSL.
// We also do some sanity checks to make sure the time value makes sense.
type ClientConfig struct {
// // The RootCA option should be a PEM encoded root ca chain
// // Use this if the server's TLS certificate is not signed
// // by a certificate authority in the default list. If the
// // server is signed by a certificate authority in the default
// // list it can be omitted.
// RootCA []byte // will be converted to byte array
// RootCAString string `yaml:"root_ca"`
// The ServerName is also only required if the root ca chain
// is not in the default list. This option should be omitted
// if RootCA is not specified. It should match the common name
// of the server's certificate.
// ServerName string `yaml:"server_name"`
// This option can be used in place of the RootCA and ServerName
// for servers that are not signed by a well known certificate
// authority. It will skip the authentication for the server. It
// is not recommended outside of a test environment.
NoValidate bool `yaml:"no_validate"`
// This option turns off encryption entirely
// it is only for testing
NoTLS bool `yaml:"no_tls"`
// Pretend true is run, but don't actually set the time
Pretend bool `yaml:"pretend"`
// This is the PEM encoded SSL client certificate. This is required
// for all https based client connections. It provides the relay identity
// to the server
// ClientCertificate []byte
// ClientCertificateString string `yaml:"client_cert"`
// // This is the PEM encoded SSL client private key. This is required
// // for all https based client connections.
// ClientKey []byte
// ClientKeyString string `yaml:"client_key"`
// // This is the hostname or IP address of the relaymq server
Host string `yaml:"host"`
// This is the port of the relaymq server
Port int `yaml:"port"`
// CheckTimeInterval in seconds
CheckTimeInterval int `yaml:"check_time_interval"`
// If this flag is set, client library logging will be printed
//EnableLogging bool
// number of buffers to hold. Remember, grease lib also holds its own buffers, so this sould be minimal
// (optional)
//NumBuffers uint32 `yaml:"num_buffers"`
// MaxBuffers is the max number of the said buffers
// MaxBuffers uint32 `yaml:"max_buffers"`
// // BufferSize is the size of each of these buffers in bytes
// //BufferSize uint32 `yaml:"buffer_size"`
// // SendSizeThreshold is the amount of bytes being held before the
// // worker will start sending
// SendSizeThreshold uint32 `yaml:"send_size_threshold"`
// // SendTimeThreshold is the amount of time in milliseconds before the worker
// // will start sending logs
// SendTimeThreshold uint32 `yaml:"send_time_threshold"`
}
type TimeClient struct {
client *http.Client
tlsconfig tls.Config
host string
port int
url string
statusChannel chan int
// used to shutdown the client only
stopChannel chan struct{}
running bool
locker sync.Mutex
checkTimeInterval time.Duration
pretend bool
// if true, the sender backs off "backoff" time
backingOff bool
backoff time.Duration
}
type ClientError struct {
StatusCode int
Status string
}
func (err *ClientError) Error() string {
return fmt.Sprintf("TIME Client Error: %d - %s", err.StatusCode, err.Status)
}
func newClientError(resp *http.Response) (ret *ClientError) {
ret = new(ClientError)
ret.StatusCode = resp.StatusCode
ret.Status = resp.Status
return
}
// StatusChannel returns the status channel which can be used to know if time is set
// if nothing reads the channel, the time will be set anyway, and a simple log message is
// printed out.
func (client *TimeClient) StatusChannel() (ok bool, status chan int) {
client.locker.Lock()
ok = client.running
status = client.statusChannel
client.locker.Unlock()
return
}
// Run starts the client
func (client *TimeClient) Run() {
go client.worker()
}
// Stop the current client's worker
func (client *TimeClient) Stop() {
client.locker.Lock()
if client.running {
close(client.stopChannel)
}
client.locker.Unlock()
}
// NewClient creates a new TimeClient and validates the config
func NewClient(config *ClientConfig) (ok bool, ret *TimeClient, err error) {
ret = new(TimeClient)
ret.statusChannel = make(chan int)
err = ret.Reconfigure(config)
if err == nil {
ok = true
}
return
}
// Reconfigure allows you to reconfigure the client
func (client *TimeClient) Reconfigure(config *ClientConfig) (err error) {
client.pretend = config.Pretend
client.host = config.Host
client.port = config.Port
client.checkTimeInterval = time.Duration(config.CheckTimeInterval) * time.Second
if client.checkTimeInterval < 5 {
client.checkTimeInterval = defaultCheckTimeInterval
}
if !config.NoTLS {
client.client = &http.Client{
Timeout: 35 * time.Second,
Transport: &http.Transport{
Dial: (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
}).Dial,
MaxIdleConnsPerHost: 100,
TLSHandshakeTimeout: 10 * time.Second,
ResponseHeaderTimeout: 10 * time.Second,
ExpectContinueTimeout: 1 * time.Second,
// TLSClientConfig: clientTLSConfig,
},
}
if len(config.Host) > 0 {
client.url = "https://" + config.Host + "/api/time"
} else {
// client.notValidConfig = true
err = errors.New("No Host field specified")
}
} else {
client.client = &http.Client{
Timeout: 35 * time.Second,
Transport: &http.Transport{
Dial: (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
}).Dial,
MaxIdleConnsPerHost: 100,
TLSHandshakeTimeout: 10 * time.Second,
ResponseHeaderTimeout: 10 * time.Second,
ExpectContinueTimeout: 1 * time.Second,
},
}
if len(config.Host) > 0 {
client.url = "http://" + config.Host
} else {
// client.notValidConfig = true
err = errors.New("No Host field specified")
}
}
return
}
// // this is actually from the golang source - but is really knew - 1.10 only:
// // TimeToTimespec converts t into a Timespec.
// // On some 32-bit systems the range of valid Timespec values are smaller
// // than that of time.Time values. So if t is out of the valid range of
// // Timespec, it returns a zero Timespec and EINVAL.
// func TimeToTimespec(t time.Time) (Timespec, error) {
// sec := t.Unix()
// nsec := int64(t.Nanosecond())
// ts := setTimespec(sec, nsec)
// // Currently all targets have either int32 or int64 for Timespec.Sec.
// // If there were a new target with floating point type for it, we have
// // to consider the rounding error.
// if int64(ts.Sec) != sec {
// return Timespec{}, EINVAL
// }
// return ts, nil
// }
type timeResponse struct {
Time int64 `json:"time"`
}
func (client *TimeClient) getTime() (err error, errcode int, ret *timeResponse) {
// adds log to the fifo
var req *http.Request
var resp *http.Response
debugging.DEBUG_OUT("TIME GET %s >>>\n", client.url)
// Client implements io.Reader's Read(), so we do this
req, err = http.NewRequest("GET", client.url, nil)
// req.Cancel = c
if err == nil {
resp, err = client.client.Do(req)
if err != nil {
if resp != nil {
defer resp.Body.Close()
}
}
debugging.DEBUG_OUT("TIME --> response +%v\n", resp)
if err == nil {
if resp != nil {
if resp.StatusCode != 200 {
debugging.DEBUG_OUT("TIME bad response - creating error object\n")
err = newClientError(resp)
errcode = BadResponse
return
} else {
ret = new(timeResponse)
dec := json.NewDecoder(resp.Body)
if dec != nil {
err = dec.Decode(ret)
if err != nil {
err = errors.New("Bad response")
errcode = BadResponse
ret = nil
}
} else {
err = errors.New("Failed to create decoder")
errcode = BadResponse
}
}
} else {
err = errors.New("No response")
errcode = TimedOut
}
}
} else {
log.MaestroErrorf("Error on GET request: %s", err.Error())
debugging.DEBUG_OUT("TIME ERROR: %s\n", err.Error())
err = errors.New("Failed to create request")
errcode = BadResponse
}
return
}
func (client *TimeClient) sendToStatusChannel(val int) {
select {
case client.statusChannel <- val:
default:
log.MaestroWarnf("time status channel is blocking.")
}
}
// periodically asks for the time
func (client *TimeClient) worker() {
client.locker.Lock()
if client.running {
client.locker.Unlock()
return
}
client.running = true
client.locker.Unlock()
timeout := time.Duration(1)
for {
select {
case <-client.stopChannel:
break
case <-time.After(timeout):
// run the client
err, errcode, timeresp := client.getTime()
timeout = client.checkTimeInterval
if err != nil {
if client.backingOff {
client.backoff = client.backoff * time.Duration(2)
} else {
client.backoff = initialBackoff
}
if client.backoff > maxBackoff {
client.backoff = maxBackoff
}
client.backingOff = true
timeout = client.backoff
// analyze errors and send to status channel
client.sendToStatusChannel(errcode)
} else {
client.backingOff = false
client.backoff = 0
client.locker.Lock()
timeout = client.checkTimeInterval
client.locker.Unlock()
// set the time
if timeresp.Time > recentTime {
timespec := timeToTimeval(timeresp.Time)
now := time.Now().UnixNano() / 1000000
log.MaestroInfof("Time: time being adjusted. Skew is %d ms", now-timeresp.Time)
if client.pretend {
log.MaestroInfof("Time: time would be set to %d s %d us - but prentending only.", timespec.Sec, timespec.Usec)
} else {
errno := syscall.Settimeofday(×pec)
if errno != nil {
log.MaestroErrorf("Time: settimeofday failed: %s", errno.Error())
client.sendToStatusChannel(SycallFailed)
} else {
log.MaestroSuccessf("Time: time of day updated.")
client.sendToStatusChannel(SetTimeOk)
}
}
} else {
log.MaestroErrorf("Time server reported INSANE time value (%ld) - ignoring.", timeresp.Time)
client.sendToStatusChannel(InsaneResponse)
}
// send to status channel
}
}
}
client.locker.Lock()
client.running = false
client.locker.Unlock()
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.