text stringlengths 1 1.05M |
|---|
#!/bin/bash
SCRIPT_VERSION="8"
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh'
YELLOW='\033[1;33m'
GREEN='\033[0;32m'
NC='\033[0m'
TMP_FILE=$(mktemp -p "" "rmmupdate_XXXXXXXXXX")
curl -s -L "${SCRIPT_URL}" > ${TMP_FILE}
NEW_VER=$(grep "^SCRIPT_VERSION" "$TMP_FILE" | awk -F'[="]' '{print $3}')
if [ "${SCRIPT_VERSION}" \< "${NEW_VER}" ]; then
printf >&2 "${YELLOW}A newer version of this update script is available.${NC}\n"
printf >&2 "${YELLOW}Please download the latest version from ${GREEN}${SCRIPT_URL}${YELLOW} and re-run.${NC}\n"
rm -f $TMP_FILE
exit 1
fi
rm -f $TMP_FILE
if [ $EUID -eq 0 ]; then
echo -ne "\033[0;31mDo NOT run this script as root. Exiting.\e[0m\n"
exit 1
fi
# added new celery queue 9-7-2020
if [ ! -f /etc/systemd/system/celery-winupdate.service ]; then
celerywinupdatesvc="$(cat << EOF
[Unit]
Description=Celery WinUpdate Service
After=network.target
After=redis-server.service
[Service]
Type=forking
User=${USER}
Group=${USER}
EnvironmentFile=/etc/conf.d/celery-winupdate.conf
WorkingDirectory=/rmm/api/tacticalrmm
ExecStart=/bin/sh -c '\${CELERY_BIN} multi start \${CELERYD_NODES} -A \${CELERY_APP} --pidfile=\${CELERYD_PID_FILE} --logfile=\${CELERYD_LOG_FILE} --loglevel=\${CELERYD_LOG_LEVEL} -Q wupdate \${CELERYD_OPTS}'
ExecStop=/bin/sh -c '\${CELERY_BIN} multi stopwait \${CELERYD_NODES} --pidfile=\${CELERYD_PID_FILE}'
ExecReload=/bin/sh -c '\${CELERY_BIN} multi restart \${CELERYD_NODES} -A \${CELERY_APP} --pidfile=\${CELERYD_PID_FILE} --logfile=\${CELERYD_LOG_FILE} --loglevel=\${CELERYD_LOG_LEVEL} -Q wupdate \${CELERYD_OPTS}'
Restart=always
RestartSec=10s
[Install]
WantedBy=multi-user.target
EOF
)"
echo "${celerywinupdatesvc}" | sudo tee /etc/systemd/system/celery-winupdate.service > /dev/null
celerywinupdate="$(cat << EOF
CELERYD_NODES="w2"
CELERY_BIN="/rmm/api/env/bin/celery"
CELERY_APP="tacticalrmm"
CELERYD_MULTI="multi"
CELERYD_OPTS="--time-limit=4000 --autoscale=40,1"
CELERYD_PID_FILE="/rmm/api/tacticalrmm/%n.pid"
CELERYD_LOG_FILE="/var/log/celery/%n%I.log"
CELERYD_LOG_LEVEL="ERROR"
EOF
)"
echo "${celerywinupdate}" | sudo tee /etc/conf.d/celery-winupdate.conf > /dev/null
sudo systemctl daemon-reload
sudo systemctl enable celery-winupdate
fi
for i in celery celerybeat celery-winupdate rmm nginx
do
sudo systemctl stop ${i}
done
sudo chown ${USER}:${USER} -R /rmm
sudo chown ${USER}:${USER} /var/log/celery
sudo chown ${USER}:${USER} -R /srv/salt/
sudo chown ${USER}:${USER} -R /etc/conf.d/
sudo chown ${USER}:www-data /srv/salt/scripts/userdefined
sudo chown -R $USER:$GROUP /home/${USER}/.npm
sudo chown -R $USER:$GROUP /home/${USER}/.config
sudo chown -R $USER:$GROUP /home/${USER}/.cache
sudo chmod 750 /srv/salt/scripts/userdefined
cd /rmm
git fetch origin develop
git reset --hard FETCH_HEAD
git clean -df
cp /rmm/_modules/* /srv/salt/_modules/
cp /rmm/scripts/* /srv/salt/scripts/
rm -rf /rmm/api/env
cd /rmm/api
python3 -m venv env
source /rmm/api/env/bin/activate
cd /rmm/api/tacticalrmm
pip install --no-cache-dir --upgrade pip
pip install --no-cache-dir setuptools==49.6.0 wheel==0.35.1
pip install --no-cache-dir -r requirements.txt
python manage.py pre_update_tasks
python manage.py migrate
python manage.py delete_tokens
python manage.py fix_salt_key
python manage.py collectstatic --no-input
python manage.py post_update_tasks
deactivate
rm -rf /rmm/web/node_modules
rm -rf /rmm/web/dist
cd /rmm/web
npm install
npm run build
sudo rm -rf /var/www/rmm/dist
sudo cp -pvr /rmm/web/dist /var/www/rmm/
sudo chown www-data:www-data -R /var/www/rmm/dist
for i in celery celerybeat celery-winupdate rmm nginx
do
sudo systemctl start ${i}
done
sudo systemctl stop meshcentral
sudo chown ${USER}:${USER} -R /meshcentral
cd /meshcentral
rm -rf node_modules/
npm install meshcentral@0.6.33
sudo systemctl start meshcentral
sleep 10
printf >&2 "${GREEN}Update finished!${NC}\n" |
Data_path="./MSLR_30k_letor" ## Data path where to unzip the data
Data_folder="Fold1" ## subfolder after unzip
Feature_number=136 ## how many features for LETOR data
Prepro_fun="" ## additional function to do preprocessing, available, "log", "None", we default normalize data to -1 and 1. If choosing log, it will first using log function to the data and then normalize it to -1 and 1.
prefix="" ## name before data, for example setl.train.txt, prefix=set1.
Data_zip_file=./MSLR-WEB30K.zip ## zipped data file path.
cd ../../
# Download MSLR-WEB30K dataset.
# view https://www.microsoft.com/en-us/research/project/mslr/ for the download link
mkdir $Data_path
mkdir $Data_path/cleaned_data # path to store data after cleaning
mkdir $Data_path/normalized # path to store data after nomalization
mkdir $Data_path/tmp_toy # path to store toy version of training data which is 1% of total dataset
mkdir $Data_path/tmp_toy/data
mkdir $Data_path/tmp_toy/tmp
mkdir $Data_path/tmp_toy/tmp_data_toy
unzip $Data_zip_file -d $Data_path/
valid_name=$Data_path/$Data_folder/${prefix}vali.txt
if [ ! -f "$valid_name" ]
then
echo "no vali, try to find valid"
valid_name=$Data_path/$Data_folder/${prefix}valid.txt
if [ ! -f "$valid_name" ]
then
echo "no valid, we will split trian with default rate"
mv $Data_path/$Data_folder/${prefix}train.txt $Data_path/$Data_folder/${prefix}train_orig.txt
python ./libsvm_tools/split_libsvm_data.py $Data_path/$Data_folder/${prefix}train_orig.txt ${valid_name} $Data_path/$Data_folder/${prefix}train.txt 0.1
fi
fi
echo "begin cleaning"
python ./libsvm_tools/clean_libsvm_file.py $Data_path/$Data_folder/${prefix}train.txt $Data_path/cleaned_data/train.txt 0
python ./libsvm_tools/clean_libsvm_file.py ${valid_name} $Data_path/cleaned_data/valid.txt 1
python ./libsvm_tools/clean_libsvm_file.py $Data_path/$Data_folder/${prefix}test.txt $Data_path/cleaned_data/test.txt 1
# Normalize the data
# Extract the feature statistics for later normalization.
echo "extract statistics for normalization"
python ./libsvm_tools/extrac_feature_statistics.py $Data_path/cleaned_data/
# Normalize the data.
echo "begin normalization"
python ./libsvm_tools/normalize_feature.py $Data_path/cleaned_data/feature_scale.json $Data_path/cleaned_data/test.txt $Data_path/normalized/test.txt $Prepro_fun
python ./libsvm_tools/normalize_feature.py $Data_path/cleaned_data/feature_scale.json $Data_path/cleaned_data/train.txt $Data_path/normalized/train.txt $Prepro_fun
python ./libsvm_tools/normalize_feature.py $Data_path/cleaned_data/feature_scale.json $Data_path/cleaned_data/valid.txt $Data_path/normalized/valid.txt $Prepro_fun
# Sample 1% training data to build the initial ranker.
echo "sample 0.01 for intiial ranker"
python ./libsvm_tools/sample_libsvm_data.py $Data_path/normalized/train.txt $Data_path/normalized/sampled_train.txt 0.01
# Download SVMrank.
wget http://download.joachims.org/svm_rank/current/svm_rank_linux64.tar.gz
tar xvzf svm_rank_linux64.tar.gz
# Conduct initial ranking with SVMrank.
python ./libsvm_tools/initial_ranking_with_svm_rank.py \
./ \
$Data_path/normalized/sampled_train.txt \
$Data_path/normalized/valid.txt \
$Data_path/normalized/test.txt \
$Data_path/tmp/
./svm_rank_classify $Data_path/normalized/train.txt $Data_path/tmp/model.dat $Data_path/tmp/train.predict
# Prepare model input.
python ./libsvm_tools/prepare_exp_data_with_svmrank.py $Data_path/normalized/ $Data_path/tmp/ $Data_path/tmp_data/ $Feature_number
cp $Data_path/normalized/sampled_train.txt $Data_path/tmp_toy/data/train.txt
cp $Data_path/normalized/sampled_train.txt $Data_path/tmp_toy/data/valid.txt
cp $Data_path/normalized/sampled_train.txt $Data_path/tmp_toy/data/test.txt
./svm_rank_classify $Data_path/tmp_toy/data/train.txt $Data_path/tmp/model.dat $Data_path/tmp_toy/tmp/train.predict
./svm_rank_classify $Data_path/tmp_toy/data/valid.txt $Data_path/tmp/model.dat $Data_path/tmp_toy/tmp/valid.predict
./svm_rank_classify $Data_path/tmp_toy/data/test.txt $Data_path/tmp/model.dat $Data_path/tmp_toy/tmp/test.predict
python ./libsvm_tools/prepare_exp_data_with_svmrank.py $Data_path/tmp_toy/data/ $Data_path/tmp_toy/tmp/ $Data_path/tmp_toy/tmp_data_toy/ $Feature_number
export SETTING_ARGS="--data_dir=$Data_path/tmp_data/ --model_dir=$Data_path/tmp_model/ --output_dir=$Data_path/tmp_output/ --setting_file=./example/offline_setting/dla_exp_settings.json"
echo $SETTING_ARGS
# Run model
python main.py --max_train_iteration=1000 $SETTING_ARGS
Test model
python main.py --test_only=True $SETTING_ARGS |
#!/bin/bash
cd ~
pwd
rm -fR Documents/workspace/microting/eform-angular-frontend/eform-client/src/app/plugins/modules/eform-dashboard-pn
rm -fR Documents/workspace/microting/eform-angular-frontend/eform-client/e2e/Tests/eform-dashboard-settings
rm -fR Documents/workspace/microting/eform-angular-frontend/eform-client/e2e/Tests/eform-dashboard-general
rm -fR Documents/workspace/microting/eform-angular-frontend/eform-client/e2e/Page\ objects/eFormDashboard
rm -fR Documents/workspace/microting/eform-angular-frontend/eform-client/wdio-plugin-step2.conf.js
cp -av Documents/workspace/microting/eform-angular-eform-dashboard-plugin/eform-client/src/app/plugins/modules/eform-dashboard-pn Documents/workspace/microting/eform-angular-frontend/eform-client/src/app/plugins/modules/eform-dashboard-pn
cp -av Documents/workspace/microting/eform-angular-eform-dashboard-plugin/eform-client/e2e/Tests/eform-dashboard-settings Documents/workspace/microting/eform-angular-frontend/eform-client/e2e/Tests/eform-dashboard-settings
cp -av Documents/workspace/microting/eform-angular-eform-dashboard-plugin/eform-client/e2e/Tests/eform-dashboard-general Documents/workspace/microting/eform-angular-frontend/eform-client/e2e/Tests/eform-dashboard-general
cp -av Documents/workspace/microting/eform-angular-eform-dashboard-plugin/eform-client/e2e/Page\ objects/eFormDashboard Documents/workspace/microting/eform-angular-frontend/eform-client/e2e/Page\ objects/eFormDashboard
cp -av Documents/workspace/microting/eform-angular-eform-dashboard-plugin/eform-client/wdio-headless-plugin-step2.conf.js Documents/workspace/microting/eform-angular-frontend/eform-client/wdio-plugin-step2.conf.js
rm -fR Documents/workspace/microting/eform-angular-frontend/eFormAPI/Plugins/eFormDashboard.Pn
cp -av Documents/workspace/microting/eform-angular-eform-dashboard-plugin/eFormAPI/Plugins/eFormDashboard.Pn Documents/workspace/microting/eform-angular-frontend/eFormAPI/Plugins/eFormDashboard.Pn
|
#!/bin/bash
# Local variable
#SOLVER=("comp" "iter" "math" "heur" "tabu" "gene")
#GENETIC=("gen00" "gen01" "gen10" "gen11" "gen20" "gen21" "gen30" "gen31")
SOLVER=("comp")
path="/home/miliamikel/tsp/scripts"
for solver in "${SOLVER[@]}"; do
if [ "$solver" == "comp" ]; then
echo "Compact model submitted"
for m in {1,2,5,8}; do
sbatch "$path/$solver"32-tsp.slurm -m $m
done
for m in {3,4,6,7}; do
sbatch "$path/$solver"64-tsp.slurm -m $m
done
elif [ "$solver" == "iter" ]; then
echo "Iterative model submitted"
for m in {9,10,11}; do
sbatch "$path/$solver"-tsp.slurm -m $m
done
elif [ "$solver" == "math" ]; then
echo "Matheuristic model submitted"
for m in {0,1}; do
sbatch "$path/$solver"-tsp.slurm -m $m
done
elif [ "$solver" == "heur" ]; then
for m in {0,1,2,3}; do
sbatch "$path/$solver"-tsp.slurm -m $m
done
elif [ "$solver" == "tabu" ]; then
sbatch "$path/$solver"-tsp.slurm
elif [ "$solver" == "gene" ]; then
for genetic in "${GENETIC[@]}"; do
sbatch "$path/$genetic"-tsp.slurm
done
else
echo "Solver not found"
fi
done
|
package br.com.hkp.whatsappwebfix.gui;
import br.com.hkp.whatsappwebfix.Updater;
import br.com.hkp.whatsappwebfix.WhatsAppEditor;
import static br.com.hkp.whatsappwebfix.global.Global.FILENAME_DIFF;
import static br.com.hkp.whatsappwebfix.global.Global.PASTA_BASE;
import static br.com.hkp.whatsappwebfix.global.Global.TARGET_ABSOLUTE_PATHNAME;
import br.com.hkp.whatsappwebfix.util.FileList;
import br.com.hkp.whatsappwebfix.util.NodeList;
import java.awt.BorderLayout;
import java.awt.Color;
import javax.swing.JFrame;
import javax.swing.JPanel;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.GridLayout;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.URL;
import java.util.LinkedList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.swing.BorderFactory;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JProgressBar;
import javax.swing.JScrollPane;
/******************************************************************************
* A janela de interface da aplicacao FixGui.
*
* @author "<NAME>"
* @since 10 de janeiro de 2021 v1.0
* @version v1.0
*****************************************************************************/
public final class SelectFrame extends JFrame
{
private final JPanel contentPane;
private final JPanel panel;
private final JButton fixButton;
private final JButton exitButton;
private final JButton updateButton;
private final JProgressBar jProgressBar;
private final KeyListen keyListen;
private final FileList fileList;
private AtomicBoolean FixThreadRunning;
private AtomicBoolean UpdateThreadRunning;
/*[00]---------------------------------------------------------------------
-------------------------------------------------------------------------*/
/**
* Construtor da janela.
*
* @param dir O diretorio onde devem estar os arquivos a serem corrigidos
*
* @throws IOException Em caso de erro de IO.
*/
public SelectFrame(final File dir) throws IOException
{
super("Corrige os Selecionados");
/*
Obtem um array com todos os arquivos HTML do diretorio "dir" que nao
sejam arquivos corrigidos criados por esta propria aplicacao
*/
File[] listFiles = dir.listFiles(new HtmlFilter());
if (listFiles.length == 0)
throw new IOException
(
"Nenhum arquivo que possa ser corrigido foi encontrado!"
);
/*
Esta estrutura irah armazenar a lista de arquivos exibida pelo frame
*/
fileList = new FileList(this);
/*
Um listener de teclas para a GUI (interface) responder a comandos pelo
teclado. O objeto "fileList" eh passado ao construtor para que keyListen
possa executar metodos deste objeto que selecionam ou deselecionam
arquivos listados
*/
keyListen = new KeyListen(fileList);
/*
Este filtro tem a funcao de identificar quais arquivos listados jah
possuem copias corrigidas no diretorio
*/
FixedFilter fixedFilter = new FixedFilter();
setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
setPreferredSize(new Dimension(450, 400));
contentPane = new JPanel();
setContentPane(contentPane);
JScrollPane scroll = new JScrollPane();
panel = new JPanel();
panel.setLayout(new GridLayout(0, 1, 0, 0));
scroll.setViewportView(panel);
scroll.setBorder(BorderFactory.createEtchedBorder());
JPanel panelButtons = new JPanel(new BorderLayout());
panelButtons.setBorder(BorderFactory.createEtchedBorder());
/*
Fonte para os rotulos de botoes
*/
Font buttonFont = new Font(Font.MONOSPACED, Font.BOLD, 12);
/*---------------------------------------------------------------------
Configura botao Sair
---------------------------------------------------------------------*/
exitButton = new JButton("Sair ");
exitButton.setMnemonic('s');
exitButton.setFont(buttonFont);
exitButton.addActionListener(new ExitButtonHandler());
exitButton.setIcon(new ImageIcon(getClass().getResource("exit.png")));
exitButton.addKeyListener(keyListen);
/*---------------------------------------------------------------------
Configura botao Pau na Maquina
---------------------------------------------------------------------*/
fixButton = new JButton("Pau na M\u00e1quina!");
fixButton.setMnemonic('p');
fixButton.setFont(buttonFont);
fixButton.addActionListener(new FixButtonHandler());
fixButton.setIcon(new ImageIcon(getClass().getResource("gear.png")));
fixButton.addKeyListener(keyListen);
/*---------------------------------------------------------------------
Configura botao Atualizar
---------------------------------------------------------------------*/
updateButton = new JButton("Atualizar");
updateButton.setMnemonic('a');
updateButton.setFont(buttonFont);
updateButton.addActionListener
(
new UpdateButtonHandler
(
new File(TARGET_ABSOLUTE_PATHNAME + '/' + PASTA_BASE)
)
);
updateButton.setIcon
(
new ImageIcon(getClass().getResource("update.png"))
);
updateButton.addKeyListener(keyListen);
/*--------------------------------------------------------------------*/
panelButtons.add(exitButton, BorderLayout.WEST);
panelButtons.add(fixButton, BorderLayout.CENTER);
panelButtons.add(updateButton, BorderLayout.EAST);
jProgressBar = new JProgressBar();
jProgressBar.setBorder(BorderFactory.createEtchedBorder());
jProgressBar.setVisible(false);
contentPane.setLayout(new BorderLayout());
contentPane.add(scroll, BorderLayout.CENTER);
contentPane.add(panelButtons, BorderLayout.NORTH);
contentPane.add(jProgressBar, BorderLayout.SOUTH);
pack();
/*
Insere o icone na janela
*/
try
{
URL url = getClass().getResource("favicon.png");
setIconImage(Toolkit.getDefaultToolkit().getImage(url));
}
catch (Exception e)
{
System.err.println(e);
}
setLocationRelativeTo(null);//Abre janela no centro da tela
addWindowListener(new CloseWindowHandler());
/*
Neste loop sao identificados quais arquivos da lista jah foram
corrigidos
*/
for (File file: listFiles)
{
fileList.addNode
(
new NodeList(fixedFilter.accept(null, file.getName()), file)
);
}//for
}//construtor
/*[01]---------------------------------------------------------------------
-------------------------------------------------------------------------*/
/**
* Adiciona um checkBox a esta janela e repassa o listener de teclado a ele.
*
* @param jc O checkBox.
*/
public void addCheckBox(JCheckBox jc)
{
panel.add(jc);
jc.addKeyListener(keyListen);
}//addCheckBox()
/*[02]---------------------------------------------------------------------
-------------------------------------------------------------------------*/
/**
* Atualiza a barra de progresso exbindo o status do processo.
*
* @param value O num. de arquivos jah processados
*/
public void setProgressBarValue(final int value)
{
jProgressBar.setValue(value);
jProgressBar.setString(String.valueOf(value));
}//setProgressBarValue()
/*[03]---------------------------------------------------------------------
-------------------------------------------------------------------------*/
/**
* Torna visivel a barra de progresso e a configura.
*
* @param maximumValue O valor maximo que a barra de progresso pode assumir
* que serah exatamente o numero de arquivos a serem processados.
*/
public void setProgressBarVisible(final int maximumValue)
{
jProgressBar.setStringPainted(true);
jProgressBar.setForeground(Color.BLACK);
jProgressBar.setValue(0);
jProgressBar.setMaximum(maximumValue);
jProgressBar.setVisible(true);
}//setProgressBarVisible()
/*[04]---------------------------------------------------------------------
Corrige os arquivos que estiverem marcados
-------------------------------------------------------------------------*/
private void Fix() throws IOException
{
/*
Obtem uma lista soh com os arquivos com o checkbox selecionado
*/
LinkedList<NodeList> listOfFilesToFixed = fileList.getList();
/*
Configura a barra de progresso para a execucao
*/
setProgressBarVisible(listOfFilesToFixed.size());
int count = 0;
setProgressBarValue(0);
/*
Corrige os arquivos da lista
*/
for(NodeList node: listOfFilesToFixed)
{
WhatsAppEditor w = new WhatsAppEditor(node.getFile());
w.createNewFile();
node.setFixed(true);
setProgressBarValue(++count);
}
java.awt.Toolkit.getDefaultToolkit().beep();//Beepa o termino
}//Fix()
/*[05]---------------------------------------------------------------------
Encerra o programa se nenhuma thread estiver executando
-------------------------------------------------------------------------*/
private void exit()
{
if (FixThreadRunning.get() || UpdateThreadRunning.get()) return;
System.exit(0);
}//exit()
/*=========================================================================
* Classes interna. Handler do fechamento da janela.
==========================================================================*/
/*-------------------------------------------------------------------------
Encerra o programa ao fechar a janela principal
-------------------------------------------------------------------------*/
private final class CloseWindowHandler extends WindowAdapter
{
/*[01]------------------------------------------------------------------
----------------------------------------------------------------------*/
/**
* Encerra o programa se nenhuma thread estiver executando.
*
* @param e n/a
*/
@Override
public void windowClosing(WindowEvent e)
{
exit();
}//windowClosing()
}//classe CloseWindow
/*=========================================================================
* Classes internas. Handlers dos botoes da janela.
==========================================================================*/
/*------------------------------------------------------------------------
Encerra o programa
------------------------------------------------------------------------*/
private final class ExitButtonHandler implements ActionListener
{
/*[01]------------------------------------------------------------------
----------------------------------------------------------------------*/
/**
* Encerra o programa se nenhuma thread estiver executando.
*
* @param e n/a
*/
@Override
public void actionPerformed(ActionEvent e)
{
exit();
}
}//classe ExitButtonHandler
/*------------------------------------------------------------------------
Corrige os arquivos selecionados
------------------------------------------------------------------------*/
private final class FixButtonHandler implements ActionListener
{
private final ExecutorService executorService;
/*[00]------------------------------------------------------------------
----------------------------------------------------------------------*/
public FixButtonHandler()
{
executorService = Executors.newFixedThreadPool(1);
FixThreadRunning = new AtomicBoolean(false);
}//construtor
/*[01]-----------------------------------------------------------------
----------------------------------------------------------------------*/
/**
* Corrige os arquivos que estiverem selecionados.
*
* @param e Evento do botao Pau na Maquina clicado
*/
@Override
public void actionPerformed(ActionEvent e)
{
if (FixThreadRunning.get()) return;
FixThreadRunning.set(true);
executorService.execute
(
new Runnable()
{
@Override
public void run()
{
try
{
Fix();
}
catch (IOException e)
{
Error.showErrorMsg(e, false);
}
finally
{
FixThreadRunning.set(false);
}
}//run()
}//classe Runnable
);
}//actionPerformed()
}//classe FixButtonHandler
/*------------------------------------------------------------------------
Cria e executa uma thread que atualiza a biblioteca de emojis em segundo
plano.
------------------------------------------------------------------------*/
private final class UpdateButtonHandler implements ActionListener
{
private final File pastaBase;
private final ExecutorService executorService;
/*[00]-----------------------------------------------------------------
---------------------------------------------------------------------*/
public UpdateButtonHandler(final File dir)
{
pastaBase = dir;
/*
O metodo estatico da classe Executors retorna um objeto
ExecutorService, capaz de gerenciar um pool de threads. Aqui eh
criado configurado para soh executar uma por vez. Impedindo que
varios downloads simultaneos possam ser disparados.
*/
executorService = Executors.newFixedThreadPool(1);
UpdateThreadRunning = new AtomicBoolean(false);
}//construtor
/*[01]-----------------------------------------------------------------
---------------------------------------------------------------------*/
/**
* Atualiza os PNGs com uma thread separada rodando em segundo plano.
*
* @param e Evento do botao Atualizar clicado.
*/
@Override
public void actionPerformed(ActionEvent e)
{
if (UpdateThreadRunning.get()) return;
UpdateThreadRunning.set(true);
executorService.execute
(
new Runnable()
{
@Override
public void run()
{
try
{
Updater updater =
new Updater
(
pastaBase,
JFrame.DO_NOTHING_ON_CLOSE
);
updater.downloadPngs();
}
catch (IOException e)
{
Error.showErrorMsg(e, false);
}
finally
{
UpdateThreadRunning.set(false);
}
}//run()
}//classe Runnable
);
}//actionPerformed()
}//classe UpdateButtonHandler
/*=========================================================================
Classes internas. Filtros de Arquivos.
=========================================================================*/
private final String fixed = FILENAME_DIFF + ".html";
/*-------------------------------------------------------------------------
Este filtro retorna true para arquivos que jah foram corrigidos
-------------------------------------------------------------------------*/
private final class FixedFilter implements FilenameFilter
{
private final String path = TARGET_ABSOLUTE_PATHNAME + '/';
@Override
public boolean accept(File dir, String filename)
{
return new File(path + filename.replace(".html", fixed)).exists();
}//accept()
}//classe FixedFilter
/*************************************************************************/
/*-------------------------------------------------------------------------
Este filtro retorna true para todos os arquivos HTML, exceto aqueles
que tenham o sufixo .fix no nome.
-------------------------------------------------------------------------*/
private final class HtmlFilter implements FilenameFilter
{
@Override
public boolean accept(File dir, String filename)
{
return (!filename.endsWith(fixed)) && (filename.endsWith(".html"));
}//accept()
}//classe HtmlFilter
}//classe SelectFrame |
// DataModel.java
import java.util.ArrayList;
import java.util.List;
public class DataModel {
private List<onChangeListener> listeners;
public DataModel() {
this.listeners = new ArrayList<>();
}
public void addListener(onChangeListener listener) {
listeners.add(listener);
}
public void removeListener(onChangeListener listener) {
listeners.remove(listener);
}
public void notifyListeners() {
for (onChangeListener listener : listeners) {
listener.onChange();
}
}
}
// Example.java
public class Example {
public static void main(String[] args) {
DataModel dataModel = new DataModel();
onChangeListener listener = new onChangeListener() {
@Override
public void onChange() {
System.out.println("Change event occurred");
}
};
dataModel.addListener(listener);
// Simulate a change event
dataModel.notifyListeners();
}
} |
package elasta.eventbus;
import elasta.core.promise.intfs.Promise;
import io.vertx.core.eventbus.*;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import lombok.Builder;
import lombok.Value;
import java.util.Objects;
public interface SimpleEventBus {
SimpleEventBus send(Params params);
<T> Promise<Message<T>> sendAndReceive(Params params);
Promise<Message<JsonObject>> sendAndReceiveJsonObject(Params params);
Promise<Message<JsonArray>> sendAndReceiveJsonArray(Params params);
SimpleEventBus publish(Params params);
@Value
@Builder
public final class Params {
final String address;
final Object message;
final DeliveryOptions options;
public Params(String address, Object message, DeliveryOptions options) {
Objects.requireNonNull(address);
Objects.requireNonNull(message);
this.address = address;
this.message = message;
this.options = (options == null) ? new DeliveryOptions() : options;
}
}
}
|
package com.io.routesapp.ui.places.placeInformation;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.RatingBar;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.MarkerOptions;
import com.google.android.material.floatingactionbutton.FloatingActionButton;
import com.google.android.material.textfield.TextInputEditText;
import com.io.routesapp.MainActivity;
import com.io.routesapp.R;
import com.io.routesapp.ui.places.model.Place;
import com.io.routesapp.ui.places.model.PlaceReview;
import com.io.routesapp.ui.places.model.PlaceReviewAdapter;
import org.json.JSONException;
import java.util.ArrayList;
import java.util.Objects;
//Class displaying place details, location and reviews
public class PlaceInformationFragment extends Fragment {
RecyclerView mRecyclerView;
PlaceReviewAdapter placeReviewAdapter;
RecyclerView.LayoutManager layoutManager;
ArrayList<PlaceReview> reviewsList;
FloatingActionButton addFAB;
View reviewField;
TextView firstComment;
int id;
Place place;
private OnMapReadyCallback callback = new OnMapReadyCallback() {
@Override
public void onMapReady(GoogleMap googleMap) {
LatLng coords = new LatLng(place.getLatitude(), place.getLongitude());
googleMap.addMarker(new MarkerOptions().position(coords).title(place.getName()));
googleMap.moveCamera(CameraUpdateFactory.newLatLngZoom(coords, 15));
}
};
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
id = requireArguments().getInt("id");
try {
place = MainActivity.HTTPClient.getPlace(id);
reviewsList = MainActivity.HTTPClient.getPlaceReviews(id);
} catch (JSONException | InterruptedException e) {
e.printStackTrace();
Toast.makeText(getContext(), "Error while loading page...", Toast.LENGTH_SHORT).show();
}
}
public View onCreateView(@NonNull LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState) {
View root = inflater.inflate(R.layout.fragment_place_information, container, false);
mRecyclerView = root.findViewById(R.id.review_list);
layoutManager = new LinearLayoutManager(getActivity());
firstComment = root.findViewById(R.id.firstComment);
firstComment.setVisibility(View.INVISIBLE);
if (reviewsList.isEmpty()){
firstComment.setVisibility(View.VISIBLE);
}
placeReviewAdapter = new PlaceReviewAdapter(reviewsList);
mRecyclerView.setAdapter(placeReviewAdapter);
addFAB = root.findViewById(R.id.add_fab);
reviewField = root.findViewById(R.id.review_field);
addFAB.setOnClickListener(v -> {
reviewField.setVisibility(View.VISIBLE);
addFAB.setVisibility(View.GONE);
});
RatingBar placeRatingBar = root.findViewById(R.id.place_rating_bar);
final TextInputEditText reviewText= root.findViewById(R.id.review_text);
final FloatingActionButton sendFAB = root.findViewById(R.id.send_fab);
reviewText.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable s) {
if (!Objects.requireNonNull(reviewText.getText()).toString().isEmpty()){
sendFAB.setEnabled(true);
}
}
});
sendFAB.setOnClickListener(v -> {
MainActivity.HTTPClient.addPlaceReview(
new PlaceReview(
String.valueOf(id),
MainActivity.getLoggedInUser().getUsername(),
Objects.requireNonNull(reviewText.getText()).toString()
)
);
reviewField.setVisibility(View.GONE);
firstComment.setVisibility(View.INVISIBLE);
sendFAB.setVisibility(View.VISIBLE);
Toast.makeText(getContext(), "Your review will be added!", Toast.LENGTH_SHORT).show();
});
return root;
}
@Override
public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
SupportMapFragment mapFragment =
(SupportMapFragment) getChildFragmentManager().findFragmentById(R.id.map);
if (mapFragment != null) {
mapFragment.getMapAsync(callback);
}
}
}
|
<filename>src/listeners/guild/guildDelete.js
'use strict';
const { SimplicityEmbed, SimplicityListener } = require('@structures');
class GuildDeleteListener extends SimplicityListener {
constructor(client) {
super(client);
}
async on(client, guild) {
const owner = guild.owner;
this.sendPrivateMessage('GUILD_LEAVE_CHANNEL',
new SimplicityEmbed({ author: owner.user })
.addField('Guild Name', guild.name, true)
.addField('Guild ID', guild.id, true)
.addField('Member Count', guild.memberCount)
.setThumbnail(guild));
if (client.database) {
await client.database.guilds.remove(guild.id);
await client.database.joinLeaveGuild.model.create({
date_at: new Date(),
guild_id: guild.id,
type: 'LEAVE',
});
}
}
}
module.exports = GuildDeleteListener;
|
#!/bin/bash
set -o errexit
set -x
CFLAGS_FOR_OVS="-g -O2"
SPARSE_FLAGS=""
EXTRA_OPTS="--enable-Werror"
TARGET="x86_64-native-linuxapp-gcc"
function install_kernel()
{
if [[ "$1" =~ ^5.* ]]; then
PREFIX="v5.x"
elif [[ "$1" =~ ^4.* ]]; then
PREFIX="v4.x"
elif [[ "$1" =~ ^3.* ]]; then
PREFIX="v3.x"
else
PREFIX="v2.6/longterm/v2.6.32"
fi
base_url="https://cdn.kernel.org/pub/linux/kernel/${PREFIX}"
# Download page with list of all available kernel versions.
wget ${base_url}/
# Uncompress in case server returned gzipped page.
(file index* | grep ASCII) || (mv index* index.new.gz && gunzip index*)
# Get version of the latest stable release.
hi_ver=$(echo ${1} | sed 's/\./\\\./')
lo_ver=$(cat ./index* | grep -P -o "${hi_ver}\.[0-9]+" | \
sed 's/.*\..*\.\(.*\)/\1/' | sort -h | tail -1)
version="${1}.${lo_ver}"
rm -rf index* linux-*
url="${base_url}/linux-${version}.tar.xz"
# Download kernel sources. Try direct link on CDN failure.
wget ${url} || wget ${url} || wget ${url/cdn/www}
tar xvf linux-${version}.tar.xz > /dev/null
pushd linux-${version}
make allmodconfig
# Cannot use CONFIG_KCOV: -fsanitize-coverage=trace-pc is not supported by compiler
sed -i 's/CONFIG_KCOV=y/CONFIG_KCOV=n/' .config
# stack validation depends on tools/objtool, but objtool does not compile on travis.
# It is giving following error.
# >>> GEN arch/x86/insn/inat-tables.c
# >>> Semantic error at 40: Unknown imm opnd: AL
# So for now disable stack-validation for the build.
sed -i 's/CONFIG_STACK_VALIDATION=y/CONFIG_STACK_VALIDATION=n/' .config
make oldconfig
# Older kernels do not include openvswitch
if [ -d "net/openvswitch" ]; then
make net/openvswitch/
else
make net/bridge/
fi
if [ "$AFXDP" ]; then
sudo make headers_install INSTALL_HDR_PATH=/usr
pushd tools/lib/bpf/
# Bulding with gcc because there are some issues in make files
# that breaks building libbpf with clang on Travis.
CC=gcc sudo make install
CC=gcc sudo make install_headers
sudo ldconfig
popd
# The Linux kernel defines __always_inline in stddef.h (283d7573), and
# sys/cdefs.h tries to re-define it. Older libc-dev package in xenial
# doesn't have a fix for this issue. Applying it manually.
sudo sed -i '/^# define __always_inline .*/i # undef __always_inline' \
/usr/include/x86_64-linux-gnu/sys/cdefs.h || true
EXTRA_OPTS="${EXTRA_OPTS} --enable-afxdp"
else
EXTRA_OPTS="${EXTRA_OPTS} --with-linux=$(pwd)"
echo "Installed kernel source in $(pwd)"
fi
popd
}
function install_dpdk()
{
local DPDK_VER=$1
local VERSION_FILE="dpdk-dir/travis-dpdk-cache-version"
if [ "${DPDK_VER##refs/*/}" != "${DPDK_VER}" ]; then
# Avoid using cache for git tree build.
rm -rf dpdk-dir
DPDK_GIT=${DPDK_GIT:-https://dpdk.org/git/dpdk}
git clone --single-branch $DPDK_GIT dpdk-dir -b "${DPDK_VER##refs/*/}"
pushd dpdk-dir
git log -1 --oneline
else
if [ -f "${VERSION_FILE}" ]; then
VER=$(cat ${VERSION_FILE})
if [ "${VER}" = "${DPDK_VER}" ]; then
EXTRA_OPTS="${EXTRA_OPTS} --with-dpdk=$(pwd)/dpdk-dir/build"
echo "Found cached DPDK ${VER} build in $(pwd)/dpdk-dir"
return
fi
fi
# No cache or version mismatch.
rm -rf dpdk-dir
wget https://fast.dpdk.org/rel/dpdk-$1.tar.xz
tar xvf dpdk-$1.tar.xz > /dev/null
DIR_NAME=$(tar -tf dpdk-$1.tar.xz | head -1 | cut -f1 -d"/")
mv ${DIR_NAME} dpdk-dir
pushd dpdk-dir
fi
make config CC=gcc T=$TARGET
if [ "$DPDK_SHARED" ]; then
sed -i '/CONFIG_RTE_BUILD_SHARED_LIB=n/s/=n/=y/' build/.config
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/$TARGET/lib
fi
# Disable building DPDK kernel modules. Not needed for OVS build or tests.
sed -i '/CONFIG_RTE_EAL_IGB_UIO=y/s/=y/=n/' build/.config
sed -i '/CONFIG_RTE_KNI_KMOD=y/s/=y/=n/' build/.config
# Enable pdump support in DPDK.
sed -i '/CONFIG_RTE_LIBRTE_PMD_PCAP=n/s/=n/=y/' build/.config
sed -i '/CONFIG_RTE_LIBRTE_PDUMP=n/s/=n/=y/' build/.config
make -j4 CC=gcc EXTRA_CFLAGS='-fPIC'
EXTRA_OPTS="$EXTRA_OPTS --with-dpdk=$(pwd)/build"
echo "Installed DPDK source in $(pwd)"
popd
echo "${DPDK_VER}" > ${VERSION_FILE}
}
function configure_ovs()
{
./boot.sh
./configure CFLAGS="${CFLAGS_FOR_OVS}" $* || { cat config.log; exit 1; }
}
function build_ovs()
{
local KERNEL=$1
configure_ovs $OPTS
make selinux-policy
# Only build datapath if we are testing kernel w/o running testsuite and
# AF_XDP support.
if [ "${KERNEL}" ] && ! [ "$AFXDP" ]; then
pushd datapath
make -j4
popd
else
make -j4 || { cat config.log; exit 1; }
fi
}
if [ "$KERNEL" ]; then
install_kernel $KERNEL
fi
if [ "$DPDK" ] || [ "$DPDK_SHARED" ]; then
if [ -z "$DPDK_VER" ]; then
DPDK_VER="19.11"
fi
install_dpdk $DPDK_VER
# Enable pdump support in OVS.
EXTRA_OPTS="${EXTRA_OPTS} --enable-dpdk-pdump"
if [ "$CC" = "clang" ]; then
# Disregard cast alignment errors until DPDK is fixed
CFLAGS_FOR_OVS="${CFLAGS_FOR_OVS} -Wno-cast-align"
fi
fi
if [ "$CC" = "clang" ]; then
CFLAGS_FOR_OVS="${CFLAGS_FOR_OVS} -Wno-error=unused-command-line-argument"
elif [ "$M32" ]; then
# Not using sparse for 32bit builds on 64bit machine.
# Adding m32 flag directly to CC to avoid any posiible issues with API/ABI
# difference on 'configure' and 'make' stages.
export CC="$CC -m32"
else
OPTS="--enable-sparse"
if [ "$AFXDP" ]; then
# netdev-afxdp uses memset for 64M for umem initialization.
SPARSE_FLAGS="${SPARSE_FLAGS} -Wno-memcpy-max-count"
fi
CFLAGS_FOR_OVS="${CFLAGS_FOR_OVS} ${SPARSE_FLAGS}"
fi
save_OPTS="${OPTS} $*"
OPTS="${EXTRA_OPTS} ${save_OPTS}"
if [ "$TESTSUITE" ]; then
# 'distcheck' will reconfigure with required options.
# Now we only need to prepare the Makefile without sparse-wrapped CC.
configure_ovs
export DISTCHECK_CONFIGURE_FLAGS="$OPTS"
if ! make distcheck CFLAGS="${CFLAGS_FOR_OVS}" \
TESTSUITEFLAGS=-j4 RECHECK=yes; then
# testsuite.log is necessary for debugging.
cat */_build/sub/tests/testsuite.log
exit 1
fi
else
if [ -z "${KERNEL_LIST}" ]; then build_ovs ${KERNEL};
else
save_EXTRA_OPTS="${EXTRA_OPTS}"
for KERNEL in ${KERNEL_LIST}; do
echo "=============================="
echo "Building with kernel ${KERNEL}"
echo "=============================="
EXTRA_OPTS="${save_EXTRA_OPTS}"
install_kernel ${KERNEL}
OPTS="${EXTRA_OPTS} ${save_OPTS}"
build_ovs ${KERNEL}
make distclean
done
fi
fi
exit 0
|
class Person:
def __init__(self, name, age, gender):
self.name = name
self.age = age
self.gender = gender
def get_name(self):
return self.name
def get_age(self):
return self.age
def get_gender(self):
return self.gender |
#!/bin/bash
#https://www.jianshu.com/p/d72462b6c67c
if [[ -z "$1" ]]; then
echo "Usage: $0 \"backupfile\""
else
devstr="put ""$1""\nbye"
devstr=`echo -e $devstr`
cadaver https://dav.jianguoyun.com/dav/Minecraft/ <<< "${devstr}"
fi
|
<filename>game_handler.go
package main
import (
"net/http"
"github.com/gorilla/mux"
)
func gameHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case http.MethodPut:
case http.MethodGet:
vars := mux.Vars(r)
date := vars["date"]
id := vars["id"]
if id == "" || date == "" {
w.WriteHeader(http.StatusNotFound)
} else {
t := renderPage(PAGE_GAMES)
if t == nil {
w.WriteHeader(http.StatusNotFound)
} else {
t.Execute(w, nil)
}
}
case http.MethodDelete:
case http.MethodPost:
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
} // gameHandler
|
<filename>src/components/Work/Work.js
import React, { useContext } from "react";
import { motion, AnimatePresence } from "framer-motion";
// Styles And Modules
import styles from "./Work.module.scss";
import WorkImages from "./WorkImages";
import { WorkContext } from "../../contexts/WorkContext";
const Work = () => {
const { work, prevWork, nextWork } = useContext(WorkContext);
return (
<div className={styles.Work} id="Work">
<div className={styles.Container}>
<div className={styles.Title}>
<div className={styles.TitleContent}>
<p>Work</p>
<div className={styles.TitleBar}></div>
</div>
</div>
<AnimatePresence exitBeforeEnter>
<motion.div
className={styles.Details}
exit={{ opacity: 0, y: 10 }}
initial={{ opacity: 0, y: 10 }}
animate={{ opacity: 1, y: 0 }}
transition={{ duration: 0.8 }}
>
<motion.p className={styles.Name}>{work["name"]}</motion.p>
<motion.p className={styles.Description}>
{work["description"]}
</motion.p>
<motion.div className={styles.Links}>
<div className={styles.Link}>
<p>{work["linkname"]}</p>
<a
target="_blank"
rel="noopener noreferrer"
href={work["link"]}
>
<svg
xmlns="http://www.w3.org/2000/svg"
width="30.621"
height="30.621"
viewBox="0 0 30.621 30.621"
>
<g
id="Icon_feather-external-link"
works-name="Icon feather-external-link"
transform="translate(-3 -2.379)"
>
<path
id="Path_153"
works-name="Path 153"
d="M27,19.5v9a3,3,0,0,1-3,3H7.5a3,3,0,0,1-3-3V12a3,3,0,0,1,3-3h9"
fill="none"
stroke="#bcbabc"
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="3"
/>
<path
id="Path_154"
works-name="Path 154"
d="M22.5,4.5h9v9"
fill="none"
stroke="#bcbabc"
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="3"
/>
<path
id="Path_155"
works-name="Path 155"
d="M15,21,31.5,4.5"
fill="none"
stroke="#bcbabc"
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="3"
/>
</g>
</svg>
</a>
</div>
</motion.div>
<a
target="_blank"
rel="noopener noreferrer"
href={work["link"]}
className={styles.LinkMobile}
>
<p>view</p>
</a>
</motion.div>
</AnimatePresence>
<WorkImages images={work["images"]} />
<div className={styles.Navigation}>
<p className={styles.NavIndicator}>{work["id"]}</p>
<div className={styles.NavLinks}>
<p className={styles.NavLink} onClick={prevWork}>
prev
</p>
<p className={styles.NavLink} onClick={nextWork}>
next
</p>
</div>
</div>
</div>
</div>
);
};
export default Work;
|
<filename>assets/js/canvas.js<gh_stars>0
$(document).ready(function() {
addRecord();
saveRecord();
getTable();
getRecord();
deleteRecord();
})
// Save (add / delete) Record in the Database
function saveRecord() {
$(document).on('click', '#btn_register', function() {
switch ($('#modal_accion').val()){
case 'add':
insertRecord();
break;
case 'update':
updateRecord();
break;
}
});
$(document).on('click','#btn_close',function() {
$('form').trigger('reset');
})
}
function addRecord() {
$('button#btn_add').on('click', function() {
$('#status').html('');
$('#status').removeClass('alert alert-warning alert-danger alert-success');
$('#message').html('');
$('#message').removeClass('alert alert-warning');
$('form').trigger('reset');
$(".no_valido").hide();
$('#detail_modal').modal('show');
$('#modal_accion').val('add');
$("#lbl_detail_modal").html("Crear Lienzo");
});
}
// Insert Record in the Database
function insertRecord() {
var desc_canvas = $('#desc_canvas').val();
var tamano_x = $('#tamano_x').val();
var tamano_y = $('#tamano_y').val();
var color = ($('#color').val() === '') ? null : $('#color').val();
var estilo = ($('#estilo').val() === '') ? null : $('#estilo').val();
var id_status = $('#id_status').val();
// Validar status
var datosValidos = validateData(desc_canvas, tamano_x, tamano_y, id_status);
if (datosValidos.status){
$.ajax({
url : 'CanvasFunc.php',
method: 'post',
data:{accion:'insertRecord',desc_canvas:desc_canvas,tamano_x:tamano_x,tamano_y:tamano_y,color:color,
estilo:estilo,id_status:id_status},
success: function(data) {
data = $.parseJSON(data);
if(data.status) {
$('#status').html(data.mensaje);
$('#status').addClass('alert alert-success');
getTable();
$('#detail_modal').modal('hide');
} else {
$('#status').html(data.mensaje);
$('#status').addClass('alert alert-danger');
}
}
})
} else {
$('#message').html(datosValidos.mensaje);
$('#message').addClass('alert alert-warning');
}
}
function validateData(desc_canvas, tamano_x, tamano_y, id_status){
var status = true;
var mensaje = 'Datos OK';
$(".no_valido").hide();
if(desc_canvas == '') {
status = false;
$('#hlpNombreNOK').show();
}
if(tamano_y == '') {
status = false;
$('#hlpAltoNOK').show();
}
if(tamano_x == '') {
status = false;
$('#hlpAnchoNOK').show();
}
if (id_status == 0){
status = false;
$('#hlpEstatusNOK').show();
}
if (!status) mensaje = 'Favor de proporcionar la información en rojo';
return {status:status,mensaje:mensaje};
}
// Display table
function getTable() {
$.ajax({
url: 'CanvasFunc.php',
method: 'post',
data:{accion:'getTable'},
success: function(data) {
data = $.parseJSON(data);
if(data.status) {
$('#table').html(data.html);
} else {
$('#status').html(data.mensaje);
if (data.vacio){
$('#table').html('<tr></tr>');
$('#status').addClass('alert alert-success');
} else {
$('#status').addClass('alert alert-danger');
}
}
}
})
}
//Get Particular Record
function getRecord()
{
$(document).on('click','#btn_edit',function() {
$('#status').html('');
$('#status').removeClass('alert alert-warning alert-danger alert-success');
$('#message').html('');
$('#message').removeClass('alert alert-warning');
var id_canvas = $(this).attr('data-id');
$.ajax({
url : 'CanvasFunc.php',
method: 'post',
data:{accion:'getRecord',id_canvas:id_canvas},
dataType: 'JSON',
success: function(data){
if(data.status) {
// Valores
$('#id_canvas').val(data.data['id_canvas']);
$('#desc_canvas').val(data.data['desc_canvas']);
$('#tamano_x').val(data.data['tamano_x']);
$('#tamano_y').val(data.data['tamano_y']);
$('#color').val(data.data['color']);
$('#estilo').val(data.data['estilo']);
$('#id_status').val(data.data['id_status']);
// Control
$(".no_valido").hide();
$('#detail_modal').modal('show');
$("#lbl_detail_modal").html("Editar Lienzo");
$('#modal_accion').val('update');
} else {
$('#status').html(data.mensaje);
$('#status').addClass('alert alert-danger');
}
}
})
})
}
// Update Record
function updateRecord() {
var id_canvas = $('#id_canvas').val();
var desc_canvas = $('#desc_canvas').val();
var tamano_x = $('#tamano_x').val();
var tamano_y = $('#tamano_y').val();
var color = ($('#color').val() === '') ? null : $('#color').val();
var estilo = ($('#estilo').val() === '') ? null : $('#estilo').val();
var id_status = $('#id_status').val();
// Validar status
var datosValidos = validateData(desc_canvas, tamano_x, tamano_y, id_status);
if (datosValidos.status){
$.ajax({
url: 'CanvasFunc.php',
method: 'post',
data:{accion:'updateRecord',id_canvas:id_canvas,desc_canvas:desc_canvas,tamano_x:tamano_x,
tamano_y:tamano_y,color:color,estilo:estilo,id_status:id_status},
success: function(data) {
data = $.parseJSON(data);
if(data.status) {
$('#status').html(data.mensaje);
$('#status').addClass('alert alert-success');
getTable();
$('#detail_modal').modal('hide');
} else {
$('#status').html(data.mensaje);
$('#status').addClass('alert alert-danger');
}
}
})
} else {
$('#message').html(datosValidos.mensaje);
$('#message').addClass('alert alert-warning');
}
}
// Delete Function
function deleteRecord() {
$(document).on('click','#btn_delete',function() {
var id_canvas = $(this).attr('data-id1');
$('#status').html('');
$('#status').removeClass('alert alert-warning alert-danger alert-success');
$('#delete').modal('show');
$(document).on('click','#btn_delete_record',function() {
$.ajax({
url : 'CanvasFunc.php',
method: 'post',
data:{accion:'deleteRecord',id_canvas:id_canvas},
success: function(data) {
data = $.parseJSON(data);
if(data.status) {
$('#status').html(data.mensaje);
$('#status').addClass('alert alert-success');
getTable();
$('form').trigger('reset');
$('#delete').modal('hide');
} else {
$('#status').html(data.mensaje);
$('#status').addClass('alert alert-danger');
}
}
})
})
})
}
|
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
import React, { createRef, Component } from 'react';
import { storiesOf } from '@storybook/react';
import { Box, Button, Drop, Grommet } from 'grommet';
import { grommet } from 'grommet/themes';
var ProgressiveDrop =
/*#__PURE__*/
function (_Component) {
_inheritsLoose(ProgressiveDrop, _Component);
function ProgressiveDrop() {
var _this;
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
_this = _Component.call.apply(_Component, [this].concat(args)) || this;
_defineProperty(_assertThisInitialized(_this), "boxRef", createRef());
_defineProperty(_assertThisInitialized(_this), "innerBoxRef", createRef());
_defineProperty(_assertThisInitialized(_this), "state", {
openDrop: false,
openInnerDrop: false,
interactedWithInnerButton: false
});
_defineProperty(_assertThisInitialized(_this), "onCloseDrop", function () {
return _this.setState({
openDrop: false,
openInnerDrop: false
});
});
_defineProperty(_assertThisInitialized(_this), "onOpenDrop", function () {
return _this.setState({
openDrop: true,
openInnerDrop: false
});
});
return _this;
}
var _proto = ProgressiveDrop.prototype;
_proto.render = function render() {
var _this2 = this;
var _this$state = this.state,
openDrop = _this$state.openDrop,
openInnerDrop = _this$state.openInnerDrop,
interactedWithInnerButton = _this$state.interactedWithInnerButton;
return React.createElement(Grommet, {
theme: grommet,
full: true
}, React.createElement(Box, {
fill: true,
align: "center",
justify: "center"
}, React.createElement(Button, {
ref: this.boxRef,
primary: true,
label: "Click me",
onClick: this.onOpenDrop
}), openDrop && React.createElement(Drop, {
target: this.boxRef.current,
onClickOutside: this.onCloseDrop,
onEsc: this.onCloseDrop
}, React.createElement(Box, {
pad: "large",
ref: this.innerBoxRef
}, React.createElement(Button, {
primary: true,
label: "Click me again",
onClick: function onClick() {
return _this2.setState({
openInnerDrop: true
});
}
})), openInnerDrop && React.createElement(Drop, {
target: this.innerBoxRef.current,
onClickOutside: function onClickOutside() {
return _this2.setState({
openInnerDrop: false
});
},
onEsc: function onEsc() {
return _this2.setState({
openInnerDrop: false
});
},
align: {
top: 'bottom',
right: 'right'
}
}, React.createElement(Box, {
pad: "large"
}, React.createElement(Button, {
primary: true,
label: interactedWithInnerButton ? 'Good job!' : 'You can interact with me',
onClick: function onClick() {
return _this2.setState({
interactedWithInnerButton: true
});
}
}))))));
};
return ProgressiveDrop;
}(Component);
storiesOf('Drop', module).add('Progressive', function () {
return React.createElement(ProgressiveDrop, null);
}); |
# Use a [Trie](https://github.com/pytries/marisa-trie)
import gc
import os
import marisa_trie
import pathlib
import psutil
import string
import uuid
from datetime import datetime as dt
### edit me - start
key_column = 1
value_column = 2
source_file_name = 'd:/test/source.csv'
sample_file_name = 'd:/test/sample.csv'
### edit me - end
source_file_name = pathlib.Path(source_file_name)
sample_file_name = pathlib.Path(sample_file_name)
print('Loading dictionary...')
t1 = dt.utcnow()
def gen_keys(source_file_name):
with open(source_file_name, 'r', encoding = 'utf-8', newline = '') as source_file:
for line in source_file:
yield line.strip().split(',')[key_column]
mydict = marisa_trie.Trie(gen_keys(source_file_name))
with open(source_file_name, 'r', encoding = 'utf-8', newline = '') as source_file:
values = [line.strip().split(',')[value_column] for line in source_file.readlines()]
print(dt.utcnow() - t1)
gc.collect()
pid = os.getpid()
py = psutil.Process(pid)
print('{ram} MB used so far'.format(ram = int(py.memory_info().vms/1024/1024)))
print('Loading real keys (hits)...')
t1 = dt.utcnow()
with open(sample_file_name, 'r', encoding = 'utf-8', newline = '') as sample_file:
hits = [line.strip() for line in sample_file.readlines()]
print(dt.utcnow() - t1)
print('Creating fake keys (misses)...')
t1 = dt.utcnow()
misses = [str(uuid.uuid4()) for hit in hits]
print(dt.utcnow() - t1)
print('Timeing samples (hits)...')
t1 = dt.utcnow()
for key in hits:
if key in mydict:
value = values[mydict[key]]
else:
raise Exception('Could not find expected key: {key}'.format(key = key))
t2 = dt.utcnow() - t1
print(t2)
print('Retrival (hits): {cnt} k / sec'.format(cnt = int(len(hits)/t2.total_seconds()/1000)))
print('Timeing samples (misses)...')
t1 = dt.utcnow()
for key in misses:
if key in mydict:
raise Exception('Found unexpected key: {key}'.format(key = key))
t2 = dt.utcnow() - t1
print(t2)
print('Retrival (misses): {cnt} k / sec'.format(cnt = int(len(misses)/t2.total_seconds()/1000)))
|
#!/usr/bin/env bash
# Comment/uncomment the following code lines according to your needs
# Stop all containers
docker stop $(docker ps -a -q)
# Delete all containers
docker rm $(docker ps -a -q)
# Delete all images
# docker rmi $(docker images -q)
# Start stardog container
echo "Starting stardog container"
docker run -itd -v /data/qanary:/stardog-4.1.1/qanary -p 5820:4000 --net="host" --name stardog qanary/stardog
# Start qapipeline container
echo "Starting qapipeline container"
docker run -itd -p 8080:5000 --net="host" --name qapipeline qanary/qapipeline
# Start containers for qanary components
echo "Starting agdistis-ned component container"
docker run -d -P --net="host" --name agdistis-ned -t qanary/agdistis-ned
# echo "Starting alchemy-nerd component container"
# docker run -d -P --net="host" --name alchemy-nerd -t qanary/alchemy-nerd
# echo "Starting dbpedia-spotlight-ned component container"
# docker run -d -P --net="host" --name dbpedia-spotlight-ned -t qanary/dbpedia-spotlight-ned
# echo "Starting dbpedia-spotlight-ner component container"
# docker run -d -P --net="host" --name dbpedia-spotlight-ner -t qanary/dbpedia-spotlight-ner
# echo "Starting fox-ner component container"
# docker run -d -P --net="host" --name fox-ner -t qanary/fox-ner
# echo "Starting lucene-linker-nerd component container"
# docker run -d -P --net="host" --name lucene-linker-nerd -t qanary/lucene-linker-nerd
# echo "Starting stanford-ner component container"
# docker run -d -P --net="host" --name stanford-ner -t qanary/stanford-ner
|
#!/bin/bash
user="vspdemo"
echo "Please enter your SPARQL Password: "
read -sr SPARQL_PASSWORD
#Goes through all .txt files in current dir and deletes all triples within them
#txt files need to end in an empty line
for file in *.txt; do
echo -e "\n|||---Processing $file---|||"
##Read from file and delimit on tabs (any number of them) and read through array
while IFS=$'\t' read -r -a arr; do
if [ ! -z "${arr[0]}" ] && [ ! -z "${arr[1]}" ] && [ ! -z "${arr[2]}" ] ; then #To stop from grabbing blank lines in arg file given
one_word_subj="${arr[0]// /_}"
../../../../../../../bin/isql 1111 $user $SPARQL_PASSWORD <<-EOF
sparql PREFIX meta: <http://modernism.uvic.ca/metadata#> PREFIX pref1: <http://localhost:8890/limo#> DELETE DATA FROM <http://localhost:8890/bestDataProduction> { meta:$one_word_subj pref1:${arr[2]} "${arr[1]}" } ;
EXIT;
EOF
fi
done < "$file"
done
echo -e "\nThank you for cleaning our data."
|
class TrafficIntersection:
def __init__(self, schedule):
self.schedule = schedule
self.count_left = 0
self.count_right = 0
def update_counts(self):
self.count_left = 0
self.count_right = 0
for agent in self.schedule.agents:
if agent.agent_type == 1:
if agent.unique_id in [0, 1]:
self.count_left += agent.count_cars()
elif agent.unique_id == 2:
self.count_right += agent.count_cars()
def get_left_count(self):
return self.count_left
def get_right_count(self):
return self.count_right |
#!/bin/sh
docker build --file Dockerfile --build-arg repository=ubuntu --build-arg tag=16.04 -t ubuntu-desktop:16.04 .
|
#!/bin/bash
# Use the config_build for _config.yml
# uses scholar plugin
cp ./build/config_build.yml _config.yml
sleep 1
# bundle exec jekyll serve --incremental &
bundle exec jekyll serve &
# jekyll serve &
sleep 3
open -a Safari http://127.0.0.1:4000
# To view build times
# bundle exec jekyll serve --profile
# :set spell spelllang=en_gb
# if already in use
# ps aux | grep jekyll
# kill -9 "PID"
# Errors due to OS updates.
# e.g.: can't find gem jekyll (>= 0.a). Depending on machine:
# If rails not installed,
# sudo gem install rails
# bundle update
# sudo gem install jekyll bundler
# sudo rm -rf _site
# sudo chmod -R 777 /usr/local/lib/ruby/gems/
# gem install bundler jekyll
# bundle add webrick
|
$(function () {
$("#jqGrid").jqGrid({
url: '../banner/list',
datatype: "json",
colModel: [
{label: 'id', name: 'id', index: 'id', key: true, hidden: true},
{label: '标题', name: 'title', index: 'title', width: 80},
{label: '内容', name: 'content', index: 'content', width: 80},
{label: '链接', name: 'link', index: 'link', width: 80},
{
label: '图片', name: 'imageUrl', index: 'imageUrl', width: 80, formatter: function (value) {
return transImg(value);
}
},
{
label: '创建时间', name: 'createTime', index: 'createTime', width: 80, formatter: function (value) {
return transDate(value);
}
},
{
label: '状态', name: 'status', index: 'status', width: 80, formatter: function (value) {
return value === 0 ?
'<span class="label label-danger">禁用</span>' :
'<span class="label label-success">正常</span>';
}
}],
viewrecords: true,
height: 385,
rowNum: 7,
rowList: [10, 30, 50],
rownumbers: true,
rownumWidth: 25,
autowidth: true,
multiselect: true,
pager: "#jqGridPager",
jsonReader: {
root: "page.list",
page: "page.currPage",
total: "page.totalPage",
records: "page.totalCount"
},
prmNames: {
page: "page",
rows: "limit",
order: "order"
},
gridComplete: function () {
//隐藏grid底部滚动条
$("#jqGrid").closest(".ui-jqgrid-bdiv").css({"overflow-x": "hidden"});
}
});
});
var vm = new Vue({
el: '#rrapp',
data: {
showList: true,
title: null,
banner: {},
ruleValidate: {
title: [
{required: true, message: '广告名称不能为空', trigger: 'blur'}
],
image_url: [
{required: true, message: '图片不能为空', trigger: 'blur'}
]
},
q: {
title: ''
}
},
methods: {
query: function () {
vm.reload();
},
add: function () {
},
update: function (event) {
},
saveOrUpdate: function (event) {
},
del: function (event) {
},
getInfo: function (id) {
},
reload: function (event) {
},
handleSuccess: function (res, file) {
},
handleFormatError: function (file) {
},
handleMaxSize: function (file) {
},
handleSubmit: function (name) {
},
handleReset: function (name) {
},
eyeImage: function () {
}
}
}); |
<reponame>anotheria/moskito-control
package org.moskito.control.ui.resource.accumulators;
import org.moskito.control.connectors.response.ConnectorAccumulatorResponse;
import org.moskito.control.connectors.response.ConnectorAccumulatorsNamesResponse;
import org.moskito.control.core.ComponentRepository;
import org.moskito.control.core.Component;
import org.moskito.control.common.AccumulatorDataItem;
import org.moskito.control.core.chart.Chart;
import org.moskito.control.core.inspection.ComponentInspectionDataProvider;
import org.moskito.control.ui.action.MainViewAction;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import java.util.*;
/**
* REST resource used to list of accumulator chart beans.
* @author strel
*/
@Path("/accumulators")
@Produces(MediaType.APPLICATION_JSON)
public class AccumulatorResource {
@GET
@Path("/{component}")
public AccumulatorsListBean componentAccumulatorNames(@PathParam("component") String componentName){
Component component = ComponentRepository.getInstance().getComponent(componentName);
ComponentInspectionDataProvider provider = new ComponentInspectionDataProvider();
ConnectorAccumulatorsNamesResponse response = provider.provideAccumulatorsNames(component);
Collections.sort(response.getNames());
AccumulatorsListBean componentAccumulators = new AccumulatorsListBean();
componentAccumulators.setNames(response.getNames());
componentAccumulators.setComponentName(componentName);
return componentAccumulators;
}
@POST
@Path("/charts")
@Consumes(MediaType.APPLICATION_JSON)
public AccumulatorChartsListResponse accumulatorCharts(AccumulatorChartsParameters params) {
Component component = ComponentRepository.getInstance().getComponent(params.getComponent());
ArrayList<String> accumulators = params.getAccumulators();
if (accumulators == null || accumulators.isEmpty()) {
return new AccumulatorChartsListResponse();
}
ComponentInspectionDataProvider provider = new ComponentInspectionDataProvider();
ConnectorAccumulatorResponse accumulatorResponse = provider.provideAccumulatorsCharts(component, accumulators);
LinkedList<Chart> chartBeans = new LinkedList<>();
Collection<String> names = accumulatorResponse.getNames();
for (String name : names) {
List<AccumulatorDataItem> line = accumulatorResponse.getLine(name);
String accumulatorName = name+"-"+component.getName(); // to avoid same accumulators ids for multiple components
Chart chart = new Chart(accumulatorName, -1);
chart.addLine(component.getName(), accumulatorName);
chart.notifyNewData(component.getName(), accumulatorName, line);
chartBeans.add(chart);
}
Collections.sort(chartBeans, new Comparator<Chart>() {
@Override
public int compare(Chart chart, Chart another) {
return chart.getName().compareTo(another.getName());
}
});
AccumulatorChartsListResponse response = new AccumulatorChartsListResponse();
response.setCharts(MainViewAction.prepareChartData(chartBeans));
return response;
}
}
|
<reponame>seek-oss/scoobie<gh_stars>1-10
import 'braid-design-system/reset';
import 'loki/configure-react';
import { Alert, Stack, Text } from 'braid-design-system';
import React from 'react';
import { ComponentProps } from 'react';
import { BraidArgs, defaultArgTypes, defaultArgs } from '../storybook/controls';
import { BraidStorybookProvider, withRouter } from '../storybook/decorators';
import { InternalLink as Component } from './InternalLink';
export default {
args: {
braidThemeName: defaultArgs.braidThemeName,
href: 'page#id',
reset: true,
},
argTypes: {
braidThemeName: defaultArgTypes.braidThemeName,
},
component: Component,
decorators: [withRouter],
title: 'Standalone/InternalLink',
};
type Args = ComponentProps<typeof Component> & BraidArgs;
interface CustomArgs extends Args {
line1: string;
line2: string;
}
export const InternalLink = ({
braidThemeName,
line1,
line2,
...args
}: CustomArgs) => (
<BraidStorybookProvider braidThemeName={braidThemeName}>
<Component {...args}>
<Alert tone="caution">
<Stack space="gutter">
<Text>{line1}</Text>
<Text size="small">{line2}</Text>
</Stack>
</Alert>
</Component>
</BraidStorybookProvider>
);
InternalLink.args = {
line1: 'InternalLink supports complex components.',
line2: 'For example, this whole Alert is a link!',
};
InternalLink.argTypes = {
line1: { control: { type: 'text' } },
line2: { control: { type: 'text' } },
};
InternalLink.storyName = 'InternalLink';
|
#!/bin/bash
#
# Copyright (c) 2016-2018, Linaro Limited
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
PCAP_IN=`find . ${TEST_DIR} $(dirname $0) -name udp64.pcap -print -quit`
PCAP_OUT="pcapout.pcap"
PCAP_EMPTY="empty.pcap"
PCAP_IN_SIZE=`stat -c %s ${PCAP_IN}`
echo "using PCAP_IN = ${PCAP_IN}, PCAP_OUT = ${PCAP_OUT}"
export ODP_PLATFORM_PARAMS="--no-pci \
--vdev net_pcap0,rx_pcap=${PCAP_IN},tx_pcap=/dev/null \
--vdev net_pcap1,rx_pcap=${PCAP_EMPTY},tx_pcap=${PCAP_OUT}"
./odp_l3fwd${EXEEXT} -i 0,1 \
-r "10.0.0.0/24,1" -d 30
STATUS=$?
PCAP_OUT_SIZE=`stat -c %s ${PCAP_OUT}`
rm -f ${PCAP_OUT}
if [ ${STATUS} -ne 0 ] || [ ${PCAP_IN_SIZE} -ne ${PCAP_OUT_SIZE} ]; then
echo "Error: status ${STATUS}, in:${PCAP_IN_SIZE} out:${PCAP_OUT_SIZE}"
exit 1
fi
echo "Pass: status ${STATUS}, in:${PCAP_IN_SIZE} out:${PCAP_OUT_SIZE}"
exit 0
|
#!/bin/bash
dnf remove --oldinstallonly --assumeyes
dnf clean all |
import boto3
sns = boto3.client('sns')
# Create SNS topic
sns_topic = sns.create_topic(Name='PageLoadAlarmNotifications')
# Subscribe to SNS topic
sns.subscribe(TopicArn=sns_topic, Protocol='email', Endpoint='<enter_email_address_here>')
# Create CloudWatch alarm
cloudwatch = boto3.client('cloudwatch')
alarm_name = 'PageLoadAlarm'
web_page_url = '<enter_web_page_url_here>'
cloudwatch.put_metric_alarm(
AlarmName=alarm_name,
MetricName='PageLoad',
Namespace='PageLoad',
Dimensions=[{'Name': 'WebPageUrl', 'Value': web_page_url}],
ComparisonOperator='NotEqualToThreshold',
EvaluationPeriods=1,
DatapointsToAlarm=2,
Period=30,
Threshold=200,
TreatMissingData=MISSING,
Statistic='Maximum',
AlarmActions=[sns_topic]
) |
<reponame>mighteejim/manager
export const SET_TOKEN = '@@authentication/SET_TOKEN';
export const LOGOUT = '@@authentication/LOGOUT';
export function setToken(token, scopes) {
return { type: SET_TOKEN, token, scopes };
}
export function logout() {
return { type: LOGOUT };
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for CESA-2009:1130
#
# Security announcement date: 2009-06-26 14:06:51 UTC
# Script generation date: 2017-01-01 21:10:00 UTC
#
# Operating System: CentOS 5
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - kdegraphics-devel.i386:3.5.4-13.el5_3
# - kdegraphics.x86_64:3.5.4-13.el5_3
# - kdegraphics-devel.x86_64:3.5.4-13.el5_3
#
# Last versions recommanded by security team:
# - kdegraphics-devel.i386:3.5.4-17.el5_5.1
# - kdegraphics.x86_64:3.5.4-17.el5_5.1
# - kdegraphics-devel.x86_64:3.5.4-17.el5_5.1
#
# CVE List:
# - CVE-2009-0945
# - CVE-2009-1709
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install kdegraphics-devel.i386-3.5.4 -y
sudo yum install kdegraphics.x86_64-3.5.4 -y
sudo yum install kdegraphics-devel.x86_64-3.5.4 -y
|
<reponame>erlandsona/attendance-tracker-4-nss
class Tardy < ActiveRecord::Base
has_many :students_tardies
has_many :students, through: :students_tardies
end
|
<reponame>PavliukKonstantin/learn-python<gh_stars>0
# ДНК и РНК это последовательности нуклеотидов.
#
# Четыре нуклеотида в ДНК это аденин (A), цитозин (C), гуанин (G) и тимин (T).
#
# Четыре нуклеотида в РНК это аденин (A), цитозин (C), гуанин (G) и урацил (U).
#
# Цепь РНК составляется на основе цепи ДНК последовательной
# заменой каждого нуклеотида:
#
# G -> C
# C -> G
# T -> A
# A -> U
# to_rna.py
# Напишите функцию to_rna, которая принимает на вход цепь ДНК
# и возвращает соответствующую цепь РНК (совершает транскрипцию РНК).
def to_rna(dna):
dna_to_rna = {
"G": "C",
"C": "G",
"T": "A",
"A": "U",
}
rna = []
for nucl in dna:
rna.append(dna_to_rna[nucl])
return ''.join(rna)
print(to_rna('ACGTGGTCTTAA'))
def test_to_rna():
assert to_rna("C") == "G"
assert to_rna("G") == "C"
assert to_rna("T") == "A"
assert to_rna("A") == "U"
assert to_rna("ACGTGGTCTTAA") == "UGCACCAGAAUU"
test_to_rna()
|
import requests
from bs4 import BeautifulSoup
url = 'https://example.com/'
page = requests.get(url)
soup = BeautifulSoup(page.content, 'lxml')
text = soup.find_all(text=True)
for t in text:
print(t) |
MJARS=$PWD/convert/out
MLIB=$PWD/mlib
cd foo.domain
# compile
javac -d build \
--module-source-path src/main/java \
--module-path $MJARS \
`find src/main/java -name "*.java"`
cd build/foo.domain
# jar
jar cf $MLIB/foo.domain.jar *
|
// +build windows
package main
import "fmt"
func bannerGet() string {
banner := fmt.Sprintf(`
___________________ __ __
/ ____/ ____/ ____/ | / /___ _________/ /____
/ __/ / /_ / /_ | | /| / / __ \/ ___/ __ / ___/
/ /___/ __/ / __/ | |/ |/ / /_/ / / / /_/ (__ )
/_____/_/ /_/ |__/|__/\____/_/ \__,_/____/ `)
banner += "\n" + `Author: <NAME> (@sh3r4)
License: MIT
Warning: Some of the following options when used in combination can
significantly weaken the pass-phrases generated.
You probably know what you are doing though, yeah?`
banner += "\n\n"
return banner
}
|
<reponame>litaiqing/brushJs<filename>brush-assist.js<gh_stars>1-10
/**
* brush-assist.js
* brush.js辅助js。
* Created by litaiqing on 2016-10-27.
* email : <EMAIL>
*/
$().ready(function () {
window.brush_templ = {};
$.extend({
cut_html: function (html) {
return html.replace(/\n+/g, "")
.replace(/<!--.*?-->/ig, "")
.replace(/\/\*.*?\*\//ig, "")
.replace(/[ ]+</ig, "<")
.trim();
},
depot: {
root: 'b-view-',
/**
* 保存一个模版
*/
set: function (key, value) {
brush_templ[$.depot.root + key] = value;
},
/**
* 读取一个模版
*/
get: function (key) {
return brush_templ[$.depot.root + key];
},
/**
* 删除一个模版
*/
rm: function (key) {
try {
delete brush_templ[$.depot.root + key];
} catch (e) {
brush_templ[$.depot.root + key] = null;
}
}
}
});
$.extend($.brush, {
// 快速刷新
flush: function (name, data, i) {
var dataObj = $.depot.get(name);
$.brush.paste(dataObj.html, data, {
i: i,
name: dataObj.alias,
callback: function (source, obj, setting) {
$('[b-view="' + name + '"]').html($('[b-view="' + name + '"]').html() + source)
return source;
}
})
}
});
/**
* 加载页面上所有模版
*/
$('[b-view]').each(function () {
var $view = $(this);
var attr = $view.attr('b-view').split(' as ');
attr[1] = attr[1] || 'data';
var html = $.cut_html($view.html());
$.depot.set(attr[0], {
key: attr[0],
alias: attr[1],
html: html,
});
$view.empty();
});
}); |
#!/usr/bin/env bash
/usr/src/rate-my-area/rate-my-area-1.0-SNAPSHOT/bin/rate-my-area \
-Dpidfile.path=/dev/null \
-Ds3-access-key=$S3_ACCESS_KEY \
-Ds3-secret-key=$S3_SECRET_KEY \
-Dplay.http.secret.key=$PLAY_SECRET_KEY \
-Dslick.dbs.default.db.url=$DB_URL \
-Dslick.dbs.default.db.user=$DB_USER \
-Dslick.dbs.default.db.password=$DB_PASSWORD
|
public interface User {
public void setName(String name);
public String getName();
public void setAge(int age);
public int getAge();
public void setEmail(String email);
public String getEmail();
} |
<filename>tests/xjwt_tests.h
/**
* Copyright 2017, ScaleFT Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef _xjwt_tests_h_
#define _xjwt_tests_h_
#include <stdarg.h>
#include <stddef.h>
#include <setjmp.h>
#include <stdio.h>
#include "cmockery.h"
#ifdef XJWT_TEST_ALL
#define XJWT_TESTS_START(module) \
int xjwt_tests_##module() { \
int rv = 0; \
const UnitTest tests[] = {
#define XJWT_TESTS_ENTRY(entry) unit_test(entry),
#define XJWT_TESTS_END() \
} \
; \
rv = run_tests(tests); \
return rv; \
}
#else
#define XJWT_TESTS_START(module) \
int main(int argc, char *argv[]) { \
const UnitTest tests[] = {
#define XJWT_TESTS_ENTRY(entry) unit_test(entry),
#define XJWT_TESTS_END() \
} \
; \
xjwt_tests_setup(); \
return run_tests(tests); \
}
#endif
#undef XJWT_ASSERT
#define XJWT_ASSERT(expression) \
mock_assert((int)(expression), #expression, __FILE__, __LINE__);
#define XJWT_NO_ERROR(expression) \
do { \
xjwt_error_t *xjwt__xx__err = NULL; \
xjwt__xx__err = (expression); \
if (xjwt__xx__err != XJWT_SUCCESS) { \
fprintf(stderr, "xjwt_error: %s\n", xjwt__xx__err->msg); \
} \
mock_assert(xjwt__xx__err == XJWT_SUCCESS, #expression, __FILE__, \
__LINE__); \
} while (0)
#define XJWT_FAIL(expression) \
do { \
xjwt_error_t *xjwt__xx__err = NULL; \
xjwt__xx__err = (expression); \
if (xjwt__xx__err == XJWT_SUCCESS) { \
fprintf(stderr, "xjwt_error: %s\n", xjwt__xx__err->msg); \
} \
mock_assert(xjwt__xx__err != XJWT_SUCCESS, #expression, __FILE__, \
__LINE__); \
xjwt_error_destroy(xjwt__xx__err); \
} while (0)
#define XJWT_TEST_MODULE(name) int xjwt_tests_##name();
XJWT_TEST_MODULE(keyset)
XJWT_TEST_MODULE(split)
XJWT_TEST_MODULE(verify)
#define PATHMAX 1024
extern char executable_path[PATHMAX];
extern char testdir_path[PATHMAX];
void xjwt_tests_setup();
void xjwt_load_fixture(const char *fname, char **outbuf, size_t *outlen);
#endif
|
<gh_stars>0
package sandbox
object Main extends App {
println("Hello " + "Cats!")
// implicitで定義したインスタンスをimportしている
// global levelではなく、object内でimportする
import JsonWriterInstances._
import JsonSyntax._
val json: Json = Json.toJson(Person("tomoya", "<EMAIL>"))
val person = Person("tomoya", "<EMAIL>")
person.toJson
val personOpt = Some(null)
println(personOpt.toJson)
}
sealed trait Json
final case class JsObject(get: Map[String, String]) extends Json
final case class JsString(get: String) extends Json
final case class JsNumber(get: Int) extends Json
case object JsNull extends Json
// type classを定義することで、AにJsonのどんなサブクラスが来ても実装できるようになる
trait JsonWriter[A] {
def write(value: A): Json
}
final case class Person(name: String, email: String)
// 上で定義したJsonWriterのインスタンスを定義する
// objectでラップしてあげることで、このオブジェクトをimportすると、implicitが使える
object JsonWriterInstances {
implicit val stringWriter: JsonWriter[String] = new JsonWriter[String] {
override def write(value: String): Json = JsString(value)
}
implicit val personWriter: JsonWriter[Person] = new JsonWriter[Person] {
override def write(value: Person): Json = JsObject(
Map(
"name" -> value.name,
"email" -> value.email
)
)
}
implicit val nullWriter: JsonWriter[Null] = new JsonWriter[Null] {
override def write(value: Null): Json = {
JsNull
}
}
}
object Json {
// implicitで引数を受け取ることで、上で定義したJsonWriterインスタンスを受け取ることができる
def toJson[A](value: A)(implicit w: JsonWriter[A]): Json = {
w.write(value)
}
}
object JsonSyntax {
implicit class RichJson[A](value: A) {
def toJson(implicit w: JsonWriter[A]) = {
w.write(value)
}
}
implicit class RichJson2[A](option: Option[A]) {
def toJson(implicit w: JsonWriter[A]) = {
option match {
case Some(aValue) => w.write(aValue)
case None => JsNull
}
}
}
}
|
#!/usr/bin/env bash
# _sash_choose_a_directory(dir: Option<String>, use_new: Option<(0 || 1)>) -> String
#
# Modifies Variables: None
#
# Lists all directories in the dir you pass in, and lets the user choose one.
# Thus allowing a user to choose a directory.
#
# Note "use_new" has the unfortunate side effect of assuming no one has a
# folder called "New" in the directory you're looking in because that's
# what we return when there is a new option.
_sash_choose_a_directory() {
local dir="${1:-.}"
local use_new="${2:-1}"
local option
local array_of_lines
array_of_lines=($(find "$dir" -maxdepth 1 -type d | grep -v "^\.$" | grep -v "^$dir$"))
if [[ "$use_new" -eq "1" ]]; then
option="$(_sash_choose_from_options ${array_of_lines[@]})"
else
option="$(_sash_choose_from_options ${array_of_lines[@]} New)"
fi
echo "$option"
} |
echo " _____ _ ";
echo " | __ \ | | ";
echo " | | | | _____ _| |_ ___ _ __ ";
echo " | | | |/ _ \ \/ / __/ _ \ '__| ";
echo " | |__| | __/> <| || __/ | ";
echo " |_____/ \___/_/\_\\__\___|_| _ _ ";
echo " |_ _| | | | | (_) ";
echo " | | _ __ __| |_ _ ___| |_ _ __ _ ___ ___ ";
echo " | | | '_ \ / _\` | | | / __| __| '__| |/ _ \/ __|";
echo " _| |_| | | | (_| | |_| \__ \ |_| | | | __/\__ \ ";
echo " |_____|_| |_|\__,_|\__,_|___/\__|_| |_|\___||___/ ";
echo " ";
echo " ";
echo " "
printf "Welcome to GoPiGo Browser Streaming Bot Installer.\nPlease ensure internet connectivity before running this script.\n
NOTE: Raspberry Pi wil reboot after completion."
echo "Must be running as Root user"
echo " "
echo "Press ENTER to begin..."
read
echo " "
echo "Check for internet connectivity..."
echo "=================================="
wget -q --tries=2 --timeout=20 http://google.com
if [ $? -eq 0 ];then
echo "Connected"
else
echo "Unable to Connect, try again !!!"
exit 0
fi
#Installing Mjpeg streamer http://blog.miguelgrinberg.com/post/how-to-build-and-run-mjpg-streamer-on-the-raspberry-pi
sudo apt-get update
sudo apt-get install libjpeg8-dev imagemagick libv4l-dev
sudo ln -s /usr/include/linux/videodev2.h /usr/include/linux/videodev.h
wget http://sourceforge.net/code-snapshots/svn/m/mj/mjpg-streamer/code/mjpg-streamer-code-182.zip
unzip mjpg-streamer-code-182.zip
cd mjpg-streamer-code-182/mjpg-streamer
make mjpg_streamer input_file.so output_http.so
sudo cp mjpg_streamer /usr/local/bin
sudo cp output_http.so input_file.so /usr/local/lib/
sudo cp -R www /usr/local/www
mkdir /tmp/stream
cd ../../
rm -rf mjpg-streamer-182
rm -rf mjpg-streamer-code-182
rm index.html
rm mjpg-streamer-code-182.zip
git clone https://github.com/DexterInd/userland.git
cd userland
mkdir build
cd build
sudo apt-get install gcc build-essential cmake vlc rpi-update
cmake ../
make
sudo make install
cd ../../
rm -R userland
git clone https://bitbucket.org/DexterIndustries/raspberry_pi_camera_streamer.git
cd raspberry_pi_camera_streamer
mkdir build
cd build
cmake ../
make
sudo make install
cd ../../
rm -R raspberry_pi_camera_streamer
sudo pip install tornado
git clone https://github.com/DexterInd/sockjs-tornado
cd sockjs-tornado
sudo python setup.py install
cd ..
rm -R sockjs-tornado
echo " "
echo "Restarting"
echo "3"
sleep 1
echo "2"
sleep 1
echo "1"
sleep 1
shutdown -r now
|
#!/bin/bash -e
install -m 644 files/sources.list "${ROOTFS_DIR}/etc/apt/"
install -m 644 files/raspi.list "${ROOTFS_DIR}/etc/apt/sources.list.d/"
sed -i "s/RELEASE/${RELEASE}/g" "${ROOTFS_DIR}/etc/apt/sources.list"
sed -i "s/RELEASE/${RELEASE}/g" "${ROOTFS_DIR}/etc/apt/sources.list.d/raspi.list"
if [ -n "$APT_PROXY" ]; then
install -m 644 files/51cache "${ROOTFS_DIR}/etc/apt/apt.conf.d/51cache"
sed "${ROOTFS_DIR}/etc/apt/apt.conf.d/51cache" -i -e "s|APT_PROXY|${APT_PROXY}|"
else
rm -f "${ROOTFS_DIR}/etc/apt/apt.conf.d/51cache"
fi
on_chroot apt-key add - < files/raspberrypi.gpg.key
on_chroot << EOF
# is it necessary ?
# dpkg --add-architecture armhf
apt-get update
apt-get dist-upgrade -y
EOF
|
<filename>acmicpc/5565/5565.py
total = int(input())
for i in range(9):
total -= int(input())
print(total)
|
#!/bin/bash
BIN=${PYTHON_BIN:-python}
$BIN -m girder "$@"
|
<filename>SMS/src/SMS/wwwroot/lib/jquery/src/effects/animatedSelector.min.js
define(["../core","../selector","../effects"],function(e){e.expr.filters.animated=function(n){return e.grep(e.timers,function(e){return n===e.elem}).length}});
//# sourceMappingURL=animatedSelector.min.js.map
|
#!/bin/sh
case "`uname`" in
CYGWIN*)
CFILE = `cygpath "$0"`
RESOLVED_NAME=`readlink -f "$CFILE"`
;;
Darwin*)
RESOLVED_NAME=`readlink "$0"`
;;
FreeBSD)
RESOLVED_NAME=`readlink -f "$0"`
;;
Linux)
RESOLVED_NAME=`readlink -f "$0"`
;;
esac
if [ "x$RESOLVED_NAME" = "x" ]; then
RESOLVED_NAME="$0"
fi
DIRNAME=`dirname "$RESOLVED_NAME"`
# Uncomment out these lines if you are integrating with `kcinit`
#if [ "$1" = "config" ]; then
# java $KC_OPTS -cp $DIRNAME/client/keycloak-admin-cli-9.0.3.jar org.keycloak.client.admin.cli.KcAdmMain "$@"
#else
# java $KC_OPTS -cp $DIRNAME/client/keycloak-admin-cli-9.0.3.jar org.keycloak.client.admin.cli.KcAdmMain "$@" --noconfig --token $(kcinit token admin-cli) --server $(kcinit show server)
#fi
# Remove the next line if you have enabled kcinit
java $KC_OPTS -cp $DIRNAME/client/keycloak-admin-cli-9.0.3.jar org.keycloak.client.admin.cli.KcAdmMain "$@"
|
#! /bin/sh
set -e
# Smoke-test looking for checkpoint races with and without data as part of
# running "make check".
$TEST_WRAPPER ./test_wt3363_checkpoint_op_races
$TEST_WRAPPER ./test_wt3363_checkpoint_op_races -d
|
import fs from 'fs'
import path from 'path'
import * as Utils from './utils'
import { DEFAULT_CONFIG, CHROMIUM, FIREFOX } from './constants'
import type { BrowserType, JestPlaywrightConfig } from '../types/global'
const {
readConfig,
getBrowserType,
getDeviceType,
checkBrowserEnv,
checkDeviceEnv,
getPlaywrightInstance,
getDisplayName,
getSkipFlag,
getBrowserOptions,
} = Utils
beforeEach(() => {
jest.resetModules()
})
describe('readConfig', () => {
it('should return the default configuration if there was no separate configuration specified', async () => {
jest.mock(
path.join(__dirname, '..', 'jest-playwright.config.js'),
() => ({}),
{ virtual: true },
)
const config = await readConfig()
expect(config).toMatchObject(DEFAULT_CONFIG)
})
it('should overwrite with a custom configuration', async () => {
const configObject = {
launchOptions: {
headless: true,
},
browsers: ['chromium'],
contextOptions: {
viewport: {
width: 800,
height: 640,
},
ignoreHTTPSErrors: true,
},
}
jest.mock(
path.join(__dirname, '..', 'jest-playwright.config.js'),
() => configObject,
{ virtual: true },
)
const config = await readConfig()
expect(config).toMatchObject(configObject)
})
it('should overwrite config if the second param is passed', async () => {
const configObject = {
launchOptions: {
headless: true,
},
browsers: ['chromium'],
}
jest.mock(
path.join(__dirname, '..', 'jest-playwright.config.js'),
() => ({
launchOptions: {
headless: true,
},
browsers: ['webkit'],
}),
{ virtual: true },
)
const config = await readConfig(
process.cwd(),
configObject as JestPlaywrightConfig,
)
expect(config).toMatchObject(configObject)
})
it('should overwrite with a custom configuration and spread the "launchOptions" and "contextOptions" setting', async () => {
const configObject = {
launchOptions: {
headless: true,
},
contextOptions: {
foo: true,
},
}
jest.mock(
path.join(__dirname, '..', 'jest-playwright.config.js'),
() => configObject,
{ virtual: true },
)
const config = await readConfig()
const expectedConfig = {
...configObject,
browsers: ['chromium'],
exitOnPageError: true,
}
expect(config).toMatchObject(expectedConfig)
})
it('should throw error if JEST_PLAYWRIGHT_CONFIG is defined but does not exist', async () => {
process.env.JEST_PLAYWRIGHT_CONFIG = 'unreached.js'
let error
try {
await readConfig()
} catch (e) {
error = e
}
expect(error).toBeTruthy()
delete process.env.JEST_PLAYWRIGHT_CONFIG
})
it('should check cjs config if npm_package_type is module', async () => {
process.env.npm_package_type = 'module'
const configPath = path.join(__dirname, '..', 'jest-playwright.config.cjs')
const configObject = {
browsers: ['webkit'],
launchOptions: {
headless: true,
},
contextOptions: {
foo: true,
},
}
fs.writeFileSync(configPath, '')
jest.mock(
path.join(__dirname, '..', 'jest-playwright.config.cjs'),
() => configObject,
{
virtual: true,
},
)
const expectedConfig = {
...configObject,
exitOnPageError: true,
}
const config = await readConfig()
expect(config).toMatchObject(expectedConfig)
delete process.env.npm_package_type
fs.unlinkSync(configPath)
})
})
describe('getDisplayName', () => {
it('should return right display name for passed browser', () => {
expect(getDisplayName('chromium', null)).toBe('browser: chromium')
})
it('should return right display name for passed browser and device', () => {
expect(getDisplayName('chromium', 'iPhone 6')).toBe(
'browser: chromium device: iPhone 6',
)
})
it('should return right display name for passed browser and custom device', () => {
const customDevice = {
name: 'Custom device',
viewport: { width: 1920, height: 1080 },
deviceScaleFactor: 1,
isMobile: false,
hasTouch: false,
}
expect(getDisplayName('chromium', customDevice)).toBe(
'browser: chromium device: Custom device',
)
})
})
describe('getBrowserType', () => {
it('should return "chromium" as default', async () => {
const browserType = getBrowserType()
expect(browserType).toBe(CHROMIUM)
})
it('should return BROWSER if defined', async () => {
process.env.BROWSER = 'webkit'
const browserType = getBrowserType('firefox')
expect(browserType).toBe(process.env.BROWSER)
delete process.env.BROWSER
})
})
describe('getBrowserOptions', () => {
it('should return undefined for empty options', async () => {
const options = getBrowserOptions(CHROMIUM)
expect(options).toBe(undefined)
})
it('should return root options', async () => {
const launchOptions = { headless: false }
const options = getBrowserOptions(CHROMIUM, launchOptions)
expect(options).toBe(launchOptions)
})
it('should return options for defined browser', async () => {
const launchOptions = { headless: false, chromium: { headless: true } }
const options = getBrowserOptions(CHROMIUM, launchOptions)
expect(options).toStrictEqual({ headless: true })
})
it('should return root options for other browser', async () => {
const launchOptions = { headless: false, chromium: { headless: true } }
const options = getBrowserOptions(FIREFOX, launchOptions)
expect(options).toStrictEqual({ headless: false })
})
})
describe('getDeviceType', () => {
it('should return "null" when there is no device', async () => {
const device = getDeviceType(null)
expect(device).toBe(null)
})
it('should return BROWSER if defined', async () => {
process.env.DEVICE = 'iPhone 11'
const device = getDeviceType(null)
expect(device).toBe(process.env.DEVICE)
delete process.env.DEVICE
})
})
describe('checkBrowserEnv', () => {
it('should throw Error with unknown type', async () => {
const browserType = getBrowserType('unknown' as BrowserType)
expect(() => checkBrowserEnv(browserType)).toThrow()
})
})
describe('checkDeviceEnv', () => {
it('should throw Error with unknown type', async () => {
const device = 'unknown'
const devices = ['iPhone 11', 'Pixel 2', 'Nexus 4']
expect(() => checkDeviceEnv(device, devices)).toThrow()
})
})
describe('getSkipFlag', () => {
it('should return true if skipOption.browsers includes browserName', async () => {
const skipOptions = { browsers: [CHROMIUM as BrowserType] }
const skipFlag = getSkipFlag(skipOptions, CHROMIUM, null)
expect(skipFlag).toBe(true)
})
it('should return false if skipOption.browsers does not include browserName', async () => {
const skipOptions = { browsers: [CHROMIUM as BrowserType] }
const skipFlag = getSkipFlag(skipOptions, FIREFOX, null)
expect(skipFlag).toBe(false)
})
it('should return true if skipOption.browser includes browserName & skipOption.devices includes deviceName', async () => {
const skipOptions = {
browsers: [CHROMIUM as BrowserType],
devices: /Pixel/,
}
const skipFlag = getSkipFlag(skipOptions, CHROMIUM, 'Pixel 2')
expect(skipFlag).toBe(true)
})
it('should return true if skipOption.devices is RegExp and match to deviceName', async () => {
const skipOptions = {
browsers: [CHROMIUM as BrowserType],
devices: ['Pixel 2'],
}
const skipFlag = getSkipFlag(skipOptions, CHROMIUM, 'Pixel 2')
expect(skipFlag).toBe(true)
})
it('should return false if skipOption.browser does not include browserName & skipOption.devices includes deviceName', async () => {
const skipOptions = {
browsers: [CHROMIUM as BrowserType],
devices: ['Pixel 2'],
}
const skipFlag = getSkipFlag(skipOptions, FIREFOX, 'Pixel 2')
expect(skipFlag).toBe(false)
})
it('should return false if skipOption.browser does not includes browserName & skipOption.devices does not include deviceName', async () => {
const skipOptions = {
browsers: [CHROMIUM as BrowserType],
devices: ['Pixel 2'],
}
const skipFlag = getSkipFlag(skipOptions, FIREFOX, null)
expect(skipFlag).toBe(false)
})
})
describe('getPlaywrightInstance', () => {
it('should return specified instance from playwright package', async () => {
jest.doMock('playwright', () => ({
firefox: 'firefox',
chromium: 'chromium',
}))
const { instance } = getPlaywrightInstance('firefox')
expect(instance).toEqual('firefox')
})
it('should return specified instance from specified playwright package', () => {
jest.doMock('playwright-chromium', () => ({
chromium: 'chromium',
}))
const { instance } = getPlaywrightInstance('chromium')
expect(instance).toEqual('chromium')
})
it('should throw error when playwright package is not provided', () => {
jest.doMock('playwright', () => ({
chromium: 'chromium',
}))
const getMissedPlaywrightInstance = () => getPlaywrightInstance('firefox')
expect(getMissedPlaywrightInstance).toThrowError(
'jest-playwright-preset: Cannot find playwright package to use firefox',
)
})
})
|
<filename>veriloggen/thread/__init__.py
from __future__ import absolute_import
from __future__ import print_function
from .thread import reset, embed_thread, Thread, TmpThread
from .pool import ThreadPool, to_thread_pool
from .stream import Stream, TmpStream
from .ttypes import __intrinsics__
from .ttypes import *
from .ram import *
from .fifo import *
from .axi import *
from . import fixed
|
function analyzeStudentDistribution(programs) {
let totalStudents = 0;
let distribution = {};
for (let program of programs) {
totalStudents += program.jumlah_mahasiswa;
distribution[program.nama_prodi] = program.jumlah_mahasiswa;
}
return {
total_students: totalStudents,
distribution: distribution
};
}
// Example usage
const programs = [
{ nama_prodi: "Computer Science", jumlah_mahasiswa: 80 },
{ nama_prodi: "Electrical Engineering", jumlah_mahasiswa: 60 },
{ nama_prodi: "Mechanical Engineering", jumlah_mahasiswa: 40 },
{ nama_prodi: "Civil Engineering", jumlah_mahasiswa: 70 }
];
const result = analyzeStudentDistribution(programs);
console.log(result); |
def longest_common_substring(string1, string2):
x = len(string1)
y = len(string2)
table = [[0]*(y+1) for _ in range(x+1)]
longest, x_longest = 0, 0
for i in range(x):
for j in range(y):
if string1[i] == string2[j]:
c = table[i][j] + 1
table[i+1][j+1] = c
if c > longest:
longest = c
x_longest = i+1
return string1[x_longest-longest: x_longest]
string1 = "abcdjfgh"
string2 = "abcdjfeeee"
print(longest_common_substring(string1, string2)) |
<filename>src/source_controller/coreservice/service/service_initfunc.go<gh_stars>1-10
/*
* Tencent is pleased to support the open source community by making 蓝鲸 available.
* Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package service
import (
"net/http"
)
func (s *coreService) initModelClassification() {
s.addAction(http.MethodPost, "/create/model/classification", s.CreateOneModelClassification, nil)
s.addAction(http.MethodPost, "/createmany/model/classification", s.CreateManyModelClassification, nil)
s.addAction(http.MethodPost, "/setmany/model/classification", s.SetManyModelClassification, nil)
s.addAction(http.MethodPost, "/set/model/classification", s.SetOneModelClassification, nil)
s.addAction(http.MethodPut, "/update/model/classification", s.UpdateModelClassification, nil)
s.addAction(http.MethodDelete, "/delete/model/classification", s.DeleteModelClassification, nil)
s.addAction(http.MethodPost, "/read/model/classification", s.SearchModelClassification, nil)
}
func (s *coreService) initModel() {
s.addAction(http.MethodPost, "/create/model", s.CreateModel, nil)
s.addAction(http.MethodPost, "/set/model", s.SetModel, nil)
s.addAction(http.MethodPut, "/update/model", s.UpdateModel, nil)
s.addAction(http.MethodDelete, "/delete/model", s.DeleteModel, nil)
s.addAction(http.MethodDelete, "/delete/model/{id}/cascade", s.CascadeDeleteModel, nil)
s.addAction(http.MethodPost, "/read/model", s.SearchModel, nil)
s.addAction(http.MethodGet, "/read/model/statistics", s.GetModelStatistics, nil)
// init model attribute groups methods
s.addAction(http.MethodPost, "/create/model/{bk_obj_id}/group", s.CreateModelAttributeGroup, nil)
s.addAction(http.MethodPost, "/set/model/{bk_obj_id}/group", s.SetModelAttributeGroup, nil)
s.addAction(http.MethodPut, "/update/model/{bk_obj_id}/group", s.UpdateModelAttributeGroup, nil)
s.addAction(http.MethodPut, "/update/model/group", s.UpdateModelAttributeGroupByCondition, nil)
s.addAction(http.MethodDelete, "/delete/model/{bk_obj_id}/group", s.DeleteModelAttributeGroup, nil)
s.addAction(http.MethodDelete, "/delete/model/group", s.DeleteModelAttributeGroupByCondition, nil)
s.addAction(http.MethodPost, "/read/model/{bk_obj_id}/group", s.SearchModelAttributeGroup, nil)
s.addAction(http.MethodPost, "/read/model/group", s.SearchModelAttributeGroupByCondition, nil)
// init attributes methods
s.addAction(http.MethodPost, "/create/model/{bk_obj_id}/attributes", s.CreateModelAttributes, nil)
s.addAction(http.MethodPost, "/set/model/{bk_obj_id}/attributes", s.SetModelAttributes, nil)
s.addAction(http.MethodPut, "/update/model/{bk_obj_id}/attributes", s.UpdateModelAttributes, nil)
s.addAction(http.MethodPost, "/update/model/{bk_obj_id}/attributes/index", s.UpdateModelAttributesIndex, nil)
s.addAction(http.MethodPut, "/update/model/attributes", s.UpdateModelAttributesByCondition, nil)
s.addAction(http.MethodDelete, "/delete/model/{bk_obj_id}/attributes", s.DeleteModelAttribute, nil)
s.addAction(http.MethodPost, "/read/model/{bk_obj_id}/attributes", s.SearchModelAttributes, nil)
s.addAction(http.MethodPost, "/read/model/attributes", s.SearchModelAttributesByCondition, nil)
}
func (s *coreService) initAttrUnique() {
s.addAction(http.MethodPost, "/read/model/attributes/unique", s.SearchModelAttrUnique, nil)
s.addAction(http.MethodPost, "/create/model/{bk_obj_id}/attributes/unique", s.CreateModelAttrUnique, nil)
s.addAction(http.MethodPut, "/update/model/{bk_obj_id}/attributes/unique/{id}", s.UpdateModelAttrUnique, nil)
s.addAction(http.MethodDelete, "/delete/model/{bk_obj_id}/attributes/unique/{id}", s.DeleteModelAttrUnique, nil)
}
func (s *coreService) initModelInstances() {
s.addAction(http.MethodPost, "/create/model/{bk_obj_id}/instance", s.CreateOneModelInstance, nil)
s.addAction(http.MethodPost, "/createmany/model/{bk_obj_id}/instance", s.CreateManyModelInstances, nil)
s.addAction(http.MethodPut, "/update/model/{bk_obj_id}/instance", s.UpdateModelInstances, nil)
s.addAction(http.MethodPost, "/read/model/{bk_obj_id}/instances", s.SearchModelInstances, nil)
s.addAction(http.MethodDelete, "/delete/model/{bk_obj_id}/instance", s.DeleteModelInstances, nil)
s.addAction(http.MethodDelete, "/delete/model/{bk_obj_id}/instance/cascade", s.CascadeDeleteModelInstances, nil)
}
func (s *coreService) initAssociationKind() {
s.addAction(http.MethodPost, "/create/associationkind", s.CreateOneAssociationKind, nil)
s.addAction(http.MethodPost, "/createmany/associationkind", s.CreateManyAssociationKind, nil)
s.addAction(http.MethodPost, "/set/associationkind", s.SetOneAssociationKind, nil)
s.addAction(http.MethodPost, "/setmany/associationkind", s.SetManyAssociationKind, nil)
s.addAction(http.MethodPut, "/update/associationkind", s.UpdateAssociationKind, nil)
s.addAction(http.MethodDelete, "/delete/associationkind", s.DeleteAssociationKind, nil)
s.addAction(http.MethodDelete, "/delete/associationkind/cascade", s.CascadeDeleteAssociationKind, nil)
s.addAction(http.MethodPost, "/read/associationkind", s.SearchAssociationKind, nil)
}
func (s *coreService) initModelAssociation() {
s.addAction(http.MethodPost, "/create/modelassociation", s.CreateModelAssociation, nil)
s.addAction(http.MethodPost, "/create/mainlinemodelassociation", s.CreateMainlineModelAssociation, nil)
s.addAction(http.MethodPost, "/set/modelassociation", s.SetModelAssociation, nil)
s.addAction(http.MethodPut, "/update/modelassociation", s.UpdateModelAssociation, nil)
s.addAction(http.MethodPost, "/read/modelassociation", s.SearchModelAssociation, nil)
s.addAction(http.MethodDelete, "/delete/modelassociation", s.DeleteModelAssociation, nil)
s.addAction(http.MethodDelete, "/delete/modelassociation/cascade", s.DeleteModelAssociation, nil)
}
func (s *coreService) initInstanceAssociation() {
s.addAction(http.MethodPost, "/create/instanceassociation", s.CreateOneInstanceAssociation, nil)
s.addAction(http.MethodPost, "/createmany/instanceassociation", s.CreateManyInstanceAssociation, nil)
s.addAction(http.MethodPost, "/read/instanceassociation", s.SearchInstanceAssociation, nil)
s.addAction(http.MethodDelete, "/delete/instanceassociation", s.DeleteInstanceAssociation, nil)
}
func (s *coreService) initMainline() {
// add handler for model topo and business topo
s.addAction(http.MethodPost, "/read/mainline/model", s.SearchMainlineModelTopo, nil)
s.addAction(http.MethodPost, "/read/mainline/instance/{bk_biz_id}", s.SearchMainlineInstanceTopo, nil)
}
func (s *coreService) host() {
s.addAction(http.MethodPost, "/set/module/host/relation/inner/module", s.TransferHostToInnerModule, nil)
s.addAction(http.MethodPost, "/set/module/host/relation/module", s.TransferHostToNormalModule, nil)
s.addAction(http.MethodPost, "/set/module/host/relation/cross/business", s.TransferHostToAnotherBusiness, nil)
s.addAction(http.MethodDelete, "/delete/host", s.DeleteHostFromSystem, nil)
s.addAction(http.MethodDelete, "/delete/host/host_module_relations", s.RemoveFromModule, nil)
s.addAction(http.MethodPost, "/read/module/host/relation", s.GetHostModuleRelation, nil)
s.addAction(http.MethodPost, "/read/host/indentifier", s.HostIdentifier, nil)
s.addAction(http.MethodGet, "/find/host/{bk_host_id}", s.GetHostByID, nil)
s.addAction(http.MethodPost, "/findmany/hosts/search", s.GetHosts, nil)
s.addAction(http.MethodGet, "/find/host/snapshot/{bk_host_id}", s.GetHostSnap, nil)
s.addAction(http.MethodPost, "/find/host/lock", s.LockHost, nil)
s.addAction(http.MethodDelete, "/delete/host/lock", s.UnlockHost, nil)
s.addAction(http.MethodPost, "/findmany/host/lock/search", s.QueryLockHost, nil)
s.addAction(http.MethodPost, "/create/userapi", s.AddUserConfig, nil)
s.addAction(http.MethodPut, "/update/userapi/{bk_biz_id}/{id}", s.UpdateUserConfig, nil)
s.addAction(http.MethodDelete, "/delete/userapi/{bk_biz_id}/{id}", s.DeleteUserConfig, nil)
s.addAction(http.MethodPost, "/findmany/userapi/search", s.GetUserConfig, nil)
s.addAction(http.MethodGet, "/find/userapi/detail/{bk_biz_id}/{id}", s.UserConfigDetail, nil)
s.addAction(http.MethodPost, "/create/usercustom/{bk_user}", s.AddUserCustom, nil)
s.addAction(http.MethodPut, "/update/usercustom/{bk_user}/{id}", s.UpdateUserCustomByID, nil)
s.addAction(http.MethodGet, "/find/usercustom/user/search/{bk_user}", s.GetUserCustomByUser, nil)
s.addAction(http.MethodPost, "/find/usercustom/default/search/{bk_user}", s.GetDefaultUserCustom, nil)
s.addAction(http.MethodPost, "/create/hosts/favorites/{user}", s.AddHostFavourite, nil)
s.addAction(http.MethodPut, "/update/hosts/favorites/{user}/{id}", s.UpdateHostFavouriteByID, nil)
s.addAction(http.MethodDelete, "/delete/hosts/favorites/{user}/{id}", s.DeleteHostFavouriteByID, nil)
s.addAction(http.MethodPost, "/findmany/hosts/favorites/search/{user}", s.ListHostFavourites, nil)
s.addAction(http.MethodGet, "/find/hosts/favorites/search/{user}/{id}", s.GetHostFavouriteByID, nil)
s.addAction(http.MethodPost, "/findmany/meta/hosts/modules/search", s.GetHostModulesIDs, nil)
s.addAction(http.MethodPost, "/findmany/hosts/list_hosts", s.ListHosts, nil)
s.addAction(http.MethodPut, "/updatemany/hosts/cloudarea_field", s.UpdateHostCloudAreaField, nil)
}
func (s *coreService) initCloudSync() {
s.addAction(http.MethodPost, "/create/cloud/sync/task", s.CreateCloudSyncTask, nil)
s.addAction(http.MethodDelete, "/delete/cloud/sync/task/{taskID}", s.DeleteCloudSyncTask, nil)
s.addAction(http.MethodPost, "/update/cloud/sync/task", s.UpdateCloudSyncTask, nil)
s.addAction(http.MethodPost, "/search/cloud/sync/task", s.SearchCloudSyncTask, nil)
s.addAction(http.MethodPost, "/create/cloud/confirm", s.CreateConfirm, nil)
s.addAction(http.MethodPost, "/check/cloud/task/name", s.CheckTaskNameUnique, nil)
s.addAction(http.MethodDelete, "/delete/cloud/confirm/{taskID}", s.DeleteConfirm, nil)
s.addAction(http.MethodPost, "/search/cloud/confirm", s.SearchConfirm, nil)
s.addAction(http.MethodPost, "/create/cloud/sync/history", s.CreateSyncHistory, nil)
s.addAction(http.MethodPost, "/search/cloud/sync/history", s.SearchSyncHistory, nil)
s.addAction(http.MethodPost, "/create/cloud/confirm/history", s.CreateConfirmHistory, nil)
s.addAction(http.MethodPost, "/search/cloud/confirm/history", s.SearchConfirmHistory, nil)
}
func (s *coreService) audit() {
s.addAction(http.MethodPost, "/create/auditlog", s.CreateAuditLog, nil)
s.addAction(http.MethodPost, "/read/auditlog", s.SearchAuditLog, nil)
}
func (s *coreService) initOperation() {
s.addAction(http.MethodPost, "/create/operation/chart", s.CreateOperationChart, nil)
s.addAction(http.MethodPost, "/findmany/operation/chart", s.SearchChartWithPosition, nil)
s.addAction(http.MethodPost, "/update/operation/chart", s.UpdateOperationChart, nil)
s.addAction(http.MethodDelete, "/delete/operation/chart/{id}", s.DeleteOperationChart, nil)
s.addAction(http.MethodPost, "/find/operation/chart/common", s.SearchChartCommon, nil)
s.addAction(http.MethodPost, "/find/operation/inst/count", s.SearchInstCount, nil)
s.addAction(http.MethodPost, "/find/operation/chart/data", s.SearchChartData, nil)
s.addAction(http.MethodPost, "/update/operation/chart/position", s.UpdateChartPosition, nil)
s.addAction(http.MethodPost, "/find/operation/timer/chart/data", s.SearchTimerChartData, nil)
s.addAction(http.MethodPost, "/start/operation/chart/timer", s.TimerFreshData, nil)
}
func (s *coreService) label() {
s.addAction(http.MethodPost, "/createmany/labels", s.AddLabels, nil)
s.addAction(http.MethodDelete, "/deletemany/labels", s.RemoveLabels, nil)
}
func (s *coreService) topographics() {
s.addAction(http.MethodPost, "/topographics/search", s.SearchTopoGraphics, nil)
s.addAction(http.MethodPost, "/topographics/update", s.UpdateTopoGraphics, nil)
}
func (s *coreService) ccSystem() {
s.addAction(http.MethodPost, "/find/system/user_config", s.GetSystemUserConfig, nil)
}
func (s *coreService) initService() {
s.initModelClassification()
s.initModel()
s.initAssociationKind()
s.initAttrUnique()
s.initModelAssociation()
s.initModelInstances()
s.initInstanceAssociation()
s.initDataSynchronize()
s.initMainline()
s.host()
s.audit()
s.initProcess()
s.initOperation()
s.initCloudSync()
s.label()
s.topographics()
s.ccSystem()
s.initSetTemplate()
s.initHostApplyRule()
}
|
<reponame>mcjazzyfunky/js-bling<gh_stars>0
import { defineComponent, mount, createElement as htm } from 'js-surface';
const CounterLabel = defineComponent({
name: 'CounterLabel',
properties: {
value: {
type: Number
}
},
render({ props }) {
return htm('label', null, htm('b', null, props.value));
}
});
// --------------------------------------------------------------------
// Counter intents
const
// State transitions
INCREASE_COUNTER = Symbol('increaseCounter'),
RESET_COUNTER = Symbol('resetCounter'),
// Interactions (aka side effects)
LOG = Symbol('log');
const Counter = defineComponent({
name: 'Counter',
properties: {
initValue: {
type: Number,
defaultValue: 0
},
style: {
type: Object,
defaultValue: null
}
},
initState({ props }) {
return { counterValue: props.initValue };
},
stateReducer: {
[INCREASE_COUNTER](delta) {
return state => ({ counterValue: state.counterValue + delta });
},
[RESET_COUNTER](value) {
return state => ({ counterValue: value });
}
},
initMiddleware({ send }) {
return {
[LOG](msg, params) {
console.log(msg, JSON.stringify(params));
// alert(msg + JSON.stringify(params));
}
}
},
methods: {
resetCounter(n) {
return ({ props, state, send }) => {
send(RESET_COUNTER, n);
};
}
},
needsUpdate({ send }) {
send(LOG, 'check whether update needed', arguments[0]);
return true;
},
onNextProps({ send }) {
send(LOG, 'next props for Counter - params:', arguments[0]);
},
onWillMount({ send }) {
send(LOG, 'will mount Counter - params:', arguments[0]);
},
onDidMount({ send }) {
send(LOG, 'did mount Counter - params:', arguments[0]);
},
onWillUpdate({ send }) {
send(LOG, 'will update Counter - params:', arguments[0]);
},
onDidUpdate({ send }) {
send(LOG, 'did update Counter - params:', arguments[0]);
},
onWillUnmount({ send }) {
send(LOG, 'will unmount Counter - params:', arguments[0]);
},
render({ props, state, send }) {
return (
htm('span',
{style: props.style},
htm('button',
{ onClick: () => send(INCREASE_COUNTER, -1) },
'-'),
htm('div',
{ style: {width: '30px', display: 'inline-block', textAlign: 'center' }},
CounterLabel({value: state.counterValue})),
htm('button',
{ onClick: () => send(INCREASE_COUNTER, 1) } ,
'+'))
);
}
});
// --------------------------------------------------------------------
const CounterCtrl = defineComponent({
name: 'CounterCtrl',
render({ send }) {
let counterInstance = null;
return (
htm("div",
null,
htm('button', { onClick: () => send(() => counterInstance.resetCounter(0)) }, 'Reset to 0'),
' ',
Counter({ref: it => counterInstance = it, style: {margin: '0 20px'}}),
' ',
htm('button', { onClick: () => send(() => counterInstance.resetCounter(100)) }, 'Reset to 100')));
}
});
mount(CounterCtrl(), 'main-content');
/*
setTimeout(() => {
mount(htm('div', null, 'done'), 'main-content');
}, 4000);
*/
|
<reponame>h0lyalg0rithm/Atlas-iOS
//
// ATLUIParticipant.h
// Atlas
//
// Created by <NAME> on 8/29/14.
// Copyright (c) 2014 Layer, Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#import <Foundation/Foundation.h>
#import "ATLAvatarItem.h"
/**
@abstract The `ATLParticipant` protocol must be adopted by objects wishing to represent Layer
participants in the user interface.
*/
@protocol ATLParticipant <NSObject, ATLAvatarItem>
/**
@abstract The first name of the participant as it should be presented in the user interface.
*/
@property (nonatomic, readonly) NSString *firstName;
/**
@abstract The last name of the participant as it should be presented in the user interface.
*/
@property (nonatomic, readonly) NSString *lastName;
/**
@abstract The full name of the participant as it should be presented in the user interface.
*/
@property (nonatomic, readonly) NSString *fullName;
/**
@abstract The unique identifier of the participant as it should be used for Layer addressing.
@discussion This identifier is issued by the Layer identity provider backend.
*/
@property (nonatomic, readonly) NSString *participantIdentifier;
@end
|
['296', '280', '581', '302', '026', '225', '163', '870', '035', '427', '789', '488', '974', '400', '149', '720', '915', '487', '206', '569'] |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See License.txt in the project root.
package com.microsoft.alm.plugin.context;
import com.microsoft.alm.core.webapi.CoreHttpClient;
import com.microsoft.alm.core.webapi.model.TeamProjectCollection;
import com.microsoft.alm.core.webapi.model.TeamProjectCollectionReference;
import com.microsoft.alm.core.webapi.model.TeamProjectReference;
import com.microsoft.alm.plugin.AbstractTest;
import com.microsoft.alm.plugin.authentication.AuthHelper;
import com.microsoft.alm.plugin.authentication.AuthenticationInfo;
import com.microsoft.alm.plugin.authentication.AuthenticationProvider;
import com.microsoft.alm.plugin.context.rest.ConnectionData;
import com.microsoft.alm.plugin.context.rest.LocationServiceData;
import com.microsoft.alm.plugin.context.rest.ServiceDefinition;
import com.microsoft.alm.plugin.context.rest.VstsHttpClient;
import com.microsoft.alm.plugin.context.rest.VstsInfo;
import com.microsoft.alm.plugin.context.rest.VstsUserInfo;
import com.microsoft.alm.plugin.exceptions.TeamServicesException;
import com.microsoft.alm.sourcecontrol.webapi.GitHttpClient;
import com.microsoft.alm.sourcecontrol.webapi.model.GitRepository;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Matchers;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import javax.ws.rs.client.Client;
import java.net.URI;
import java.util.Collection;
import java.util.UUID;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.when;
@RunWith(PowerMockRunner.class)
@PrepareForTest({VstsHttpClient.class, AuthHelper.class})
public class ServerContextManagerTest extends AbstractTest {
@Before
public void setupLocalTests() {
MockitoAnnotations.initMocks(this);
}
@Test
public void testEmptyManager() {
ServerContextManager manager = new ServerContextManager();
Assert.assertEquals(0, manager.getAllServerContexts().size());
Assert.assertNull(manager.getLastUsedContext());
manager.clearLastUsedContext();
ServerContext context = manager.get("foo");
Assert.assertNull(context);
manager.remove("foo");
}
@Test
public void testAdd() {
ServerContextManager manager = new ServerContextManager();
Assert.assertEquals(0, manager.getAllServerContexts().size());
Assert.assertNull(manager.getLastUsedContext());
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("http://server/path").build();
manager.add(context);
Assert.assertEquals(context, manager.getLastUsedContext());
Assert.assertEquals(1, manager.getAllServerContexts().size());
ServerContext _context = manager.get(context.getUri().toString());
Assert.assertEquals(context, _context);
// add a second context
ServerContext context2 = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("http://server2/path2").build();
manager.add(context2);
Assert.assertEquals(context2, manager.getLastUsedContext());
Assert.assertEquals(2, manager.getAllServerContexts().size());
ServerContext _context2 = manager.get(context2.getUri().toString());
Assert.assertEquals(context2, _context2);
// add a third context that has a very similar URI
ServerContext context3 = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("http://server2/path2/3").build();
manager.add(context3);
Assert.assertEquals(context3, manager.getLastUsedContext());
Assert.assertEquals(3, manager.getAllServerContexts().size());
ServerContext _context3 = manager.get(context3.getUri().toString());
Assert.assertEquals(context3, _context3);
}
@Test
public void testAddDuplicate() {
ServerContextManager manager = new ServerContextManager();
Assert.assertEquals(0, manager.getAllServerContexts().size());
Assert.assertNull(manager.getLastUsedContext());
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("http://server/path").build();
manager.add(context);
Assert.assertEquals(context, manager.getLastUsedContext());
Assert.assertEquals(1, manager.getAllServerContexts().size());
ServerContext _context = manager.get(context.getUri().toString());
Assert.assertEquals(context, _context);
// add a second context that has the SAME URI
ServerContext context2 = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("http://server/path").build();
manager.add(context2);
Assert.assertEquals(context2, manager.getLastUsedContext());
Assert.assertEquals(1, manager.getAllServerContexts().size());
ServerContext _context2 = manager.get(context2.getUri().toString());
Assert.assertEquals(context2, _context2);
Assert.assertNotEquals(context, _context2);
// add a third with upper case URI
ServerContext context3 = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("HTTP://SERVER/PATH").build();
manager.add(context3);
Assert.assertEquals(context3, manager.getLastUsedContext());
Assert.assertEquals(1, manager.getAllServerContexts().size());
ServerContext _context3 = manager.get(context3.getUri().toString());
Assert.assertEquals(context3, _context3);
Assert.assertNotEquals(context, _context3);
}
@Test
public void testRemove() {
ServerContextManager manager = new ServerContextManager();
Assert.assertEquals(0, manager.getAllServerContexts().size());
Assert.assertNull(manager.getLastUsedContext());
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("http://server/path").build();
manager.add(context);
ServerContext context2 = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("http://server2/path2").build();
manager.add(context2);
ServerContext context3 = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("http://server2/path2/3").build();
manager.add(context3);
Assert.assertEquals(context3, manager.getLastUsedContext());
Assert.assertEquals(3, manager.getAllServerContexts().size());
// Remove context2 and make sure 1 and 3 are left
manager.remove(context2.getUri().toString());
Assert.assertEquals(2, manager.getAllServerContexts().size());
ServerContext _context2 = manager.get(context2.getUri().toString());
Assert.assertNull(_context2);
ServerContext _context = manager.get(context.getUri().toString());
Assert.assertEquals(context, _context);
ServerContext _context3 = manager.get(context3.getUri().toString());
Assert.assertEquals(context3, _context3);
Assert.assertEquals(context3, manager.getLastUsedContext());
// Remove 3 and assure 1 is left
manager.remove(context3.getUri().toString());
Assert.assertEquals(1, manager.getAllServerContexts().size());
_context3 = manager.get(context3.getUri().toString());
Assert.assertNull(_context3);
_context = manager.get(context.getUri().toString());
Assert.assertEquals(context, _context);
Assert.assertNull(manager.getLastUsedContext());
// Remove the last one and make sure they are all gone
manager.remove(context.getUri().toString());
Assert.assertEquals(0, manager.getAllServerContexts().size());
_context = manager.get(context.getUri().toString());
Assert.assertNull(_context);
Assert.assertNull(manager.getLastUsedContext());
// Make sure calling remove with null doesn't throw
manager.remove(null);
}
@Test
public void testValidateServerConnection_VSTS() {
ServerContextManager manager = new ServerContextManager();
Assert.assertNull(manager.getLastUsedContext());
ConnectionData connectionData = new ConnectionData();
connectionData.setAuthenticatedUser(new VstsUserInfo());
connectionData.setAuthorizedUser(new VstsUserInfo());
connectionData.setInstanceId(UUID.randomUUID());
connectionData.setLocationServiceData(new LocationServiceData());
ServiceDefinition definition = new ServiceDefinition();
definition.setServiceType("distributedtask");
connectionData.getLocationServiceData().setServiceDefinitions(new ServiceDefinition[]{definition});
PowerMockito.mockStatic(VstsHttpClient.class);
when(VstsHttpClient.sendRequest(any(Client.class), anyString(), Matchers.eq(ConnectionData.class)))
.thenReturn(connectionData);
final Client client = Mockito.mock(Client.class);
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.VSO).uri("https://server.visualstudio.com").buildWithClient(client);
// Make sure it doesn't throw for a 2015 server
manager.validateServerConnection(context);
}
@Test
public void testValidateServerConnection_2015Server() {
ServerContextManager manager = new ServerContextManager();
Assert.assertNull(manager.getLastUsedContext());
ConnectionData connectionData = new ConnectionData();
connectionData.setAuthenticatedUser(new VstsUserInfo());
connectionData.setAuthorizedUser(new VstsUserInfo());
connectionData.setInstanceId(UUID.randomUUID());
connectionData.setLocationServiceData(new LocationServiceData());
ServiceDefinition definition = new ServiceDefinition();
definition.setServiceType("distributedtask");
connectionData.getLocationServiceData().setServiceDefinitions(new ServiceDefinition[]{definition});
PowerMockito.mockStatic(VstsHttpClient.class);
when(VstsHttpClient.sendRequest(any(Client.class), anyString(), Matchers.eq(ConnectionData.class)))
.thenReturn(connectionData);
final Client client = Mockito.mock(Client.class);
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("http://server/path").buildWithClient(client);
// Make sure it doesn't throw for a 2015 server
manager.validateServerConnection(context);
}
@Test
public void testValidateServerConnection_2013Server() {
ServerContextManager manager = new ServerContextManager();
Assert.assertNull(manager.getLastUsedContext());
ConnectionData connectionData = new ConnectionData();
connectionData.setAuthenticatedUser(new VstsUserInfo());
connectionData.setAuthorizedUser(new VstsUserInfo());
connectionData.setInstanceId(UUID.randomUUID());
connectionData.setLocationServiceData(new LocationServiceData());
ServiceDefinition definition = new ServiceDefinition();
definition.setServiceType("doesntExist");
connectionData.getLocationServiceData().setServiceDefinitions(new ServiceDefinition[]{definition});
PowerMockito.mockStatic(VstsHttpClient.class);
when(VstsHttpClient.sendRequest(any(Client.class), anyString(), Matchers.eq(ConnectionData.class)))
.thenReturn(connectionData);
final Client client = Mockito.mock(Client.class);
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("http://server/path").buildWithClient(client);
// Make sure we get unsupported version
try {
manager.validateServerConnection(context);
Assert.fail("should not get here");
} catch (TeamServicesException ex) {
Assert.assertEquals(TeamServicesException.KEY_TFS_UNSUPPORTED_VERSION, ex.getMessage());
}
}
@Test
public void testValidateServerConnection_404() {
ServerContextManager manager = new ServerContextManager();
Assert.assertNull(manager.getLastUsedContext());
PowerMockito.mockStatic(VstsHttpClient.class);
when(VstsHttpClient.sendRequest(any(Client.class), anyString(), Matchers.eq(ConnectionData.class)))
.thenThrow(new VstsHttpClient.VstsHttpClientException(404, "message", null));
final Client client = Mockito.mock(Client.class);
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("http://server/path").buildWithClient(client);
// Make sure we get unsupported version
try {
manager.validateServerConnection(context);
Assert.fail("should not get here");
} catch (TeamServicesException ex) {
Assert.assertEquals(TeamServicesException.KEY_TFS_UNSUPPORTED_VERSION, ex.getMessage());
}
}
@Test
public void testValidateServerConnection_justVSTSRemoteURL() {
ServerContextManager manager = new ServerContextManager();
Assert.assertNull(manager.getLastUsedContext());
ConnectionData connectionData = new ConnectionData();
connectionData.setAuthenticatedUser(new VstsUserInfo());
connectionData.setAuthorizedUser(new VstsUserInfo());
connectionData.setInstanceId(UUID.randomUUID());
connectionData.setLocationServiceData(new LocationServiceData());
ServiceDefinition definition = new ServiceDefinition();
definition.setServiceType("distributedtask");
connectionData.getLocationServiceData().setServiceDefinitions(new ServiceDefinition[]{definition});
PowerMockito.mockStatic(VstsHttpClient.class);
when(VstsHttpClient.sendRequest(any(Client.class), anyString(), Matchers.eq(ConnectionData.class)))
.thenReturn(connectionData);
final TeamProjectCollection collection = new TeamProjectCollection();
collection.setName("coll1");
collection.setId(UUID.randomUUID());
collection.setUrl("https://server.visualstudio.com/coll1");
final GitRepository repo = new GitRepository();
repo.setName("repo1");
final Client client = Mockito.mock(Client.class);
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.VSO).uri("https://server.visualstudio.com/project/_git/repo").buildWithClient(client);
final GitHttpClient gitHttpClient = Mockito.mock(GitHttpClient.class);
when(gitHttpClient.getRepository(anyString(), anyString())).thenReturn(repo);
final CoreHttpClient coreHttpClient = Mockito.mock(CoreHttpClient.class);
when(coreHttpClient.getProjectCollection(anyString())).thenReturn(collection);
final MyValidator myValidator = new MyValidator(context, gitHttpClient, coreHttpClient, collection);
// test the code path when we just provide the remote URL
manager.validateServerConnection(context, myValidator);
}
@Test
public void testValidateServerConnection_justTFSRemoteURL() {
ServerContextManager manager = new ServerContextManager();
Assert.assertNull(manager.getLastUsedContext());
ConnectionData connectionData = new ConnectionData();
connectionData.setAuthenticatedUser(new VstsUserInfo());
connectionData.setAuthorizedUser(new VstsUserInfo());
connectionData.setInstanceId(UUID.randomUUID());
connectionData.setLocationServiceData(new LocationServiceData());
ServiceDefinition definition = new ServiceDefinition();
definition.setServiceType("distributedtask");
connectionData.getLocationServiceData().setServiceDefinitions(new ServiceDefinition[]{definition});
PowerMockito.mockStatic(VstsHttpClient.class);
when(VstsHttpClient.sendRequest(any(Client.class), anyString(), Matchers.eq(ConnectionData.class)))
.thenReturn(connectionData);
final TeamProjectCollection collection = new TeamProjectCollection();
collection.setName("coll1");
collection.setId(UUID.randomUUID());
collection.setUrl("https://server:8080/coll1");
final GitRepository repo = new GitRepository();
repo.setName("repo1");
final Client client = Mockito.mock(Client.class);
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.VSO).uri("https://server:8080/project/_git/repo").buildWithClient(client);
final GitHttpClient gitHttpClient = Mockito.mock(GitHttpClient.class);
when(gitHttpClient.getRepository(anyString(), anyString())).thenReturn(repo);
final CoreHttpClient coreHttpClient = Mockito.mock(CoreHttpClient.class);
when(coreHttpClient.getProjectCollection(anyString())).thenReturn(collection);
final MyValidator myValidator = new MyValidator(context, gitHttpClient, coreHttpClient, collection);
// test the code path when we just provide the remote URL
manager.validateServerConnection(context, myValidator);
}
@Test
public void testValidateGitUrl() {
final Client client = Mockito.mock(Client.class);
final ServerContext context = new ServerContextBuilder().type(ServerContext.Type.VSO).uri("https://dev.azure.com/username").buildWithClient(client);
final ServerContextManager.Validator validator = new ServerContextManager.Validator(context);
// We should test that the actual API URL used for VstsInto retrieval doesn't contain a user name; otherwise the
// HTTP client will use wrong credentials.
final String serverUrl = "https://dev.azure.com/username";
final String repositoryUrl = "https://username@dev.azure.com/username/projectname/_git/repositoryname";
final String repositoryInfoApiUrl = "https://dev.azure.com/username/projectname/_git/repositoryname/vsts/info";
final GitRepository repository = new GitRepository();
repository.setRemoteUrl(repositoryUrl);
repository.setProjectReference(new TeamProjectReference());
final VstsInfo vstsInfo = new VstsInfo();
vstsInfo.setServerUrl(serverUrl);
vstsInfo.setCollectionReference(new TeamProjectCollectionReference());
vstsInfo.setRepository(repository);
PowerMockito.mockStatic(VstsHttpClient.class);
when(VstsHttpClient.sendRequest(client, repositoryInfoApiUrl, VstsInfo.class)).thenReturn(vstsInfo);
Assert.assertTrue(validator.validateGitUrl(repositoryUrl));
Assert.assertEquals("https://dev.azure.com/username", validator.getServerUrl());
}
@Test
public void activeTfsContext() {
ServerContextManager manager = new ServerContextManager();
Assert.assertNull(manager.getLastUsedContext());
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.TFS).uri("http://server/path").build();
manager.add(context);
Assert.assertEquals(context, manager.getLastUsedContext());
Assert.assertTrue(manager.lastUsedContextIsTFS());
Assert.assertFalse(manager.lastUsedContextIsEmpty());
manager.clearLastUsedContext();
Assert.assertNull(manager.getLastUsedContext());
Assert.assertTrue(manager.lastUsedContextIsEmpty());
Assert.assertFalse(manager.lastUsedContextIsTFS());
}
@Test
public void activeVsoContext() {
ServerContextManager manager = new ServerContextManager();
Assert.assertNull(manager.getLastUsedContext());
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.VSO).uri("http://server/path").build();
manager.add(context);
Assert.assertEquals(context, manager.getLastUsedContext());
manager.clearLastUsedContext();
Assert.assertNull(manager.getLastUsedContext());
ServerContext context2 = new ServerContextBuilder().type(ServerContext.Type.VSO_DEPLOYMENT).build();
try {
manager.add(context2);
} catch (AssertionError ex) { /* correct */ }
}
@Test
public void getServerContext() {
ServerContextManager manager = new ServerContextManager();
String uri = "http://server/path";
Assert.assertNull(manager.getLastUsedContext());
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.TFS).uri(uri).build();
manager.add(context);
ServerContext testContext = manager.get(uri);
Assert.assertNotNull(testContext);
Assert.assertEquals(uri, testContext.getUri().toString().toLowerCase());
Collection<ServerContext> contexts = manager.getAllServerContexts();
Assert.assertEquals(1, contexts.size());
Assert.assertEquals(uri, contexts.iterator().next().getUri().toString().toLowerCase());
}
@Test
public void clearServerContext() {
ServerContextManager manager = new ServerContextManager();
String uri = "http://server/path";
Assert.assertNull(manager.getLastUsedContext());
ServerContext context = new ServerContextBuilder().type(ServerContext.Type.TFS).uri(uri).build();
manager.add(context);
manager.remove(uri);
Assert.assertNull(manager.getLastUsedContext());
ServerContext testContext = manager.get(uri);
Assert.assertNull(testContext);
}
/**
* This test avoids the problems with authenticating by asking for the context
* that is already the active context.
*/
@Test
public void getAuthenticatedContext_simplest() {
ServerContextManager manager = new ServerContextManager();
Assert.assertNull(manager.getLastUsedContext());
URI gitUri = URI.create("http://server/_git/repo1");
AuthenticationInfo info = new AuthenticationInfo("", "", "", "");
TeamProjectCollectionReference collection = new TeamProjectCollectionReference();
TeamProjectReference project = new TeamProjectReference();
GitRepository repo = new GitRepository();
repo.setRemoteUrl(gitUri.toString());
ServerContext context = new ServerContext(ServerContext.Type.TFS, info, UUID.randomUUID(), gitUri, gitUri, null, collection, project, repo);
manager.add(context);
ServerContext testContext = manager.getAuthenticatedContext(gitUri.toString(), true);
Assert.assertNotNull(testContext);
Assert.assertEquals(gitUri, testContext.getUri());
}
@Test
public void testUpdateAuthenticationInfo() {
String serverURL1 = "http://server:8080/project";
String serverURL2 = "http://server2:8080/project";
String serverURL3 = "http://server:8080/project/_git/repo";
AuthenticationInfo authInfo = new AuthenticationInfo("user", "pass", serverURL1, "user");
PowerMockito.mockStatic(AuthHelper.class);
when(AuthHelper.getAuthenticationInfoSynchronously(any(AuthenticationProvider.class), anyString())).thenReturn(authInfo);
ServerContextManager manager = new ServerContextManager();
AuthenticationInfo authInfo1 = new AuthenticationInfo("user1", "<PASSWORD>1", serverURL1, "user1");
ServerContext context1 = new ServerContext(ServerContext.Type.TFS, authInfo1, UUID.randomUUID(), URI.create(serverURL1), URI.create(serverURL1), null, null, null, null);
AuthenticationInfo authInfo2 = new AuthenticationInfo("user2", "<PASSWORD>", serverURL2, "user2");
ServerContext context2 = new ServerContext(ServerContext.Type.TFS, authInfo2, UUID.randomUUID(), URI.create(serverURL2), URI.create(serverURL2), null, null, null, null);
ServerContext context3 = new ServerContext(ServerContext.Type.TFS, authInfo1, UUID.randomUUID(), URI.create(serverURL3), URI.create(serverURL1), null, null, null, null);
manager.add(context1);
manager.add(context2);
manager.add(context3);
Assert.assertEquals(3, manager.getAllServerContexts().size());
manager.updateAuthenticationInfo(serverURL1);
Assert.assertEquals(3, manager.getAllServerContexts().size());
Assert.assertEquals(authInfo, manager.get(serverURL1).getAuthenticationInfo());
Assert.assertNotEquals(authInfo, manager.get(serverURL2).getAuthenticationInfo());
Assert.assertEquals(authInfo, manager.get(serverURL3).getAuthenticationInfo());
}
private class MyValidator extends ServerContextManager.Validator {
final GitHttpClient gitHttpClient;
final CoreHttpClient coreHttpClient;
final TeamProjectCollection collection;
public MyValidator(final ServerContext context, final GitHttpClient gitHttpClient, final CoreHttpClient coreHttpClient, final TeamProjectCollection collection) {
super(context);
this.gitHttpClient = gitHttpClient;
this.coreHttpClient = coreHttpClient;
this.collection = collection;
}
@Override
protected GitHttpClient getGitHttpClient(Client jaxrsClient, URI baseUrl) {
return gitHttpClient;
}
@Override
protected CoreHttpClient getCoreHttpClient(Client jaxrsClient, URI baseUrl) {
return coreHttpClient;
}
@Override
protected TeamProjectCollection getCollectionFromServer(ServerContext context, String collectionName) {
return collection;
}
}
}
|
#!/bin/bash
# For SLURM cluster only
#SBATCH --output=/scratch_net/phon/majing/src/log/%j.out
#SBATCH --gres=gpu:1
#SBATCH --mem=50G
#SBATCH --mail-type=ALL
#SBATCH --constraint='turing|titan_xp'
source /scratch_net/phon/majing/anaconda3/etc/profile.d/conda.sh
conda activate self-mono
# experiments and datasets meta
KITTI_RAW_HOME="/scratch_net/phon/majing/datasets/kitti_full/"
#KITTI_RAW_HOME="/disk_hdd/kitti_full/"
EXPERIMENTS_HOME="/scratch_net/phon/majing/src/exps"
# model
MODEL=MonoSF_Full
# save path
CHECKPOINT=None
# Loss and Augmentation
Train_Dataset=KITTI_Raw_KittiSplit_Train_mnsf
Train_Augmentation=Augmentation_SceneFlow
Train_Loss_Function=Loss_SceneFlow_SelfSup_NoImg
Valid_Dataset=KITTI_Raw_KittiSplit_Valid_mnsf
Valid_Augmentation=Augmentation_Resize_Only
Valid_Loss_Function=Loss_SceneFlow_SelfSup
ALIAS="-kitti-selfsup-noimg-"
TIME=$(date +"%Y%m%d-%H%M%S")
SAVE_PATH="$EXPERIMENTS_HOME/$ALIAS/"
#CHECKPOINT="$EXPERIMENTS_HOME/$ALIAS/checkpoint_latest.ckpt"
python ../../main.py \
--batch_size=4 \
--batch_size_val=1 \
--checkpoint=$CHECKPOINT \
--lr_scheduler=MultiStepLR \
--lr_scheduler_gamma=0.5 \
--lr_scheduler_milestones="[23, 39, 47, 54]" \
--model=$MODEL \
--num_workers=16 \
--optimizer=Adam \
--optimizer_lr=2e-4 \
--save=$SAVE_PATH \
--total_epochs=62 \
--training_augmentation=$Train_Augmentation \
--training_augmentation_photometric=True \
--training_dataset=$Train_Dataset \
--training_dataset_root=$KITTI_RAW_HOME \
--training_dataset_flip_augmentations=True \
--training_dataset_preprocessing_crop=True \
--training_dataset_num_examples=-1 \
--training_key=total_loss \
--training_loss=$Train_Loss_Function \
--validation_augmentation=$Valid_Augmentation \
--validation_dataset=$Valid_Dataset \
--validation_dataset_root=$KITTI_RAW_HOME \
--validation_dataset_preprocessing_crop=False \
--validation_key=total_loss \
--validation_loss=$Valid_Loss_Function \ |
package goacmedns
// Account is a struct that holds the registration response from an ACME-DNS
// server. It represents an API username/key that can be used to update TXT
// records for the account's subdomain.
type Account struct {
FullDomain string
SubDomain string
Username string
Password string
}
|
<gh_stars>1-10
"""This module contains common logging functions. It works almost the same way as normal python3 logging module does."""
import sys
from micropython import const
from stylization import style_func_stream
CRITICAL = const(50)
ERROR = const(40)
WARNING = const(30)
INFO = const(20)
DEBUG = const(10)
NOTSET = const(0)
_level_dict = {
CRITICAL: "CRIT",
ERROR: "ERROR",
WARNING: "WARN",
INFO: "INFO",
DEBUG: "DEBUG",
}
_stream = sys.stderr
def _colorize(fg, stylename=None, bg=None):
"""Decorator to choose whether to colorize output stream or not."""
def decorator(func):
def wrapper(*args, **kwds):
style_func_stream(_stream, fg=fg, stylename=stylename, bg=bg)\
(func)\
(*args, **kwds) if _colorize_enable else func(*args, **kwds)
return wrapper
return decorator
class Logger:
level = NOTSET
def __init__(self, name: str) -> None:
self.name = name
def _level_str(self, level: int) -> str:
"""Get level name by its int level number."""
l = _level_dict.get(level)
if l is not None:
return l
return "LVL%s" % level
def set_level(self, level: int) -> None:
"""Set logging level."""
self.level = level
def is_enabled_for(self, level: int) -> bool:
"""Formatting of message arguments is deferred until it cannot be avoided.
However, computing the arguments passed to the logging method can also be expensive,
and you may want to avoid doing it if the logger will just throw away your event.
Example usage:
if logger.isEnabledFor(logging.DEBUG):
logger.debug('Message with %s, %s', expensive_func1(),
expensive_func2())"""
return level >= (self.level or _level)
def log(self, level: int, msg: str, *args) -> None:
"""Write message to output stream."""
if level >= (self.level or _level):
msg_string = msg if not args else msg % args
_stream.write("%s:%s:%s\n" % (self._level_str(level), self.name, msg_string))
@_colorize(fg="green")
def debug(self, msg: str, *args) -> None:
"""Display debug level message."""
self.log(DEBUG, msg, *args)
@_colorize(fg="blue")
def info(self, msg: str, *args) -> None:
"""Display info level message."""
self.log(INFO, msg, *args)
@_colorize(fg="yellow")
def warning(self, msg: str, *args) -> None:
"""Display warning level message."""
self.log(WARNING, msg, *args)
@_colorize(fg="red")
def error(self, msg: str, *args) -> None:
"""Display error level message."""
self.log(ERROR, msg, *args)
@_colorize(fg="red", stylename="bold", bg="black")
def critical(self, msg: str, *args):
"""Display critical level message."""
self.log(CRITICAL, msg, *args)
@_colorize(fg="red")
def exception(self, e, msg: str, *args) -> None:
"""Display exception message and exception trace."""
self.log(ERROR, msg, *args)
sys.print_exception(e, _stream)
_level = INFO
_colorize_enable = True
_loggers = {}
def get_logger(name) -> Logger:
"""Get logger by name or create it by name if it does not exist."""
if name in _loggers:
return _loggers[name]
l = Logger(name)
_loggers[name] = l
return l
def info(msg: str, *args) -> None:
"""Create info message with no logger name specified."""
get_logger(None).info(msg, *args)
def debug(msg: str, *args) -> None:
"""Create debug message with no logger name specified."""
get_logger(None).debug(msg, *args)
def basic_config(level: int = INFO, filename=None, stream=None, format=None, colorize: bool = True) -> None:
"""Function sets basic config setting of logging module."""
global _level, _stream, _colorize_enable
_level = level
_colorize_enable = colorize
if filename is not None:
raise ValueError("filename argument is not supported on this platform.")
else:
if stream:
_stream = stream
if format is not None:
raise ValueError("format argument is not supported on this platform.")
|
<filename>full/src/main/java/de/ids_mannheim/korap/oauth2/service/OAuth2TokenService.java
package de.ids_mannheim.korap.oauth2.service;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.apache.oltu.oauth2.common.exception.OAuthSystemException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import de.ids_mannheim.korap.authentication.AuthenticationManager;
import de.ids_mannheim.korap.config.Attributes;
import de.ids_mannheim.korap.config.FullConfiguration;
import de.ids_mannheim.korap.exceptions.KustvaktException;
import de.ids_mannheim.korap.exceptions.StatusCodes;
import de.ids_mannheim.korap.oauth2.constant.OAuth2Error;
import de.ids_mannheim.korap.oauth2.entity.Authorization;
/**
* OAuth2TokenService manages business logic related to OAuth2
* requesting and creating access token.
*
* @author margaretha
*
*/
@Service
public class OAuth2TokenService {
@Autowired
protected OAuth2ClientService clientService;
@Autowired
private OAuth2AuthorizationService authorizationService;
@Autowired
protected OAuth2ScopeServiceImpl scopeService;
@Autowired
protected FullConfiguration config;
@Autowired
private AuthenticationManager authenticationManager;
/**
* RFC 6749:
* If the client type is confidential or the client was issued
* client credentials, the client MUST authenticate with the
* authorization server.
*
* @param authorizationCode
* @param redirectURI
* required if included in the authorization request
* @param clientId
* required if there is no authorization header
* @param clientSecret
* client_secret, required if client_secret was issued
* for the client in client registration.
* @return an authorization
* @throws OAuthSystemException
* @throws KustvaktException
*/
protected Authorization retrieveAuthorization (
String authorizationCode, String redirectURI, String clientId,
String clientSecret) throws KustvaktException {
Authorization authorization =
authorizationService.retrieveAuthorization(authorizationCode);
try {
clientService.authenticateClient(clientId, clientSecret);
authorization = authorizationService
.verifyAuthorization(authorization, clientId, redirectURI);
}
catch (KustvaktException e) {
authorizationService.addTotalAttempts(authorization);
throw e;
}
return authorization;
}
public ZonedDateTime authenticateUser (String username, String password,
Set<String> scopes) throws KustvaktException {
if (username == null || username.isEmpty()) {
throw new KustvaktException(StatusCodes.MISSING_PARAMETER,
"username is missing.", OAuth2Error.INVALID_REQUEST);
}
if (password == null || password.isEmpty()) {
throw new KustvaktException(StatusCodes.MISSING_PARAMETER,
"password is missing", OAuth2Error.INVALID_REQUEST);
}
Map<String, Object> attributes = new HashMap<>();
if (scopes != null && !scopes.isEmpty()) {
attributes.put(Attributes.SCOPE, scopes);
}
authenticationManager.authenticate(
config.getOAuth2passwordAuthentication(), username, password,
attributes);
ZonedDateTime authenticationTime =
ZonedDateTime.now(ZoneId.of(Attributes.DEFAULT_TIME_ZONE));
return authenticationTime;
}
}
|
package types
import (
"errors"
"math/big"
"sort"
"github.com/ethereum/go-ethereum/common"
)
var (
errInsufficientBalance = errors.New("Insufficient balance to undelegate")
errInvalidAmount = errors.New("Invalid amount, must be positive")
)
const (
// LockPeriodInEpoch is the number of epochs a undelegated token needs to be before it's released to the delegator's balance
LockPeriodInEpoch = 7
)
// Delegation represents the bond with tokens held by an account. It is
// owned by one delegator, and is associated with the voting power of one
// validator.
type Delegation struct {
DelegatorAddress common.Address `json:"delegator_address" yaml:"delegator_address"`
Amount *big.Int `json:"amount" yaml:"amount"`
Reward *big.Int `json:"reward" yaml:"reward"`
Undelegations []*Undelegation `json:"undelegations" yaml:"undelegations"`
}
// Undelegation represents one undelegation entry
type Undelegation struct {
Amount *big.Int
Epoch *big.Int
}
// DelegationIndex stored the index of a delegation in the validator's delegation list
type DelegationIndex struct {
ValidatorAddress common.Address
Index uint64
}
// NewDelegation creates a new delegation object
func NewDelegation(delegatorAddr common.Address,
amount *big.Int) Delegation {
return Delegation{
DelegatorAddress: delegatorAddr,
Amount: amount,
}
}
// Undelegate - append entry to the undelegation
func (d *Delegation) Undelegate(epoch *big.Int, amt *big.Int) error {
if amt.Sign() <= 0 {
return errInvalidAmount
}
if d.Amount.Cmp(amt) < 0 {
return errInsufficientBalance
}
d.Amount.Sub(d.Amount, amt)
exist := false
for _, entry := range d.Undelegations {
if entry.Epoch.Cmp(epoch) == 0 {
exist = true
entry.Amount.Add(entry.Amount, amt)
return nil
}
}
if !exist {
item := Undelegation{amt, epoch}
d.Undelegations = append(d.Undelegations, &item)
// Always sort the undelegate by epoch in increasing order
sort.SliceStable(
d.Undelegations,
func(i, j int) bool { return d.Undelegations[i].Epoch.Cmp(d.Undelegations[j].Epoch) < 0 },
)
}
return nil
}
// TotalInUndelegation - return the total amount of token in undelegation (locking period)
func (d *Delegation) TotalInUndelegation() *big.Int {
total := big.NewInt(0)
for _, entry := range d.Undelegations {
total.Add(total, entry.Amount)
}
return total
}
// DeleteEntry - delete an entry from the undelegation
// Opimize it
func (d *Delegation) DeleteEntry(epoch *big.Int) {
entries := []*Undelegation{}
for i, entry := range d.Undelegations {
if entry.Epoch.Cmp(epoch) == 0 {
entries = append(d.Undelegations[:i], d.Undelegations[i+1:]...)
}
}
if entries != nil {
d.Undelegations = entries
}
}
// RemoveUnlockedUndelegations removes all fully unlocked undelegations and returns the total sum
func (d *Delegation) RemoveUnlockedUndelegations(curEpoch *big.Int) *big.Int {
totalWithdraw := big.NewInt(0)
count := 0
for j := range d.Undelegations {
if big.NewInt(0).Sub(curEpoch, d.Undelegations[j].Epoch).Int64() > LockPeriodInEpoch { // need to wait at least 7 epochs to withdraw;
totalWithdraw.Add(totalWithdraw, d.Undelegations[j].Amount)
count++
} else {
break
}
}
d.Undelegations = d.Undelegations[count:]
return totalWithdraw
}
|
package awx
import (
"testing"
"time"
)
func TestListInventories(t *testing.T) {
var (
expectListInventoriesResponse = []*Inventory{
{
ID: 1,
Type: "inventory",
URL: "/api/v2/inventories/1/",
Related: &Related{
CreatedBy: "/api/v2/users/1/",
ModifiedBy: "/api/v2/users/1/",
JobTemplates: "/api/v2/inventories/1/job_templates/",
VariableData: "/api/v2/inventories/1/variable_data/",
RootGroups: "/api/v2/inventories/1/root_groups/",
ObjectRoles: "/api/v2/inventories/1/object_roles/",
AdHocCommands: "/api/v2/inventories/1/ad_hoc_commands/",
Script: "/api/v2/inventories/1/script/",
Tree: "/api/v2/inventories/1/tree/",
AccessList: "/api/v2/inventories/1/access_list/",
ActivityStream: "/api/v2/inventories/1/activity_stream/",
InstanceGroups: "/api/v2/inventories/1/instance_groups/",
Hosts: "/api/v2/inventories/1/hosts/",
Groups: "/api/v2/inventories/1/groups/",
Copy: "/api/v2/inventories/1/copy/",
UpdateInventorySources: "/api/v2/inventories/1/update_inventory_sources/",
InventorySources: "/api/v2/inventories/1/inventory_sources/",
Organization: "/api/v2/organizations/1/",
},
SummaryFields: &Summary{
Organization: &OrgnizationSummary{
ID: 1,
Name: "Default",
Description: "",
},
CreatedBy: &ByUserSummary{
ID: 1,
Username: "admin",
FirstName: "",
LastName: "",
},
ModifiedBy: &ByUserSummary{
ID: 1,
Username: "admin",
FirstName: "",
LastName: "",
},
ObjectRoles: &ObjectRoles{
UseRole: &ApplyRole{
ID: 23,
Description: "Can use the inventory in a job template",
Name: "Use",
},
AdminRole: &ApplyRole{
ID: 21,
Description: "Can manage all aspects of the inventory",
Name: "Admin",
},
AdhocRole: &ApplyRole{
ID: 20,
Description: "May run ad hoc commands on an inventory",
Name: "Ad Hoc",
},
UpdateRole: &ApplyRole{
ID: 24,
Description: "May update project or inventory or group using the configured source update system",
Name: "Update",
},
ReadRole: &ApplyRole{
ID: 22,
Description: "May view settings for the inventory",
Name: "Read",
},
},
UserCapabilities: &UserCapabilities{
Edit: true,
Copy: true,
Adhoc: true,
Delete: true,
},
},
Created: func() time.Time {
t, _ := time.Parse(time.RFC3339, "2018-05-21T01:34:35.657185Z")
return t
}(),
Modified: func() time.Time {
t, _ := time.Parse(time.RFC3339, "2018-05-30T09:42:22.412749Z")
return t
}(),
Name: "Demo Inventory",
Description: "",
Organization: 1,
Kind: "",
HostFilter: nil,
Variables: "",
HasActiveFailures: false,
TotalHosts: 2,
HostsWithActiveFailures: 0,
TotalGroups: 0,
GroupsWithActiveFailures: 0,
HasInventorySources: false,
TotalInventorySources: 0,
InventorySourcesWithFailures: 0,
InsightsCredential: nil,
PendingDeletion: false,
},
}
)
awx := NewAWX(testAwxHost, testAwxUserName, testAwxPasswd, nil)
result, _, err := awx.InventoriesService.ListInventories(map[string]string{
"name": "Demo Inventory",
})
if err != nil {
t.Fatalf("ListInventories err: %s", err)
} else {
checkAPICallResult(t, expectListInventoriesResponse, result)
t.Log("ListInventories passed!")
}
}
func TestCreateInventory(t *testing.T) {
var (
expectCreateInventoryResponse = &Inventory{
ID: 6,
Type: "inventory",
URL: "/api/v2/inventories/6/",
Related: &Related{
NamedURL: "/api/v2/inventories/TestInventory++Default/",
CreatedBy: "/api/v2/users/1/",
ModifiedBy: "/api/v2/users/1/",
JobTemplates: "/api/v2/inventories/6/job_templates/",
VariableData: "/api/v2/inventories/6/variable_data/",
RootGroups: "/api/v2/inventories/6/root_groups/",
ObjectRoles: "/api/v2/inventories/6/object_roles/",
AdHocCommands: "/api/v2/inventories/6/ad_hoc_commands/",
Script: "/api/v2/inventories/6/script/",
Tree: "/api/v2/inventories/6/tree/",
AccessList: "/api/v2/inventories/6/access_list/",
ActivityStream: "/api/v2/inventories/6/activity_stream/",
InstanceGroups: "/api/v2/inventories/6/instance_groups/",
Hosts: "/api/v2/inventories/6/hosts/",
Groups: "/api/v2/inventories/6/groups/",
Copy: "/api/v2/inventories/6/copy/",
UpdateInventorySources: "/api/v2/inventories/6/update_inventory_sources/",
InventorySources: "/api/v2/inventories/6/inventory_sources/",
Organization: "/api/v2/organizations/1/",
},
SummaryFields: &Summary{
Organization: &OrgnizationSummary{
ID: 1,
Name: "Default",
Description: "",
},
CreatedBy: &ByUserSummary{
ID: 1,
Username: "admin",
FirstName: "",
LastName: "",
},
ModifiedBy: &ByUserSummary{
ID: 1,
Username: "admin",
FirstName: "",
LastName: "",
},
ObjectRoles: &ObjectRoles{
UseRole: &ApplyRole{
ID: 80,
Description: "Can use the inventory in a job template",
Name: "Use",
},
AdminRole: &ApplyRole{
ID: 78,
Description: "Can manage all aspects of the inventory",
Name: "Admin",
},
AdhocRole: &ApplyRole{
ID: 77,
Description: "May run ad hoc commands on an inventory",
Name: "Ad Hoc",
},
UpdateRole: &ApplyRole{
ID: 81,
Description: "May update project or inventory or group using the configured source update system",
Name: "Update",
},
ReadRole: &ApplyRole{
ID: 79,
Description: "May view settings for the inventory",
Name: "Read",
},
},
UserCapabilities: &UserCapabilities{
Edit: true,
Copy: true,
Adhoc: true,
Delete: true,
},
},
Created: func() time.Time {
t, _ := time.Parse(time.RFC3339, "2018-08-13T01:59:47.160127Z")
return t
}(),
Modified: func() time.Time {
t, _ := time.Parse(time.RFC3339, "2018-08-13T01:59:47.160140Z")
return t
}(),
Name: "TestInventory",
Description: "for testing CreateInventory api",
Organization: 1,
Kind: "",
HostFilter: nil,
Variables: "",
HasActiveFailures: false,
TotalHosts: 0,
HostsWithActiveFailures: 0,
TotalGroups: 0,
GroupsWithActiveFailures: 0,
HasInventorySources: false,
TotalInventorySources: 0,
InventorySourcesWithFailures: 0,
InsightsCredential: nil,
PendingDeletion: false,
}
)
awx := NewAWX(testAwxHost, testAwxUserName, testAwxPasswd, nil)
result, err := awx.InventoriesService.CreateInventory(map[string]interface{}{
"name": "TestInventory",
"description": "for testing CreateInventory api",
"organization": 1,
"kind": "",
"host_filter": "",
"variables": "",
}, map[string]string{})
if err != nil {
t.Fatalf("CreateInventory err: %s", err)
} else {
checkAPICallResult(t, expectCreateInventoryResponse, result)
t.Log("CreateInventory passed!")
}
}
func TestUpdateInventory(t *testing.T) {
var (
expectUpdateInventoryResponse = &Inventory{
ID: 6,
Type: "inventory",
URL: "/api/v2/inventories/6/",
Related: &Related{
NamedURL: "/api/v2/inventories/TestInventory-update1++Default/",
CreatedBy: "/api/v2/users/1/",
ModifiedBy: "/api/v2/users/1/",
JobTemplates: "/api/v2/inventories/6/job_templates/",
VariableData: "/api/v2/inventories/6/variable_data/",
RootGroups: "/api/v2/inventories/6/root_groups/",
ObjectRoles: "/api/v2/inventories/6/object_roles/",
AdHocCommands: "/api/v2/inventories/6/ad_hoc_commands/",
Script: "/api/v2/inventories/6/script/",
Tree: "/api/v2/inventories/6/tree/",
AccessList: "/api/v2/inventories/6/access_list/",
ActivityStream: "/api/v2/inventories/6/activity_stream/",
InstanceGroups: "/api/v2/inventories/6/instance_groups/",
Hosts: "/api/v2/inventories/6/hosts/",
Groups: "/api/v2/inventories/6/groups/",
Copy: "/api/v2/inventories/6/copy/",
UpdateInventorySources: "/api/v2/inventories/6/update_inventory_sources/",
InventorySources: "/api/v2/inventories/6/inventory_sources/",
Organization: "/api/v2/organizations/1/",
},
SummaryFields: &Summary{
Organization: &OrgnizationSummary{
ID: 1,
Name: "Default",
Description: "",
},
CreatedBy: &ByUserSummary{
ID: 1,
Username: "admin",
FirstName: "",
LastName: "",
},
ModifiedBy: &ByUserSummary{
ID: 1,
Username: "admin",
FirstName: "",
LastName: "",
},
ObjectRoles: &ObjectRoles{
UseRole: &ApplyRole{
ID: 80,
Description: "Can use the inventory in a job template",
Name: "Use",
},
AdminRole: &ApplyRole{
ID: 78,
Description: "Can manage all aspects of the inventory",
Name: "Admin",
},
AdhocRole: &ApplyRole{
ID: 77,
Description: "May run ad hoc commands on an inventory",
Name: "Ad Hoc",
},
UpdateRole: &ApplyRole{
ID: 81,
Description: "May update project or inventory or group using the configured source update system",
Name: "Update",
},
ReadRole: &ApplyRole{
ID: 79,
Description: "May view settings for the inventory",
Name: "Read",
},
},
UserCapabilities: &UserCapabilities{
Edit: true,
Copy: true,
Adhoc: true,
Delete: true,
},
},
Created: func() time.Time {
t, _ := time.Parse(time.RFC3339, "2018-08-13T01:59:47.160127Z")
return t
}(),
Modified: func() time.Time {
t, _ := time.Parse(time.RFC3339, "2018-08-13T01:59:47.160140Z")
return t
}(),
Name: "TestInventory-update1",
Description: "for testing UpdateInventory api",
Organization: 1,
Kind: "",
HostFilter: nil,
Variables: "",
HasActiveFailures: false,
TotalHosts: 0,
HostsWithActiveFailures: 0,
TotalGroups: 0,
GroupsWithActiveFailures: 0,
HasInventorySources: false,
TotalInventorySources: 0,
InventorySourcesWithFailures: 0,
InsightsCredential: nil,
PendingDeletion: false,
}
)
awx := NewAWX(testAwxHost, testAwxUserName, testAwxPasswd, nil)
result, err := awx.InventoriesService.UpdateInventory(6, map[string]interface{}{
"name": "TestInventory-update1",
"description": "for testing UpdateInventory api",
"organization": 1,
"kind": "",
"host_filter": "",
"variables": "",
}, map[string]string{})
if err != nil {
t.Fatalf("UpdateInventory err: %s", err)
} else {
checkAPICallResult(t, expectUpdateInventoryResponse, result)
t.Log("UpdateInventory passed!")
}
}
func TestGetInventory(t *testing.T) {
var (
expectGetInventoryResponse = &Inventory{
ID: 1,
Type: "inventory",
URL: "/api/v2/inventories/1/",
Related: &Related{
CreatedBy: "/api/v2/users/1/",
ModifiedBy: "/api/v2/users/1/",
JobTemplates: "/api/v2/inventories/1/job_templates/",
VariableData: "/api/v2/inventories/1/variable_data/",
RootGroups: "/api/v2/inventories/1/root_groups/",
ObjectRoles: "/api/v2/inventories/1/object_roles/",
AdHocCommands: "/api/v2/inventories/1/ad_hoc_commands/",
Script: "/api/v2/inventories/1/script/",
Tree: "/api/v2/inventories/1/tree/",
AccessList: "/api/v2/inventories/1/access_list/",
ActivityStream: "/api/v2/inventories/1/activity_stream/",
InstanceGroups: "/api/v2/inventories/1/instance_groups/",
Hosts: "/api/v2/inventories/1/hosts/",
Groups: "/api/v2/inventories/1/groups/",
Copy: "/api/v2/inventories/1/copy/",
UpdateInventorySources: "/api/v2/inventories/1/update_inventory_sources/",
InventorySources: "/api/v2/inventories/1/inventory_sources/",
Organization: "/api/v2/organizations/1/",
},
SummaryFields: &Summary{
Organization: &OrgnizationSummary{
ID: 1,
Name: "Default",
Description: "",
},
CreatedBy: &ByUserSummary{
ID: 1,
Username: "admin",
FirstName: "",
LastName: "",
},
ModifiedBy: &ByUserSummary{
ID: 1,
Username: "admin",
FirstName: "",
LastName: "",
},
ObjectRoles: &ObjectRoles{
UseRole: &ApplyRole{
ID: 80,
Description: "Can use the inventory in a job template",
Name: "Use",
},
AdminRole: &ApplyRole{
ID: 78,
Description: "Can manage all aspects of the inventory",
Name: "Admin",
},
AdhocRole: &ApplyRole{
ID: 77,
Description: "May run ad hoc commands on an inventory",
Name: "Ad Hoc",
},
UpdateRole: &ApplyRole{
ID: 81,
Description: "May update project or inventory or group using the configured source update system",
Name: "Update",
},
ReadRole: &ApplyRole{
ID: 79,
Description: "May view settings for the inventory",
Name: "Read",
},
},
UserCapabilities: &UserCapabilities{
Edit: true,
Copy: true,
Adhoc: true,
Delete: true,
},
},
Created: func() time.Time {
t, _ := time.Parse(time.RFC3339, "2018-05-21T01:34:35.657185Z")
return t
}(),
Modified: func() time.Time {
t, _ := time.Parse(time.RFC3339, "2018-05-30T09:42:22.412749Z")
return t
}(),
Name: "Demo Inventory",
Description: "",
Organization: 1,
Kind: "",
HostFilter: nil,
Variables: "",
HasActiveFailures: false,
TotalHosts: 2,
HostsWithActiveFailures: 0,
TotalGroups: 0,
GroupsWithActiveFailures: 0,
HasInventorySources: false,
TotalInventorySources: 0,
InventorySourcesWithFailures: 0,
InsightsCredential: nil,
PendingDeletion: false,
}
)
awx := NewAWX(testAwxHost, testAwxUserName, testAwxPasswd, nil)
result, err := awx.InventoriesService.GetInventory(1, map[string]string{})
if err != nil {
t.Fatalf("GetInventory err: %s", err)
} else {
checkAPICallResult(t, expectGetInventoryResponse, result)
t.Log("GetInventory passed!")
}
}
func TestDeleteInventory(t *testing.T) {
var (
expectDeleteInventoryResponse = &Inventory{}
)
awx := NewAWX(testAwxHost, testAwxUserName, testAwxPasswd, nil)
result, err := awx.InventoriesService.DeleteInventory(1)
if err != nil {
t.Fatalf("DeleteInventory err: %s", err)
} else {
checkAPICallResult(t, expectDeleteInventoryResponse, result)
t.Log("DeleteInventory passed!")
}
}
func TestSyncInventorySourcesByInventoryID(t *testing.T) {
var (
expectSyncInventorySourcesByInventoryIDResponse = []*InventoryUpdate{
{
InventorySource: 10,
Status: "started",
ID: 305,
Type: "inventory_update",
URL: "/api/v2/inventory_updates/305/",
Related: &Related{
CreatedBy: "/api/v2/users/5/",
ModifiedBy: "/api/v2/users/5/",
UnifiedJobTemplate: "/api/v2/inventory_sources/10/",
Stdout: "/api/v2/inventory_updates/305/stdout/",
InventorySource: "/api/v2/inventory_sources/10/",
Cancel: "/api/v2/inventory_updates/305/cancel/",
Notifications: "/api/v2/inventory_updates/305/notifications/",
Events: "/api/v2/inventory_updates/305/events/",
Inventory: "/api/v2/inventories/1/",
Credentials: "/api/v2/inventory_updates/305/credentials/",
},
SummaryFields: &Summary{
Organization: &OrgnizationSummary{
ID: 1,
Name: "Default",
Description: "",
},
Inventory: &Inventory{
ID: 1,
Name: "Default",
Description: "",
HasActiveFailures: true,
TotalHosts: 7,
HostsWithActiveFailures: 6,
TotalGroups: 14,
HasInventorySources: true,
TotalInventorySources: 1,
InventorySourcesWithFailures: 0,
OrganizationID: 1,
Kind: "",
},
UnifiedJobTemplate: &UnifiedJobTemplate{
ID: 10,
Name: "Default",
Description: "",
UnifiedJobType: "inventory_update",
},
InventorySource: &InventorySource{
Source: "scm",
LastUpdated: func() time.Time {
t, _ := time.Parse(time.RFC3339, "2021-07-30T10:12:44.553099Z")
return t
}(),
Status: "pending",
},
CreatedBy: &ByUserSummary{
ID: 5,
Username: "admin",
FirstName: "",
LastName: "",
},
ModifiedBy: &ByUserSummary{
ID: 5,
Username: "admin",
FirstName: "",
LastName: "",
},
UserCapabilities: &UserCapabilities{
Delete: true,
Start: true,
},
Credentials: []Credential{},
},
Created: func() time.Time {
t, _ := time.Parse(time.RFC3339, "2021-08-02T01:45:22.144755Z")
return t
}(),
Modified: func() time.Time {
t, _ := time.Parse(time.RFC3339, "2021-08-02T01:45:22.157220Z")
return t
}(),
Name: "Default",
Description: "",
Source: "scm",
SourcePath: "",
SourceScript: nil,
SourceVars: "",
Credential: nil,
EnabledVar: "",
EnabledValue: "",
HostFilter: "",
Overwrite: true,
OverwriteVars: true,
CustomVirtualenv: nil,
Timeout: 0,
Verbosity: 2,
UnifiedJobTemplate: 10,
LaunchType: "manual",
Failed: false,
Started: nil,
Finished: nil,
CanceledOn: nil,
Elapsed: 0.0,
JobArgs: "",
JobCwd: "",
JobEnv: nil,
JobExplanation: "",
ExecutionNode: "",
ResultTraceback: "",
EventProcessingFinished: false,
Inventory: 1,
LicenseError: false,
OrgHostLimitError: false,
SourceProjectUpdate: nil,
SourceProject: nil,
InventoryUpdate: 305,
},
}
)
awx := NewAWX(testAwxHost, testAwxUserName, testAwxPasswd, nil)
result, err := awx.InventoriesService.SyncInventorySourcesByInventoryID(1)
if err != nil {
t.Fatalf("SyncInventorySourcesByInventoryID err: %s", err)
} else {
checkAPICallResult(t, expectSyncInventorySourcesByInventoryIDResponse, result)
t.Log("SyncInventorySourcesByInventoryID passed!")
}
}
|
import { filter } from 'lodash';
import { COLOR_STATUS, STRUCTURE_TASK } from './constants';
import { startOneTask, stopOneTask, downloadOneTask, deleteOneTask } from './services';
export function fStatus(status) {
let statusLabelStyle = {};
switch (status) {
case 'WAITING':
statusLabelStyle = {
borderColor: COLOR_STATUS.WAITING,
color: COLOR_STATUS.WAITING
};
break;
case 'DONE':
statusLabelStyle = {
borderColor: COLOR_STATUS.DONE,
color: COLOR_STATUS.DONE
};
break;
case 'ERROR':
statusLabelStyle = {
borderColor: COLOR_STATUS.ERROR,
color: COLOR_STATUS.ERROR
};
break;
case 'CANCEL':
statusLabelStyle = {
borderColor: COLOR_STATUS.CANCEL,
color: COLOR_STATUS.CANCEL
};
break;
case 'MISSED':
statusLabelStyle = {
borderColor: COLOR_STATUS.MISSED,
color: COLOR_STATUS.MISSED
};
break;
case 'IDLE':
statusLabelStyle = {
borderColor: COLOR_STATUS.IDLE,
color: COLOR_STATUS.IDLE
};
break;
case 'PROCESS':
return {
borderColor: COLOR_STATUS.PROCESS,
color: COLOR_STATUS.PROCESS
};
default:
return statusLabelStyle;
}
return statusLabelStyle;
}
export function fNameTask(nameTask) {
let temp = [...STRUCTURE_TASK];
temp = temp.filter((item) => item.name === nameTask);
return temp[0].display;
}
export function descendingComparator(a, b, orderBy) {
if (b[orderBy] < a[orderBy]) {
return -1;
}
if (b[orderBy] > a[orderBy]) {
return 1;
}
return 0;
}
export function getComparator(order, orderBy) {
return order === 'desc'
? (a, b) => descendingComparator(a, b, orderBy)
: (a, b) => -descendingComparator(a, b, orderBy);
}
export function applySortFilter(array, comparator, query) {
const stabilizedThis = array.map((el, index) => [el, index]);
stabilizedThis.sort((a, b) => {
const order = comparator(a[0], b[0]);
if (order !== 0) return order;
return a[1] - b[1];
});
if (query) {
return filter(array, (_user) => _user.name.toLowerCase().indexOf(query.toLowerCase()) !== -1);
}
return stabilizedThis.map((el) => el[0]);
}
export function createFileDownload(data, id) {
return new Promise(() => {
const aTag = document.createElement('a');
aTag.href = `data:attachment/csv,${encodeURI(data)}`;
aTag.download = `${id}.csv`;
aTag.target = '_blank';
aTag.click();
});
}
export async function handlePlay(id) {
try {
const resData = await startOneTask(id);
if (resData) {
if (resData.result === 'ok') {
console.log('Play ok');
} else {
console.log('Play fail');
}
}
} catch (err) {
console.log('Play fail');
}
}
export async function handleStop(id) {
try {
const resData = await stopOneTask(id);
if (resData) {
if (resData.result === 'ok') {
console.log('Stop ok');
} else {
console.log('Stop fail');
}
}
} catch (err) {
console.log('Stop fail');
}
}
export async function handleDownload(id) {
try {
const resData = await downloadOneTask(id);
if (resData) {
await createFileDownload(resData, id);
} else {
console.log('Stop fail');
}
} catch (err) {
console.log('Stop fail');
}
}
export async function handleDelete(id) {
try {
const resData = await deleteOneTask(id);
if (resData) {
if (resData.result === 'ok') {
console.log('Stop ok');
} else {
console.log('Stop fail');
}
}
} catch (err) {
console.log('Stop fail');
}
}
|
#!/bin/bash
#Github
git add --all
git commit -am 'pypi upload'
git push
#make an egg
python setup.py bdist_egg
#make pypi dist
python setup.py sdist
#upload pypi
pip install twine
twine upload -u sequencecentral --skip-existing dist/* |
function convertDate($inputDate) {
$day = substr($inputDate, 0, 2);
$month = substr($inputDate, 2, 2);
$year = substr($inputDate, 4);
return [
'day' => $day,
'month' => $month,
'year' => $year
];
} |
<reponame>joaomoreno/vscode-go
// Type definitions for diff-match-patch v1.0.0
// Project: https://www.npmjs.com/package/diff-match-patch
// Definitions by: Asana <https://asana.com>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
declare module "diff-match-patch" {
type Diff = [number, string];
export class Patch {
diffs: Diff[];
start1: number;
start2: number;
length1: number;
length2: number;
}
export class diff_match_patch {
static new (): diff_match_patch;
Diff_Timeout: number;
Diff_EditCost: number;
Match_Threshold: number;
Match_Distance: number;
Patch_DeleteThreshold: number;
Patch_Margin: number;
Match_MaxBits: number;
diff_main(text1: string, text2: string, opt_checklines?: boolean, opt_deadline?: number): Diff[];
diff_commonPrefix(text1: string, text2: string): number;
diff_commonSuffix(text1: string, text2: string): number;
diff_cleanupSemantic(diffs: Diff[]): void;
diff_cleanupSemanticLossless(diffs: Diff[]): void;
diff_cleanupEfficiency(diffs: Diff[]): void;
diff_cleanupMerge(diffs: Diff[]): void;
diff_xIndex(diffs: Diff[], loc: number): number;
diff_prettyHtml(diffs: Diff[]): string;
diff_text1(diffs: Diff[]): string;
diff_text2(diffs: Diff[]): string;
diff_levenshtein(diffs: Diff[]): number;
diff_toDelta(diffs: Diff[]): string;
diff_fromDelta(text1: string, delta: string): Diff[];
patch_make(text1, text2: string): Patch[];
patch_deepCopy(patches: Patch[]): Patch[];
patch_apply(patches: Patch[], text: string): [string, boolean[]];
patch_fromText(text: string): Patch[];
patch_toText(patches: Patch[]): string;
}
export var DIFF_DELETE: number;
export var DIFF_INSERT: number;
export var DIFF_EQUAL: number;
}
|
# platform = Red Hat Enterprise Linux 7
# CAUTION: This remediation script will remove telnet
# from the system, and may remove any packages
# that depend on telnet. Execute this
# remediation AFTER testing on a non-production
# system!
# Include source function library.
. /usr/share/scap-security-guide/remediation_functions
package_command remove telnet
|
import lxml.etree as ET
def extract_image_urls(xml_data):
root = ET.fromstring(xml_data)
imgs = root.xpath(r"//img[@class='play-long-image']/@src")
return imgs |
psql -h localhost -p 5432 -U thermometer_readings
|
#!/bin/sh
#
# script to install amuse build system from scratch
#
# author: Arjen van Elteren
# date : 2009 - 05 -18
#
# Prerequisites directory check
if [ -z ${PREFIX} ]; then
echo The PREFIX variable is not set, please set it to an user directory
exit 1
fi
if [ ! -d ${PREFIX} ]; then
echo ${PREFIX} directory does not exists, please create it first!
exit 1
fi
# Python version selection (default to Python 2)
if [ "${1}" = "python3" ]
then
APPVER=3.6.5
else
APPVER=2.7.15
fi
# Python
APPFILE=Python-${APPVER}.tgz
APP_DIR=Python-${APPVER}
URL=https://www.python.org/ftp/python/${APPVER}/${APPFILE}
# OpenSSL
OPENSSLVERSION="1.1.0g"
OPENSSLFILE=openssl-${OPENSSLVERSION}.tar.gz
OPENSSLURL=https://www.openssl.org/source/old/1.1.0/${OPENSSLFILE}
OPENSSLDIR=openssl-${OPENSSLVERSION}
# Setting up the directory structure
INSTALL_DIR=${PREFIX}/install
mkdir ${INSTALL_DIR}
cd ${INSTALL_DIR}
DOWNLOAD_DIR=${INSTALL_DIR}/_downloaded
mkdir ${DOWNLOAD_DIR}
BUILD_DIR=${INSTALL_DIR}/_build
mkdir ${BUILD_DIR}
SOURCE_DIR=${INSTALL_DIR}/_source
mkdir ${SOURCE_DIR}
# Download
echo "Downloading"
cd ${DOWNLOAD_DIR}
if [ -e ${APPFILE} ] ; then
echo "Python already downloaded";
else
echo "Downloading Python"
if which curl >/dev/null; then
curl -L -O ${URL} ;
else
wget ${URL};
fi
fi
if [ -e ${OPENSSLFILE} ] ; then
echo "OpenSSL already downloaded";
else
echo "Downloading OpenSSL"
if which curl >/dev/null; then
curl -L -O ${OPENSSLURL} ;
else
wget ${OPENSSLURL};
fi
fi
cd ..
echo "Done"
# Unpack
echo "Unpacking"
cd ${SOURCE_DIR}
rm -Rf ${APP_DIR}
echo "Unpacking Python..."
tar -xf ${DOWNLOAD_DIR}/${APPFILE}
cd ${SOURCE_DIR}
rm -Rf ${OPENSSLDIR}
echo "Unpacking OpenSSL..."
tar -xf ${DOWNLOAD_DIR}/${OPENSSLFILE} || exit $?
cd ..
echo "Done"
# Build
echo "Building"
MACHINE=`(uname -m) 2>/dev/null`
PLATFORM=`uname`
echo "Building OpenSSL"
cd ${SOURCE_DIR}/${OPENSSLDIR}
if [ ${PLATFORM} == 'Darwin' ]; then
if [ ${MACHINE} == 'x86_64' ]; then
./Configure darwin64-x86_64-cc \
--prefix=${PREFIX} \
--openssldir=${PREFIX}/openssl \
--shared
else
./Configure darwin64-i386-cc \
--prefix=${PREFIX} \
--openssldir=${PREFIX}/openssl \
--shared
fi
else
./config \
--prefix=${PREFIX} \
--openssldir=${PREFIX}/openssl \
--shared
fi
make
make install
echo "Build Python"
cd ${BUILD_DIR}
rm -Rf ${APP_DIR}
mkdir ${APP_DIR}
cd ${APP_DIR}
if [ $PLATFORM == 'Darwin' ] ; then
${SOURCE_DIR}/${APP_DIR}/configure \
--with-dyld \
--prefix=${PREFIX} \
--enable-unicode=ucs4\
--program-suffix=.exe ;
else
${SOURCE_DIR}/${APP_DIR}/configure \
--prefix=${PREFIX} \
--libdir=${PREFIX}/lib \
--enable-shared \
--enable-unicode=ucs4\
--program-suffix=.exe ;
fi
make
make install
echo "Done"
echo "Install complete"
|
#!/usr/bin/env bash
kill -9 $(cat target/universal/stage/server.pid)
|
import React, {useState} from 'react';
const App = () => {
const [selectedCountry, setSelectedCountry] = useState(null);
const countries = [
{name: 'Australia', capital: 'Canberra'},
{name: 'Canada', capital: 'Ottawa'},
{name: 'China', capital: 'Beijing'},
{name: 'Denmark', capital: 'Copenhagen'},
{name: 'France', capital: 'Paris'},
];
return (
<div className="App">
<h1>Select a country</h1>
<select
onChange={e => setSelectedCountry(e.target.value)}
value={selectedCountry}
>
<option>--- select a country ---</option>
{countries.map(country => (
<option key={country.name}>{country.name}</option>
))}
</select>
{selectedCountry && (
<>
<h2>Selected country</h2>
<p>Name: {selectedCountry}</p>
<p>
Capital:{" "}
{
countries.find(country => country.name === selectedCountry)
.capital
}
</p>
</>
)}
</div>
);
};
export default App; |
def userInput():
userlist =[]
finished = False
while finished == False:
inputValue = input("Enter the next number (or type 'done' to finish):")
if inputValue == "done":
finished = True
else:
userlist.append(inputValue)
return userlist |
<filename>src/components/Content/Box/DocBox.tsx
import React, { useEffect, useRef, useState } from 'react';
import { useRouter } from 'next/router';
import { IFrontMatter, IH2 } from '@/types';
import { generateToc, getTimeString } from '@/utils';
import {
Box, ContentBox, HeaderBox, Toc
} from '@/components/Content';
interface IDocBox {
children: React.ReactNode;
frontMatter?: IFrontMatter;
error?: boolean;
}
export const DocBox = ({ children, frontMatter, error = false, }: IDocBox) => {
const [ toc, setToc, ] = useState<IH2[]>([]);
const tocRef = useRef<HTMLDivElement>(null);
const router = useRouter();
useEffect(() => {
if (!error) {
const contents = tocRef.current;
const h2Data: IH2[] = [];
setToc(generateToc(contents, h2Data));
}
}, [ router.asPath, ]);
const updateTime = getTimeString(frontMatter?.updatedAt as number);
const category = frontMatter?.categories === ''
? '없음'
: frontMatter?.categories;
return (
<>
<div id='doc-content'>
<HeaderBox
category={category}
error={error}
>
{frontMatter?.title}
</HeaderBox>
{toc.length !== 0 && (
<Box mt={10} mb={10}>
<Toc toc={toc} />
</Box>
)}
<ContentBox
error={error}
updateTime={updateTime}
tocRef={tocRef}
mt={10}
mb={10}
>
{children}
</ContentBox>
</div>
</>
);
};
|
export const colors = {
WHITE: '#ffffff',
YELLOW: '#F2C94C',
ORANGE: '#f2994a',
LIGHT_RED: '#EED4D4',
RED: '#EB9387',
DARK_RED: '#AE2727',
PURPLE: '#a98abf',
LIGHT_BLUE: '#d3e3f5',
MEDIUM_BLUE: '#2175cb',
IMAGE_BLUE: '#2D9CDB',
NAV_BACKGROUND: '#f7f7f7',
NAV_PROFILE_BACKGROUND: '#D3E3F566',
GRAY: '#999999',
LIGHTER_GRAY: '#e5e5e5',
LIGHT_GRAY: '#bdbdbd',
DARK_GRAY: '#828282',
LIGHT_GREEN: '#D4EEDF',
GREEN: '#3faa6d',
}
|
#!/bin/bash
#
# Copyright (C) 2016 The CyanogenMod Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
DEVICE=zoom
VENDOR=lenovo
# Load extractutils and do some sanity checks
MY_DIR="${BASH_SOURCE%/*}"
if [[ ! -d "$MY_DIR" ]]; then MY_DIR="$PWD"; fi
CM_ROOT="$MY_DIR"/../../..
HELPER="$CM_ROOT"/vendor/cm/build/tools/extract_utils.sh
if [ ! -f "$HELPER" ]; then
echo "Unable to find helper script at $HELPER"
exit 1
fi
. "$HELPER"
# Initialize the helper
setup_vendor "$DEVICE" "$VENDOR" "$CM_ROOT"
# Copyright headers and guards
write_headers
# The blobs
write_makefiles "$MY_DIR"/proprietary-files.txt
# We are done!
write_footers
|
#!/bin/sh
# Copyright (C) 2012 The Regents of The University California.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DEVDIR="`dirname $0`"
BINDIR="`dirname $DEVDIR`"
FWDIR="`dirname $BINDIR`"
rm -rf $FWDIR/run-tests-from-scratch-workspace
rm -rf $FWDIR/test_warehouses
rm -rf $FWDIR/conf/shark-env.sh
rm -rf $FWDIR/metastore_db
rm -rf $FWDIR/derby.log
rm -rf $FWDIR/project/target $FWDIR/project/project/target
rm -rf $FWDIR/target/resolution-cache
rm -rf $FWDIR/target/streams
rm -rf $FWDIR/target/scala-*/cache
rm -rf $FWDIR/target/scala-*/classes
rm -rf $FWDIR/target/scala-*/test-classes
find $FWDIR -name ".DS_Store" -exec rm {} \;
find $FWDIR -name ".history" -exec rm {} \;
|
/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.engine.audio;
import org.terasology.gestalt.assets.Asset;
import org.terasology.gestalt.assets.AssetData;
import org.terasology.gestalt.assets.AssetType;
import org.terasology.gestalt.assets.DisposableResource;
import org.terasology.gestalt.assets.ResourceUrn;
/**
* An abstract class
* @param <T> The asset type this asset belongs to
*/
public abstract class Sound<T extends AssetData> extends Asset<T> implements org.terasology.nui.asset.Sound {
/**
* The constructor for an asset. It is suggested that implementing classes provide a constructor taking both the urn, and an initial AssetData to load.
*
* @param urn The urn identifying the asset.
* @param assetType The asset type this asset belongs to.
*/
protected Sound(ResourceUrn urn, AssetType<?, T> assetType, DisposableResource resource) {
super(urn, assetType,resource);
}
/**
* The constructor for an asset. It is suggested that implementing classes provide a constructor taking both the urn, and an initial AssetData to load.
*
* @param urn The urn identifying the asset.
* @param assetType The asset type this asset belongs to.
*/
protected Sound(ResourceUrn urn, AssetType<?, T> assetType) {
super(urn, assetType);
}
/**
* @return channels amount of sound (1 - mono, 2 - stereo)
*/
public abstract int getChannels();
/**
* @return sampling rate of sound (example 44100)
*/
public abstract int getSamplingRate();
/**
* @return the size of the sound buffer
*/
public abstract int getBufferSize();
/**
* Plays the sound at full volume.
*/
public abstract void play();
/**
* Plays the sound at the given volume.
* @param volume
*/
public abstract void play(float volume);
}
|
/*
* Copyright 2013-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.gateway.tests.http2;
import java.time.Duration;
import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import reactor.core.publisher.Hooks;
import reactor.core.publisher.Mono;
import reactor.netty.http.Http2SslContextSpec;
import reactor.netty.http.HttpProtocol;
import reactor.netty.http.client.HttpClient;
import reactor.netty.resources.ConnectionProvider;
import reactor.test.StepVerifier;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.system.CapturedOutput;
import org.springframework.boot.test.system.OutputCaptureExtension;
import org.springframework.boot.test.web.server.LocalServerPort;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.http.client.reactive.ReactorClientHttpConnector;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.web.reactive.function.client.WebClient;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
/**
* @author <NAME>
*/
@ExtendWith(OutputCaptureExtension.class)
@SpringBootTest(classes = Http2Application.class, webEnvironment = WebEnvironment.RANDOM_PORT)
@DirtiesContext
public class Http2ApplicationTests {
@LocalServerPort
int port;
@Test
public void http2Works(CapturedOutput output) {
Hooks.onOperatorDebug();
String uri = "https://localhost:" + port + "/myprefix/hello";
String expected = "Hello";
assertResponse(uri, expected);
Assertions.assertThat(output).contains("Negotiated application-level protocol [h2]", "PRI * HTTP/2.0");
}
public static void assertResponse(String uri, String expected) {
WebClient client = WebClient.builder().clientConnector(new ReactorClientHttpConnector(getHttpClient())).build();
Mono<ResponseEntity<String>> responseEntityMono = client.get().uri(uri).retrieve().toEntity(String.class);
StepVerifier.create(responseEntityMono).assertNext(entity -> {
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(entity.getBody()).isEqualTo(expected);
}).expectComplete().verify();
}
static HttpClient getHttpClient() {
return HttpClient
.create(ConnectionProvider.builder("test").maxConnections(100)
.pendingAcquireTimeout(Duration.ofMillis(0)).pendingAcquireMaxCount(-1).build())
.protocol(HttpProtocol.HTTP11, HttpProtocol.H2).secure(sslContextSpec -> {
Http2SslContextSpec clientSslCtxt = Http2SslContextSpec.forClient()
.configure(builder -> builder.trustManager(InsecureTrustManagerFactory.INSTANCE));
sslContextSpec.sslContext(clientSslCtxt);
});
}
}
|
#!/bin/bash
text2image --find_fonts --text langdata/eng/eng.rupee.training_text --outputbase ./findfonts --fonts_dir /usr/share/fonts --min_coverage 1 |& grep raw | sed -e 's/ :.*/"/g' | sed -e 's/^/"/' > findfonts.rupee.1.txt
|
<gh_stars>1-10
/*
* Copyright 2017 BlackBerry Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const e = 2.71828;
var timePassed : number; // time passed between calls
const timeConstant = 3000; // sampling frequency, the goal is to rate limit 10 requests/1 minute
let lastRate = 0; // previously recorded rate, we start with zero
let duration = 0 ; // elapsed time
let frequency = 5000; // initial simulated API invocation frequence, in this case it is one request every 5 seconds
console.log("Time passed" + "," + "Rate");
for(var i=1; i<20; i++){
timePassed = frequency;
duration += timePassed;
lastRate = calculateRate(timeConstant, lastRate, timePassed);
console.log(duration + "," + lastRate);
}
i--;
for(; i>0;i--){
timePassed = frequency* (20-i);
duration += timePassed;
lastRate = calculateRate(timeConstant, lastRate, timePassed);
console.log(duration + "," + lastRate);
}
function calculateRate(timeConstant: number, lastRate: number, timePassed: number) : number {
timePassed = timePassed>1 ? timePassed : 1;
let alpha : number = 1.0 - e**(-timePassed / timeConstant);
return lastRate + alpha * ((1.0 / timePassed) - lastRate);
} |
package io.opensphere.tracktool;
import java.awt.Insets;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import io.opensphere.core.Toolbox;
import io.opensphere.core.api.DefaultTransformer;
import io.opensphere.core.callout.CalloutDragListener;
import io.opensphere.core.control.action.ContextActionManager;
import io.opensphere.core.control.action.ContextSingleActionProvider;
import io.opensphere.core.control.action.context.ContextIdentifiers;
import io.opensphere.core.control.action.context.GeographicPositionsContextKey;
import io.opensphere.core.control.ui.ToolbarManager.SeparatorLocation;
import io.opensphere.core.control.ui.ToolbarManager.ToolbarLocation;
import io.opensphere.core.quantify.Quantify;
import io.opensphere.core.util.Utilities;
import io.opensphere.core.util.image.IconUtil;
import io.opensphere.core.util.swing.EventQueueUtilities;
import io.opensphere.core.util.swing.IconButton;
import io.opensphere.myplaces.models.MyPlacesEditListener;
import io.opensphere.myplaces.models.MyPlacesModel;
import io.opensphere.tracktool.model.Track;
import io.opensphere.tracktool.registry.TrackRegistry;
/**
* The Class TrackToolTransformer.
*/
public final class TrackToolTransformer extends DefaultTransformer
{
/**
* The manager for actions associated with controlling which provider will
* manage completed tracks.
*/
private final ContextActionManager myActionManager;
/**
* When this button is selected, I will be the action provider for completed
* arcs.
*/
private IconButton myActivationButton;
/** Manager for handling completed tracks. */
private CompletedTrackManager myCompletedTrackManager;
/** The action provider for completed arcs. */
private final ContextSingleActionProvider<GeographicPositionsContextKey> myCompProvider = new ContextSingleActionProvider<GeographicPositionsContextKey>()
{
@Override
public void doAction(String contextId, GeographicPositionsContextKey key, int x, int y)
{
if (ContextIdentifiers.ARC_CONTEXT.equals(contextId) && key.getPositions().size() > 1)
{
myToolbox.getUIRegistry().getContextActionManager()
.clearContextSingleActionProvider(ContextIdentifiers.DEFAULT_MOUSE_CONTEXT, MouseEvent.class);
myTrackRegistry.createNewTrackFromPositions(key.getPositions(), myToolbox, myModel.getDataGroups(),
myEditListener);
}
}
@Override
public void invalidated()
{
myActivationButton.setSelected(false);
}
};
/**
* The context for using the unmodified mouse actions for drawing on the
* canvas.
*/
private final ContextSingleActionProvider<MouseEvent> myDrawProvider = new ContextSingleActionProvider<MouseEvent>()
{
@Override
public void doAction(String contextId, MouseEvent key, int x, int y)
{
// TODO This is currently used for exclusive button grouping. The
// action should be handled here when the controls are re-written.
}
@Override
public void invalidated()
{
myActivationButton.setSelected(false);
myActionManager.deregisterContextSingleActionProvider(ContextIdentifiers.ARC_CONTEXT,
GeographicPositionsContextKey.class, myCompProvider);
}
};
/** The Toolbox. */
private final Toolbox myToolbox;
/** The track registry. */
private final TrackRegistry myTrackRegistry;
/**
* The my places model.
*/
private final MyPlacesModel myModel;
/**
* The edit listener.
*/
private final MyPlacesEditListener myEditListener;
/**
* The callout drag listener.
*/
private final CalloutDragListener<Track> myDragListener;
/**
* Instantiates a new track tool transformer.
*
* @param toolbox the toolbox
* @param registry the track registry
* @param model The my places model.
* @param editListener The edit listener.
* @param dragListener The callout drag listener.
*/
public TrackToolTransformer(Toolbox toolbox, TrackRegistry registry, MyPlacesModel model, MyPlacesEditListener editListener,
CalloutDragListener<Track> dragListener)
{
super(null);
Utilities.checkNull(toolbox, "toolbox");
Utilities.checkNull(registry, "controller");
myToolbox = toolbox;
myTrackRegistry = registry;
myModel = model;
myEditListener = editListener;
myDragListener = dragListener;
if (myToolbox.getUIRegistry() != null)
{
myActionManager = myToolbox.getUIRegistry().getContextActionManager();
}
else
{
myActionManager = null;
}
EventQueueUtilities.invokeLater(new Runnable()
{
@Override
public void run()
{
registerToolbarButton(myToolbox);
}
});
}
@Override
public void close()
{
super.close();
myCompletedTrackManager.close();
if (myToolbox.getUIRegistry() != null)
{
myActionManager.deregisterContextSingleActionProvider(ContextIdentifiers.ARC_CONTEXT,
GeographicPositionsContextKey.class, myCompProvider);
myActionManager.deregisterContextSingleActionProvider(ContextIdentifiers.DEFAULT_MOUSE_CONTEXT, MouseEvent.class,
myDrawProvider);
}
}
@Override
public void open()
{
super.open();
myCompletedTrackManager = new CompletedTrackManager(myToolbox, this, myTrackRegistry, myDragListener);
}
/**
* Adds the track tool toolbar button.
*
* @param toolbox the toolbox
*/
private void registerToolbarButton(final Toolbox toolbox)
{
myActivationButton = new IconButton("Track");
IconUtil.setIcons(myActivationButton, "/images/path-default.png", IconUtil.DEFAULT_ICON_FOREGROUND,
IconUtil.ICON_SELECTION_FOREGROUND);
myActivationButton.setToolTipText("Create a track");
myActivationButton.addMouseListener(new MouseAdapter()
{
@Override
public void mouseClicked(MouseEvent e)
{
if (e.getButton() == MouseEvent.BUTTON1)
{
myActivationButton.setSelected(!myActivationButton.isSelected());
Quantify.collectEnableDisableMetric("mist3d.track-tool.create-a-track", myActivationButton.isSelected());
if (myActivationButton.isSelected())
{
myActionManager.registerContextSingleActionProvider(ContextIdentifiers.ARC_CONTEXT,
GeographicPositionsContextKey.class, myCompProvider);
myActionManager.registerContextSingleActionProvider(ContextIdentifiers.DEFAULT_MOUSE_CONTEXT,
MouseEvent.class, myDrawProvider);
}
else
{
myActionManager.deregisterContextSingleActionProvider(ContextIdentifiers.ARC_CONTEXT,
GeographicPositionsContextKey.class, myCompProvider);
myActionManager.deregisterContextSingleActionProvider(ContextIdentifiers.DEFAULT_MOUSE_CONTEXT,
MouseEvent.class, myDrawProvider);
}
}
}
});
toolbox.getUIRegistry().getToolbarComponentRegistry().registerToolbarComponent(ToolbarLocation.NORTH, "Track Tool",
myActivationButton, 380, SeparatorLocation.NONE, new Insets(0, 2, 0, 2));
}
}
|
<filename>minecraft/bukkit/src/main/java/io/github/portlek/scoreboard/bukkit/BukkitScoreboard.java<gh_stars>1-10
/*
* MIT License
*
* Copyright (c) 2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package io.github.portlek.scoreboard.bukkit;
import lombok.AccessLevel;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.bukkit.event.EventHandler;
import org.bukkit.event.HandlerList;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerQuitEvent;
import org.bukkit.event.server.PluginDisableEvent;
import org.bukkit.plugin.Plugin;
import org.jetbrains.annotations.NotNull;
/**
* a class that represents initializer for Bukkit's scoreboard system.
*/
@Getter
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
public final class BukkitScoreboard implements Listener, AutoCloseable {
/**
* the plugin.
*/
@NotNull
@Getter
private final Plugin plugin;
/**
* the thread.
*/
@NotNull
private final BukkitScoreboardThread thread;
/**
* initiate the scoreboard system.
*
* @param plugin the plugin to initiate.
* @param tick the tick to initiate.
*
* @return a bukkit scoreboard sender instance to use.
*/
@NotNull
public static BukkitScoreboard create(@NotNull final Plugin plugin, final long tick) {
return new BukkitScoreboard(plugin, new BukkitScoreboardThread(new BukkitScoreboardSender(plugin), tick));
}
@Override
public void close() {
this.thread.interrupt();
this.thread.getSender().close();
HandlerList.unregisterAll(this);
}
/**
* obtains the sender.
*
* @return sender.
*/
@NotNull
public BukkitScoreboardSender getSender() {
return this.thread.getSender();
}
/**
* runs when a player quits.
*
* @param event the event to handle.
*/
@EventHandler
public void handle(final PlayerQuitEvent event) {
this.getSender().onQuit(event.getPlayer());
}
/**
* runs when the plugin disables.
*
* @param event the event to handle.
*/
@EventHandler
public void handle(final PluginDisableEvent event) {
this.close();
}
/**
* registers the listener and starts the thread.
*/
public void setup() {
this.plugin.getServer().getPluginManager().registerEvents(this, this.plugin);
this.thread.start();
}
}
|
#!/bin/sh
cd `dirname $0`
source ./../config.sh
exec_dir major_portrait_scientific_num
HIVE_DB=assurance
HIVE_TABLE=major_portrait_scientific_num
TARGET_TABLE=im_quality_major_data_info
DATA_NO=ZY_ZXKYXMSL
function create_table() {
hadoop fs -rm -r ${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE} || :
hive -e "DROP TABLE IF EXISTS ${HIVE_DB}.${HIVE_TABLE};"
hive -e "CREATE EXTERNAL TABLE IF NOT EXISTS ${HIVE_DB}.${HIVE_TABLE}(
data_no String comment '数据项编号',
data_name String comment '数据项名称',
major_no String comment '专业编号',
major_name String comment '专业名称',
data_cycle String comment '数据统计周期 YEAR 年 MONTH 月 DAY 日 QUARTER 季度 OTHER 其他',
data_type String comment '数据类型 NUMBER 数值类型 ENUM 枚举类型',
data_time String comment '数据日期 年YYYY 月YYYYmm 日YYYYMMDD 季度YYYY-1,yyyy-2,yyyy-3,yyyy-4 学期 yyyy-yyyy 学期 yyyy-yyyy-1,yyyy-yyyy-2',
data_value String comment '数据项值(数字保存数字,如果是数据字典枚举保存key)',
is_new String comment '是否最新 是YES 否NO',
create_time String comment '创建时间'
) COMMENT '纵向科研项目数量'
LOCATION '${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE}'"
fn_log "创建表——横向科研项目数量表:${HIVE_DB}.${HIVE_TABLE}"
}
function import_table() {
hive -e "
INSERT INTO TABLE ${HIVE_DB}.${HIVE_TABLE}
select
c.data_no as data_no,
c.data_name as data_name,
a.major_code as major_no,
b.name as major_name,
c.data_cycle as data_cycle,
c.data_type as data_type,
a.semester_year as data_time ,
a.portrait_project_num as data_value,
'NO' as is_new,
FROM_UNIXTIME(UNIX_TIMESTAMP()) AS create_time
from
app.major_scientific_info a
left join model.basic_major_info b
on a.major_code=b.code
,assurance.im_quality_data_base_info c
where c.data_no ='${DATA_NO}' and a.major_code !=''
"
fn_log "导入数据 —— 纵向科研项目数量:${HIVE_DB}.${HIVE_TABLE}"
}
function export_table() {
DATE_TIME=`hive -e "select max(data_time) from ${HIVE_DB}.${HIVE_TABLE} " `
clear_mysql_data "delete from im_quality_major_data_info where data_no ='${DATA_NO}';"
sqoop export --connect ${MYSQL_URL} --username ${MYSQL_USERNAME} --password ${MYSQL_PASSWORD} \
--table ${TARGET_TABLE} --export-dir ${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE} \
--input-fields-terminated-by '\0001' --input-null-string '\\N' --input-null-non-string '\\N' \
--null-string '\\N' --null-non-string '\\N' \
--columns 'data_no,data_name,major_no,major_name,data_cycle,data_type,data_time,data_value,is_new,create_time'
clear_mysql_data "update assurance.im_quality_major_data_info set is_new = 'NO' where data_no ='${DATA_NO}';"
clear_mysql_data "update assurance.im_quality_major_data_info set is_new = 'YES' where data_no ='${DATA_NO}' and data_time='${DATE_TIME}'"
fn_log "导出数据--纵向科研项目数量:${HIVE_DB}.${TARGET_TABLE}"
}
function import_table_new() {
hive -e "
INSERT INTO TABLE ${HIVE_DB}.${HIVE_TABLE}
select
c.data_no as data_no,
c.data_name as data_name,
a.major_code as major_no,
b.name as major_name,
c.data_cycle as data_cycle,
c.data_type as data_type,
a.semester_year as data_time ,
a.portrait_project_num as data_value,
'NO' as is_new,
FROM_UNIXTIME(UNIX_TIMESTAMP()) AS create_time
from
app.major_scientific_info a
left join model.basic_major_info b
on a.major_code=b.code
,assurance.im_quality_data_base_info c
where c.data_no ='${DATA_NO}' and a.major_code !=''
and a.semester_year in (select max(s.semester_year) from model.basic_major_info s )
"
fn_log "导入数据 —— 纵向科研项目数量:${HIVE_DB}.${HIVE_TABLE}"
}
#导出新数据
function export_table_new() {
DATE_TIME=`hive -e "select max(data_time) from ${HIVE_DB}.${HIVE_TABLE} " `
clear_mysql_data "delete from im_quality_major_data_info
where data_no = '${DATA_NO}' and data_time= '${DATE_TIME}';"
sqoop export --connect ${MYSQL_URL} --username ${MYSQL_USERNAME} --password ${MYSQL_PASSWORD} \
--table ${TARGET_TABLE} --export-dir ${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE} \
--input-fields-terminated-by '\0001' --input-null-string '\\N' --input-null-non-string '\\N' \
--null-string '\\N' --null-non-string '\\N' \
--columns 'data_no,data_name,major_no,major_name,data_cycle,data_type,data_time,data_value,is_new,create_time'
clear_mysql_data "update assurance.im_quality_major_data_info set is_new = 'NO' where data_no = '${DATA_NO}';"
clear_mysql_data "update assurance.im_quality_major_data_info set is_new = 'YES' where data_no = '${DATA_NO}' and data_time= '${DATE_TIME}' "
fn_log "导出数据--纵向科研项目数量:${HIVE_DB}.${TARGET_TABLE}"
}
#create_table
#import_table
#export_table
#create_table
#import_table_new
#export_table_new
#finish
|
class NewClass:
def __init__(self, data1, data2):
self.data1 = data1
self.data2 = data2
def method1(self):
# method implementation
def method2(self, param1):
# method implementation |
//declarar as variáveis: meses e quantidadeDeAlunos
var meses;
var quantidadeDeAlunos; |
package ff.camaro.plugin.gradle_plugin;
import org.gradle.api.Project;
import org.gradle.api.publish.maven.plugins.MavenPublishPlugin;
import ff.camaro.Configurator;
public class Maven extends GradlePlugin {
@Override
public void apply(final Project prj, final Configurator configurator) {
prj.getPluginManager().apply(MavenPublishPlugin.class);
}
}
|
#!/usr/bin/env bash
set -eu
log::banner "Generating GitHub stats..."
export APP_GITHUB_ACCESS_TOKEN=${BOT_GITHUB_TOKEN}
/prow-tools/githubstats -o kyma-project -r kyma
/prow-tools/githubstats -o kyma-project -r helm-broker
/prow-tools/githubstats -o kyma-project -r rafter
/prow-tools/githubstats -o kyma-project -r test-infra
/prow-tools/githubstats -o kyma-incubator -r compass
|
import requests
import re
def extract_version(repo_url: str) -> str:
try:
# Get the contents of the repository
response = requests.get(f"{repo_url}/archive/refs/heads/main.zip")
response.raise_for_status()
# Extract the version number from Python files
version_pattern = re.compile(r'__VERSION__\s*=\s*[\'"]([^\'"]+)[\'"]')
zip_file = response.content
# Extract Python files from the zip archive and search for the version number
for filename in get_python_files_from_zip(zip_file):
with open(filename, 'r') as file:
content = file.read()
match = version_pattern.search(content)
if match:
return match.group(1)
return "Version not found"
except requests.RequestException:
return "Version not found"
def get_python_files_from_zip(zip_file: bytes):
# Extract Python files from the zip archive and return their filenames
# This function is implementation-specific and not provided here
pass |
import pytest
from belvo import resources
@pytest.mark.parametrize(
("method", "params"), [("resume", {"fake-token", "fake-session"}), ("delete", {"fake-token"})]
)
def test_institutions_raises_not_implemented(method, params, api_session):
institutions = resources.Institutions(api_session)
with pytest.raises(NotImplementedError):
func = getattr(institutions, method)
assert func(*params)
|
/*
* Copyright (c) 2013-2016, Freescale Semiconductor, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* o Neither the name of Freescale Semiconductor, Inc. nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _FSL_FLASH_H_
#define _FSL_FLASH_H_
#if (defined(BL_TARGET_FLASH) || defined(BL_TARGET_ROM) || defined(BL_TARGET_RAM))
#include <assert.h>
#include <string.h>
#include "fsl_device_registers.h"
#include "bootloader_common.h"
#else
#include "fsl_common.h"
#endif
/*******************************************************************************
* Definitions
******************************************************************************/
/*!
* @addtogroup flash_driver
* @{
*/
/*!
* @name Flash version
* @{
*/
/*! @brief Construct the version number for drivers. */
#if !defined(MAKE_VERSION)
#define MAKE_VERSION(major, minor, bugfix) (((major) << 16) | ((minor) << 8) | (bugfix))
#endif
/*! @brief FLASH driver version for SDK*/
#define FSL_FLASH_DRIVER_VERSION (MAKE_VERSION(2, 1, 0)) /*!< Version 2.1.0. */
/*! @brief FLASH driver version for ROM*/
enum _flash_driver_version_constants
{
kFLASH_driverVersionName = 'F', /*!< Flash driver version name.*/
kFLASH_driverVersionMajor = 2, /*!< Major flash driver version.*/
kFLASH_driverVersionMinor = 1, /*!< Minor flash driver version.*/
kFLASH_driverVersionBugfix = 0 /*!< Bugfix for flash driver version.*/
};
/*@}*/
/*!
* @name Flash configuration
* @{
*/
/*! @brief Whether to support FlexNVM in flash driver */
#if !defined(FLASH_SSD_CONFIG_ENABLE_FLEXNVM_SUPPORT)
#define FLASH_SSD_CONFIG_ENABLE_FLEXNVM_SUPPORT 1 /*!< Enable FlexNVM support by default. */
#endif
/*! @brief Whether the FlexNVM is enabled in flash driver */
#define FLASH_SSD_IS_FLEXNVM_ENABLED (FLASH_SSD_CONFIG_ENABLE_FLEXNVM_SUPPORT && FSL_FEATURE_FLASH_HAS_FLEX_NVM)
/*! @brief Flash driver location. */
#if !defined(FLASH_DRIVER_IS_FLASH_RESIDENT)
#if (!defined(BL_TARGET_ROM) && !defined(BL_TARGET_RAM))
#define FLASH_DRIVER_IS_FLASH_RESIDENT 1 /*!< Used for flash resident application. */
#else
#define FLASH_DRIVER_IS_FLASH_RESIDENT 0 /*!< Used for non-flash resident application. */
#endif
#endif
/*! @brief Flash Driver Export option */
#if !defined(FLASH_DRIVER_IS_EXPORTED)
#if (defined(BL_TARGET_ROM) || defined(BL_TARGET_FLASH))
#define FLASH_DRIVER_IS_EXPORTED 1 /*!< Used for ROM bootloader. */
#else
#define FLASH_DRIVER_IS_EXPORTED 0 /*!< Used for SDK application. */
#endif
#endif
/*@}*/
/*!
* @name Flash status
* @{
*/
/*! @brief Flash driver status group. */
#if defined(kStatusGroup_FlashDriver)
#define kStatusGroupGeneric kStatusGroup_Generic
#define kStatusGroupFlashDriver kStatusGroup_FlashDriver
#elif defined(kStatusGroup_FLASH)
#define kStatusGroupGeneric kStatusGroup_Generic
#define kStatusGroupFlashDriver kStatusGroup_FLASH
#else
#define kStatusGroupGeneric 0
#define kStatusGroupFlashDriver 1
#endif
/*! @brief Construct a status code value from a group and code number. */
#if !defined(MAKE_STATUS)
#define MAKE_STATUS(group, code) ((((group)*100) + (code)))
#endif
/*!
* @brief Flash driver status codes.
*/
enum _flash_status
{
kStatus_FLASH_Success = MAKE_STATUS(kStatusGroupGeneric, 0), /*!< Api is executed successfully*/
kStatus_FLASH_InvalidArgument = MAKE_STATUS(kStatusGroupGeneric, 4), /*!< Invalid argument*/
kStatus_FLASH_SizeError = MAKE_STATUS(kStatusGroupFlashDriver, 0), /*!< Error size*/
kStatus_FLASH_AlignmentError =
MAKE_STATUS(kStatusGroupFlashDriver, 1), /*!< Parameter is not aligned with specified baseline*/
kStatus_FLASH_AddressError = MAKE_STATUS(kStatusGroupFlashDriver, 2), /*!< Address is out of range */
kStatus_FLASH_AccessError =
MAKE_STATUS(kStatusGroupFlashDriver, 3), /*!< Invalid instruction codes and out-of bounds addresses */
kStatus_FLASH_ProtectionViolation = MAKE_STATUS(
kStatusGroupFlashDriver, 4), /*!< The program/erase operation is requested to execute on protected areas */
kStatus_FLASH_CommandFailure =
MAKE_STATUS(kStatusGroupFlashDriver, 5), /*!< Run-time error during command execution. */
kStatus_FLASH_UnknownProperty = MAKE_STATUS(kStatusGroupFlashDriver, 6), /*!< Unknown property.*/
kStatus_FLASH_EraseKeyError = MAKE_STATUS(kStatusGroupFlashDriver, 7), /*!< Api erase key is invalid.*/
kStatus_FLASH_RegionExecuteOnly = MAKE_STATUS(kStatusGroupFlashDriver, 8), /*!< Current region is execute only.*/
kStatus_FLASH_ExecuteInRamFunctionNotReady =
MAKE_STATUS(kStatusGroupFlashDriver, 9), /*!< Execute-in-ram function is not available.*/
kStatus_FLASH_PartitionStatusUpdateFailure =
MAKE_STATUS(kStatusGroupFlashDriver, 10), /*!< Failed to update partition status.*/
kStatus_FLASH_SetFlexramAsEepromError =
MAKE_STATUS(kStatusGroupFlashDriver, 11), /*!< Failed to set flexram as eeprom.*/
kStatus_FLASH_RecoverFlexramAsRamError =
MAKE_STATUS(kStatusGroupFlashDriver, 12), /*!< Failed to recover flexram as ram.*/
kStatus_FLASH_SetFlexramAsRamError = MAKE_STATUS(kStatusGroupFlashDriver, 13), /*!< Failed to set flexram as ram.*/
kStatus_FLASH_RecoverFlexramAsEepromError =
MAKE_STATUS(kStatusGroupFlashDriver, 14), /*!< Failed to recover flexram as eeprom.*/
kStatus_FLASH_CommandNotSupported = MAKE_STATUS(kStatusGroupFlashDriver, 15), /*!< Flash api is not supported.*/
kStatus_FLASH_SwapSystemNotInUninitialized =
MAKE_STATUS(kStatusGroupFlashDriver, 16), /*!< Swap system is not in uninitialzed state.*/
kStatus_FLASH_SwapIndicatorAddressError =
MAKE_STATUS(kStatusGroupFlashDriver, 17), /*!< Swap indicator address is invalid.*/
};
/*@}*/
/*!
* @name Flash API key
* @{
*/
/*! @brief Construct the four char code for flash driver API key. */
#if !defined(FOUR_CHAR_CODE)
#define FOUR_CHAR_CODE(a, b, c, d) (((d) << 24) | ((c) << 16) | ((b) << 8) | ((a)))
#endif
/*!
* @brief Enumeration for flash driver API keys.
*
* @note The resulting value is built with a byte order such that the string
* being readable in expected order when viewed in a hex editor, if the value
* is treated as a 32-bit little endian value.
*/
enum _flash_driver_api_keys
{
kFLASH_apiEraseKey = FOUR_CHAR_CODE('k', 'f', 'e', 'k') /*!< Key value used to validate all flash erase APIs.*/
};
/*@}*/
/*!
* @brief Enumeration for supported flash margin levels.
*/
typedef enum _flash_margin_value
{
kFLASH_marginValueNormal, /*!< Use the 'normal' read level for 1s.*/
kFLASH_marginValueUser, /*!< Apply the 'User' margin to the normal read-1 level.*/
kFLASH_marginValueFactory, /*!< Apply the 'Factory' margin to the normal read-1 level.*/
kFLASH_marginValueInvalid /*!< Not real margin level, Used to determine the range of valid margin level. */
} flash_margin_value_t;
/*!
* @brief Enumeration for the three possible flash security states.
*/
typedef enum _flash_security_state
{
kFLASH_securityStateNotSecure, /*!< Flash is not secure.*/
kFLASH_securityStateBackdoorEnabled, /*!< Flash backdoor is enabled.*/
kFLASH_securityStateBackdoorDisabled /*!< Flash backdoor is disabled.*/
} flash_security_state_t;
/*!
* @brief Enumeration for the three possible flash protection levels.
*/
typedef enum _flash_protection_state
{
kFLASH_protectionStateUnprotected, /*!< Flash region is not protected.*/
kFLASH_protectionStateProtected, /*!< Flash region is protected.*/
kFLASH_protectionStateMixed /*!< Flash is mixed with protected and unprotected region.*/
} flash_protection_state_t;
/*!
* @brief Enumeration for the three possible flash execute access levels.
*/
typedef enum _flash_execute_only_access_state
{
kFLASH_accessStateUnLimited, /*!< Flash region is unLimited.*/
kFLASH_accessStateExecuteOnly, /*!< Flash region is execute only.*/
kFLASH_accessStateMixed /*!< Flash is mixed with unLimited and execute only region.*/
} flash_execute_only_access_state_t;
/*!
* @brief Enumeration for various flash properties.
*/
typedef enum _flash_property_tag
{
kFLASH_propertyPflashSectorSize = 0x00U, /*!< Pflash sector size property.*/
kFLASH_propertyPflashTotalSize = 0x01U, /*!< Pflash total size property.*/
kFLASH_propertyPflashBlockSize = 0x02U, /*!< Pflash block size property.*/
kFLASH_propertyPflashBlockCount = 0x03U, /*!< Pflash block count property.*/
kFLASH_propertyPflashBlockBaseAddr = 0x04U, /*!< Pflash block base address property.*/
kFLASH_propertyPflashFacSupport = 0x05U, /*!< Pflash fac support property.*/
kFLASH_propertyPflashAccessSegmentSize = 0x06U, /*!< Pflash access segment size property.*/
kFLASH_propertyPflashAccessSegmentCount = 0x07U, /*!< Pflash access segment count property.*/
kFLASH_propertyFlexRamBlockBaseAddr = 0x08U, /*!< FlexRam block base address property.*/
kFLASH_propertyFlexRamTotalSize = 0x09U, /*!< FlexRam total size property.*/
kFLASH_propertyDflashSectorSize = 0x10U, /*!< Dflash sector size property.*/
kFLASH_propertyDflashTotalSize = 0x11U, /*!< Dflash total size property.*/
kFLASH_propertyDflashBlockSize = 0x12U, /*!< Dflash block count property.*/
kFLASH_propertyDflashBlockCount = 0x13U, /*!< Dflash block base address property.*/
kFLASH_propertyDflashBlockBaseAddr = 0x14U, /*!< Eeprom total size property.*/
kFLASH_propertyEepromTotalSize = 0x15U
} flash_property_tag_t;
/*!
* @brief Constants for execute-in-ram flash function.
*/
enum _flash_execute_in_ram_function_constants
{
kFLASH_executeInRamFunctionMaxSize = 64U, /*!< Max size of execute-in-ram function.*/
kFLASH_executeInRamFunctionTotalNum = 2U /*!< Total number of execute-in-ram functions.*/
};
/*!
* @brief Flash execute-in-ram function information.
*/
typedef struct _flash_execute_in_ram_function_config
{
uint32_t activeFunctionCount; /*!< Number of available execute-in-ram functions.*/
uint8_t *flashRunCommand; /*!< execute-in-ram function: flash_run_command.*/
uint8_t *flashCacheClearCommand; /*!< execute-in-ram function: flash_cache_clear_command.*/
} flash_execute_in_ram_function_config_t;
/*!
* @brief Enumeration for the two possible options of flash read resource command.
*/
typedef enum _flash_read_resource_option
{
kFLASH_resourceOptionFlashIfr =
0x00U, /*!< Select code for Program flash 0 IFR, Program flash swap 0 IFR, Data flash 0 IFR */
kFLASH_resourceOptionVersionId = 0x01U /*!< Select code for Version ID*/
} flash_read_resource_option_t;
/*!
* @brief Enumeration for the range of special-purpose flash resource
*/
enum _flash_read_resource_range
{
#if (FSL_FEATURE_FLASH_IS_FTFE == 1)
kFLASH_resourceRangePflashIfrSizeInBytes = 1024U, /*!< Pflash IFR size in byte.*/
kFLASH_resourceRangeVersionIdSizeInBytes = 8U, /*!< Version ID IFR size in byte.*/
kFLASH_resourceRangeVersionIdStart = 0x08U, /*!< Version ID IFR start address.*/
kFLASH_resourceRangeVersionIdEnd = 0x0FU, /*!< Version ID IFR end address.*/
#else /* FSL_FEATURE_FLASH_IS_FTFL == 1 or FSL_FEATURE_FLASH_IS_FTFA = =1 */
kFLASH_resourceRangePflashIfrSizeInBytes = 256U, /*!< Pflash IFR size in byte.*/
kFLASH_resourceRangeVersionIdSizeInBytes = 8U, /*!< Version ID IFR size in byte.*/
kFLASH_resourceRangeVersionIdStart = 0x00U, /*!< Version ID IFR start address.*/
kFLASH_resourceRangeVersionIdEnd = 0x07U, /*!< Version ID IFR end address.*/
#endif
kFLASH_resourceRangePflashSwapIfrStart = 0x40000U, /*!< Pflash swap IFR start address.*/
kFLASH_resourceRangePflashSwapIfrEnd = 0x403FFU, /*!< Pflash swap IFR end address.*/
kFLASH_resourceRangeDflashIfrStart = 0x800000U, /*!< Dflash IFR start address.*/
kFLASH_resourceRangeDflashIfrEnd = 0x8003FFU, /*!< Dflash IFR end address.*/
};
/*!
* @brief Enumeration for the two possilbe options of set flexram function command.
*/
typedef enum _flash_flexram_function_option
{
kFLASH_flexramFunctionOptionAvailableAsRam = 0xFFU, /*!< Option used to make FlexRAM available as RAM */
kFLASH_flexramFunctionOptionAvailableForEeprom = 0x00U /*!< Option used to make FlexRAM available for EEPROM */
} flash_flexram_function_option_t;
/*!
* @brief Enumeration for the possible options of Swap function
*/
typedef enum _flash_swap_function_option
{
kFLASH_swapFunctionOptionEnable = 0x00U, /*!< Option used to enable Swap function */
kFLASH_swapFunctionOptionDisable = 0x01U /*!< Option used to Disable Swap function */
} flash_swap_function_option_t;
/*!
* @brief Enumeration for the possible options of Swap Control commands
*/
typedef enum _flash_swap_control_option
{
kFLASH_swapControlOptionIntializeSystem = 0x01U, /*!< Option used to Intialize Swap System */
kFLASH_swapControlOptionSetInUpdateState = 0x02U, /*!< Option used to Set Swap in Update State */
kFLASH_swapControlOptionSetInCompleteState = 0x04U, /*!< Option used to Set Swap in Complete State */
kFLASH_swapControlOptionReportStatus = 0x08U, /*!< Option used to Report Swap Status */
kFLASH_swapControlOptionDisableSystem = 0x10U /*!< Option used to Disable Swap Status */
} flash_swap_control_option_t;
/*!
* @brief Enumeration for the possible flash swap status.
*/
typedef enum _flash_swap_state
{
kFLASH_swapStateUninitialized = 0x00U, /*!< Flash swap system is in uninitialized state.*/
kFLASH_swapStateReady = 0x01U, /*!< Flash swap system is in ready state.*/
kFLASH_swapStateUpdate = 0x02U, /*!< Flash swap system is in update state.*/
kFLASH_swapStateUpdateErased = 0x03U, /*!< Flash swap system is in updateErased state.*/
kFLASH_swapStateComplete = 0x04U, /*!< Flash swap system is in complete state.*/
kFLASH_swapStateDisabled = 0x05U /*!< Flash swap system is in disabled state.*/
} flash_swap_state_t;
/*!
* @breif Enumeration for the possible flash swap block status
*/
typedef enum _flash_swap_block_status
{
kFLASH_swapBlockStatusLowerHalfProgramBlocksAtZero =
0x00U, /*!< Swap block status is that lower half program block at zero.*/
kFLASH_swapBlockStatusUpperHalfProgramBlocksAtZero =
0x01U, /*!< Swap block status is that upper half program block at zero.*/
} flash_swap_block_status_t;
/*!
* @brief Flash Swap information.
*/
typedef struct _flash_swap_state_config
{
flash_swap_state_t flashSwapState; /*!< Current swap system status.*/
flash_swap_block_status_t currentSwapBlockStatus; /*!< Current swap block status.*/
flash_swap_block_status_t nextSwapBlockStatus; /*!< Next swap block status.*/
} flash_swap_state_config_t;
/*!
* @brief Flash Swap IFR fileds.
*/
typedef struct _flash_swap_ifr_field_config
{
uint16_t swapIndicatorAddress; /*!< Swap indicator address field.*/
uint16_t swapEnableWord; /*!< Swap enable word field.*/
uint8_t reserved0[6]; /*!< Reserved field.*/
uint16_t swapDisableWord; /*!< Swap disable word field.*/
uint8_t reserved1[4]; /*!< Reserved field.*/
} flash_swap_ifr_field_config_t;
/*!
* @brief Enumeration for FlexRAM load during reset option.
*/
typedef enum _flash_partition_flexram_load_option
{
kFLASH_partitionFlexramLoadOptionLoadedWithValidEepromData =
0x00U, /*!< FlexRAM is loaded with valid EEPROM data during reset sequence.*/
kFLASH_partitionFlexramLoadOptionNotLoaded = 0x01U /*!< FlexRAM is not loaded during reset sequence.*/
} flash_partition_flexram_load_option_t;
/*! @brief callback type used for pflash block*/
typedef void (*flash_callback_t)(void);
/*!
* @brief Active flash information for current operation.
*/
typedef struct _flash_operation_config
{
uint32_t convertedAddress; /*!< Converted address for current flash type.*/
uint32_t activeSectorSize; /*!< Sector size of current flash type.*/
uint32_t activeBlockSize; /*!< Block size of current flash type.*/
uint32_t blockWriteUnitSize; /*!< write unit size.*/
uint32_t sectorCmdAddressAligment; /*!< Erase sector command address alignment.*/
uint32_t sectionCmdAddressAligment; /*!< Program/Verify section command address alignment.*/
uint32_t resourceCmdAddressAligment; /*!< Read resource command address alignment.*/
uint32_t checkCmdAddressAligment; /*!< Program check command address alignment.*/
} flash_operation_config_t;
/*! @brief Flash driver state information.
*
* An instance of this structure is allocated by the user of the flash driver and
* passed into each of the driver APIs.
*/
typedef struct _flash_config
{
uint32_t PFlashBlockBase; /*!< Base address of the first PFlash block */
uint32_t PFlashTotalSize; /*!< Size of all combined PFlash block. */
uint32_t PFlashBlockCount; /*!< Number of PFlash blocks. */
uint32_t PFlashSectorSize; /*!< Size in bytes of a sector of PFlash. */
flash_callback_t PFlashCallback; /*!< Callback function for flash API. */
uint32_t PFlashAccessSegmentSize; /*!< Size in bytes of a access segment of PFlash. */
uint32_t PFlashAccessSegmentCount; /*!< Number of PFlash access segments. */
uint32_t *flashExecuteInRamFunctionInfo; /*!< Info struct of flash execute-in-ram function. */
uint32_t FlexRAMBlockBase; /*!< For FlexNVM device, this is the base address of FlexRAM
For non-FlexNVM device, this is the base address of acceleration RAM memory */
uint32_t FlexRAMTotalSize; /*!< For FlexNVM device, this is the size of FlexRAM
For non-FlexNVM device, this is the size of acceleration RAM memory */
uint32_t DFlashBlockBase; /*!< For FlexNVM device, this is the base address of D-Flash memory (FlexNVM memory);
For non-FlexNVM device, this field is unused */
uint32_t DFlashTotalSize; /*!< For FlexNVM device, this is total size of the FlexNVM memory;
For non-FlexNVM device, this field is unused */
uint32_t EEpromTotalSize; /*!< For FlexNVM device, this is the size in byte of EEPROM area which was partitioned
from FlexRAM;
For non-FlexNVM device, this field is unused */
} flash_config_t;
/*******************************************************************************
* API
******************************************************************************/
#if defined(__cplusplus)
extern "C" {
#endif
/*!
* @name Initialization
* @{
*/
/*!
* @brief Initializes global flash properties structure members
*
* This function checks and initializes Flash module for the other Flash APIs.
*
* @param config Pointer to storage for the driver runtime state.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_PartitionStatusUpdateFailure Failed to update partition status.
*/
status_t FLASH_Init(flash_config_t *config);
/*!
* @brief Set the desired flash callback function
*
* @param config Pointer to storage for the driver runtime state.
* @param callback callback function to be stored in driver
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
*/
status_t FLASH_SetCallback(flash_config_t *config, flash_callback_t callback);
/*!
* @brief Prepare flash execute-in-ram functions
*
* @param config Pointer to storage for the driver runtime state.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
*/
#if FLASH_DRIVER_IS_FLASH_RESIDENT
status_t FLASH_PrepareExecuteInRamFunctions(flash_config_t *config);
#endif
/*@}*/
/*!
* @name Erasing
* @{
*/
/*!
* @brief Erases entire flash
*
* @param config Pointer to storage for the driver runtime state.
* @param key value used to validate all flash erase APIs.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_EraseKeyError Api erase key is invalid.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
* @retval #kStatus_FLASH_PartitionStatusUpdateFailure Failed to update partition status
*/
status_t FLASH_EraseAll(flash_config_t *config, uint32_t key);
/*!
* @brief Erases flash sectors encompassed by parameters passed into function
*
* This function erases the appropriate number of flash sectors based on the
* desired start address and length.
*
* @param config Pointer to storage for the driver runtime state.
* @param start The start address of the desired flash memory to be erased.
* The start address does not need to be sector aligned but must be word-aligned.
* @param lengthInBytes The length, given in bytes (not words or long-words)
* to be erased. Must be word aligned.
* @param key value used to validate all flash erase APIs.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_AlignmentError Parameter is not aligned with specified baseline.
* @retval #kStatus_FLASH_AddressError Address is out of range.
* @retval #kStatus_FLASH_EraseKeyError Api erase key is invalid.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
status_t FLASH_Erase(flash_config_t *config, uint32_t start, uint32_t lengthInBytes, uint32_t key);
/*!
* @brief Erases entire flash, including protected sectors.
*
* @param config Pointer to storage for the driver runtime state.
* @param key value used to validate all flash erase APIs.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_EraseKeyError Api erase key is invalid.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
* @retval #kStatus_FLASH_PartitionStatusUpdateFailure Failed to update partition status
*/
#if defined(FSL_FEATURE_FLASH_HAS_ERASE_ALL_BLOCKS_UNSECURE_CMD) && FSL_FEATURE_FLASH_HAS_ERASE_ALL_BLOCKS_UNSECURE_CMD
status_t FLASH_EraseAllUnsecure(flash_config_t *config, uint32_t key);
#endif
/*!
* @brief Erases all program flash execute-only segments defined by the FXACC registers.
*
* @param config Pointer to storage for the driver runtime state.
* @param key value used to validate all flash erase APIs.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_EraseKeyError Api erase key is invalid.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
status_t FLASH_EraseAllExecuteOnlySegments(flash_config_t *config, uint32_t key);
/*@}*/
/*!
* @name Programming
* @{
*/
/*!
* @brief Programs flash with data at locations passed in through parameters
*
* This function programs the flash memory with desired data for a given
* flash area as determined by the start address and length.
*
* @param config Pointer to storage for the driver runtime state.
* @param start The start address of the desired flash memory to be programmed. Must be
* word-aligned.
* @param src Pointer to the source buffer of data that is to be programmed
* into the flash.
* @param lengthInBytes The length, given in bytes (not words or long-words)
* to be programmed. Must be word-aligned.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_AlignmentError Parameter is not aligned with specified baseline.
* @retval #kStatus_FLASH_AddressError Address is out of range.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
status_t FLASH_Program(flash_config_t *config, uint32_t start, uint32_t *src, uint32_t lengthInBytes);
/*!
* @brief Programs Program Once Field through parameters
*
* This function programs the Program Once Field with desired data for a given
* flash area as determined by the index and length.
*
* @param config Pointer to storage for the driver runtime state.
* @param index The index indicating which area of Program Once Field to be programmed.
* @param src Pointer to the source buffer of data that is to be programmed
* into the Program Once Field.
* @param lengthInBytes The length, given in bytes (not words or long-words)
* to be programmed. Must be word-aligned.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
status_t FLASH_ProgramOnce(flash_config_t *config, uint32_t index, uint32_t *src, uint32_t lengthInBytes);
/*!
* @brief Programs flash with data at locations passed in through parameters via Program Section command
*
* This function programs the flash memory with desired data for a given
* flash area as determined by the start address and length.
*
* @param config Pointer to storage for the driver runtime state.
* @param start The start address of the desired flash memory to be programmed. Must be
* word-aligned.
* @param src Pointer to the source buffer of data that is to be programmed
* into the flash.
* @param lengthInBytes The length, given in bytes (not words or long-words)
* to be programmed. Must be word-aligned.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_AlignmentError Parameter is not aligned with specified baseline.
* @retval #kStatus_FLASH_AddressError Address is out of range.
* @retval #kStatus_FLASH_SetFlexramAsRamError Failed to set flexram as ram
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
* @retval #kStatus_FLASH_RecoverFlexramAsEepromError Failed to recover flexram as eeprom
*/
#if defined(FSL_FEATURE_FLASH_HAS_PROGRAM_SECTION_CMD) && FSL_FEATURE_FLASH_HAS_PROGRAM_SECTION_CMD
status_t FLASH_ProgramSection(flash_config_t *config, uint32_t start, uint32_t *src, uint32_t lengthInBytes);
#endif
/*!
* @brief Programs EEPROM with data at locations passed in through parameters
*
* This function programs the Emulated EEPROM with desired data for a given
* flash area as determined by the start address and length.
*
* @param config Pointer to storage for the driver runtime state.
* @param start The start address of the desired flash memory to be programmed. Must be
* word-aligned.
* @param src Pointer to the source buffer of data that is to be programmed
* into the flash.
* @param lengthInBytes The length, given in bytes (not words or long-words)
* to be programmed. Must be word-aligned.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_AddressError Address is out of range.
* @retval #kStatus_FLASH_SetFlexramAsEepromError Failed to set flexram as eeprom.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_RecoverFlexramAsRamError Failed to recover flexram as ram
*/
#if FLASH_SSD_IS_FLEXNVM_ENABLED
status_t FLASH_EepromWrite(flash_config_t *config, uint32_t start, uint8_t *src, uint32_t lengthInBytes);
#endif
/*@}*/
/*!
* @name Reading
* @{
*/
/*!
* @brief Read resource with data at locations passed in through parameters
*
* This function reads the flash memory with desired location for a given
* flash area as determined by the start address and length.
*
* @param config Pointer to storage for the driver runtime state.
* @param start The start address of the desired flash memory to be programmed. Must be
* word-aligned.
* @param dst Pointer to the destination buffer of data that is used to store
* data to be read.
* @param lengthInBytes The length, given in bytes (not words or long-words)
* to be read. Must be word-aligned.
* @param option The resource option which indicates which area should be read back.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_AlignmentError Parameter is not aligned with specified baseline.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
#if defined(FSL_FEATURE_FLASH_HAS_READ_RESOURCE_CMD) && FSL_FEATURE_FLASH_HAS_READ_RESOURCE_CMD
status_t FLASH_ReadResource(
flash_config_t *config, uint32_t start, uint32_t *dst, uint32_t lengthInBytes, flash_read_resource_option_t option);
#endif
/*!
* @brief Read Program Once Field through parameters
*
* This function reads the read once feild with given index and length
*
* @param config Pointer to storage for the driver runtime state.
* @param index The index indicating the area of program once field to be read.
* @param dst Pointer to the destination buffer of data that is used to store
* data to be read.
* @param lengthInBytes The length, given in bytes (not words or long-words)
* to be programmed. Must be word-aligned.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
status_t FLASH_ReadOnce(flash_config_t *config, uint32_t index, uint32_t *dst, uint32_t lengthInBytes);
/*@}*/
/*!
* @name Security
* @{
*/
/*!
* @brief Returns the security state via the pointer passed into the function
*
* This function retrieves the current Flash security status, including the
* security enabling state and the backdoor key enabling state.
*
* @param config Pointer to storage for the driver runtime state.
* @param state Pointer to the value returned for the current security status code:
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
*/
status_t FLASH_GetSecurityState(flash_config_t *config, flash_security_state_t *state);
/*!
* @brief Allows user to bypass security with a backdoor key
*
* If the MCU is in secured state, this function will unsecure the MCU by
* comparing the provided backdoor key with ones in the Flash Configuration
* Field.
*
* @param config Pointer to storage for the driver runtime state.
* @param backdoorKey Pointer to the user buffer containing the backdoor key.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
status_t FLASH_SecurityBypass(flash_config_t *config, const uint8_t *backdoorKey);
/*@}*/
/*!
* @name Verification
* @{
*/
/*!
* @brief Verifies erasure of entire flash at specified margin level
*
* This function will check to see if the flash have been erased to the
* specified read margin level.
*
* @param config Pointer to storage for the driver runtime state.
* @param margin Read margin choice
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
status_t FLASH_VerifyEraseAll(flash_config_t *config, flash_margin_value_t margin);
/*!
* @brief Verifies erasure of desired flash area at specified margin level
*
* This function will check the appropriate number of flash sectors based on
* the desired start address and length to see if the flash have been erased
* to the specified read margin level.
*
* @param config Pointer to storage for the driver runtime state.
* @param start The start address of the desired flash memory to be verified.
* The start address does not need to be sector aligned but must be word-aligned.
* @param lengthInBytes The length, given in bytes (not words or long-words)
* to be verified. Must be word-aligned.
* @param margin Read margin choice
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_AlignmentError Parameter is not aligned with specified baseline.
* @retval #kStatus_FLASH_AddressError Address is out of range.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
status_t FLASH_VerifyErase(flash_config_t *config, uint32_t start, uint32_t lengthInBytes, flash_margin_value_t margin);
/*!
* @brief Verifies programming of desired flash area at specified margin level
*
* This function verifies the data programed in the flash memory using the
* Flash Program Check Command and compares it with expected data for a given
* flash area as determined by the start address and length.
*
* @param config Pointer to storage for the driver runtime state.
* @param start The start address of the desired flash memory to be verified. Must be word-aligned.
* @param lengthInBytes The length, given in bytes (not words or long-words)
* to be verified. Must be word-aligned.
* @param expectedData Pointer to the expected data that is to be
* verified against.
* @param margin Read margin choice
* @param failedAddress Pointer to returned failing address.
* @param failedData Pointer to returned failing data. Some derivitives do
* not included failed data as part of the FCCOBx registers. In this
* case, zeros are returned upon failure.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_AlignmentError Parameter is not aligned with specified baseline.
* @retval #kStatus_FLASH_AddressError Address is out of range.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
status_t FLASH_VerifyProgram(flash_config_t *config,
uint32_t start,
uint32_t lengthInBytes,
const uint32_t *expectedData,
flash_margin_value_t margin,
uint32_t *failedAddress,
uint32_t *failedData);
/*!
* @brief Verifies if the program flash executeonly segments have been erased to
* the specified read margin level
*
* @param config Pointer to storage for the driver runtime state.
* @param margin Read margin choice
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
status_t FLASH_VerifyEraseAllExecuteOnlySegments(flash_config_t *config, flash_margin_value_t margin);
/*@}*/
/*!
* @name Protection
* @{
*/
/*!
* @brief Returns the protection state of desired flash area via the pointer passed into the function
*
* This function retrieves the current Flash protect status for a given
* flash area as determined by the start address and length.
*
* @param config Pointer to storage for the driver runtime state.
* @param start The start address of the desired flash memory to be checked. Must be word-aligned.
* @param lengthInBytes The length, given in bytes (not words or long-words)
* to be checked. Must be word-aligned.
* @param protection_state Pointer to the value returned for the current
* protection status code for the desired flash area.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_AlignmentError Parameter is not aligned with specified baseline.
* @retval #kStatus_FLASH_AddressError Address is out of range.
*/
status_t FLASH_IsProtected(flash_config_t *config,
uint32_t start,
uint32_t lengthInBytes,
flash_protection_state_t *protection_state);
/*!
* @brief Returns the access state of desired flash area via the pointer passed into the function
*
* This function retrieves the current Flash access status for a given
* flash area as determined by the start address and length.
*
* @param config Pointer to storage for the driver runtime state.
* @param start The start address of the desired flash memory to be checked. Must be word-aligned.
* @param lengthInBytes The length, given in bytes (not words or long-words)
* to be checked. Must be word-aligned.
* @param access_state Pointer to the value returned for the current
* access status code for the desired flash area.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_AlignmentError Parameter is not aligned with specified baseline.
* @retval #kStatus_FLASH_AddressError Address is out of range.
*/
status_t FLASH_IsExecuteOnly(flash_config_t *config,
uint32_t start,
uint32_t lengthInBytes,
flash_execute_only_access_state_t *access_state);
/*@}*/
/*!
* @name Properties
* @{
*/
/*!
* @brief Returns the desired flash property.
*
* @param config Pointer to storage for the driver runtime state.
* @param whichProperty The desired property from the list of properties in
* enum flash_property_tag_t
* @param value Pointer to the value returned for the desired flash property
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_UnknownProperty unknown property tag
*/
status_t FLASH_GetProperty(flash_config_t *config, flash_property_tag_t whichProperty, uint32_t *value);
/*@}*/
/*!
* @name FlexRAM
* @{
*/
/*!
* @brief Set FlexRAM Function command
*
* @param config Pointer to storage for the driver runtime state.
* @param option The option used to set work mode of FlexRAM
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
#if defined(FSL_FEATURE_FLASH_HAS_SET_FLEXRAM_FUNCTION_CMD) && FSL_FEATURE_FLASH_HAS_SET_FLEXRAM_FUNCTION_CMD
status_t FLASH_SetFlexramFunction(flash_config_t *config, flash_flexram_function_option_t option);
#endif
/*@}*/
/*!
* @name Swap
* @{
*/
/*!
* @brief Configure Swap function or Check the swap state of Flash Module
*
* @param config Pointer to storage for the driver runtime state.
* @param address Address used to configure the flash swap function
* @param option The possible option used to configure Flash Swap function or check the flash swap status
* @param returnInfo Pointer to the data which is used to return the information of flash swap.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_AlignmentError Parameter is not aligned with specified baseline.
* @retval #kStatus_FLASH_SwapIndicatorAddressError Swap indicator address is invalid
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
#if defined(FSL_FEATURE_FLASH_HAS_SWAP_CONTROL_CMD) && FSL_FEATURE_FLASH_HAS_SWAP_CONTROL_CMD
status_t FLASH_SwapControl(flash_config_t *config,
uint32_t address,
flash_swap_control_option_t option,
flash_swap_state_config_t *returnInfo);
#endif
/*!
* @brief Swap the lower half flash with the higher half flaock
*
* @param config Pointer to storage for the driver runtime state.
* @param address Address used to configure the flash swap function
* @param option The possible option used to configure Flash Swap function or check the flash swap status
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_AlignmentError Parameter is not aligned with specified baseline.
* @retval #kStatus_FLASH_SwapIndicatorAddressError Swap indicator address is invalid
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
* @retval #kStatus_FLASH_SwapSystemNotInUninitialized Swap system is not in uninitialzed state
*/
#if defined(FSL_FEATURE_FLASH_HAS_PFLASH_BLOCK_SWAP) && FSL_FEATURE_FLASH_HAS_PFLASH_BLOCK_SWAP
status_t FLASH_Swap(flash_config_t *config, uint32_t address, flash_swap_function_option_t option);
#endif
/*!
* @name FlexNVM
* @{
*/
/*!
* @brief Prepares the FlexNVM block for use as data flash, EEPROM backup, or a combination of both and initializes the
* FlexRAM.
*
* @param config Pointer to storage for the driver runtime state.
* @param option The option used to set FlexRAM load behavior during reset.
* @param eepromDataSizeCode Determines the amount of FlexRAM used in each of the available EEPROM subsystems.
* @param flexnvmPartitionCode Specifies how to split the FlexNVM block between data flash memory and EEPROM backup
* memory supporting EEPROM functions.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_ExecuteInRamFunctionNotReady Execute-in-ram function is not available.
* @retval #kStatus_FLASH_AccessError Invalid instruction codes and out-of bounds addresses.
* @retval #kStatus_FLASH_ProtectionViolation The program/erase operation is requested to execute on protected areas.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
#if defined(FSL_FEATURE_FLASH_HAS_PROGRAM_PARTITION_CMD) && FSL_FEATURE_FLASH_HAS_PROGRAM_PARTITION_CMD
status_t FLASH_ProgramPartition(flash_config_t *config,
flash_partition_flexram_load_option_t option,
uint32_t eepromDataSizeCode,
uint32_t flexnvmPartitionCode);
#endif
/*@}*/
/*!
* @name Flash Protection Utilities
* @{
*/
/*!
* @brief Set PFLASH Protection to the intended protection status.
*
* @param config Pointer to storage for the driver runtime state.
* @param protectStatus The expected protect status user wants to set to PFlash protection register. Each bit is
* corresponding to protection of 1/32 of the total PFlash. The least significant bit is corresponding to the lowest
* address area of P-Flash. The most significant bit is corresponding to the highest address area of PFlash. There are
* two possible cases as shown below:
* 0: this area is protected.
* 1: this area is unprotected.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
status_t FLASH_PflashSetProtection(flash_config_t *config, uint32_t protectStatus);
/*!
* @brief Get PFLASH Protection Status.
*
* @param config Pointer to storage for the driver runtime state.
* @param protectStatus Protect status returned by PFlash IP. Each bit is corresponding to protection of 1/32 of the
* total PFlash. The least significant bit is corresponding to the lowest address area of PFlash. The most significant
* bit is corresponding to the highest address area of PFlash. Thee are two possible cases as below:
* 0: this area is protected.
* 1: this area is unprotected.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
*/
status_t FLASH_PflashGetProtection(flash_config_t *config, uint32_t *protectStatus);
/*!
* @brief Set DFLASH Protection to the intended protection status.
*
* @param config Pointer to storage for the driver runtime state.
* @param protectStatus The expected protect status user wants to set to DFlash protection register. Each bit is
* corresponding to protection of 1/8 of the total DFlash. The least significant bit is corresponding to the lowest
* address area of DFlash. The most significant bit is corresponding to the highest address area of DFlash. There are
* two possible cases as shown below:
* 0: this area is protected.
* 1: this area is unprotected.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_CommandNotSupported Flash api is not supported
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
#if FLASH_SSD_IS_FLEXNVM_ENABLED
status_t FLASH_DflashSetProtection(flash_config_t *config, uint8_t protectStatus);
#endif
/*!
* @brief Get DFLASH Protection Status.
*
* @param config Pointer to storage for the driver runtime state.
* @param protectStatus DFlash Protect status returned by PFlash IP. Each bit is corresponding to protection of 1/8 of
* the total DFlash. The least significant bit is corresponding to the lowest address area of DFlash. The most
* significant bit is corresponding to the highest address area of DFlash and so on. There are two possible cases as
* below:
* 0: this area is protected.
* 1: this area is unprotected.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_CommandNotSupported Flash api is not supported
*/
#if FLASH_SSD_IS_FLEXNVM_ENABLED
status_t FLASH_DflashGetProtection(flash_config_t *config, uint8_t *protectStatus);
#endif
/*!
* @brief Set EEPROM Protection to the intended protection status.
*
* @param config Pointer to storage for the driver runtime state.
* @param protectStatus The expected protect status user wants to set to EEPROM protection register. Each bit is
* corresponding to protection of 1/8 of the total EEPROM. The least significant bit is corresponding to the lowest
* address area of EEPROM. The most significant bit is corresponding to the highest address area of EEPROM, and so on.
* There are two possible cases as shown below:
* 0: this area is protected.
* 1: this area is unprotected.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_CommandNotSupported Flash api is not supported
* @retval #kStatus_FLASH_CommandFailure Run-time error during command execution.
*/
#if FLASH_SSD_IS_FLEXNVM_ENABLED
status_t FLASH_EepromSetProtection(flash_config_t *config, uint8_t protectStatus);
#endif
/*!
* @brief Get DFLASH Protection Status.
*
* @param config Pointer to storage for the driver runtime state.
* @param protectStatus DFlash Protect status returned by PFlash IP. Each bit is corresponding to protection of 1/8 of
* the total EEPROM. The least significant bit is corresponding to the lowest address area of EEPROM. The most
* significant bit is corresponding to the highest address area of EEPROM. There are two possible cases as below:
* 0: this area is protected.
* 1: this area is unprotected.
*
* @retval #kStatus_FLASH_Success Api was executed successfully.
* @retval #kStatus_FLASH_InvalidArgument Invalid argument is provided.
* @retval #kStatus_FLASH_CommandNotSupported Flash api is not supported.
*/
#if FLASH_SSD_IS_FLEXNVM_ENABLED
status_t FLASH_EepromGetProtection(flash_config_t *config, uint8_t *protectStatus);
#endif
/*@}*/
#if defined(__cplusplus)
}
#endif
/*! @}*/
#endif /* _FSL_FLASH_H_ */
|
<gh_stars>0
# ref: some models in statsmodel lib: https://www.statsmodels.org/stable/api.html
# ref: some models in general: https://stackabuse.com/multiple-linear-regression-with-python/
# ref: https://towardsdfscience.com/linear-regression-in-6-lines-of-python-5e1d0cd05b8d
# ref: https://pbpython.com/notebook-alternative.html
# note: why no stata? "...if you use an unauthorized copy it will give you the wrong results without warning..."
# https://www.econjobrumors.com/topic/there-are-no-stata-14-and-stata-15-torrents
# ref: https://dergipark.org.tr/en/download/article-file/744047
import numpy as np
import pandas as pd
import re
# from sklearn.decomposition import PCA
# from statsmodels.iolib.summary2 import summary_col
def fsReformatColumnNames(sColName):
sMassagedName = sColName.replace(',', '') \
.replace(' ', '_').replace('-', '_').replace('>', '').replace('+', '') \
.replace('?', '').replace('.', '') \
.lower()
sMassagedName = sMassagedName.replace('_/_', '_')
sMassagedName = sMassagedName.replace('__', '_')
return sMassagedName
def getData(dropFirstDummy=True):
df = pd.read_csv('alt-ed-metasurvey-100821.csv')
# TODO: split OCEAN, maybe extract value wrangling function
df = df.replace({
"Strongly Agree": "10",
"Strongly Disagree": "1",
"Very Much": "10",
"Very Little": "1",
"Very Impressed": "10",
"Very Unimpressed": "1",
})
df = df.rename(columns=lambda x: re.sub(r'Have you heard of any of the following online course providers\?','familiarity_', x))
df = df.rename(columns={
"Age?": "age",
"Do you contribute to hiring and firing decisions at your company?": "manager_effects",
"For many professions, alternative credentials can qualify a person for an entry-level position.": "hirability",
"Gender?": "gender",
"Government regulation helps ensure businesses treat individuals more fairly.": "worldview_continuous_pro_regulation",
"Household Income?": "income",
"How long do you believe it usually takes to obtain an alternative credential?": "expected_duration",
"I enjoy taking risks": "favor_seeking_risk",
"I favor freer trade and migration with other nations": "worldview_continuous_pro_foreign",
"I have a high level of community engagement, participation, or activism related to my worldview.": "worldview_continuous_activism",
"I prefer to hire or work with a person that has a college degree rather a person that holds a reputable certification or non-college credential.": "is_prefer_college_peer",
"I think of a career in programming as enjoyable": "favor_programming_career",
"It will soon become common for high school graduates to obtain alternative credentials instead of going to college.": "expected_conventionality",
"It will soon become fairly conventional for high school graduates to obtain alternative credentials instead of going to college.": "expected_conventionality",
"Roughly how many full-time employees currently work for your organization?": "firm_size",
"To what degree has coronavirus-induced remote activity improved your favorability to remote learning (either for yourself or for other people)?": "covid_fav_online",
"To what degree has coronavirus caused you to increase your participation in remote learning, remote working, and other remote activities?": "covid_remote",
"To what degree has coronavirus negatively impacted your life?": "covid_impact",
"What is the highest level of education you have completed?": "education",
"What state do you reside in?": "state",
"When you add up the pros and cons for online education, it's probably a good thing for society overall.": "favor_online_ed",
"When you add up the pros and cons for artificial intelligence, it's probably a good thing for society overall.": "worldview_continuous_pro_innovation",
"Which of these industries most closely matches your profession?": "industry",
"Which race/ethnicity best describes you?": "ethnicity",
"Which worldview best describes you?": "worldview_description",
})
df = df.rename(fsReformatColumnNames, axis='columns')
df = df.rename(columns=lambda x: re.sub(r'people_who_(.)*break_(.)*present_a_risk(.)*','rulebreakers_risky', x))
df = df.rename(columns=lambda x: re.sub(r'people_who_(.)*break_(.)*benefit_the_culture(.)*','rulebreakers_culture_value', x))
df = df.rename(columns=lambda x: re.sub(r'people_who_(.)*break_(.)*could_(.)*be_high_performers(.)*','rulebreakers_mixed_bag', x))
df = df.rename(columns=lambda x: re.sub(r'if_you_do_contribute_to_hiring_and_firing_decisions_please_write_(.)*','job_title', x))
df = df.rename(columns=lambda x: re.sub(r'thinking_about_the_job_title_provided_(.)*','job_title_credentials', x))
df = df.rename(columns=lambda x: re.sub(r'the_level_of','skill', x))
df = df.rename(columns=lambda x: re.sub(r'the_willingness_to','skill', x))
df = df.rename(columns=lambda x: re.sub(r'_held_by_a(n)?','', x))
df = df.rename(columns=lambda x: re.sub(r'_required_by_a(n)?','', x))
df = df.rename(columns=lambda x: re.sub(r'non_college_graduate_with_an_alternative_credential','ngwac', x))
df = df.rename(columns=lambda x: re.sub(r'willingness_to_break_formal_or_informal_rules_and_norms','break_rules', x))
df = df.rename(columns=lambda x: re.sub(r'attention_to_detail_work_ethic_timeliness_and_organization_of_work','conscientiousness', x))
df = df.rename(columns=lambda x: re.sub(r'commute_or_travel_to_a_workplace_or_even_as_a_part_of_the_daily_work_as_in_commercial_trucking','commute', x))
df = df.rename(columns=lambda x: re.sub(r'for_many_professions_learning_at_this_school_can_qualify_a_person_for_an_entry_level_position',
'school_hirability_', x))
df['ocean'] = df['ocean'].replace(regex=r'[a-zA-Z\s]*', value='')
df.loc[df['ocean'].str.count(',') != 4, 'ocean'] = ''
df[['personality_o',
'personality_c',
'personality_e',
'personality_a',
'personality_n']] = df['ocean'].str.split(',', n=5, expand=True)
familiarity_columns = [s for s in df.columns if 'familiarity_' in s]
favorability_columns = [s for s in df.columns if 'favor_' in s]
other_column_to_numerize = [
"expected_conventionality",
"hirability",
"is_prefer_college_peer"
]
personality_columns = [s for s in df.columns if 'personality_' in s]
school_columns = [s for s in df.columns if 'school_hirability_' in s]
skill_columns = [s for s in df.columns if 'skill_' in s]
worldview_columns = [s for s in df.columns if 'worldview_continuous_' in s]
column_names_to_numerize = favorability_columns + other_column_to_numerize + personality_columns + school_columns + skill_columns + worldview_columns
df[column_names_to_numerize] = df[column_names_to_numerize].apply(pd.to_numeric)
df['familiarity_count'] = df.apply(lambda row: compute_familiarity_count(row, familiarity_columns), axis='columns')
df['is_large_firm_size'] = df.apply(compute_is_large_firm_size, axis='columns')
df['is_serious'] = df.apply(compute_fraud_flag, axis='columns')
df['is_tech'] = df.industry == "Information Technology"
df['school_unaccredited_hirability'] = df.school_hirability_ + df.school_hirability_1 + df.school_hirability_2 + df.school_hirability_3
df['school_self_impressed'] = df.school_hirability_2 + df.school_hirability_3 + df.school_hirability_6 + df.school_hirability_7
df['school_other_impressed'] = df.school_hirability_1 + df.school_hirability_3 + df.school_hirability_5 + df.school_hirability_7
compute_skill_gaps(df)
# df = pd.get_dummies(df, columns=['industry']).rename(
# fsReformatColumnNames, axis='columns')
# if dropFirstDummy:
# df.drop(columns=['industry_agriculture''])
# help build long model formula
print("\n+ ".join(list(df.columns)))
print("\n---")
print("done getting data")
print("---\n")
return df
def compute_familiarity_count(row, familiarity_columns):
total = 0
for column in familiarity_columns:
curr_value = row[column]
if not pd.isnull(curr_value):
total += 1
return total
def compute_fraud_flag(row=None):
# rudimentary fraud detection algo: if they put the same answer every time for skills, consider it fraud
# compute suspected fraud response value as average of first 3 skill questions
# give an allowance of varied answers up to arbitrarily threshold, still call it fraud
# tuned based on manual input data review, threshold = 3 (like, 3 is OK, less is fraud)
allowance_remaining = 2
count_done = 0
sus_skill_value = 0
for column in row.index:
if re.match("skill_", column):
curr_value = row[column]
# > 0 is a quick NaN check
if curr_value > 0:
if count_done < 3:
sus_skill_value += curr_value
count_done += 1
elif count_done == 3:
# get average of first three
sus_skill_value = sus_skill_value / 3
count_done += 1
elif count_done > 3:
if curr_value != sus_skill_value:
allowance_remaining -= 1
if allowance_remaining < 1:
return True
if sus_skill_value == 0:
return True
else:
return False
def compute_is_large_firm_size(row=None):
if row.firm_size in ["501-1,000", "1,001-5,000", "5,001-10,000", "10,000+"]:
return True
return False
def compute_skill_gaps(df):
def compute_with_no_overqualification(df, prefix_from, prefix_to, skill_name):
df[prefix_to + skill_name] = np.where(df[prefix_from + skill_name].isnull(), df[prefix_from + skill_name],
np.where(df[prefix_from + skill_name] > 0, df[prefix_from + skill_name], 0))
alt_ed_individual_substring = "_ngwac"
ideal_substring = "_ideal_job_applicant"
recent_college_grad_substring = "_recent_college_graduate"
ideal_levels = [s for s in df.columns if ideal_substring in s]
for ideal_skill_name in ideal_levels:
# eg skill_commute_ideal_job_applicant -> skill_commute
skill_name, *ignored = ideal_skill_name.split(ideal_substring)
# ideal - alt_ed_individual_levels[index]
df["skill_aetiwo_" + skill_name] = df[ideal_skill_name] - df[skill_name + alt_ed_individual_substring]
compute_with_no_overqualification(df, "skill_aetiwo_", "skill_aetiwno_", skill_name)
df["skill_rcgtiwo_" + skill_name] = df[ideal_skill_name] - df[skill_name + recent_college_grad_substring]
compute_with_no_overqualification(df, "skill_rcgtiwo_", "skill_rcgtiwno_", skill_name)
df["skill_comparative_wo" + skill_name] = df["skill_rcgtiwo_" + skill_name] - df["skill_aetiwo_" + skill_name]
df["skill_comparative_wno" + skill_name] = df["skill_rcgtiwo_" + skill_name] - df["skill_aetiwo_" + skill_name]
compute_with_no_overqualification(df, "skill_comparative_wno", "skill_comparative_no_relative_oq", skill_name)
# drop out-of-quartile to reduce skew
# intended to reduce skew and kurtosis
# tradeoff is analytical restriction (hopefully unimportant)
def getDeskewedData(dropFirstDummy=True):
df = getData(dropFirstDummy)
# drop Other gender bc n=2 is too small (maybe revisit here or in seperate analysis if over say a dozen)
df = df[df.gender != "Other"]
df = df[df.is_serious == True]
return df.drop(df[df['hirability'] < 5].index)
def getLowHirabilityGroup(dropFirstDummy=True):
df = getData(dropFirstDummy)
return df.drop(df[df['hirability'] >= 5].index)
def getTens(dropFirstDummy=True):
df = getData(dropFirstDummy)
return df.drop(df[df['hirability'] < 10].index)
# if this file executed as script
if __name__ == '__main__':
skewedData = getData(False)
skewedData.to_csv('./alt-ed-metasurvey-wrangled.csv')
# TODO: touch faster if we implement deskewed as f(skewed)
deskewedData = getDeskewedData(False)
deskewedData.to_csv('./alt-ed-metasurvey-wrangled-deskewed.csv')
|
#!/bin/bash
$KEYCLOAK_HOME/bin/standalone.sh -b 0.0.0.0 -bmanagement 0.0.0.0
|
import os
from argparse import ArgumentParser
def process_snps_files(snps_filename, genome_filename):
# Your implementation to process .snps_files to VCF format goes here
pass
if __name__ == "__main__":
parser = ArgumentParser("Process one or more .snps_files files from dnadiff to VCF format.")
parser.add_argument("snps_filename", help="Filename containing list of .snps_files to process.")
parser.add_argument("genome_filename", help="Genome fasta. Chromosome IDs must agree with .snps_files files!")
args = parser.parse_args()
snps_filename = args.snps_filename
genome_filename = args.genome_filename
if not os.path.isfile(snps_filename):
print(f"Error: {snps_filename} does not exist.")
elif not os.path.isfile(genome_filename):
print(f"Error: {genome_filename} does not exist.")
else:
process_snps_files(snps_filename, genome_filename) |
import axios from 'axios';
// Action type
const CHANGE_QTYPE = 'CHANGE_QTYPE';
const SELECT_SECOND_TWEET = 'SELECT_SECOND_TWEET';
const FIND_CONTRA_EX = 'FIND_CONTRA_EX';
const IS_CF_LOADING = 'IS_CF_LOADING';
export const findContrastiveExamples = ({
qType,
selectedTweet,
secondSelectedTweet,
tweets,
features,
currentModelInfo
}) => {
return async dispatch => {
const request_str = JSON.stringify({
qType: qType,
selectedTweet: selectedTweet,
secondSelectedTweet: secondSelectedTweet,
modelId: 'dt_0',
features: features,
tweets: tweets,
currentModelInfo: currentModelInfo
});
dispatch({
type: 'IS_CF_LOADING',
payload: true
});
await axios({
method: 'post',
url: '/tweets/findContrastiveExamples/',
data: JSON.stringify({
qType: qType,
selectedTweet: selectedTweet,
secondSelectedTweet: secondSelectedTweet,
modelId: 'dt_0',
features: features,
tweets: tweets,
currentModelInfo: currentModelInfo
})
}).then(res => {
dispatch({ type: 'FIND_CONTRA_EX', payload: res.data });
dispatch({ type: 'IS_CF_LOADING', payload: false });
});
};
};
// initial value for state
const initialState = {
qType: 'p-mode',
instance: {
grp: 'con',
id: 737000000000000000,
content:
"A singer killed at a meet & greet, and then 50 people are murdered at a nightclub and yet some people still think we don't need gun control",
screenName: 'sunnshinecoast',
userId: 4847697913,
numFollowers: 100,
numFriends: 118,
numTweets: 1542,
numRetweeted: 4074205,
botScore: 'N/A',
valence: 0.215672608,
dominance: 0.801444548,
care: 0.988902207,
fairness: 0.415014834
},
contrastiveRules: [],
selectedTweetRules: [],
contrastiveEXs: [],
diffRule: '',
isCFLoading: false // CF == CounterFactual
};
// Reducers
const localInterpreter = (state = initialState, action) => {
switch (action.type) {
case CHANGE_QTYPE:
return {
...state,
qType: action.payload
};
case FIND_CONTRA_EX:
const qType = action.payload.qType;
const updatedState =
qType === 'p-mode'
? {
...state,
selectedTweet: action.payload.selectedTweet,
selectedTweetRules: action.payload.selectedTweetRules,
contrastiveRules: action.payload.contRules,
contrastiveEXs: action.payload.contExamples,
isCFLoading: false
}
: {
// for 'o-mode'
...state,
selectedTweet: action.payload.selectedTweet,
secondSelectedTweet: action.payload.secondSelectedTweet,
diffRule: action.payload.diffRule,
isCFLoading: false
};
return updatedState;
case IS_CF_LOADING:
return {
...state,
isCFLoading: action.payload
}
default:
return state;
}
};
export default localInterpreter;
|
def diagonalSum(arr, n):
sum = 0
for i in range(n):
for j in range(n):
if (i == j):
sum += arr[i][j]
return sum |
<filename>tapestry-core/src/test/java/org/apache/tapestry5/integration/app1/components/GenericTypeDisplay.java
// Copyright 2006-2014 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.integration.app1.components;
import java.lang.reflect.Type;
import org.apache.tapestry5.BindingConstants;
import org.apache.tapestry5.ComponentResources;
import org.apache.tapestry5.MarkupWriter;
import org.apache.tapestry5.annotations.Parameter;
import org.apache.tapestry5.ioc.annotations.Inject;
/**
* Outputs the type and genericType of the 'value' binding in a div
*/
public class GenericTypeDisplay {
@Inject
private ComponentResources resources;
@Parameter(required=true, defaultPrefix=BindingConstants.LITERAL)
private String description;
@Parameter(required=true)
private Object value;
void afterRender(MarkupWriter writer) {
writer.element("div");
Class<?> type = resources.getBoundType("value");
Type genericType = resources.getBoundGenericType("value");
String text = String.format("description=%s,type=%s,genericType=%s", description, type.getName(), genericType.toString());
writer.write(text);
writer.end();
}
}
|
<reponame>kazimanzurrashid/AspNetMvcBackboneJsSpa
(function() {
var $, expect;
expect = this.chai.expect;
$ = jQuery;
describe('Router', function() {
var container, router, stubbedNavigationView, stubbedNotFoundView, stubbedPageView, templete;
templete = null;
container = null;
stubbedNavigationView = null;
stubbedPageView = null;
stubbedNotFoundView = null;
router = null;
before(function() {
var contentView, navigationView;
templete = $('<div/>', {
id: 'page-template'
}).appendTo('body').append('<p/>');
container = $('<div/>', {
id: 'container'
}).appendTo('body');
navigationView = {
select: function(menu) {},
deselectAll: function() {}
};
contentView = (function() {
function contentView() {}
contentView.prototype.el = $('<div/>');
contentView.prototype.render = function() {
return this;
};
contentView.prototype.activate = function() {};
contentView.prototype.deactivate = function() {};
return contentView;
})();
stubbedNavigationView = sinon.stub(Application.Views, 'Navigation').returns(navigationView);
stubbedPageView = sinon.stub(Application.Views, 'Page', function() {
return new contentView;
});
stubbedNotFoundView = sinon.stub(Application.Views, 'NotFound', function() {
return new contentView;
});
return router = new Application.Router;
});
describe('new', function() {
it('creates navigation view', function() {
return expect(router.navigationView).to.exist;
});
it('creates home view', function() {
return expect(router.homeView).to.exist;
});
it('creates about view', function() {
return expect(router.homeView).to.exist;
});
return it('creates not found view', function() {
return expect(router.notFoundView).to.exist;
});
});
describe('navigation', function() {
var stubbedActivate;
stubbedActivate = null;
before(function() {
stubbedActivate = sinon.stub(router, 'activate', function() {});
return router.currentView = void 0;
});
describe('#about', function() {
before(function() {
return router.about();
});
it('activates home view', function() {
return expect(stubbedActivate).to.have.been.calledWith(router.aboutView, 'about');
});
return after(function() {
router.currentView = void 0;
return stubbedActivate.reset();
});
});
describe('#home', function() {
before(function() {
return router.home();
});
it('activates home view', function() {
return expect(stubbedActivate).to.have.been.calledWith(router.homeView, 'home');
});
return after(function() {
router.currentView = void 0;
return stubbedActivate.reset();
});
});
describe('#notFound', function() {
before(function() {
return router.notFound();
});
it('activates not found view', function() {
return expect(stubbedActivate).to.have.been.calledWith(router.notFoundView);
});
return after(function() {
router.currentView = void 0;
return stubbedActivate.reset();
});
});
return after(function() {
return stubbedActivate.restore();
});
});
describe('#activate', function() {
describe('other view', function() {
var spiedAboutViewActivate, spiedHomeViewDeactivate, spiedSelectMenu;
spiedHomeViewDeactivate = null;
spiedAboutViewActivate = null;
spiedSelectMenu = null;
before(function() {
spiedSelectMenu = sinon.spy(router.navigationView, 'select');
spiedHomeViewDeactivate = sinon.spy(router.homeView, 'deactivate');
spiedAboutViewActivate = sinon.spy(router.aboutView, 'activate');
router.currentView = router.homeView;
return router.activate(router.aboutView, 'about');
});
it('deactivates current view', function() {
return expect(spiedHomeViewDeactivate).to.have.been.calledOnce;
});
it('activates other view', function() {
return expect(spiedAboutViewActivate).to.have.been.calledOnce;
});
it('selects other view menu', function() {
return expect(spiedSelectMenu).to.have.been.calledWith('about');
});
it('sets other view as current view', function() {
return expect(router.currentView).to.equal(router.aboutView);
});
return after(function() {
spiedSelectMenu.restore();
spiedHomeViewDeactivate.restore();
return spiedAboutViewActivate.restore();
});
});
describe('same view', function() {
var spiedHomeViewDeactivate, spiedSelectMenu;
spiedHomeViewDeactivate = null;
spiedSelectMenu = null;
before(function() {
spiedSelectMenu = sinon.spy(router.navigationView, 'select');
spiedHomeViewDeactivate = sinon.spy(router.homeView, 'deactivate');
router.currentView = router.homeView;
return router.activate(router.homeView, 'home');
});
it('does nothing', function() {
expect(spiedHomeViewDeactivate).to.not.have.been.called;
expect(spiedSelectMenu).to.not.have.been.called;
return expect(router.currentView).to.equal(router.homeView);
});
return after(function() {
spiedSelectMenu.restore();
return spiedHomeViewDeactivate.restore();
});
});
return describe('not found view', function() {
var spiedDeselectMenu;
spiedDeselectMenu = null;
before(function() {
spiedDeselectMenu = sinon.spy(router.navigationView, 'deselectAll');
router.currentView = router.homeView;
return router.activate(router.notFoundView);
});
it('deselects menu', function() {
return expect(spiedDeselectMenu).to.have.been.calledOnce;
});
return after(function() {
return spiedDeselectMenu.restore();
});
});
});
return after(function() {
stubbedNavigationView.restore();
stubbedPageView.restore();
stubbedNotFoundView.restore();
container.remove();
return templete.remove();
});
});
}).call(this);
|
<gh_stars>0
#include "vulkan_context.h"
#include <cassert>
#include <iostream>
#include <array>
#include <glm/gtc/type_ptr.hpp>
#include "vulkan_util.h"
#include "vulkan_helper.h"
#include "vertex_format.h"
#include "material.h"
VulkanContext::VulkanContext(HINSTANCE hinstance, HWND hwnd, uint32_t width, uint32_t height)
{
init_vulkan(hinstance, hwnd, width, height);
}
VulkanContext::~VulkanContext()
{
destroy_vulkan();
}
void VulkanContext::resize(uint32_t width, uint32_t height)
{
basic.window_extent.width = width;
basic.window_extent.height = height;
//recreate_swapchain();
}
std::pair<VkBuffer, VkDeviceMemory> VulkanContext::create_vertex_buffer(uint8_t* buffer_data, std::size_t buffer_size)
{
return vkutil::create_vertex_buffer(
feature.memory_properties,
basic.device,
basic.queue,
basic.work_command_buffer,
buffer_data,
buffer_size
);
}
std::pair<VkBuffer, VkDeviceMemory> VulkanContext::create_index_buffer(uint8_t* buffer_data, std::size_t buffer_size)
{
return vkutil::create_index_buffer(
feature.memory_properties,
basic.device,
basic.queue,
basic.work_command_buffer,
buffer_data,
buffer_size
);
}
std::pair<VkImage, VkDeviceMemory> VulkanContext::create_texture(uint32_t width, uint32_t height, uint8_t* buffer_data, std::size_t buffer_size)
{
return vkutil::create_texture(
feature.memory_properties,
basic.device,
basic.queue,
basic.work_command_buffer,
width, height,
buffer_data, buffer_size
);
}
std::pair<VkImage, VkDeviceMemory> VulkanContext::create_texture_cubemap(uint32_t width, uint32_t height, uint8_t* buffer_data, std::size_t buffer_size)
{
return vkutil::create_texture_cubemap(
feature.memory_properties,
basic.device,
basic.queue,
basic.work_command_buffer,
width, height,
buffer_data, buffer_size
);
}
std::pair<VkImage, VkDeviceMemory> VulkanContext::create_texture_depth(uint32_t width, uint32_t height, uint32_t array_layers, bool sampled)
{
return vkutil::create_texture_depth(
feature.memory_properties,
basic.device,
basic.queue,
basic.work_command_buffer,
basic.depth_format,
width, height,
array_layers,
sampled
);
}
VkImageView VulkanContext::create_image_view_texture(VkImage image)
{
return vkutil::create_image_view_texture(basic.device, image);
}
VkImageView VulkanContext::create_image_view_texture_cubemap(VkImage image)
{
return vkutil::create_image_view_texture_cubemap(basic.device, image);
}
VkImageView VulkanContext::create_image_view_texture_depth(VkImage image, bool use_depth, bool use_stencil, uint32_t base_array_layer, uint32_t layer_count)
{
return vkutil::create_image_view_texture_depth(
basic.device, image, basic.depth_format, use_depth, use_stencil, base_array_layer, layer_count
);
}
VkSampler VulkanContext::create_sampler()
{
return vkutil::create_sampler(basic.device);
}
VkSampler VulkanContext::create_depth_sampler()
{
return vkutil::create_depth_sampler(basic.device);
}
VkSampler VulkanContext::create_shadow_sampler()
{
return vkutil::create_shadow_sampler(basic.device);
}
void VulkanContext::destroy_buffer(VkBuffer buffer)
{
vkDestroyBuffer(basic.device, buffer, vkutil::vulkan_allocator);
}
void VulkanContext::free_memory(VkDeviceMemory memory)
{
vkFreeMemory(basic.device, memory, vkutil::vulkan_allocator);
}
void VulkanContext::destroy_buffer_and_memory(VkBuffer buffer, VkDeviceMemory memory)
{
destroy_buffer(buffer);
free_memory(memory);
}
void VulkanContext::destroy_vulkan_buffer(VulkanBuffer vulkan_buffer)
{
auto& frame = frames[basic.frame_index];
frame.vulkan_buffers.push_back(vulkan_buffer);
}
void VulkanContext::destroy_vulkan_buffer_immediately(VulkanBuffer vulkan_buffer)
{
destroy_buffer_and_memory(vulkan_buffer.buffer, vulkan_buffer.memory);
}
void VulkanContext::destroy_image_view(VkImageView image_view)
{
vkDestroyImageView(basic.device, image_view, vkutil::vulkan_allocator);
}
void VulkanContext::destroy_image(VkImage image)
{
vkDestroyImage(basic.device, image, vkutil::vulkan_allocator);
}
void VulkanContext::destroy_vulkan_image(VulkanImage vulkan_image)
{
destroy_image(vulkan_image.image);
free_memory(vulkan_image.memory);
}
void VulkanContext::destroy_sampler(VkSampler sampler)
{
vkDestroySampler(basic.device, sampler, vkutil::vulkan_allocator);
}
VkFramebuffer VulkanContext::create_framebuffer(uint32_t width, uint32_t height, VkRenderPass render_pass, std::vector<VkImageView>& image_views)
{
return vkutil::create_framebuffer(
basic.device,
width,
height,
render_pass,
image_views
);
}
void VulkanContext::destroy_framebuffer(VkFramebuffer framebuffer)
{
return vkutil::destroy_framebuffer(
basic.device,
framebuffer
);
}
std::pair<VkBuffer, VkDeviceMemory> VulkanContext::create_uniform_buffer(uint8_t* buffer_data, std::size_t buffer_size)
{
return vkutil::create_uniform_buffer(
feature.memory_properties,
basic.device,
basic.queue,
basic.work_command_buffer,
buffer_data, buffer_size
);
}
void VulkanContext::update_descriptor_set(VkDescriptorSet descriptor_set, VkSampler sampler, VkImageView image_view, VkBuffer uniform_buffer, std::size_t uniform_buffer_size)
{
vkutil::update_descriptor_set(
basic.device,
descriptor_set,
sampler,
image_view,
uniform_buffer,
uniform_buffer_size
);
}
void VulkanContext::free_descriptor_sets(const std::vector<VkDescriptorSet>& descriptor_sets)
{
vkutil::free_descriptor_sets(basic.device, basic.descriptor_pool, descriptor_sets);
}
void VulkanContext::reset_descriptor_pool(VkDescriptorPool descriptor_pool)
{
vkutil::reset_descriptor_pool(basic.device, descriptor_pool);
}
void VulkanContext::destroy_descriptor_pool(VkDescriptorPool descriptor_pool)
{
vkutil::destroy_descriptor_pool(basic.device, descriptor_pool);
}
void VulkanContext::destroy_vulkan_descriptor_sets(const std::vector<VkDescriptorSet>& descriptor_sets)
{
auto& frame = frames[basic.frame_index];
auto& v = frame.vulkan_descriptor_sets;
v.insert(v.end(), descriptor_sets.begin(), descriptor_sets.end());
}
void VulkanContext::render(VkClearColorValue clear_color,
std::function<void(VkCommandBuffer command_buffer)> depth_pass_callback,
std::function<void(VkCommandBuffer command_buffer)> render_callback
)
{
VkResult vkres;
uint64_t timeout = UINT64_MAX;
uint32_t image_index = 0;
uint32_t width = basic.extent.width;
uint32_t height = basic.extent.height;
basic.frame_index = (basic.frame_index + 1) % frame_count;
VulkanFrame& frame = frames[basic.frame_index];
vkres = vkWaitForFences(basic.device, 1, &frame.fence, VK_FALSE, 0xffffff);
vkutil::check_vk_result(vkres);
vkResetFences(basic.device, 1, &frame.fence);
for (auto& vulkan_buffer : frame.vulkan_buffers) {
destroy_vulkan_buffer_immediately(vulkan_buffer);
}
frame.vulkan_buffers.clear();
free_descriptor_sets(frame.vulkan_descriptor_sets);
frame.vulkan_descriptor_sets.clear();
reset_descriptor_pool(frame.descriptor_pool);
vkres = vkAcquireNextImageKHR(
basic.device,
basic.swapchain,
timeout,
frame.image_available_semaphore,
VK_NULL_HANDLE,
&image_index
);
switch (vkres) {
case VK_SUCCESS:
break;
case VK_SUBOPTIMAL_KHR:
std::cout << "WARN vkAcquireNextImageKHR suboptimal\n";
break;
case VK_ERROR_OUT_OF_DATE_KHR:
std::cout << "WARN vkAcquireNextImageKHR out of date\n";
recreate_swapchain();
break;
default:
assert(false);
break;
}
VulkanSwapImage swap_image = swap_images[image_index];
// render
VkCommandBuffer command_buffer = swap_image.command_buffer;
vkres = vkResetCommandBuffer(command_buffer, 0);
vkutil::check_vk_result(vkres);
VkCommandBufferBeginInfo begin_info = {
VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, // sType;
nullptr, // pNext;
VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT, // flags;
nullptr // pInheritanceInfo;
};
vkBeginCommandBuffer(command_buffer, &begin_info);
// shadow
depth_pass_callback(command_buffer);
// render
VkRect2D render_area = {
{
0, 0
},
{
width, height
}
};
VkClearDepthStencilValue clear_depth_value{
1.0f,
0
};
std::array<VkClearValue, 2> clear_values;
clear_values[0].color = clear_color;
clear_values[1].depthStencil = clear_depth_value;
VkFramebuffer framebuffer = swap_image.framebuffer;
VkRenderPassBeginInfo render_pass_begin_info = {
VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, // sType;
nullptr, // pNext;
basic.render_pass, // renderPass;
framebuffer, // framebuffer;
render_area, // renderArea;
(uint32_t)clear_values.size(), // clearValueCount;
clear_values.data() // pClearValues;
};
vkCmdBeginRenderPass(command_buffer, &render_pass_begin_info, VK_SUBPASS_CONTENTS_INLINE);
VkViewport viewport = {
0.0f, 0.0f, (float)width, (float)height, 0.0f, 1.0f
};
vkhelper::flip_viewport(viewport);
VkRect2D scissor = {
{
0, 0
},
{
width, height
}
};
vkCmdSetViewport(command_buffer, 0, 1, &viewport);
vkCmdSetScissor(command_buffer, 0, 1, &scissor);
// -----------------------------
render_callback(command_buffer);
// -----
vkCmdEndRenderPass(command_buffer);
vkres = vkEndCommandBuffer(command_buffer);
assert(vkres == VK_SUCCESS);
VkPipelineStageFlags wait_dst_stage_mask = VK_PIPELINE_STAGE_TRANSFER_BIT;
VkSubmitInfo submit_info = {
VK_STRUCTURE_TYPE_SUBMIT_INFO, // sType;
nullptr, // pNext;
1, // waitSemaphoreCount;
&frame.image_available_semaphore, // pWaitSemaphores;
&wait_dst_stage_mask, // pWaitDstStageMask;
1, // commandBufferCount;
&command_buffer, // pCommandBuffers;
1, // signalSemaphoreCount;
&frame.rendering_finished_semaphore, // pSignalSemaphores;
};
vkres = vkQueueSubmit(basic.queue, 1, &submit_info, frame.fence);
assert(vkres == VK_SUCCESS);
VkPresentInfoKHR present_info = {
VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, // sType;
nullptr, // pNext;
1, // waitSemaphoreCount;
&frame.rendering_finished_semaphore, // pWaitSemaphores;
1, // swapchainCount;
&basic.swapchain, // pSwapchains;
&image_index, // pImageIndices;
nullptr // pResults;
};
vkres = vkQueuePresentKHR(basic.queue, &present_info);
switch (vkres) {
case VK_SUCCESS:
break;
case VK_SUBOPTIMAL_KHR:
std::cout << "QueuePresent suboptimal\n";
break;
case VK_ERROR_OUT_OF_DATE_KHR:
return recreate_swapchain();
default:
assert(false);
break;
}
}
void VulkanContext::device_wait_idle()
{
vkDeviceWaitIdle(basic.device);
}
std::pair<VkBuffer, VkDeviceMemory> VulkanContext::create_uniform_buffer_coherent(uint8_t* buffer_data, std::size_t buffer_size)
{
return vkutil::create_uniform_buffer_coherent(
feature.memory_properties,
basic.device,
basic.queue,
basic.work_command_buffer,
buffer_data, buffer_size
);
}
void VulkanContext::init_vulkan(HINSTANCE hinstance, HWND hwnd, uint32_t width, uint32_t height)
{
VkResult vkres;
vkres = volkInitialize();
vkutil::check_vk_result(vkres);
basic.window_extent.width = width;
basic.window_extent.height = height;
init_vulkan_instance();
volkLoadInstanceOnly(basic.instance);
init_physical_device();
init_feature();
init_surface(hinstance, hwnd);
init_queue_family_index();
init_device();
volkLoadDevice(basic.device);
init_queue();
vkutil::dump_available_extensions();
init_present_mode();
init_surface_format();
init_depth_format();
init_depth();
init_depth_render_pass();
init_render_pass();
init_command_pool();
init_frames();
init_swapchain(width, height);
init_swapchain_images();
init_descriptor_pool();
}
VkDescriptorSet VulkanContext::create_descriptor_set(VkDescriptorSetLayout descriptor_set_layout)
{
return vkutil::create_descriptor_set(basic.device, basic.descriptor_pool, descriptor_set_layout);
}
VkDescriptorSet VulkanContext::create_descriptor_set_transient(VkDescriptorSetLayout descriptor_set_layout)
{
return vkutil::create_descriptor_set(basic.device, frames[basic.frame_index].descriptor_pool, descriptor_set_layout);
}
void VulkanContext::init_vulkan_instance()
{
basic.instance = vkutil::create_vulkan_instance();
}
void VulkanContext::init_physical_device()
{
auto physical_devices = vkutil::get_physical_devices(basic.instance);
assert(physical_devices.size() > 0); // no vulkan device
for (auto& physical_device : physical_devices) {
vkutil::dump_physical_device(physical_device);
}
// use first device
basic.physical_device = physical_devices[0];
}
void VulkanContext::init_feature()
{
feature.memory_properties = vkutil::get_physical_device_memory_properties(basic.physical_device);
}
void VulkanContext::init_surface(HINSTANCE hinstance, HWND hwnd)
{
basic.surface = vkutil::create_win32_surface(basic.instance, hinstance, hwnd);
}
void VulkanContext::init_queue_family_index()
{
auto graphics_and_present_index = vkutil::select_graphics_present_queue_family(basic.physical_device, basic.surface);
uint32_t graphics_queue_family_index = graphics_and_present_index.first;
uint32_t present_queue_family_index = graphics_and_present_index.second;
if (graphics_queue_family_index == present_queue_family_index) {
// same queue index, this is supported
basic.queue_family_index = graphics_queue_family_index;
}
else {
std::cout << "ERROR different graphics and present queue family index not supported\n";
std::exit(-1);
}
}
void VulkanContext::init_device() {
basic.device = vkutil::create_device(basic.physical_device, basic.queue_family_index);
}
void VulkanContext::init_queue()
{
basic.queue = vkutil::get_device_queue(basic.device, basic.queue_family_index);
}
void VulkanContext::init_present_mode()
{
basic.present_mode = vkutil::select_present_mode(basic.physical_device, basic.surface);
}
void VulkanContext::init_surface_format()
{
basic.surface_format = vkutil::select_surface_format(basic.physical_device, basic.surface);
}
void VulkanContext::init_depth_format()
{
basic.depth_format = vkutil::select_depth_format(basic.physical_device);
}
void VulkanContext::init_depth()
{
if (basic.depth_image_view) {
vkDestroyImageView(basic.device, basic.depth_image_view, vkutil::vulkan_allocator);
vkDestroyImage(basic.device, basic.depth_image, vkutil::vulkan_allocator);
vkFreeMemory(basic.device, basic.depth_memory, vkutil::vulkan_allocator);
}
std::pair<VkImage, VkDeviceMemory> image_and_memory = create_texture_depth(basic.window_extent.width, basic.window_extent.height, 1, false);
VkImage depth_image = image_and_memory.first;
VkImageView depth_image_view = create_image_view_texture_depth(depth_image, true, true, 0, 1);
basic.depth_image = depth_image;
basic.depth_image_view = depth_image_view;
basic.depth_memory = image_and_memory.second;
}
void VulkanContext::init_depth_render_pass()
{
basic.depth_render_pass = vkutil::create_depth_render_pass(basic.device, basic.depth_format);
}
void VulkanContext::init_render_pass()
{
basic.render_pass = vkutil::create_render_pass(basic.device, basic.surface_format, basic.depth_format);
}
void VulkanContext::init_frames() {
// frames
frames.resize(frame_count);
for (uint32_t i = 0; i < frame_count; ++i) {
VulkanFrame& frame = frames[i];
frame.image_available_semaphore = vkutil::create_semaphore(basic.device);
frame.rendering_finished_semaphore = vkutil::create_semaphore(basic.device);
frame.fence = vkutil::create_fence(basic.device);
frame.descriptor_pool = vkutil::create_descriptor_pool(basic.device, false);
}
}
void VulkanContext::init_swapchain(uint32_t width, uint32_t height)
{
VkExtent2D hint_extent{
width,
height
};
auto old_swapchain = basic.swapchain;
auto swapchain_and_extent = vkutil::create_swapchain(
basic.physical_device,
basic.device,
basic.surface,
basic.present_mode,
basic.surface_format,
hint_extent,
old_swapchain
);
if (swapchain_and_extent.first == VK_NULL_HANDLE) {
return;
}
basic.swapchain = swapchain_and_extent.first;
basic.extent = swapchain_and_extent.second;
}
void VulkanContext::init_command_pool()
{
basic.command_pool = vkutil::create_command_pool(basic.device, basic.queue_family_index);
basic.work_command_buffer = vkutil::create_command_buffer(basic.device, basic.command_pool);
}
void VulkanContext::init_swapchain_images()
{
auto device = basic.device;
if (swap_images.size() > 0) {
for (uint32_t i = 0; i < swap_images.size(); ++i) {
VulkanSwapImage& swap_image = swap_images[i];
destroy_framebuffer(swap_image.framebuffer);
vkFreeCommandBuffers(device, basic.command_pool, 1, &swap_image.command_buffer);
destroy_image_view(swap_image.image_view);
//destroy_image(swap_image.image); // image belong to swapchain
}
swap_images.clear();
}
auto images = vkutil::get_swapchain_images(basic.device, basic.swapchain);
uint32_t image_count = (uint32_t)images.size();
swap_images.resize(image_count);
for (uint32_t i = 0; i < image_count; ++i) {
VulkanSwapImage& swap_image = swap_images[i];
VkImage image = images[i];
VkImageView image_view = vkutil::create_image_view(basic.device, basic.surface_format, image);
std::vector<VkImageView> image_views{
image_view, basic.depth_image_view
};
VkFramebuffer framebuffer = vkutil::create_framebuffer(basic.device, basic.extent.width, basic.extent.height, basic.render_pass, image_views);
VkCommandBuffer command_buffer = vkutil::create_command_buffer(basic.device, basic.command_pool);
swap_image.image = image;
swap_image.image_view = image_view;
swap_image.framebuffer = framebuffer;
swap_image.command_buffer = command_buffer;
}
}
void VulkanContext::init_descriptor_pool()
{
basic.descriptor_pool = vkutil::create_descriptor_pool(basic.device, true);
}
void VulkanContext::recreate_swapchain()
{
std::cout << "recreate_swapchain\n";
device_wait_idle();
init_swapchain(basic.window_extent.width, basic.window_extent.height);
init_depth();
init_swapchain_images();
}
void VulkanContext::destroy_vulkan()
{
auto device = basic.device;
auto vulkan_allocator = vkutil::vulkan_allocator;
vkDeviceWaitIdle(device);
vkFreeCommandBuffers(device, basic.command_pool, 1, &basic.work_command_buffer);
for (uint32_t i = 0; i < frames.size(); ++i) {
VulkanFrame& frame = frames[i];
vkDestroySemaphore(device, frame.image_available_semaphore, vulkan_allocator);
vkDestroySemaphore(device, frame.rendering_finished_semaphore, vulkan_allocator);
vkDestroyFence(device, frame.fence, vulkan_allocator);
for (auto& vulkan_buffer : frame.vulkan_buffers) {
destroy_vulkan_buffer_immediately(vulkan_buffer);
}
frame.vulkan_buffers.clear();
free_descriptor_sets(frame.vulkan_descriptor_sets);
frame.vulkan_descriptor_sets.clear();
reset_descriptor_pool(frame.descriptor_pool);
destroy_descriptor_pool(frame.descriptor_pool);
}
for (uint32_t i = 0; i < swap_images.size(); ++i) {
VulkanSwapImage& swap_image = swap_images[i];
destroy_framebuffer(swap_image.framebuffer);
vkFreeCommandBuffers(device, basic.command_pool, 1, &swap_image.command_buffer);
destroy_image_view(swap_image.image_view);
//destroy_image(swap_image.image); // image belong to swapchain
}
destroy_image_view(basic.depth_image_view);
destroy_image(basic.depth_image);
free_memory(basic.depth_memory);
vkDestroyCommandPool(device, basic.command_pool, vulkan_allocator);
destroy_descriptor_pool(basic.descriptor_pool);
vkDestroyRenderPass(device, basic.render_pass, vulkan_allocator);
vkDestroyRenderPass(device, basic.depth_render_pass, vulkan_allocator);
vkDestroySwapchainKHR(device, basic.swapchain, vulkan_allocator);
vkDestroyDevice(device, vulkan_allocator);
vkDestroySurfaceKHR(basic.instance, basic.surface, vulkan_allocator);
vkDestroyInstance(basic.instance, vulkan_allocator);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.