text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
set -eux -o pipefail
: "${CHART_FILE?required}"
: "${CHART_NAME:="$(basename "${CHART_FILE%%.yaml}")"}"
# Assuming charts that only contain crds are called $CHART_MAIN_NAME-crd
if [[ ${CHART_NAME} = *crd* ]]; then
: "${CHART_URL:="${CHART_REPO:="https://rke2-charts.rancher.io"}/assets/${CHART_NAME%%-crd}/${CHART_NAME}-${CHART_VERSION:="v0.0.0"}.tgz"}"
else
: "${CHART_URL:="${CHART_REPO:="https://rke2-charts.rancher.io"}/assets/${CHART_NAME}/${CHART_NAME}-${CHART_VERSION:="v0.0.0"}.tgz"}"
fi
curl -fsSL "${CHART_URL}" -o "${CHART_TMP:=$(mktemp)}"
cat <<-EOF > "${CHART_FILE}"
apiVersion: helm.cattle.io/v1
kind: HelmChart
metadata:
name: "${CHART_NAME}"
namespace: "${CHART_NAMESPACE:="kube-system"}"
annotations:
helm.cattle.io/chart-url: "${CHART_URL}"
spec:
bootstrap: ${CHART_BOOTSTRAP:=false}
chartContent: $(base64 -w0 < "${CHART_TMP}")
EOF
|
public class PrintMatrix {
public static void main(String[] args)
{
// 3x3 matrix
int matrix[][] = {{1, 2, 3},
{4, 5, 6},
{7, 8, 9}};
// sum variable to store sum
int sum = 0;
// traverse matrix
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 3; j++) {
// add the current element to sum
sum += matrix[i][j];
}
}
// print the sum
System.out.println("The sum of all matrix elements is " + sum);
}
} |
<gh_stars>0
package quasar6.main;
import javax.sound.sampled.*;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.AbstractMap;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/** Singleton class for making the GUI. */
public class Main {
/** Amount of flags currently placed. */
private static int flagsPlaced = 0;
////////////////////////////////Swing components start////////////////////////////////
private static final JFrame app = new JFrame("Minesweeper by Quasar6");
private static final JMenuBar bar = new JMenuBar();
private static final JPanel clockPanel = new JPanel();
private static final JPanel buttonPanel = new JPanel();
private static final JLabel clockLabel = new JLabel("\u23F1 00:00:00 \u23F1");
private static final JLabel flagsLabel = new JLabel(Integer.toString(flagsPlaced));
private static final SpringLayout clockPanelLayout = new SpringLayout();
private static final JRadioButtonMenuItem beginner = new JRadioButtonMenuItem(Field.BEGINNER);
private static final JRadioButtonMenuItem intermediate = new JRadioButtonMenuItem(Field.INTERMEDIATE);
private static final JRadioButtonMenuItem expert = new JRadioButtonMenuItem(Field.EXPERT);
private static final JCheckBoxMenuItem sounds = new JCheckBoxMenuItem("Sounds");
private static final JButton playPause = new JButton("\u25B6");
private static final Color flagTileColor = new Color(79, 130, 66);
private static final Color hiddenTileColor = Color.DARK_GRAY;
private static final Color revealedTileColor = Color.GRAY;
private static final Color qmarkColor = new Color(0, 35, 102);
private static Font defFont = new Font("Dialog", Font.PLAIN, 18);
////////////////////////////////Swing components end//////////////////////////////////
/**
* Necessary boolean for the {@link #clockTick()} method.
* Without this the {@link #onPlayPause(ActionEvent)} method
* could make an infinite number of threads.
*/
private static final AtomicBoolean started = new AtomicBoolean(false);
/**
* These two maps store the button values and the flag icons.
* Keys are map entries with the coordinates of the button in the matrix.
* {@link #buttonFlagOnPause}
*/
private static final Map<Map.Entry<Integer, Integer>, String> buttonTextOnPause = new HashMap<>();
private static final Map<Map.Entry<Integer, Integer>, ImageIcon> buttonFlagOnPause = new HashMap<>();
/**
* The measured time.
* {@code time[0]} = hours
* {@code time[1]} = minutes
* {@code time[2]} = seconds
*/
private static int[] time;
/** Matrix containing the buttons for the field.*/
private static MatrixJButton[][] buttons;
/** The current difficulty generated. */
private static String difficulty;
/**
* When set to {@code true} the timer Thread can run,
* when set to false it terminates that Thread.
*/
private static boolean clockRun = false;
/**
* String representing the clock.
* This is needed for the popup windows.
*/
private static String timeScore = "00:00:00";
/**
* Instance of this class.
* Vulnerable to reflection.
* {@link #getInstance()}
*/
private static final Main INSTANCE = new Main();
/**
* @return the single instance of this class
*/
public static Main getInstance() {
return INSTANCE;
}
/**
* Construct the GUI window and set it's parameters.
* @throws RuntimeException if you try to instantiate it
*/
private Main()
{
if (INSTANCE != null)
throw new RuntimeException("Singleton! Access this class through the getInstance() method");
String[] fonts = GraphicsEnvironment.getLocalGraphicsEnvironment().getAvailableFontFamilyNames();
for (String s : fonts) {
Font f = new Font(s, Font.PLAIN, 20);
if (f.canDisplayUpTo("\u23F1\u25B6\u23F8") == -1) {
defFont = f;
break;
}
}
app.setIconImage(createIconForWindow());
app.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
final SpringLayout mainLayout = new SpringLayout();
mainLayout.putConstraint(SpringLayout.NORTH, clockPanel, 0, SpringLayout.NORTH, app);
mainLayout.putConstraint(SpringLayout.NORTH, buttonPanel, 0, SpringLayout.SOUTH, clockPanel);
clockPanelLayout.putConstraint(SpringLayout.WEST, flagsLabel, 14, SpringLayout.WEST, clockPanel);
clockPanelLayout.putConstraint(SpringLayout.HORIZONTAL_CENTER, clockLabel, 0, SpringLayout.HORIZONTAL_CENTER, clockPanel);
clockPanelLayout.putConstraint(SpringLayout.EAST, playPause, -18, SpringLayout.EAST, clockPanel);
clockPanelLayout.putConstraint(SpringLayout.VERTICAL_CENTER, playPause, 25,SpringLayout.NORTH, buttonPanel);
app.setLayout(mainLayout);
clockPanel.setLayout(clockPanelLayout);
app.setResizable(false);
final JMenu diffMenu = new JMenu("Difficulty");
final ButtonGroup radios = new ButtonGroup();
final JMenu soundMenu = new JMenu("Sounds");
final JButton help = new JButton("Help");
help.setOpaque(true);
help.setContentAreaFilled(false);
help.setBorderPainted(false);
help.setFocusable(false);
help.setPreferredSize(new Dimension(5, diffMenu.getHeight()));
help.addActionListener((ActionEvent e) -> {
setOsTheme();
JOptionPane.showMessageDialog(app,
"Press left-click to reveal a tile.\n" +
"Press right-click to mark a tile as potential bomb.\n" +
"In the top left corner you can see how many tiles you have marked.\n" +
"At the top you can see the clock. It starts measuring your time after the first reveal.\n" +
"In the top right corner there is the play/pause button.\n" +
"When you pause the game you will not see any state of the tiles.\n" +
"In the \"Difficulty\" menu you can change the difficulty any time.", "Help", JOptionPane.PLAIN_MESSAGE);
setMetalTheme();
});
radios.add(beginner);
radios.add(intermediate);
radios.add(expert);
sounds.setSelected(true);
diffMenu.add(beginner);
diffMenu.add(intermediate);
diffMenu.add(expert);
soundMenu.add(sounds);
bar.add(diffMenu);
bar.add(soundMenu);
bar.add(help);
app.setJMenuBar(bar);
clockPanel.add(flagsLabel);
clockPanel.add(clockLabel);
clockPanel.add(playPause);
app.add(clockPanel);
app.add(buttonPanel);
beginner.setSelected(true);
beginner.addActionListener(this::onDifficultyChange);
intermediate.addActionListener(this::onDifficultyChange);
expert.addActionListener(this::onDifficultyChange);
playPause.addActionListener(this::onPlayPause);
playPause.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, 0), "Space");
playPause.getActionMap().put("Space", new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e)
{
onPlayPause(e);
playPause.requestFocusInWindow();
}
});
playPause.setEnabled(false);
clockLabel.setFont(defFont.deriveFont(Font.BOLD, 34));
flagsLabel.setFont(defFont.deriveFont(Font.BOLD, 34));
playPause.setFont(defFont);
playPause.setBackground(hiddenTileColor);
playPause.setForeground(Color.RED);
playPause.setFocusable(false);
clockLabel.setForeground(Color.RED);
flagsLabel.setForeground(Color.RED);
clockPanel.setBackground(Color.BLACK);
app.setVisible(true);
}
/**
* This must be called once to generate the GUI.
*
* @param diff The difficulty to set {@link Field#BEGINNER}
*/
private void run(String diff)
{
if (diff == null || !Field.BEGINNER.equals(diff) && !Field.INTERMEDIATE.equals(diff) && !Field.EXPERT.equals(diff))
throw new IllegalArgumentException("Wrong difficulty!");
difficulty = diff;
Field.generate(difficulty);
buttons = new MatrixJButton[Field.getSizeX()][Field.getSizeY()];
buttonPanel.setLayout(new GridLayout(Field.getSizeX(), Field.getSizeY()));
clockPanel.setPreferredSize(new Dimension(Field.getSizeY() * 45, 50));
for (int i = 0; i < Field.getSizeX(); i++) {
for (int j = 0; j < Field.getSizeY(); j++) {
buttons[i][j] = new MatrixJButton(i, j);
buttons[i][j].setBackground(hiddenTileColor);
buttons[i][j].setForeground(Color.BLACK);
buttons[i][j].setFont(defFont);
buttons[i][j].setPreferredSize(new Dimension(45, 45));
buttons[i][j].setFocusable(false);
buttons[i][j].addActionListener(this::onPress);
buttons[i][j].addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e)
{
if (clockRun)
if (e.getButton() == MouseEvent.BUTTON3) {
MatrixJButton btn = (MatrixJButton)e.getSource();
if (Field.getTileAt(btn.getRow(), btn.getCol()).isHidden())
if(btn.isEnabled()) {
if (sounds.isSelected())
playAudio(getClass().getResourceAsStream("/quasar6/main/sound/flag.wav"));
if (btn.getIcon() == null) {
btn.setIcon(createIcon("/quasar6/main/images/flag.gif"));
btn.setBackground(flagTileColor);
++flagsPlaced;
} else if (((ImageIcon)btn.getIcon()).getDescription().equals("flag")){
btn.setIcon(createIcon("/quasar6/main/images/qmark.gif"));
btn.setBackground(qmarkColor);
--flagsPlaced;
} else {
btn.setIcon(null);
btn.setBackground(hiddenTileColor);
}
flagsLabel.setText(Integer.toString(flagsPlaced));
}
}
}
});
buttonPanel.add(buttons[i][j]);
}
}
clockPanel.validate();
buttonPanel.validate();
app.setPreferredSize(new Dimension(Field.getSizeY() * 45 + app.getInsets().left + app.getInsets().right,
Field.getSizeX() * 45 + clockPanel.getHeight() + bar.getHeight() + app.getInsets().top + app.getInsets().bottom));
app.revalidate();
app.pack();
centerWindow();
}
/**
* Called when a button is pressed on the field.
*
* @param e ActionEvent received on button press
*/
private void onPress(ActionEvent e)
{
if (!clockRun) {
clockRun = true;
playPause.setEnabled(true);
playPause.setText("\u23F8");
clockTick();
}
MatrixJButton btn = (MatrixJButton)e.getSource();
if (btn.getIcon() != null)
return;
int x = btn.getRow();
int y = btn.getCol();
if (!Field.getTileAt(x, y).isHidden())
return;
if (!Field.getTileAt(x, y).isMine()) {
if (sounds.isSelected())
playAudio(getClass().getResourceAsStream("/quasar6/main/sound/click.wav"));
if (Field.getTileAt(x, y).getRank() != 0) {
Field.getTileAt(x, y).setHidden(false);
btn.setBackground(revealedTileColor);
btn.setForeground(Field.getTileAt(x, y).getColor());
btn.setText(Integer.toString(Field.getTileAt(x, y).getRank()));
} else {
Field.revealTiles(x, y);
for (int i = 0; i < Field.getSizeX(); i++)
for (int j = 0; j < Field.getSizeY(); j++)
if (!Field.getTileAt(i, j).isHidden()) {
if (buttons[i][j].getIcon() != null)
if ("flag".equals(((ImageIcon)buttons[i][j].getIcon()).getDescription()))
buttons[i][j].setIcon(null);
buttons[i][j].setBackground(revealedTileColor);
if (Field.getTileAt(i, j).getRank() != 0) {
buttons[i][j].setForeground(Field.getTileAt(i, j).getColor());
buttons[i][j].setText(Integer.toString(Field.getTileAt(i, j).getRank()));
}
}
}
} else {
if (sounds.isSelected())
playAudio(getClass().getResourceAsStream("/quasar6/main/sound/loose.wav"));
clockRun = false;
String correctFlags = Integer.toString(correctFlags());
revealMines();
setOsTheme();
int restart = JOptionPane.showConfirmDialog(app, "You have successfully blown yourself up under " + timeScore
+ "\nCorrect flags: " + correctFlags + " out of " + flagsPlaced + "\nAnother game?", "Game Over", JOptionPane.YES_NO_OPTION, JOptionPane.PLAIN_MESSAGE);
setMetalTheme();
if (restart != 0) {
System.exit(0);
} else {
resetWidgets();
Field.generate(difficulty);
}
}
if (Field.isWinningState()) {
if (sounds.isSelected())
playAudio(getClass().getResourceAsStream("/quasar6/main/sound/win.wav"));
clockRun = false;
String correctFlags = Integer.toString(correctFlags());
revealMines();
setOsTheme();
int restart = JOptionPane.showConfirmDialog(app, "You win!\n" + "You have solved the " + difficulty
+ " difficulty under " + timeScore + "\nCorrect flags: " + correctFlags + " out of " + flagsPlaced + "\nAnother game?", "Winner", JOptionPane.YES_NO_OPTION, JOptionPane.PLAIN_MESSAGE);
setMetalTheme();
if (restart != 0) {
System.exit(0);
} else {
resetWidgets();
Field.generate(difficulty);
}
}
}
/**
* Called when the difficulty changed from the dropdown menu.
*
* @param e ActionEvent received on button press
*/
private void onDifficultyChange(ActionEvent e)
{
JRadioButtonMenuItem btn = (JRadioButtonMenuItem)e.getSource();
switch (btn.getText()) {
case Field.BEGINNER -> {
clockRun = false;
resetWidgets();
removeButtons();
run(Field.BEGINNER);
}
case Field.INTERMEDIATE -> {
clockRun = false;
resetWidgets();
removeButtons();
run(Field.INTERMEDIATE);
}
case Field.EXPERT -> {
clockRun = false;
resetWidgets();
removeButtons();
run(Field.EXPERT);
}
}
}
/**
* This is called when the play/pause button is pressed.
* This method saves the state of the field into {@link #buttonFlagOnPause}
* and {@link #buttonTextOnPause}, and it also stops the clock.
*
* @param e ActionEvent received on button press
*/
private void onPlayPause(ActionEvent e)
{
JButton btn = (JButton)e.getSource();
if (!clockRun) {
for (Component c : buttonPanel.getComponents()) {
MatrixJButton matrixBtn = (MatrixJButton)c;
if (!Field.getTileAt(matrixBtn.getRow(), matrixBtn.getCol()).isHidden()) {
matrixBtn.setText(buttonTextOnPause.get(new AbstractMap.SimpleImmutableEntry<>(matrixBtn.getRow(), matrixBtn.getCol())));
matrixBtn.setBackground(revealedTileColor);
}
matrixBtn.setIcon(buttonFlagOnPause.get(new AbstractMap.SimpleImmutableEntry<>(matrixBtn.getRow(), matrixBtn.getCol())));
for (var entry : buttonFlagOnPause.entrySet()) {
if ("flag".equals(entry.getValue().getDescription()))
buttons[entry.getKey().getKey()][entry.getKey().getValue()].setBackground(flagTileColor);
else
buttons[entry.getKey().getKey()][entry.getKey().getValue()].setBackground(qmarkColor);
}
matrixBtn.setEnabled(true);
}
btn.setText("\u23F8");
clockRun = true;
clockTick();
} else {
for (Component c : buttonPanel.getComponents()) {
MatrixJButton matrixBtn = (MatrixJButton)c;
if (!Field.getTileAt(matrixBtn.getRow(), matrixBtn.getCol()).isHidden()) {
var key = new AbstractMap.SimpleImmutableEntry<>(matrixBtn.getRow(), matrixBtn.getCol());
buttonTextOnPause.put(key, matrixBtn.getText());
matrixBtn.setText("");
matrixBtn.setBackground(hiddenTileColor);
} else if (matrixBtn.getIcon() != null) {
if ("flag".equals(((ImageIcon)matrixBtn.getIcon()).getDescription()) || "qmark".equals(((ImageIcon)matrixBtn.getIcon()).getDescription())) {
var key = new AbstractMap.SimpleImmutableEntry<>(matrixBtn.getRow(), matrixBtn.getCol());
buttonFlagOnPause.put(key, (ImageIcon)matrixBtn.getIcon());
matrixBtn.setBackground(hiddenTileColor);
matrixBtn.setIcon(null);
}
}
matrixBtn.setEnabled(false);
}
btn.setText("\u25B6");
clockRun = false;
}
}
/**
* This method runs the timer. It creates a new Thread and updates the JLabel every second.
* A ScheduledExecutorService is used because Thread.sleep(1000) in a loop is not consistent.
*/
private static void clockTick()
{
if(!started.get()) {
started.set(true);
final Thread clockThread = new Thread(() -> {
final ScheduledExecutorService service = Executors.newSingleThreadScheduledExecutor();
service.scheduleAtFixedRate(() -> {
if (time == null)
time = new int[]{0, 0, 1};
if (!clockRun) {
started.set(false);
service.shutdown();
return;
}
timeScore = String.format("%02d", time[0]) + ":" + String.format("%02d", time[1]) + ":" + String.format("%02d", time[2]);
clockLabel.setText("\u23F1 " + timeScore + " \u23F1");
time[2] += 1;
if (time[2] == 60) {
time[2] = 0;
time[1] += 1;
}
if (time[1] == 60) {
time[1] = 0;
time[0] += 1;
}
if (time[0] == 24) {
time[0] = 0;
time[1] = 0;
time[2] = 0;
}
}, 0, 1, TimeUnit.SECONDS);
});
clockThread.start();
}
}
/**
* Plays audio from the specified InputStream.
* The InputStream must be decorated as a BufferedInputStream,
* because the AudioInputStream requires mark/reset support.
*
* @param audioSrc the incoming stream
*/
private static void playAudio(InputStream audioSrc)
{
InputStream bufferedIn = new BufferedInputStream(audioSrc);
try (AudioInputStream audioStream = AudioSystem.getAudioInputStream(bufferedIn) ) {
Clip clip = AudioSystem.getClip();
clip.open(audioStream);
clip.addLineListener(event -> {
if (LineEvent.Type.STOP.equals(event.getType()))
clip.close();
});
clip.start();
} catch (LineUnavailableException | IOException | IllegalArgumentException | UnsupportedAudioFileException exc) {
System.err.println(exc.getMessage());
exc.printStackTrace();
}
}
/** Removes all buttons buttons from the field. */
private static void removeButtons()
{
buttons = null;
buttonPanel.removeAll();
app.revalidate();
}
/**
* Resets everything to default except the difficulty.
* This method clears all caches.
*/
private static void resetWidgets()
{
buttonTextOnPause.clear();
buttonFlagOnPause.clear();
clockRun = false;
time = null;
playPause.setText("\u25B6");
playPause.setEnabled(false);
flagsPlaced = 0;
flagsLabel.setText(Integer.toString(flagsPlaced));
timeScore = "00:00:00";
for (int i = 0; i < Field.getSizeX(); i++)
for (int j = 0; j < Field.getSizeY(); j++) {
buttons[i][j].setText("");
buttons[i][j].setBackground(hiddenTileColor);
buttons[i][j].setForeground(Color.BLACK);
if (buttons[i][j].getIcon() != null)
buttons[i][j].setIcon(null);
}
clockLabel.setText("\u23F1 " + timeScore + " \u23F1");
}
/**
* Counts how many flags have been placed correctly.
* The incorrectly placed ones will be highlighted with red.
* @return The numbered of correctly placed flags.
*/
private static int correctFlags()
{
int correctFlags = 0;
for (int i = 0; i < Field.getSizeX(); i++) {
for (int j = 0; j < Field.getSizeY(); j++) {
if (buttons[i][j].getIcon() != null) {
if (Field.getTileAt(i, j).isMine()) {
ImageIcon icon = (ImageIcon) buttons[i][j].getIcon();
if ("flag".equals(icon.getDescription()))
++correctFlags;
} else {
buttons[i][j].setBackground(Color.RED);
buttons[i][j].setIcon(null);
}
}
}
}
return correctFlags;
}
/** Reveals mines with the specified icon. */
private void revealMines()
{
for (int i = 0; i < Field.getSizeX(); i++)
for (int j = 0; j < Field.getSizeY(); j++)
if (Field.getTileAt(i, j).isMine()) {
buttons[i][j].setBackground(hiddenTileColor);
buttons[i][j].setText("");
buttons[i][j].setIcon(createIcon("/quasar6/main/images/mine.gif"));
}
}
/** Sets the Look and Feel to the Swing default. */
private static void setMetalTheme()
{
try {
UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName());
} catch (UnsupportedLookAndFeelException | ClassNotFoundException | InstantiationException | IllegalAccessException e) {
System.err.println(e.getMessage());
}
}
/** Sets the Look and Feel to the OS default. */
private static void setOsTheme()
{
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (UnsupportedLookAndFeelException | ClassNotFoundException | InstantiationException | IllegalAccessException e) {
System.err.println(e.getMessage());
}
}
/**
* Creates an ImageIcon with the specified path
* if the file exists, otherwise returns null.
* The description is the file name without the extension.
* @param path path to the icon
* @return the ImageIcon or null
*/
private ImageIcon createIcon(String path)
{
Matcher matcher = Pattern.compile("(?<=/)[^/]+(?=\\.)").matcher(path);
if (matcher.find()) {
URL imgURL = getClass().getResource(path);
if (imgURL != null)
return new ImageIcon(imgURL, matcher.group());
System.err.println("Couldn't find file: " + path);
}
return null;
}
/** @return image for the window icon */
private Image createIconForWindow() {
ImageIcon imageIcon = createIcon("/quasar6/main/images/mine.gif");
if (imageIcon != null)
return imageIcon.getImage();
return null;
}
/** Places the windows horizontally and vertically in the center. */
private static void centerWindow()
{
Dimension dim = Toolkit.getDefaultToolkit().getScreenSize();
int width = (int) ((dim.getWidth() - app.getWidth()) / 2);
int height = (int) ((dim.getHeight() - app.getHeight()) / 2);
app.setLocation(width, height);
}
public static void main(String[] args)
{
Main ms = Main.getInstance();
ms.run(Field.BEGINNER);
}
} |
<gh_stars>1-10
package com.yingnuo.web.filter;
import javax.servlet.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/*
* 利用 CharacterFilter 实现全站统一编码
* */
public class CharacterFilter implements Filter {
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
HttpServletRequest request = (HttpServletRequest) servletRequest;
HttpServletResponse response = (HttpServletResponse) servletResponse;
String url = request.getRequestURI();
System.out.println("[CharacterFilter]-Request URL:" + url);
// 过滤静态文件
if (url.indexOf(".css")>0 || url.indexOf(".js")>0 || url.indexOf(".png")>0 || url.indexOf(".jpg")>0){
}else {
response.setContentType("text/html;charset=utf-8");
}
// 拦截所有请求 解决中文乱码。指定 request 和 response 的编码格式
request.setCharacterEncoding("utf-8"); // 只对消息体有效
// 对request 进行包装
CharacterRequest characterRequest = new CharacterRequest(request);
filterChain.doFilter(characterRequest,response);
}
@Override
public void destroy() {
}
}
|
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part1.sh
# Description: OpenWrt DIY script part 1 (Before Update feeds)
#
# Uncomment a feed source
#sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default
# Add a feed source
#echo 'src-git helloworld https://github.com/fw876/helloworld' >>feeds.conf.default
#echo 'src-git kenzo https://github.com/kenzok8/openwrt-packages' >>feeds.conf.default
#echo 'src-git small https://github.com/kenzok8/small' >>feeds.conf.default
#echo 'src-git clone https://github.com/wiwizcom/WiFiPortal.git' >>feeds.conf.default
#sed -i '$a src-git NueXini_Packages https://github.com/NueXini/NueXini_Packages.git' feeds.conf.default
#sed -i '$a src-git WiFiPortal https://github.com/wiwizcom/WiFiPortal.git' feeds.conf.default
|
<reponame>wujia28762/Tmate<gh_stars>0
package com.honyum.elevatorMan.activity.worker;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.ListView;
import com.honyum.elevatorMan.R;
import com.honyum.elevatorMan.activity.maintenance.MaintenancePlanAddActivity;
import com.honyum.elevatorMan.adapter.FixTaskListAdapter;
import com.honyum.elevatorMan.adapter.MSTaskListAdapter;
import com.honyum.elevatorMan.base.BaseFragmentActivity;
import com.honyum.elevatorMan.base.ListItemCallback;
import com.honyum.elevatorMan.data.FixInfo;
import com.honyum.elevatorMan.data.MaintenanceTaskInfo;
import com.honyum.elevatorMan.net.FixRequest;
import com.honyum.elevatorMan.net.FixResponse;
import com.honyum.elevatorMan.net.MaintenanceServiceResponse;
import com.honyum.elevatorMan.net.base.NetConstant;
import com.honyum.elevatorMan.net.base.NetTask;
import com.honyum.elevatorMan.net.base.NewRequestHead;
import com.honyum.elevatorMan.net.base.RequestBean;
import com.honyum.elevatorMan.net.base.RequestHead;
import java.util.List;
/**
* Created by Star on 2017/6/12.
*/
public class FixOrderListActivity extends BaseFragmentActivity implements ListItemCallback<FixInfo> {
private FixTaskListAdapter mFixTaskListAdapter;
private ListView fix_list;
private List<FixInfo> mFixInfo;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_fix_list);
initTitle();
initView();
}
private void initView() {
fix_list = (ListView)findViewById(R.id.fix_list);
}
@Override
protected void onResume() {
super.onResume();
requestFixListInfo();
}
private RequestBean getRequestBean(String userId, String token) {
FixRequest request = new FixRequest();
request.setHead(new NewRequestHead().setuserId(userId).setaccessToken(token));
request.setBody(request.new FixRequestBody().setPage(NetConstant.PAGE).setRows(NetConstant.ROWS));
return request;
}
private void requestFixListInfo() {
NetTask task = new NetTask(getConfig().getServer() + NetConstant.URL_FIX_LIST,
getRequestBean(getConfig().getUserId(), getConfig().getToken())) {
@Override
protected void onResponse(NetTask task, String result) {
FixResponse response = FixResponse.getFixResponse(result);
mFixInfo = response.getBody();
//获取到了返回的信息
if (mFixInfo == null||mFixInfo.size()==0) {
findViewById(R.id.tv_tips).setVisibility(View.VISIBLE);
findViewById(R.id.fix_list).setVisibility(View.GONE);
return;
}
fillList();
// requestMaintenTaskList();
//Log.e("!!!!!!!!!!!!!!", "onResponse: "+ msInfoList.get(0).getMainttypeId());
}
};
addTask(task);
}
private void fillList() {
mFixTaskListAdapter = new FixTaskListAdapter(mFixInfo, this);
fix_list.setAdapter(mFixTaskListAdapter);
}
/**
* 初始化标题
*/
private void initTitle() {
initTitleBar("怡墅维修", R.id.title,
R.drawable.back_normal, backClickListener);
}
@Override
public void performItemCallback(FixInfo data) {
Intent intent = new Intent(this, FixDetailActivity.class);
Bundle bundle = new Bundle();
bundle.putSerializable("Info", data);
intent.putExtras(bundle);
startActivity(intent);
}
}
|
let person = {
name: "John Doe",
age: 25,
occupation: "Engineer"
}; |
/* * -* *- *- *- *- *- *- * * ** -* -* -* - *- *- *-* - ** - *- - * *- */
/* * _ _ +\ */
/* - | |_ ___ ___ ___ ___ ___ ___ ___ _| |___ ___ ___ ___ + */
/* + | _| _| .'| |_ -| _| -_| | . | -_| | _| -_| /* */
/* * |_| |_| |__,|_|_|___|___|___|_|_|___|___|_|_|___|___| + */
/* - ~--~--~--~--~--~--~--~--~--~--~--~--~--~--~--~--~--~ * */
/* * <NAME> | okruitho | Alpha_1337k *- */
/* -* <NAME> | rvan-hou | robijnvh -+ */
/* * / <NAME> | jbennink | JonasDBB /- */
/* / <NAME> | tvan-cit | Tjobo-Hero * */
/* + <NAME> | rbraaksm | rbraaksm - */
/* *. ._ */
/* *. match.service.ts | Created: 2021-10-06 17:48:04 ._ */
/* - Edited on 2021-10-06 17:48:04 by alpha .- */
/* -* *- *- * -* -* -* ** - *-* -* * / -* -*- * /- - -* --*-*++ * -* * */
import { Injectable, Inject } from '@nestjs/common';
import { Repository } from 'typeorm';
import { Match } from './match.entity';
import { UserService } from '../user/user.service';
@Injectable()
export class MatchService {
constructor(
@Inject('MATCH_REPOSITORY')
private matchRepo: Repository<Match>,
private userService: UserService
) {}
async addMatch(
idP1: number,
idP2: number,
p1Score: number,
p2Score: number
): Promise<Match> {
const p1 = await this.userService.getUserById(idP1);
const p2 = await this.userService.getUserById(idP2);
const newMatch = this.matchRepo.create({
p1Score: p1Score,
p2Score: p2Score,
});
if (p1Score > p2Score) {
await this.userService.updateElo(p1.userid, true, p2.userElo);
await this.userService.updateElo(p2.userid, false, p1.userElo);
} else {
await this.userService.updateElo(p1.userid, false, p2.userElo);
await this.userService.updateElo(p2.userid, true, p1.userElo);
}
newMatch.players = [p1, p2];
return this.matchRepo.save(newMatch);
}
async getMatchDetails(matchId: number): Promise<Match> {
return this.matchRepo.findOne(matchId, {
relations: ['players'],
});
}
}
|
<filename>src/app/services/api/model/ia-hal.ts
export namespace IAHal {
export type IAHalName = string;
export const SELF: IAHalName = 'self';
export const AIC: IAHalName = 'http://identifiers.emc.com/aic';
export const SEARCH: IAHalName = 'http://identifiers.emc.com/search';
export const SEARCH_COMPOSITIONS = 'http://identifiers.emc.com/search-compositions';
export const XFORM: IAHalName = 'http://identifiers.emc.com/xform';
export const RESULT_MASTER: IAHalName = 'http://identifiers.emc.com/result-master';
export const CI: IAHalName = 'http://identifiers.emc.com/ci';
}
|
export SPARK_WORKER_MEMORY=10g
|
/*-
* BSD LICENSE
*
* Copyright (c) 2015 - 2016 CESNET
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* * Neither the name of CESNET nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef RTE_PMD_SZEDATA2_H_
#define RTE_PMD_SZEDATA2_H_
#include <stdbool.h>
#include <rte_byteorder.h>
/* PCI Vendor ID */
#define PCI_VENDOR_ID_NETCOPE 0x1b26
/* PCI Device IDs */
#define PCI_DEVICE_ID_NETCOPE_COMBO80G 0xcb80
#define PCI_DEVICE_ID_NETCOPE_COMBO100G 0xc1c1
#define PCI_DEVICE_ID_NETCOPE_COMBO100G2 0xc2c1
/* number of PCI resource used by COMBO card */
#define PCI_RESOURCE_NUMBER 0
/* szedata2_packet header length == 4 bytes == 2B segment size + 2B hw size */
#define RTE_SZE2_PACKET_HEADER_SIZE 4
#define RTE_SZE2_MMIO_MAX 10
/*!
* Round 'what' to the nearest larger (or equal) multiple of '8'
* (szedata2 packet is aligned to 8 bytes)
*/
#define RTE_SZE2_ALIGN8(what) (((what) + ((8) - 1)) & (~((8) - 1)))
/*! main handle structure */
struct szedata {
int fd;
struct sze2_instance_info *info;
uint32_t *write_size;
void *space[RTE_SZE2_MMIO_MAX];
struct szedata_lock lock[2][2];
__u32 *rx_asize, *tx_asize;
/* szedata_read_next variables - to keep context (ct) */
/*
* rx
*/
/** initial sze lock ptr */
const struct szedata_lock *ct_rx_lck_orig;
/** current sze lock ptr (initial or next) */
const struct szedata_lock *ct_rx_lck;
/** remaining bytes (not read) within current lock */
unsigned int ct_rx_rem_bytes;
/** current pointer to locked memory */
unsigned char *ct_rx_cur_ptr;
/**
* allocated buffer to store RX packet if it was split
* into 2 buffers
*/
unsigned char *ct_rx_buffer;
/** registered function to provide filtering based on hwdata */
int (*ct_rx_filter)(u_int16_t hwdata_len, u_char *hwdata);
/*
* tx
*/
/**
* buffer for tx - packet is prepared here
* (in future for burst write)
*/
unsigned char *ct_tx_buffer;
/** initial sze TX lock ptrs - number according to TX interfaces */
const struct szedata_lock **ct_tx_lck_orig;
/** current sze TX lock ptrs - number according to TX interfaces */
const struct szedata_lock **ct_tx_lck;
/** already written bytes in both locks */
unsigned int *ct_tx_written_bytes;
/** remaining bytes (not written) within current lock */
unsigned int *ct_tx_rem_bytes;
/** current pointers to locked memory */
unsigned char **ct_tx_cur_ptr;
/** NUMA node closest to PCIe device, or -1 */
int numa_node;
};
/*
* @return Byte from PCI resource at offset "offset".
*/
static inline uint8_t
pci_resource_read8(struct rte_mem_resource *rsc, uint32_t offset)
{
return *((uint8_t *)((uint8_t *)rsc->addr + offset));
}
/*
* @return Two bytes from PCI resource starting at offset "offset".
*/
static inline uint16_t
pci_resource_read16(struct rte_mem_resource *rsc, uint32_t offset)
{
return rte_le_to_cpu_16(*((uint16_t *)((uint8_t *)rsc->addr +
offset)));
}
/*
* @return Four bytes from PCI resource starting at offset "offset".
*/
static inline uint32_t
pci_resource_read32(struct rte_mem_resource *rsc, uint32_t offset)
{
return rte_le_to_cpu_32(*((uint32_t *)((uint8_t *)rsc->addr +
offset)));
}
/*
* @return Eight bytes from PCI resource starting at offset "offset".
*/
static inline uint64_t
pci_resource_read64(struct rte_mem_resource *rsc, uint32_t offset)
{
return rte_le_to_cpu_64(*((uint64_t *)((uint8_t *)rsc->addr +
offset)));
}
/*
* Write one byte to PCI resource address space at offset "offset".
*/
static inline void
pci_resource_write8(struct rte_mem_resource *rsc, uint32_t offset, uint8_t val)
{
*((uint8_t *)((uint8_t *)rsc->addr + offset)) = val;
}
/*
* Write two bytes to PCI resource address space at offset "offset".
*/
static inline void
pci_resource_write16(struct rte_mem_resource *rsc, uint32_t offset,
uint16_t val)
{
*((uint16_t *)((uint8_t *)rsc->addr + offset)) = rte_cpu_to_le_16(val);
}
/*
* Write four bytes to PCI resource address space at offset "offset".
*/
static inline void
pci_resource_write32(struct rte_mem_resource *rsc, uint32_t offset,
uint32_t val)
{
*((uint32_t *)((uint8_t *)rsc->addr + offset)) = rte_cpu_to_le_32(val);
}
/*
* Write eight bytes to PCI resource address space at offset "offset".
*/
static inline void
pci_resource_write64(struct rte_mem_resource *rsc, uint32_t offset,
uint64_t val)
{
*((uint64_t *)((uint8_t *)rsc->addr + offset)) = rte_cpu_to_le_64(val);
}
#define SZEDATA2_PCI_RESOURCE_PTR(rsc, offset, type) \
((type)(((uint8_t *)(rsc)->addr) + (offset)))
enum szedata2_link_speed {
SZEDATA2_LINK_SPEED_DEFAULT = 0,
SZEDATA2_LINK_SPEED_10G,
SZEDATA2_LINK_SPEED_40G,
SZEDATA2_LINK_SPEED_100G,
};
enum szedata2_mac_check_mode {
SZEDATA2_MAC_CHMODE_PROMISC = 0x0,
SZEDATA2_MAC_CHMODE_ONLY_VALID = 0x1,
SZEDATA2_MAC_CHMODE_ALL_BROADCAST = 0x2,
SZEDATA2_MAC_CHMODE_ALL_MULTICAST = 0x3,
};
/*
* Structure describes CGMII IBUF address space
*/
struct szedata2_cgmii_ibuf {
/** Total Received Frames Counter low part */
uint32_t trfcl;
/** Correct Frames Counter low part */
uint32_t cfcl;
/** Discarded Frames Counter low part */
uint32_t dfcl;
/** Counter of frames discarded due to buffer overflow low part */
uint32_t bodfcl;
/** Total Received Frames Counter high part */
uint32_t trfch;
/** Correct Frames Counter high part */
uint32_t cfch;
/** Discarded Frames Counter high part */
uint32_t dfch;
/** Counter of frames discarded due to buffer overflow high part */
uint32_t bodfch;
/** IBUF enable register */
uint32_t ibuf_en;
/** Error mask register */
uint32_t err_mask;
/** IBUF status register */
uint32_t ibuf_st;
/** IBUF command register */
uint32_t ibuf_cmd;
/** Minimum frame length allowed */
uint32_t mfla;
/** Frame MTU */
uint32_t mtu;
/** MAC address check mode */
uint32_t mac_chmode;
/** Octets Received OK Counter low part */
uint32_t orocl;
/** Octets Received OK Counter high part */
uint32_t oroch;
} __rte_packed;
/* Offset of CGMII IBUF memory for MAC addresses */
#define SZEDATA2_CGMII_IBUF_MAC_MEM_OFF 0x80
/*
* @return
* true if IBUF is enabled
* false if IBUF is disabled
*/
static inline bool
cgmii_ibuf_is_enabled(volatile struct szedata2_cgmii_ibuf *ibuf)
{
return ((rte_le_to_cpu_32(ibuf->ibuf_en) & 0x1) != 0) ? true : false;
}
/*
* Enables IBUF.
*/
static inline void
cgmii_ibuf_enable(volatile struct szedata2_cgmii_ibuf *ibuf)
{
ibuf->ibuf_en =
rte_cpu_to_le_32(rte_le_to_cpu_32(ibuf->ibuf_en) | 0x1);
}
/*
* Disables IBUF.
*/
static inline void
cgmii_ibuf_disable(volatile struct szedata2_cgmii_ibuf *ibuf)
{
ibuf->ibuf_en =
rte_cpu_to_le_32(rte_le_to_cpu_32(ibuf->ibuf_en) & ~0x1);
}
/*
* @return
* true if ibuf link is up
* false if ibuf link is down
*/
static inline bool
cgmii_ibuf_is_link_up(volatile struct szedata2_cgmii_ibuf *ibuf)
{
return ((rte_le_to_cpu_32(ibuf->ibuf_st) & 0x80) != 0) ? true : false;
}
/*
* @return
* MAC address check mode
*/
static inline enum szedata2_mac_check_mode
cgmii_ibuf_mac_mode_read(volatile struct szedata2_cgmii_ibuf *ibuf)
{
switch (rte_le_to_cpu_32(ibuf->mac_chmode) & 0x3) {
case 0x0:
return SZEDATA2_MAC_CHMODE_PROMISC;
case 0x1:
return SZEDATA2_MAC_CHMODE_ONLY_VALID;
case 0x2:
return SZEDATA2_MAC_CHMODE_ALL_BROADCAST;
case 0x3:
return SZEDATA2_MAC_CHMODE_ALL_MULTICAST;
default:
return SZEDATA2_MAC_CHMODE_PROMISC;
}
}
/*
* Writes "mode" in MAC address check mode register.
*/
static inline void
cgmii_ibuf_mac_mode_write(volatile struct szedata2_cgmii_ibuf *ibuf,
enum szedata2_mac_check_mode mode)
{
ibuf->mac_chmode = rte_cpu_to_le_32(
(rte_le_to_cpu_32(ibuf->mac_chmode) & ~0x3) | mode);
}
/*
* Structure describes CGMII OBUF address space
*/
struct szedata2_cgmii_obuf {
/** Total Sent Frames Counter low part */
uint32_t tsfcl;
/** Octets Sent Counter low part */
uint32_t oscl;
/** Total Discarded Frames Counter low part */
uint32_t tdfcl;
/** reserved */
uint32_t reserved1;
/** Total Sent Frames Counter high part */
uint32_t tsfch;
/** Octets Sent Counter high part */
uint32_t osch;
/** Total Discarded Frames Counter high part */
uint32_t tdfch;
/** reserved */
uint32_t reserved2;
/** OBUF enable register */
uint32_t obuf_en;
/** reserved */
uint64_t reserved3;
/** OBUF control register */
uint32_t ctrl;
/** OBUF status register */
uint32_t obuf_st;
} __rte_packed;
/*
* @return
* true if OBUF is enabled
* false if OBUF is disabled
*/
static inline bool
cgmii_obuf_is_enabled(volatile struct szedata2_cgmii_obuf *obuf)
{
return ((rte_le_to_cpu_32(obuf->obuf_en) & 0x1) != 0) ? true : false;
}
/*
* Enables OBUF.
*/
static inline void
cgmii_obuf_enable(volatile struct szedata2_cgmii_obuf *obuf)
{
obuf->obuf_en =
rte_cpu_to_le_32(rte_le_to_cpu_32(obuf->obuf_en) | 0x1);
}
/*
* Disables OBUF.
*/
static inline void
cgmii_obuf_disable(volatile struct szedata2_cgmii_obuf *obuf)
{
obuf->obuf_en =
rte_cpu_to_le_32(rte_le_to_cpu_32(obuf->obuf_en) & ~0x1);
}
/*
* Function takes value from IBUF status register. Values in IBUF and OBUF
* should be same.
*
* @return Link speed constant.
*/
static inline enum szedata2_link_speed
cgmii_link_speed(volatile struct szedata2_cgmii_ibuf *ibuf)
{
uint32_t speed = (rte_le_to_cpu_32(ibuf->ibuf_st) & 0x70) >> 4;
switch (speed) {
case 0x03:
return SZEDATA2_LINK_SPEED_10G;
case 0x04:
return SZEDATA2_LINK_SPEED_40G;
case 0x05:
return SZEDATA2_LINK_SPEED_100G;
default:
return SZEDATA2_LINK_SPEED_DEFAULT;
}
}
/*
* IBUFs and OBUFs can generally be located at different offsets in different
* firmwares.
* This part defines base offsets of IBUFs and OBUFs through various firmwares.
* Currently one firmware type is supported.
* Type of firmware is set through configuration option
* CONFIG_RTE_LIBRTE_PMD_SZEDATA_AS.
* Possible values are:
* 0 - for firmwares:
* NIC_100G1_LR4
* HANIC_100G1_LR4
* HANIC_100G1_SR10
*/
#if !defined(RTE_LIBRTE_PMD_SZEDATA2_AS)
#error "RTE_LIBRTE_PMD_SZEDATA2_AS has to be defined"
#elif RTE_LIBRTE_PMD_SZEDATA2_AS == 0
/*
* CGMII IBUF offset from the beginning of PCI resource address space.
*/
#define SZEDATA2_CGMII_IBUF_BASE_OFF 0x8000
/*
* Size of CGMII IBUF.
*/
#define SZEDATA2_CGMII_IBUF_SIZE 0x200
/*
* GCMII OBUF offset from the beginning of PCI resource address space.
*/
#define SZEDATA2_CGMII_OBUF_BASE_OFF 0x9000
/*
* Size of CGMII OBUF.
*/
#define SZEDATA2_CGMII_OBUF_SIZE 0x100
#else
#error "RTE_LIBRTE_PMD_SZEDATA2_AS has wrong value, see comments in config file"
#endif
#endif
|
seq = [1]
for i in range(1,7):
seq.append(seq[-1]*2)
print(seq) |
#!/bin/bash
red=`tput setaf 1`
green=`tput setaf 2`
cyan=`tput setaf 6`
magenta=`tput setaf 5`
yellow=`tput setaf 3`
reset=`tput sgr0`
function Success()
{
echo "${green}$1${reset}"
}
function Info()
{
echo "${cyan}$1${reset}"
}
function Warning()
{
echo "${yellow}$1${reset}"
}
function Error()
{
echo "${red}$1${reset}"
}
function Divide()
{
sudo echo ""
sudo echo "${magenta}----------${reset}"
sudo echo ""
} |
const express = require("express");
const app = express();
const bodyParser = require("body-parser");
const messages = [];
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
// List all the messages
app.get("/messages", (req, res) => {
res.json(messages);
});
// Get the message with ID
app.get("/messages/:id", (req, res) => {
const message = messages.find((m) => m.id === req.params.id);
if (!message) {
res.status(404).json({});
} else {
res.json(message);
}
});
// Post a new message
app.post("/messages", (req, res) => {
const message = {
id: Date.now(),
username: req.body.username,
content: req.body.content,
};
messages.push(message);
res.status(201).json(message);
});
// Delete a message
app.delete("/messages/:id", (req, res) => {
const messageIndex = messages.findIndex((m) => m.id === req.params.id);
if (messageIndex === -1) {
res.status(404).json({});
} else {
messages.splice(messageIndex, 1);
res.status(204).json({});
}
});
app.listen(3000, () => {
console.log("Server is running on port 3000");
}); |
// Define the TypedVector struct
struct TypedVector<T> {
type_id: String,
vec: Vec<T>,
}
impl<T> TypedVector<T> {
// Constructor method to create a new TypedVector instance
fn new(type_id: String) -> TypedVector<T> {
TypedVector {
type_id,
vec: Vec::new(),
}
}
// Method to add a new element to the vector
fn push(&mut self, element: T) {
self.vec.push(element);
}
// Method to return the length of the vector
fn len(&self) -> usize {
self.vec.len()
}
// Method to return the element at the specified index
fn get(&self, index: usize) -> Option<&T> {
self.vec.get(index)
}
}
fn main() {
// Create a new TypedVector instance to store integers
let mut int_vector = TypedVector::new("i32".to_string());
// Add elements to the vector
int_vector.push(10);
int_vector.push(20);
int_vector.push(30);
// Print the length of the vector
println!("Length of the vector: {}", int_vector.len());
// Access and print elements from the vector
if let Some(element) = int_vector.get(1) {
println!("Element at index 1: {}", element);
}
} |
#!/bin/bash
# Copyright 2013 The Shenzhen Key Laboratory of Intelligent Media and Speech,
# PKU-HKUST Shenzhen Hong Kong Institution (Author: Wei Shi)
# 2016 Johns Hopkins University (Author: Daniel Povey)
# Apache 2.0
# Combine PLP and pitch features together
# Note: This file is based on make_plp.sh and make_pitch_kaldi.sh
# Begin configuration section.
nj=4
cmd=run.pl
plp_config=conf/plp.conf
pitch_config=conf/pitch.conf
pitch_postprocess_config=
paste_length_tolerance=2
compress=true
# End configuration section.
echo "$0 $@" # Print the command line for logging
if [ -f path.sh ]; then . ./path.sh; fi
. parse_options.sh || exit 1;
if [ $# -lt 1 ] || [ $# -gt 3 ]; then
echo "Usage: $0 [options] <data-dir> [<log-dir> [<plp-dir>] ]";
echo "e.g.: $0 data/train exp/make_plp/train mfcc"
echo "Note: <log-dir> defaults to <data-dir>/log, and <plp-dir> defaults to <data-dir>/data"
echo "Options: "
echo " --plp-config <config-file> # config passed to compute-plp-feats "
echo " --pitch-config <pitch-config-file> # config passed to compute-kaldi-pitch-feats "
echo " --pitch-postprocess-config <postprocess-config-file> # config passed to process-kaldi-pitch-feats "
echo " --paste-length-tolerance <tolerance> # length tolerance passed to paste-feats"
echo " --nj <nj> # number of parallel jobs"
echo " --cmd (utils/run.pl|utils/queue.pl <queue opts>) # how to run jobs."
exit 1;
fi
data=$1
if [ $# -ge 2 ]; then
logdir=$2
else
logdir=$data/log
fi
if [ $# -ge 3 ]; then
plp_pitch_dir=$3
else
plp_pitch_dir=$data/data
fi
# make $plp_pitch_dir an absolute pathname.
plp_pitch_dir=`perl -e '($dir,$pwd)= @ARGV; if($dir!~m:^/:) { $dir = "$pwd/$dir"; } print $dir; ' $plp_pitch_dir ${PWD}`
# use "name" as part of name of the archive.
name=`basename $data`
mkdir -p $plp_pitch_dir || exit 1;
mkdir -p $logdir || exit 1;
if [ -f $data/feats.scp ]; then
mkdir -p $data/.backup
echo "$0: moving $data/feats.scp to $data/.backup"
mv $data/feats.scp $data/.backup
fi
scp=$data/wav.scp
required="$scp $plp_config $pitch_config"
for f in $required; do
if [ ! -f $f ]; then
echo "make_plp_pitch.sh: no such file $f"
exit 1;
fi
done
utils/validate_data_dir.sh --no-text --no-feats $data || exit 1;
if [ ! -z "$pitch_postprocess_config" ]; then
postprocess_config_opt="--config=$pitch_postprocess_config";
else
postprocess_config_opt=
fi
if [ -f $data/spk2warp ]; then
echo "$0 [info]: using VTLN warp factors from $data/spk2warp"
vtln_opts="--vtln-map=ark:$data/spk2warp --utt2spk=ark:$data/utt2spk"
elif [ -f $data/utt2warp ]; then
echo "$0 [info]: using VTLN warp factors from $data/utt2warp"
vtln_opts="--vtln-map=ark:$data/utt2warp"
fi
for n in $(seq $nj); do
# the next command does nothing unless $plp_pitch_dir/storage/ exists, see
# utils/create_data_link.pl for more info.
utils/create_data_link.pl $plp_pitch_dir/raw_plp_pitch_$name.$n.ark
done
if [ -f $data/segments ]; then
echo "$0 [info]: segments file exists: using that."
split_segments=""
for n in $(seq $nj); do
split_segments="$split_segments $logdir/segments.$n"
done
utils/split_scp.pl $data/segments $split_segments || exit 1;
rm $logdir/.error 2>/dev/null
plp_feats="ark:extract-segments scp,p:$scp $logdir/segments.JOB ark:- | compute-plp-feats $vtln_opts --verbose=2 --config=$plp_config ark:- ark:- |"
pitch_feats="ark,s,cs:extract-segments scp,p:$scp $logdir/segments.JOB ark:- | compute-kaldi-pitch-feats --verbose=2 --config=$pitch_config ark:- ark:- | process-kaldi-pitch-feats $postprocess_config_opt ark:- ark:- |"
$cmd JOB=1:$nj $logdir/make_plp_pitch_${name}.JOB.log \
paste-feats --length-tolerance=$paste_length_tolerance "$plp_feats" "$pitch_feats" ark:- \| \
copy-feats --compress=$compress ark:- \
ark,scp:$plp_pitch_dir/raw_plp_pitch_$name.JOB.ark,$plp_pitch_dir/raw_plp_pitch_$name.JOB.scp \
|| exit 1;
else
echo "$0: [info]: no segments file exists: assuming wav.scp indexed by utterance."
split_scps=""
for n in $(seq $nj); do
split_scps="$split_scps $logdir/wav_${name}.$n.scp"
done
utils/split_scp.pl $scp $split_scps || exit 1;
plp_feats="ark:compute-plp-feats $vtln_opts --verbose=2 --config=$plp_config scp,p:$logdir/wav_${name}.JOB.scp ark:- |"
pitch_feats="ark,s,cs:compute-kaldi-pitch-feats --verbose=2 --config=$pitch_config scp,p:$logdir/wav_${name}.JOB.scp ark:- | process-kaldi-pitch-feats $postprocess_config_opt ark:- ark:- |"
$cmd JOB=1:$nj $logdir/make_plp_pitch_${name}.JOB.log \
paste-feats --length-tolerance=$paste_length_tolerance "$plp_feats" "$pitch_feats" ark:- \| \
copy-feats --compress=$compress ark:- \
ark,scp:$plp_pitch_dir/raw_plp_pitch_$name.JOB.ark,$plp_pitch_dir/raw_plp_pitch_$name.JOB.scp \
|| exit 1;
fi
if [ -f $logdir/.error.$name ]; then
echo "Error producing plp & pitch features for $name:"
tail $logdir/make_plp_pitch_${name}.1.log
exit 1;
fi
# concatenate the .scp files together.
for n in $(seq $nj); do
cat $plp_pitch_dir/raw_plp_pitch_$name.$n.scp || exit 1;
done > $data/feats.scp
rm $logdir/wav_${name}.*.scp $logdir/segments.* 2>/dev/null
nf=`cat $data/feats.scp | wc -l`
nu=`cat $data/utt2spk | wc -l`
if [ $nf -ne $nu ]; then
echo "It seems not all of the feature files were successfully processed ($nf != $nu);"
echo "consider using utils/fix_data_dir.sh $data"
fi
if [ $nf -lt $[$nu - ($nu/20)] ]; then
echo "Less than 95% the features were successfully generated. Probably a serious error."
exit 1;
fi
echo "Succeeded creating PLP & Pitch features for $name"
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = peerStoreEnhancer;
var _redux = require('redux');
var _reduxActionEnhancerMiddleware = require('redux-action-enhancer-middleware');
var _reduxActionEnhancerMiddleware2 = _interopRequireDefault(_reduxActionEnhancerMiddleware);
var _peerReducerEnhancer = require('./enhancers/peerReducerEnhancer');
var _peerReducerEnhancer2 = _interopRequireDefault(_peerReducerEnhancer);
var _peerPreloadedStateEnhancer = require('./enhancers/peerPreloadedStateEnhancer');
var _peerPreloadedStateEnhancer2 = _interopRequireDefault(_peerPreloadedStateEnhancer);
var _peerActionEnhancers = require('./enhancers/peerActionEnhancers');
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function peerStoreEnhancer() {
return function (createStore) {
return function (reducer, preloadedState) {
var enhancedReducer = (0, _peerReducerEnhancer2.default)(reducer);
var enhancedPreloadedState = (0, _peerPreloadedStateEnhancer2.default)(preloadedState);
var peerEnhancer = (0, _redux.applyMiddleware)((0, _reduxActionEnhancerMiddleware2.default)({
filter: _peerActionEnhancers.ignorePeerActions,
enhancer: _peerActionEnhancers.peerMetadataEnhancer
}), (0, _reduxActionEnhancerMiddleware2.default)({
filter: _peerActionEnhancers.ignorePeerActions,
enhancer: _peerActionEnhancers.peerReplicateActionEnhancer
}));
return peerEnhancer(createStore)(enhancedReducer, enhancedPreloadedState);
};
};
} |
#!/usr/bin/env bash
BASEDIR=$(dirname "$0")
source $BASEDIR/../lib.sh
source ../lib.sh 2>/dev/null # for IDE code completion
NEWCONSENTER_NAME=${1:?`printUsage "$usageMsg" "$exampleMsg"`}
NEWCONSENTER_DOMAIN=${2:?`printUsage "$usageMsg" "$exampleMsg"`}
NEWCONSENTER_PORT=${3:-7050}
NEWCONSENTER_WWW_PORT=${4:-80}
: ${DOMAIN:=bazaar.com}
: ${ORDERER_DOMAIN:=${DOMAIN}}
: ${ORDERER_NAME:=orderer}
export DOMAIN ORDERER_NAME ORDERER_DOMAIN
COMPOSE_PROJECT_NAME=${ORDERER_NAME}.${ORDERER_DOMAIN} EXECUTE_BY_ORDERER=1 runCLI "container-scripts/orderer/raft-add-orderer-msp.sh $NEWCONSENTER_NAME $NEWCONSENTER_DOMAIN ${NEWCONSENTER_WWW_PORT}"
|
// Copyright (c) 2021 FRC Team 2881 - The Lady Cans
//
// Open Source Software; you can modify and/or share it under the terms of BSD
// license file in the root directory of this project.
package frc.robot.commands;
import java.util.function.DoubleSupplier;
import edu.wpi.first.wpilibj2.command.CommandBase;
import frc.robot.subsystems.Elevator;
/**
* This is intended to be used as the default command for the {@link Elevator}
* subsystem. It takes values from the given DoubleSupplier (ostensibly a stick
* on a controller) and uses its value to directly move the elevator.
*/
public class RunElevator extends CommandBase {
/**
* The {@link Elevator} subsystem to use.
*/
private final Elevator m_elevator;
/**
* The DoubleSupplier that provides the speed (and direction) at which the
* elevator should move.
*/
private final DoubleSupplier m_speed;
/**
* This command runs the elevator based on driver control.
*
* <p>This is intended to be used as the default command for the {@link
* Elevator} subsystem. It takes values from the given DoubleSupplier
* (ostensibly a stick on a controller) and uses its value to directly move
* the elevator.
*
* @param elevator is the {@link Elevator} subsystem to use.
*
* @param speed is the DoubleSupplier used to query the speed and direction
* to move the elevator.
*/
public RunElevator(Elevator elevator, DoubleSupplier speed) {
m_elevator = elevator;
m_speed = speed;
addRequirements(m_elevator);
}
// Called when the command is initially scheduled.
@Override
public void initialize() {
}
// Called every time the scheduler runs while the command is scheduled.
@Override
public void execute() {
m_elevator.run(m_speed.getAsDouble());
}
// Called once the command ends or is interrupted.
@Override
public void end(boolean interrupted) {
m_elevator.stop();
}
// Returns true when the command should end.
@Override
public boolean isFinished() {
return false;
}
}
|
<filename>java/src/main/java/com/me/java/outer.java
package com.me.java;
/**
* @author zs
* @date 2021/10/8.
* 跳到指定位置的循环
*/
public class outer {
public static void main(String[] args) {
outer:
for (int i = 0; i < 10; i++) {
for (int j = 0; j < 10; j++) {
System.out.println(i*10 + j);
if(j == 5){
break outer;
}
}
}
}
}
|
/*
* Copyright 2019 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package se.trixon.toolbox.io.file;
/**
*
* @author <NAME>
*/
public class CoordinatePoint {
protected double mX;
protected double mY;
protected double mZ;
public double getX() {
return mX;
}
public double getY() {
return mY;
}
public double getZ() {
return mZ;
}
public void setX(double x) {
mX = x;
}
public void setY(double y) {
mY = y;
}
public void setZ(double z) {
mZ = z;
}
}
|
<filename>src/natives.cpp
/*
* Copyright (C) 2014 Mellnik
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define CRYPTOPP_ENABLE_NAMESPACE_WEAK 1 // forMD5 checksum
#include <fstream>
#include "cryptopp/base64.h"
#include "cryptopp/osrng.h"
#include "cryptopp/integer.h"
#include "cryptopp/files.h"
#include "cryptopp/hex.h"
#include "cryptopp/filters.h"
#include "cryptopp/md5.h" // for MD5 checksum
#include "cryptopp/sha.h"
#include "cryptopp/whrlpool.h"
#include "utility.h"
#include "pbkdf2.h"
#include "callback.h"
#include "natives.h"
cell AMX_NATIVE_CALL Native::hash_generate(AMX *amx, cell *params)
{
static const unsigned ParameterCount = 4;
if(params[0] < ParameterCount * sizeof(cell))
{
logprintf("[HASH] Invalid parameter count in hash_generate.");
return 0;
}
if(params[2] < 1000)
{
logprintf("[HASH] Invalid iteration count. Expected at least 1000.");
return 0;
}
char *key = NULL, *callback = NULL, *format = NULL;
amx_StrParam(amx, params[1], key);
amx_StrParam(amx, params[3], callback);
amx_StrParam(amx, params[4], format);
if(key == NULL || callback == NULL || format == NULL)
{
logprintf("[HASH] Failed to get hash_generate parameter.");
return 0;
}
CallbackData *cData = new CallbackData;
cData->Name = callback;
Callback::Get()->Parameters(cData->Params, format, amx, params, ParameterCount);
Callback::Get()->QueueWorker(new Pbkdf2(key, static_cast<unsigned int>(params[2]), cData));
return 1;
}
cell AMX_NATIVE_CALL Native::hash_retrieve(AMX *amx, cell *params)
{
PARAM_CHECK(4, "hash_retrieve");
if(Callback::Get()->GetActiveResult() != NULL)
{
if(Callback::Get()->GetActiveResult()->h_Worker != PBKDF2_GENERATE)
{
logprintf("[HASH] Invalid function call for hash validation.");
return 0;
}
else
{
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[1], &amx_Addr);
amx_SetString(amx_Addr, Callback::Get()->GetActiveResult()->h_Hash.c_str(), 0, 0, params[3]);
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, Callback::Get()->GetActiveResult()->h_Salt.c_str(), 0, 0, params[4]);
return 1;
}
}
else
{
logprintf("[HASH] No active result.");
return 0;
}
}
cell AMX_NATIVE_CALL Native::hash_validate(AMX *amx, cell *params)
{
static const unsigned ParameterCount = 6;
if(params[0] < ParameterCount * sizeof(cell))
{
logprintf("[HASH] Invalid parameter count in hash_validate.");
return 0;
}
if(params[4] < 1000)
{
logprintf("[HASH] Invalid iteration count. Expected at least 1000.");
return 0;
}
char *key = NULL, *callback = NULL, *format = NULL, *hash = NULL, *salt = NULL;
amx_StrParam(amx, params[1], key);
amx_StrParam(amx, params[2], hash);
amx_StrParam(amx, params[3], salt);
amx_StrParam(amx, params[5], callback);
amx_StrParam(amx, params[6], format);
if(key == NULL || callback == NULL || format == NULL || hash == NULL || salt == NULL)
{
logprintf("[HASH] Failed to get hash_generate parameter.");
return 0;
}
CallbackData *cData = new CallbackData;
cData->Name = callback;
Callback::Get()->Parameters(cData->Params, format, amx, params, ParameterCount);
Callback::Get()->QueueWorker(new Pbkdf2(key, hash, salt, static_cast<unsigned int>(params[4]), cData));
return 1;
}
cell AMX_NATIVE_CALL Native::hash_is_equal(AMX *amx, cell *params)
{
if(Callback::Get()->GetActiveResult() != NULL)
{
if(Callback::Get()->GetActiveResult()->h_Worker != PBKDF2_VALIDATE)
{
logprintf("[HASH] Invalid function call for hash generation.");
return 0;
}
else
{
return static_cast<cell>(Callback::Get()->GetActiveResult()->h_Equal);
}
}
else
{
logprintf("[HASH] No active result.");
return 0;
}
}
cell AMX_NATIVE_CALL Native::hash_unprocessed(AMX *amx, cell *params)
{
return static_cast<cell>(Callback::Get()->UnprocessedWorkerCount());
}
cell AMX_NATIVE_CALL Native::hash_exec_time(AMX *amx, cell *params)
{
if(Callback::Get()->GetActiveResult() != NULL)
{
return static_cast<cell>(Callback::Get()->GetActiveResult()->h_ExecTime);
}
else
{
logprintf("[HASH] No active result.");
return 0;
}
}
cell AMX_NATIVE_CALL Native::hash_thread_limit(AMX *amx, cell *params)
{
PARAM_CHECK(1, "hash_thread_limit");
if(params[1] < 1)
{
logprintf("[HASH] Invalid thread limit. Expected at least 1.");
return 0;
}
Callback::Get()->SetThreadLimit(static_cast<unsigned int>(params[1]));
return 1;
}
cell AMX_NATIVE_CALL Native::slow_equals(AMX *amx, cell *params)
{
PARAM_CHECK(2, "slow_equals");
char *a = NULL, *b = NULL;
amx_StrParam(amx, params[1], a);
amx_StrParam(amx, params[2], b);
if(a == NULL || b == NULL)
{
logprintf("[HASH] Failed to get slow_equals parameter.");
return 0;
}
unsigned diff = strlen(a) ^ strlen(b);
for(unsigned i = 0; i < strlen(a) && i < strlen(b); ++i)
diff |= a[i] ^ b[i];
return static_cast<cell>(diff == 0);
}
cell AMX_NATIVE_CALL Native::sha256(AMX *amx, cell *params)
{
PARAM_CHECK(3, "sha256");
char *str = NULL;
amx_StrParam(amx, params[1], str);
string hash;
Utility::sha256(str, hash);
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, hash.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::sha384(AMX *amx, cell *params)
{
PARAM_CHECK(3, "sha384");
char *str = NULL;
amx_StrParam(amx, params[1], str);
string hash;
Utility::sha384(str, hash);
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, hash.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::sha512(AMX *amx, cell *params)
{
PARAM_CHECK(3, "sha512");
char *str = NULL;
amx_StrParam(amx, params[1], str);
string hash;
Utility::sha512(str, hash);
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, hash.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::sha3(AMX *amx, cell *params)
{
PARAM_CHECK(3, "sha3");
char *str = NULL;
amx_StrParam(amx, params[1], str);
string hash;
Utility::sha3(str, hash);
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, hash.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::whirlpool(AMX *amx, cell *params)
{
PARAM_CHECK(3, "whirlpool");
char *str = NULL;
amx_StrParam(amx, params[1], str);
string hash;
Utility::whirlpool(str, hash);
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, hash.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::ripemd160(AMX *amx, cell *params)
{
PARAM_CHECK(3, "ripemd160");
char *str = NULL;
amx_StrParam(amx, params[1], str);
string hash;
Utility::ripemd160(str, hash);
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, hash.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::ripemd256(AMX *amx, cell *params)
{
PARAM_CHECK(3, "ripemd256");
char *str = NULL;
amx_StrParam(amx, params[1], str);
string hash;
Utility::ripemd256(str, hash);
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, hash.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::ripemd320(AMX *amx, cell *params)
{
PARAM_CHECK(3, "ripemd320");
char *str = NULL;
amx_StrParam(amx, params[1], str);
string hash;
Utility::ripemd320(str, hash);
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, hash.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::base64_encode(AMX *amx, cell *params)
{
PARAM_CHECK(3, "base64_encode");
char *str = NULL;
amx_StrParam(amx, params[1], str);
string base64;
Utility::base64_encode(str, base64);
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, base64.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::base64_decode(AMX *amx, cell *params)
{
PARAM_CHECK(3, "base64_decode");
char *str = NULL;
amx_StrParam(amx, params[1], str);
string decoded;
Utility::base64_decode(str, decoded);
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, decoded.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::hex_encode(AMX *amx, cell *params)
{
PARAM_CHECK(3, "hex_encode");
char *str = NULL;
amx_StrParam(amx, params[1], str);
string hex;
Utility::hex_encode(str, hex);
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, hex.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::hex_decode(AMX *amx, cell *params)
{
PARAM_CHECK(3, "hex_decode");
char *str = NULL;
amx_StrParam(amx, params[1], str);
string decoded;
Utility::hex_decode(str, decoded);
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, decoded.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::random_int(AMX *amx, cell *params)
{
PARAM_CHECK(2, "random_int");
if(params[2] < params[1]) // Prevent crash
{
logprintf("[HASH] Invalid input in random_int.");
return 0;
}
CryptoPP::AutoSeededRandomPool RNG;
CryptoPP::Integer num(RNG, 32);
num.Randomize(RNG, params[1], params[2]);
return static_cast<cell>(num.ConvertToLong());
}
cell AMX_NATIVE_CALL Native::random_string(AMX *amx, cell *params)
{
PARAM_CHECK(3, "random_string");
if(params[1] < 1)
{
logprintf("[HASH] Invalid length specified.");
return 0;
}
string random;
Utility::random_string(random, static_cast<unsigned int>(params[1]));
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, random.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::md5sum(AMX *amx, cell *params)
{
PARAM_CHECK(3, "md5sum");
char *file = NULL;
amx_StrParam(amx, params[1], file);
if(file == NULL)
return 0;
if(!(std::ifstream(file)))
{
logprintf("[HASH] File does not exist.");
return 0;
}
string sum;
CryptoPP::Weak::MD5 h_md5;
CryptoPP::FileSource(file, true, new CryptoPP::HashFilter(h_md5, new CryptoPP::HexEncoder(new CryptoPP::StringSink(sum))));
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, sum.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::sha1sum(AMX *amx, cell *params)
{
PARAM_CHECK(3, "sha1sum");
char *file = NULL;
amx_StrParam(amx, params[1], file);
if(file == NULL)
return 0;
if(!(std::ifstream(file)))
{
logprintf("[HASH] File does not exist.");
return 0;
}
string sum;
CryptoPP::SHA1 h_sha1;
CryptoPP::FileSource(file, true, new CryptoPP::HashFilter(h_sha1, new CryptoPP::HexEncoder(new CryptoPP::StringSink(sum))));
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, sum.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::sha256sum(AMX *amx, cell *params)
{
PARAM_CHECK(3, "sha256sum");
char *file = NULL;
amx_StrParam(amx, params[1], file);
if(file == NULL)
return 0;
if(!(std::ifstream(file)))
{
logprintf("[HASH] File does not exist.");
return 0;
}
string sum;
CryptoPP::SHA256 h_sha256;
CryptoPP::FileSource(file, true, new CryptoPP::HashFilter(h_sha256, new CryptoPP::HexEncoder(new CryptoPP::StringSink(sum))));
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, sum.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::sha384sum(AMX *amx, cell *params)
{
PARAM_CHECK(3, "sha384sum");
char *file = NULL;
amx_StrParam(amx, params[1], file);
if(file == NULL)
return 0;
if(!(std::ifstream(file)))
{
logprintf("[HASH] File does not exist.");
return 0;
}
string sum;
CryptoPP::SHA384 h_sha384;
CryptoPP::FileSource(file, true, new CryptoPP::HashFilter(h_sha384, new CryptoPP::HexEncoder(new CryptoPP::StringSink(sum))));
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, sum.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::sha512sum(AMX *amx, cell *params)
{
PARAM_CHECK(3, "sha512sum");
char *file = NULL;
amx_StrParam(amx, params[1], file);
if(file == NULL)
return 0;
if(!(std::ifstream(file)))
{
logprintf("[HASH] File does not exist.");
return 0;
}
string sum;
CryptoPP::SHA512 h_sha512;
CryptoPP::FileSource(file, true, new CryptoPP::HashFilter(h_sha512, new CryptoPP::HexEncoder(new CryptoPP::StringSink(sum))));
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, sum.c_str(), 0, 0, params[3]);
return 1;
}
cell AMX_NATIVE_CALL Native::wpsum(AMX *amx, cell *params)
{
PARAM_CHECK(3, "wpsum");
char *file = NULL;
amx_StrParam(amx, params[1], file);
if(file == NULL)
return 0;
if(!(std::ifstream(file)))
{
logprintf("[HASH] File does not exist.");
return 0;
}
string sum;
CryptoPP::Whirlpool h_wp;
CryptoPP::FileSource(file, true, new CryptoPP::HashFilter(h_wp, new CryptoPP::HexEncoder(new CryptoPP::StringSink(sum))));
cell *amx_Addr = NULL;
amx_GetAddr(amx, params[2], &amx_Addr);
amx_SetString(amx_Addr, sum.c_str(), 0, 0, params[3]);
return 1;
}
|
#!/bin/bash
set -x
spark_master=$SPARK_MASTER
driver_port=$SPARK_DRIVER_PORT
block_manager_port=$SPARK_BLOCK_MANAGER_PORT
driver_host=$SPARK_DRIVER_IP
driver_block_manager_port=$SPARK_DRIVER_BLOCK_MANAGER_PORT
secure_passowrd=$SPARK_SECURE_PASSWORD
export SPARK_HOME=/ppml/trusted-big-data-ml/work/spark-2.4.3
SGX=1 ./pal_loader /opt/jdk8/bin/java \
-cp "/ppml/trusted-big-data-ml/work/bigdl-jar-with-dependencies.jar:/ppml/trusted-big-data-ml/work/spark-2.4.3/conf/:/ppml/trusted-big-data-ml/work/spark-2.4.3/jars/*" \
-Dspark.authenticate=true \
-Dspark.authenticate.secret=$secure_passowrd \
-Dspark.network.crypto.enabled=true \
-Dspark.network.crypto.keyLength=128 \
-Dspark.network.crypto.keyFactoryAlgorithm=PBKDF2WithHmacSHA1 \
-Dspark.io.encryption.enabled=true \
-Dspark.io.encryption.keySizeBits=128 \
-Dspark.io.encryption.keygen.algorithm=HmacSHA1 \
-Dspark.ssl.enabled=true \
-Dspark.ssl.port=8043 \
-Dspark.ssl.keyPassword=$secure_passowrd \
-Dspark.ssl.keyStore=/ppml/trusted-big-data-ml/work/keys/keystore.jks \
-Dspark.ssl.keyStorePassword=$secure_passowrd \
-Dspark.ssl.keyStoreType=JKS \
-Dspark.ssl.trustStore=/ppml/trusted-big-data-ml/work/keys/keystore.jks \
-Dspark.ssl.trustStorePassword=$secure_passowrd \
-Dspark.ssl.trustStoreType=JKS \
-Xmx10g \
org.apache.spark.deploy.SparkSubmit \
--master $spark_master \
--conf spark.driver.port=$driver_port \
--conf spark.scheduler.maxRegisteredResourcesWaitingTime=5000000 \
--conf spark.worker.timeout=600 \
--conf spark.executor.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-jar-with-dependencies.jar \
--conf spark.driver.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-jar-with-dependencies.jar \
--conf spark.starvation.timeout=250000 \
--conf spark.blockManager.port=$block_manager_port \
--conf spark.driver.host=$driver_host \
--conf spark.driver.blockManager.port=$driver_block_manager_port \
--conf spark.network.timeout=1900s \
--conf spark.executor.heartbeatInterval=1800s \
--class com.intel.analytics.bigdl.models.lenet.Train \
--executor-cores 4 \
--total-executor-cores 4 \
--executor-memory 12G \
/ppml/trusted-big-data-ml/work/bigdl-jar-with-dependencies.jar \
-f /ppml/trusted-big-data-ml/work/data \
-b 64 -e 1 | tee ./spark-driver-sgx.log
|
#include <SPI.h>
#include <Wire.h>
#include <Adafruit_BME280.h>
#include <LoRa.h>
#define BME_SCK 18
#define BME_MISO 19
#define BME_MOSI 23
#define BME_CS 5
Adafruit_BME280 bme;
// Setup LoRa transceiver
const int CS = 22, RST = 4;
const int BAND = 434E6;
void setup() {
Serial.begin(9600);
LoRa.setPins(CS, RST, BAND);
if (!LoRa.begin(BAND)) {
Serial.println("Starting LoRa failed!");
while (true);
}
// Setup BME280
if (!bme.begin(BME_CS))
Serial.println("Failed to initialize BME280 sensor!");
}
void loop() {
float temp = bme.readTemperature();
float hum = bme.readHumidity();
float pressure = bme.readPressure() / 100;
// Send data via LoRa
if (LoRa.beginPacket()) {
LoRa.print("Temperature: ");
LoRa.println(temp);
LoRa.print("Humidity: ");
LoRa.println(hum);
LoRa.print("Pressure: ");
LoRa.println(pressure);
LoRa.endPacket();
delay(5000);
}
} |
#!/bin/bash
# MG_GTDB=/bioinf/projects/megx/UNKNOWNS/2017_11/MG_GTDB_DB/mg_gtdb_orfs.fasta.gz
# OMRGC2=/bioinf/projects/megx/UNKNOWNS/2017_11/chiara/OM-RGC-v2/OM-RGC_v2.aa.OG.fasta.gz
# DIR=/bioinf/projects/megx/UNKNOWNS/2017_11/GTDB
MG_GTDB="${1}"
OMRGC2="${2}"
DIR="${3}"
# RELEASE=$(date +%Y%m%d)
cd "${DIR}"
mkdir -p mg_g_omrgc_db_update
zcat "${MG_GTDB}" "${OMRGC2}" | gzip > mg_g_omrgc2_orfs.fasta.gz
mmseqs createdb mg_g_omrgc2_orfs.fasta.gz mg_g_omrgc2_orfs_db
|
// Requires root
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <signal.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <syslog.h>
#include <math.h>
#include <wiringPi.h>
static const int MOTOR0 = 0;
static const int MOTOR1 = 1;
static const int MAX_SPEED = 480;
static const int PIN_MOTOR_PWM[] = {12, 13};
static const int PIN_MOTOR_DIR[] = {5, 6};
static int target_speeds[] = {0, 0};
static int current_speeds[] = {0, 0};
static float step_speeds[] = {0.0f, 0.0f};
static int GPIO_start()
{
int init = wiringPiSetupGpio();
if (init != 0)
return init;
pinMode(PIN_MOTOR_PWM[MOTOR0], PWM_OUTPUT);
pinMode(PIN_MOTOR_PWM[MOTOR1], PWM_OUTPUT);
pwmSetMode(PWM_MODE_MS);
pwmSetRange(MAX_SPEED);
pwmSetClock(2);
pinMode(PIN_MOTOR_DIR[MOTOR0], OUTPUT);
pinMode(PIN_MOTOR_DIR[MOTOR1], OUTPUT);
syslog(LOG_NOTICE, "GPIO initialised.");
return 0;
}
static void calc_steps(int motor)
{
if (motor < 0 || motor > 1)
return;
int diff = target_speeds[motor] - current_speeds[motor];
step_speeds[motor] = (float)diff / 15.0f;
}
static void set_speed_direct(int motor, int speed)
{
if (motor < 0 || motor > 1)
return;
if (speed > MAX_SPEED)
speed = MAX_SPEED;
if (speed < -MAX_SPEED)
speed = -MAX_SPEED;
int dir = 0;
if (speed < 0)
dir = 1;
digitalWrite(PIN_MOTOR_DIR[motor], dir);
pwmWrite(PIN_MOTOR_PWM[motor], speed);
target_speeds[motor] = speed;
current_speeds[motor] = speed;
calc_steps(motor);
}
void set_speed(int motor, int speed)
{
if (motor < 0 || motor > 1)
return;
if (speed > MAX_SPEED)
speed = MAX_SPEED;
if (speed < -MAX_SPEED)
speed = -MAX_SPEED;
if ((speed > 0 && current_speeds[motor] < 0) ||
(speed < 0 && current_speeds[motor] > 0))
{
// Change direction so we stop first
current_speeds[motor] = 0;
pwmWrite(PIN_MOTOR_PWM[motor], 0);
}
target_speeds[motor] = speed;
calc_steps(motor);
// digitalWrite(PIN_MOTOR_DIR[motor], dir);
// pwmWrite(PIN_MOTOR_PWM[motor], current_speeds[motor]);
}
static void check_root()
{
if(geteuid() != 0)
{
fprintf(stderr, "This daemon requires root.\n");
exit(EXIT_FAILURE);
}
}
static void skeleton_daemon()
{
pid_t pid;
/* Fork off the parent process */
pid = fork();
/* An error occurred */
if (pid < 0)
exit(EXIT_FAILURE);
/* Success: Let the parent terminate */
if (pid > 0)
exit(EXIT_SUCCESS);
/* On success: The child process becomes session leader */
if (setsid() < 0)
exit(EXIT_FAILURE);
/* Catch, ignore and handle signals */
//TODO: Implement a working signal handler */
signal(SIGCHLD, SIG_IGN);
signal(SIGHUP, SIG_IGN);
/* Fork off for the second time*/
pid = fork();
/* An error occurred */
if (pid < 0)
exit(EXIT_FAILURE);
/* Success: Let the parent terminate */
if (pid > 0)
exit(EXIT_SUCCESS);
/* Set new file permissions */
umask(0);
/* Change the working directory to the root directory */
/* or another appropriated directory */
chdir("/");
/* Close all open file descriptors */
int x;
for (x = sysconf(_SC_OPEN_MAX); x>=0; x--)
{
close (x);
}
/* Open the log file */
openlog ("drv8835_daemon", LOG_PID, LOG_DAEMON);
}
int main()
{
check_root();
skeleton_daemon();
if (GPIO_start() != 0)
{
fprintf(stderr, "GPIO initialisation failure, try sudo?");
return EXIT_FAILURE;
}
// test run
syslog(LOG_NOTICE, "drv8835_daemon self test started.");
set_speed_direct(MOTOR0, MAX_SPEED/2);
set_speed_direct(MOTOR1, -MAX_SPEED/2);
usleep(200000);
set_speed_direct(MOTOR0, -MAX_SPEED/2);
set_speed_direct(MOTOR1, MAX_SPEED/2);
usleep(200000);
set_speed_direct(MOTOR0, 0);
set_speed_direct(MOTOR1, 0);
syslog(LOG_NOTICE, "drv8835_daemon self test ended.");
syslog(LOG_NOTICE, "drv8835_daemon started.");
while (1)
{
for(int i = 0; i < 2; ++i)
{
if (target_speeds[i] != current_speeds[i])
{
if (fabs(target_speeds[i] - current_speeds[i]) <= fabs(step_speeds[i]))
{
current_speeds[i] = target_speeds[i];
}
else
{
current_speeds[i] += step_speeds[i];
}
int dir = 0;
if (current_speeds[i] < 0)
dir = 1;
digitalWrite(PIN_MOTOR_DIR[i], dir);
pwmWrite(PIN_MOTOR_PWM[i], current_speeds[i]);
}
}
// 32ms sleep ~= 30 fps
usleep (32000);
}
syslog (LOG_NOTICE, "drv8835_daemon terminated.");
closelog();
return EXIT_SUCCESS;
}
|
#!/bin/sh
cp ./wifi/dhcpcd.conf /etc/dhcpcd.conf
cp ./wifi/interfaces /etc/network/interfaces
cp ./wifi/hostapd /etc/default/hostapd
sudo ifdown wlan0
sudo ifup wlan0 |
#!/bin/bash
# How to add as custom extension to existing vm, need to do az login prior to run the following command
# Update fileUris with appropiate release tag
group=''
vmName=''
az vm extension set \
--resource-group $group \
--vm-name $vmName \
--name customScript \
--publisher Microsoft.Azure.Extensions \
--protected-settings '{"fileUris": ["https://github.com/caligaris/patchlogger/releases/download/v0.0.1-alpha.3/patchlogger.tar.gz"],"commandToExecute": "sudo tar zxvf patchlogger.tar.gz && sh ./patchloggerSetup.sh"}' |
<gh_stars>0
import _ from "lodash";
import React from "react";
import PropTypes from "prop-types";
import { useTable } from "react-table";
import { DndProvider, useDrag, useDrop } from "react-dnd";
import { HTML5Backend } from "react-dnd-html5-backend";
import { filterExcludeKeys, objectIsEmpty } from "../utils/dictionary-utils";
import { restRead, restUpdateJson } from "../api/rest-api-consumation";
import update from "immutability-helper";
import {
ButtonResponsivenessWrapper,
Button,
FormSubmitButton,
} from "../components/button-components";
export { PostTable as default };
const MAX_STRING_LENGTH = 25;
class RenderingIncompleteError extends Error {
constructor(message) {
super(message);
this.name = "RenderingIncompleteError";
}
}
export const PostTableResponsivenessWrapper = ({ children }) => {
return (
<React.StrictMode>
<div className="table-responsive table-wrapper-scroll-y m-auto">
{children}
</div>
</React.StrictMode>
);
};
PostTableResponsivenessWrapper.propTypes = {
children: PropTypes.node,
};
const emptyRowKeys = [
"id",
"user",
"resource_url",
"manage_url",
"type",
"article_url",
"image_url",
"image",
"splash_image_url",
"splash_image",
"header",
"title",
"content",
"first_reaction",
"second_reaction",
"third_reaction",
"newsfeed_base",
"time_created",
"time_updated",
];
export const PostTable = React.memo(
({ style, dataStates, updateCondition, excludeKeys }) => {
console.log("rendering");
for (let dataState in dataStates) {
const data = dataStates[dataState].data;
if (
typeof data == "undefined" ||
data.length === 0 ||
objectIsEmpty(data)
) {
return null;
}
}
const resourceUrl = dataStates["resource"].url;
const postStateData = dataStates["resource"].data;
const setPostStateData = dataStates["resource"].callback;
const newsfeedBase = dataStates["newsfeedBase"].data;
const setNewsfeedBase = dataStates["newsfeedBase"].callback;
const newsfeedSize = newsfeedBase.newsfeed_size;
const newsfeedBaseId = newsfeedBase.id;
const newsfeedBaseUrl = newsfeedBase.resource_url;
const postOrder = newsfeedBase.post_order;
let postData = [];
try {
postData = createPostData(newsfeedSize, postOrder, postStateData);
} catch (e) {
if (e instanceof RenderingIncompleteError) {
return null;
}
}
const [originalPostData] = React.useState(postData);
const resetToOriginalPostData = () => setPostStateData(originalPostData);
const columnsData = createColumnsData(postData, excludeKeys);
const columns = React.useMemo(() => columnsData, []);
const defaultColumn = {
// Set our editable cell renderer as the default Cell renderer
Cell: EditableCell,
};
const getRowId = React.useCallback((row) => {
return row.id;
}, []);
const updateData = (rowIndex, columnId, value) => {
console.log("*** updataData ***", postStateData);
console.log("*** updateData ***", rowIndex, columnId, value);
setPostStateData((old) =>
old.map((row, index) => {
console.log(" *** updateData ***", index, row);
if (index === rowIndex) {
const ret = {
...old[rowIndex],
[columnId]: value,
};
console.log(
" *** updateData index equal***",
index,
rowIndex,
ret
);
return ret;
}
console.log(" *** updateData return row***", index, rowIndex, row);
return row;
})
);
};
const { getTableProps, getTableBodyProps, headerGroups, rows, prepareRow } =
useTable({
data: postData,
columns,
defaultColumn,
getRowId,
updateData,
initialState: {
hiddenColumns: ["id"],
},
});
const updatePostStateData = React.useCallback(async (event) => {
event.preventDefault(); // prevent default behavior of form submit
const formDataRaw = new FormData(event.target);
let data = [];
formDataRaw.forEach((value, key) => {
const inputIdRegxp = /^(\w+)--\w+$/;
const inputNameRegExp = /^\w+--(\w+)$/;
const inputId = parseInt(key.replace(inputIdRegxp, "$1"));
const inputName = key.replace(inputNameRegExp, "$1");
if (data[inputId] == null) {
data.push({});
}
data[inputId][`${inputName}`] = value;
});
data = data
.filter((row) => !row.id.startsWith("empty"))
.map((row) => {
row.order_in_newsfeed_base = {
newsfeed_base_id: newsfeedBaseId,
index: row.order_in_newsfeed_base,
};
return row;
});
console.log(" updatePostStateData date without empty entries", data);
const [
,
// status
jsonData,
] = await restUpdateJson({
url: resourceUrl,
data: JSON.stringify(data),
});
console.log(" updatePostStateData updated json Data", jsonData);
const [, jsonNewsfeedBase] = await restRead({
url: newsfeedBaseUrl,
});
console.log(
" updatePostStateData get newsfeedBase with new order",
jsonNewsfeedBase
);
setNewsfeedBase(() => jsonNewsfeedBase);
setPostStateData(() => jsonData);
console.log(" updatePostStateData updating condition");
// updateCondition += 1;
});
const moveRow = async (dragIndex, hoverIndex) => {
const dragRecord = postData[dragIndex];
const newData = update(postData, {
$splice: [
[dragIndex, 1],
[hoverIndex, 0, dragRecord],
],
});
const newPostOrder = {
order: [],
value_to_key: {},
};
newData.map((dataEntry, index) => {
if (
typeof dataEntry.id == "string" &&
dataEntry.id.startsWith("empty")
) {
newPostOrder.order.push(null);
} else {
newPostOrder.order.push(dataEntry.id);
newPostOrder.value_to_key[dataEntry.id] = index;
}
});
const [, json] = await restUpdateJson({
url: newsfeedBaseUrl,
data: JSON.stringify({ post_order: newPostOrder }),
});
setNewsfeedBase(() => json);
setPostStateData(() => newData);
};
const formId = "updateForm";
return (
<React.StrictMode>
<ButtonResponsivenessWrapper>
{/* ToDo use label variable */}
<Button
style={style}
label={"Reset manual posts"}
onClick={resetToOriginalPostData}
/>
<FormSubmitButton
formId={formId}
style={style}
label={"Update manual posts"}
/>
</ButtonResponsivenessWrapper>
<DndProvider backend={HTML5Backend}>
<form onSubmit={updatePostStateData} id={formId}>
<table
className={`table table-${style.color} table-striped`}
{...getTableProps()}
>
<thead>
{headerGroups.map((headerGroup, index) => {
return (
<tr key={index} {...headerGroup.getHeaderGroupProps()}>
<th
{...headerGroups[0].headers[0].getHeaderProps({
style: {
minWidth: 4,
maxWidth: 4,
width: 4,
},
})}
></th>
{headerGroup.headers.map((column, index) => {
return (
<th
key={index}
{...column.getHeaderProps({
style: {
minWidth: column.minWidth,
maxWidth: column.maxWidth,
width: column.width,
},
})}
>
{column.render("Header")}
</th>
);
})}
</tr>
);
})}
</thead>
<tbody {...getTableBodyProps()}>
{rows.map((row, index) => {
return (
prepareRow(row) || (
<Row
id={row.id}
index={index}
row={row}
moveRow={moveRow}
manageUrl={row.original.manage_url}
{...row.getRowProps()}
></Row>
)
);
})}
</tbody>
</table>
</form>
</DndProvider>
</React.StrictMode>
);
},
(prevProps, nextProps) => {
const prevPostStateData = prevProps.dataStates["resource"].data;
const prevNewsfeedBase = prevProps.dataStates["newsfeedBase"].data;
const nextPostStateData = nextProps.dataStates["resource"].data;
const nextNewsfeedBase = nextProps.dataStates["newsfeedBase"].data;
// if (nextNewsfeedBase === []) {
// return true
// }
console.log("******************");
console.log("prevPostStateData", prevPostStateData);
console.log("nextPostStateData", nextPostStateData);
console.log(
"postStateDataEqual",
_.isEqual(prevPostStateData, nextPostStateData)
);
console.log("prevNewsfeedBase", prevNewsfeedBase);
console.log("nextNewsfeedBase", nextNewsfeedBase);
console.log(
"newsfeedBaseEqual",
_.isEqual(prevNewsfeedBase, nextNewsfeedBase)
);
return _.isEqual(prevProps, nextProps);
}
);
PostTable.propTypes = {
style: PropTypes.object,
dataStates: PropTypes.object,
resourceUrl: PropTypes.string,
postStateData: PropTypes.array,
setPostStateData: PropTypes.func,
updateCondition: PropTypes.number,
excludeKeys: PropTypes.array,
};
const createPostData = (newsfeedSize, postOrder, postStateData) => {
const rawPosts = {};
postStateData.map((dataEntry) => {
rawPosts[dataEntry.id] = dataEntry;
});
const postData = [];
for (let i = 0; i < newsfeedSize; i++) {
const postOrderId = postOrder["order"][i];
if (postOrderId != null) {
const rawPost = rawPosts[postOrderId];
if (rawPost == undefined) {
throw new RenderingIncompleteError();
}
postData.push(rawPost);
} else {
const emptyRow = {};
emptyRowKeys.map((key) => {
if (key === "id") {
emptyRow[key] = "empty" + i;
} else {
emptyRow[key] = i;
}
});
postData.push({ ...emptyRow });
}
}
console.log(" -------------------------------------");
console.log(" createPostData postOrder", postOrder);
console.log(" createPostData postStateData", postStateData);
console.log(" createPostData postData", postData);
return postData;
};
const createColumnsData = (postData, excludeKeys) => {
const tableFirstRowDataFiltered = filterExcludeKeys(postData[0], excludeKeys);
const tableHeads = Object.keys(tableFirstRowDataFiltered);
const columnsData = [];
tableHeads.map((tableHead) => {
const tableHeadColumn = {
Header: tableHead,
accessor: tableHead,
minWidth: MAX_STRING_LENGTH,
maxWidth: MAX_STRING_LENGTH,
width: MAX_STRING_LENGTH,
};
if (tableHead.includes("image")) {
tableHeadColumn.Cell = function tableCellImage(tableProps) {
const prop = tableProps.row.original[tableHead];
if (typeof prop == "number") {
return <></>;
}
return (
<img
src={tableProps.row.original[tableHead]}
width="100%"
alt="image"
/>
);
};
}
columnsData.push(tableHeadColumn);
});
return columnsData;
};
const DND_ITEM_TYPE = "row";
export const Row = ({ id, row, index, moveRow, manageUrl }) => {
const goToManagePage = () => {
if (typeof id == "string" && id.startsWith("empty")) {
return;
}
window.location.href = manageUrl;
};
const dropRef = React.useRef(null);
const dragRef = React.useRef(null);
const [, drop] = useDrop({
accept: DND_ITEM_TYPE,
hover(item, monitor) {
if (!dropRef.current) {
return;
}
const dragIndex = item.index;
const hoverIndex = index;
// Don't replace items with themselves
if (dragIndex === hoverIndex) {
return;
}
// Determine rectangle on screen
const hoverBoundingRect = dropRef.current.getBoundingClientRect();
// Get vertical middle
const hoverMiddleY =
(hoverBoundingRect.bottom - hoverBoundingRect.top) / 2;
// Determine mouse position
const clientOffset = monitor.getClientOffset();
// Get pixels to the top
const hoverClientY = clientOffset.y - hoverBoundingRect.top;
// Only perform the move when the mouse has crossed half of the items height
// When dragging downwards, only move when the cursor is below 50%
// When dragging upwards, only move when the cursor is above 50%
// Dragging downwards
if (dragIndex < hoverIndex && hoverClientY < hoverMiddleY) {
return;
}
// Dragging upwards
if (dragIndex > hoverIndex && hoverClientY > hoverMiddleY) {
return;
}
// Time to actually perform the action
moveRow(dragIndex, hoverIndex);
// Note: we're mutating the monitor item here!
// Generally it's better to avoid mutations,
// but it's good here for the sake of performance
// to avoid expensive index searches.
item.index = hoverIndex;
},
});
const [, drag, preview] = useDrag({
type: DND_ITEM_TYPE,
item: { type: DND_ITEM_TYPE, index },
collect: (monitor) => ({
isDragging: monitor.isDragging(),
}),
});
preview(drop(dropRef));
drag(dragRef);
const headCell = row.cells[0];
return (
<React.StrictMode>
<tr className="opacity-50" ref={dropRef} onClick={goToManagePage}>
<td
className="cursor-grab"
ref={dragRef}
{...headCell.getCellProps({
style: {
minWidth: 4,
maxWidth: 4,
width: 4,
},
})}
>
<input type="hidden" name={`${index}--id`} value={id} />
{/* for ordering and fixed position */}
<input
type="hidden"
name={`${index}--order_in_newsfeed_base`}
value={index}
/>
<input
type="hidden"
name={`${index}--type`}
value={row.original.type}
/>
<input
type="hidden"
name={`${index}--article_url`}
value={row.original.article_url}
/>
<input
type="hidden"
name={`${index}--image_url`}
value={row.original.image_url}
/>
<input
type="hidden"
name={`${index}--splash_url`}
value={row.original.splash_url}
/>
move
</td>
{row.cells.map((cell, index) => {
if (typeof id == "string" && id.startsWith("empty")) {
return (
<td
key={index}
{...cell.getCellProps({
style: {
minWidth: cell.column.minWidth,
maxWidth: cell.column.maxWidth,
width: cell.column.width,
cursor: "unset",
},
})}
></td>
);
}
return (
<td
key={index}
{...cell.getCellProps({
style: {
minWidth: cell.column.minWidth,
maxWidth: cell.column.maxWidth,
width: cell.column.width,
cursor: "pointer",
},
})}
>
{cell.render("Cell")}
</td>
);
})}
</tr>
</React.StrictMode>
);
};
Row.propTypes = {
id: PropTypes.oneOfType([PropTypes.number, PropTypes.string]),
row: PropTypes.object,
index: PropTypes.number,
moveRow: PropTypes.func,
manageUrl: PropTypes.oneOfType([PropTypes.number, PropTypes.string]),
};
// Create an editable cell renderer
const EditableCell = ({
value: initialValue,
row: { index },
column: { id },
updateData, // This is a custom function that we supplied to our table instance
}) => {
// We need to keep and update the state of the cell normally
const [value, setValue] = React.useState(initialValue);
const onChange = (e) => {
setValue(e.target.value);
};
// We'll only update the external data when the input is blurred
const onBlur = () => {
updateData(index, id, value);
};
const onClick = (e) => {
e.stopPropagation();
};
// If the initialValue is changed external, sync it up with our state
React.useEffect(() => {
setValue(initialValue);
}, [initialValue]);
return (
<input
value={value}
name={`${index}--${id}`}
onChange={onChange}
onBlur={onBlur}
onClick={onClick}
/>
);
};
EditableCell.propTypes = {
value: PropTypes.oneOfType([PropTypes.number, PropTypes.string]),
row: PropTypes.object,
column: PropTypes.object,
updateData: PropTypes.func,
};
|
<reponame>michaelmosmann/games
package de.flapdoodle.phaser.config;
import org.teavm.jso.JSProperty;
import org.teavm.jso.dom.html.HTMLElement;
public interface GameConfig extends PropertyObject{
/**
* The width of the game, in game pixels.
*/
@JSProperty
void width(int value);
/**
* The height of the game, in game pixels.
*/
@JSProperty
void height(int value);
/**
* Which renderer to use. Phaser.AUTO, Phaser.CANVAS, Phaser.HEADLESS, or Phaser.WEBGL. AUTO picks WEBGL if available, otherwise CANVAS.
*/
@JSProperty
void type(int value);
enum Renderer { AUTO, CANVAR, HEADLEASS, WEBGL}
default void type(Renderer value) {
switch (value) {
case AUTO:
type(0);
break;
default:
throw new IllegalArgumentException("unknown type "+value);
}
}
/**
* The DOM element that will contain the game canvas, or its `id`. If undefined or if the named element doesn't exist, the game canvas is inserted directly into the document body. If `null` no parent will be used and you are responsible for adding the canvas to your environment.
*/
@JSProperty
void parent(HTMLElement parent);
/**
* Automatically call window.focus() when the game boots. Usually necessary to capture input events if the game is in a separate frame.
*/
@JSProperty
void autoFocus(boolean value);
/**
* Physics configuration.
*/
@JSProperty
void physics(PhysicsConfig config);
/**
* A scene or scenes to add to the game. If several are given, the first is started; the remainder are started only if they have { active: true }.
*/
@JSProperty
void scene();
}
//declare type GameConfig = {
// /**
// * The width of the game, in game pixels.
// */
// width?: integer | string;
// /**
// * The height of the game, in game pixels.
// */
// height?: integer | string;
// /**
// * Simple scale applied to the game canvas. 2 is double size, 0.5 is half size, etc.
// */
// zoom?: number;
// /**
// * The size of each game pixel, in canvas pixels. Values larger than 1 are "high" resolution.
// */
// resolution?: number;
// /**
// * Which renderer to use. Phaser.AUTO, Phaser.CANVAS, Phaser.HEADLESS, or Phaser.WEBGL. AUTO picks WEBGL if available, otherwise CANVAS.
// */
// type?: number;
// /**
// * The DOM element that will contain the game canvas, or its `id`. If undefined or if the named element doesn't exist, the game canvas is inserted directly into the document body. If `null` no parent will be used and you are responsible for adding the canvas to your environment.
// */
// parent?: HTMLElement | string;
// /**
// * Provide your own Canvas element for Phaser to use instead of creating one.
// */
// canvas?: HTMLCanvasElement;
// /**
// * CSS styles to apply to the game canvas instead of Phaser's default styles.
// */
// canvasStyle?: string;
// /**
// * Provide your own Canvas Context for Phaser to use, instead of creating one.
// */
// context?: CanvasRenderingContext2D;
// /**
// * A scene or scenes to add to the game. If several are given, the first is started; the remainder are started only if they have { active: true }.
// */
// scene?: object;
// /**
// * Seed for the random number generator.
// */
// seed?: string[];
// /**
// * The title of the game. Shown in the browser console.
// */
// title?: string;
// /**
// * The URL of the game. Shown in the browser console.
// */
// url?: string;
// /**
// * The version of the game. Shown in the browser console.
// */
// version?: string;
// /**
// * Automatically call window.focus() when the game boots. Usually necessary to capture input events if the game is in a separate frame.
// */
// autoFocus?: boolean;
// /**
// * Input configuration, or `false` to disable all game input.
// */
// input?: boolean | InputConfig;
// /**
// * Disable the browser's default 'contextmenu' event (usually triggered by a right-button mouse click).
// */
// disableContextMenu?: boolean;
// /**
// * Configuration for the banner printed in the browser console when the game starts.
// */
// banner?: boolean | BannerConfig;
// /**
// * The DOM Container configuration object.
// */
// dom?: DOMContainerConfig;
// /**
// * Game loop configuration.
// */
// fps?: FPSConfig;
// /**
// * Game renderer configuration.
// */
// render?: RenderConfig;
// /**
// * The background color of the game canvas. The default is black.
// */
// backgroundColor?: string | number;
// /**
// * Optional callbacks to run before or after game boot.
// */
// callbacks?: CallbacksConfig;
// /**
// * Loader configuration.
// */
// loader?: LoaderConfig;
// /**
// * Images configuration.
// */
// images?: ImagesConfig;
// /**
// * Physics configuration.
// */
// physics?: object;
// /**
// * Plugins to install.
// */
// plugins?: PluginObject | PluginObjectItem[];
// /**
// * The Scale Manager configuration.
// */
// scale?: ScaleConfig;
// };
|
#!/bin/bash
# This script gets current config files and updates the ones in the repo with them
echo -e "\e[1;34mGetting current repo files...\e[0m"
# Constants
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
SCRIPT_DIR=$SCRIPT_DIR"/../../"
# Variables
unalias cp
# Neovim
cp ~/.config/nvim/init.vim "$SCRIPT_DIR""/configs/nvim/"
# bash files
cp -r /home/"$USER"/.bashrc "$SCRIPT_DIR""/configs/bash/"
cp -r /home/"$USER"/.profile "$SCRIPT_DIR""/configs/bash/"
cp -r /home/"$USER"/.bash_aliases "$SCRIPT_DIR""/configs/bash/"
# VS-Code
cp -r /home/"$USER"/.config/Code/User/settings.json "$SCRIPT_DIR""/configs/code/"
cp -r /home/"$USER"/.config/Code/User/keybindings.json "$SCRIPT_DIR""/configs/code/"
cp -r /home/"$USER"/.config/.prettierrc.json "$SCRIPT_DIR""/configs/code/"
# Thunderbird
cp -r /home/"$USER"/.thunderbird/. "$SCRIPT_DIR""/configs/thunderbird/"
# qBittorrent
cp -r /home/"$USER"/.config/qBittorrent/qBittorrent.conf "$SCRIPT_DIR""/configs/qbittorrent/"
# Git
cp -r /home/"$USER"/.gitconfig "$SCRIPT_DIR""/configs/git/"
|
#!/bin/bash
TEMP_CLONE_FOLDER="temp_wiki2"
if [ -z "$ACTION_MAIL" ]; then
echo "ACTION_MAIL ENV is missing"
exit 1
fi
if [ -z "$ACTION_NAME" ]; then
echo "ACTION_NAME ENV is missing"
exit 1
fi
if [ -z "$OWNER" ]; then
echo "OWNER ENV is missing. Cannot proceed"
exit 1
fi
if [ -z "$REPO_NAME" ]; then
echo "REPO_NAME ENV is missing. Cannot proceed"
exit 1
fi
if [ -z "$WIKI_PUSH_MESSAGE" ]; then
echo "WIKI_PUSH_MESSAGE ENV is missing, using the default one"
WIKI_PUSH_MESSAGE='Auto Publish new pages'
fi
# get source code of the wiki
mkdir $TEMP_CLONE_FOLDER
cd $TEMP_CLONE_FOLDER
git init
git config user.name $ACTION_NAME
git config user.email $ACTION_MAIL
git pull https://${GH_PAT}@github.com/$OWNER/$REPO_NAME.wiki.git
cd ..
rm -rf $TEMP_CLONE_FOLDER/* # make sure we remove everyting first. In case files were deleted/renamed
function processFile {
echo " Processing $1"
if [[ $i == *.md ]]; then
realFileName=`basename $1` # filename 'block-reordering' will end up with the name 'Block Reordering'
newFileName=`echo $realFileName | tr - " "` # replace - with spaces
newFileName=`echo $newFileName | sed -e "s/\b\(.\)/\u\1/g"` #capitalize
if [[ $i == *.md ]]; then
echo " Changing markdown file $i, saving to $TEMP_CLONE_FOLDER/$newFileName"
# Replacing image urls, to relative paths in the wiki.
# This may give some problems if text /documentation/ is present in markdown...
# We can create a more restrictive rule
sed 's/\/documentation\///g' "$1" > "$TEMP_CLONE_FOLDER/$newFileName"
else
# If its not markdown just copy the file. No need to change it
echo " copying $i to $TEMP_CLONE_FOLDER/$newFileName"
cp "$1" "$TEMP_CLONE_FOLDER/$newFileName"
fi
fi
}
#Deprecated when moved app documentation to /docs
#echo "Copying challenge docs"
## Challenge documentation
#for i in $MD_FOLDER*; do
# processFile $i
#done
echo "Copying docs"
# Project documentation
for i in docs/*; do
processFile $i
done
echo "Copying app docs"
# Project documentation
for i in docs/app/*; do
processFile $i
done
echo "Copying images folder"
cp -r "docs/app/img" "$TEMP_CLONE_FOLDER"
echo "Pushing new pages"
cd $TEMP_CLONE_FOLDER
git add .
git commit -m "$WIKI_PUSH_MESSAGE"
git push --set-upstream https://${GH_PAT}@github.com/$OWNER/$REPO_NAME.wiki.git master |
#!/usr/bin/env bash
set -e
docker build . -t halyard.compile -f Dockerfile.compile
docker build . -t halyard.local -f Dockerfile.local &
docker build . -t halyard.ubuntu -f Dockerfile.ubuntu &
wait
|
<reponame>fjrandwqq/oleccoffee-adimn
import request from '@/utils/request'
const getOrder = params => request({
url: '/api/orders',
method: 'get',
params
})
const getShopsByRole = () => request({
url: '/api/shop/list',
method: 'get'
})
const finishOrder = params => request({
url: 'api/orders/finish',
method: 'post',
params
})
export {
getOrder,
getShopsByRole,
finishOrder
}
|
from django.http import HttpRequest
def get_tenant_config(request: HttpRequest) -> dict:
# Assume the tenant can be determined based on a header or query parameter
tenant_identifier = request.headers.get('X-Tenant-ID') # Example: 'TENANT_A'
# Define configurations for different tenants
tenant_configs = {
'TENANT_A': {'key1': 'value1', 'key2': 'value2'},
'TENANT_B': {'key1': 'value3', 'key2': 'value4'}
# Add configurations for other tenants as needed
}
# Return the configuration for the determined tenant
return tenant_configs.get(tenant_identifier, {}) # Return empty dictionary if tenant not found |
<reponame>dominiek/bulkhead<gh_stars>1-10
export * from './state';
export { default as getVisibleSelectionRect } from './getVisibleSelectionRect';
|
<reponame>paloverde-grupo/jakkerp<filename>cometchat/modules/translate2/extra.js<gh_stars>0
var languageAdded = 0;
function fireEventCC(element,event){
try {
if (document.createEventObject){
var evt = document.createEventObject();
element.fireEvent('on'+event,evt);
} else {
var evt = document.createEvent("HTMLEvents");
evt.initEvent(event, true, true );
element.dispatchEvent(evt);
}
} catch (e) {
}
}
function changeLanguage(lang) {
if (jqcc('#google_translate_element').length == 0 || jqcc('#google_translate_element').html() == '' || jqcc('.goog-te-combo').length == 0 || jqcc('.goog-te-combo').html() == '') {
setTimeout(function() { changeLanguage(lang); }, 500);
} else {
jqcc('.goog-te-combo').val(lang);
jqcc('.goog-te-combo').attr('id','cclanguagebutton');
fireEventCC(document.getElementById('cclanguagebutton'),'change');
}
}
function addLanguageCode() {
if (!languageAdded) {
jqcc.getScript('//translate.google.com/translate_a/element.js?cb=googleTranslateElementInit',function(data) {
jqcc("body").append('<div id="google_translate_element"></div><style>#google_translate_element {display:none!important;}</style>');
languageAdded++;
});
}
}
jqcc(document).ready(function() {
if (jqcc.cookie('googtrans')) {
addLanguageCode();
}
});
function googleTranslateElementInit() {
new google.translate.TranslateElement({
pageLanguage: 'en',
autoDisplay: false
}, 'google_translate_element');
} |
import { gql } from '@apollo/client';
export const GET_USERS = gql`
query getUsers {
getUsers {
id
nombre
apellido
identificacion
email
rol
createdAt
}
}
`;
export const NEW_USER = gql`
mutation newUser($input: UserInput) {
newUser(input: $input) {
id
nombre
apellido
identificacion
email
rol
createdAt
}
}
`;
export const LOGIN = gql`
mutation authUser($input: AuthInput) {
authUser(input: $input) {
token
}
}
`; |
// @flow
import THREE from '../../../universal/THREE';
import GameObject from '../core/GameObject';
class Lighting extends GameObject {
get hemisphere() {
return new THREE.HemisphereLight(0xaaaaaa, 0x000000, 0.9);
}
get shadow() {
const light = new THREE.DirectionalLight(0xffffff, 0.9);
light.position.set(0, 350, 350);
return light;
}
loadAsync = async (scene) => {
this.add(this.hemisphere);
this.add(this.shadow);
await super.loadAsync(scene);
};
}
export default Lighting;
|
<reponame>AntonYermilov/progue
from dataclasses import dataclass
from enum import Enum
class DamageType(Enum):
PHYSICAL = 0
MAGIC = 1
HEALING = 2
@dataclass
class Damage:
"""
Damage class.
"""
damage_type: DamageType
damage_amount: int
confuse_turns: int = 0
|
<reponame>FreDP47/WashBuddiez
import { Component, OnInit } from '@angular/core';
import { OrderService } from 'app/services/order.service';
import { IProducts } from 'app/products';
@Component({
selector: 'app-pricing-home',
templateUrl: './pricing-home.component.html',
styleUrls: ['./pricing-home.component.css']
})
export class PricingHomeComponent {
products: IProducts[];
constructor( private orderService: OrderService) {
this.products = orderService.getProducts();
}
}
|
<filename>lib/backend/k8s/conversion/conversion.go
// Copyright (c) 2016-2019 Tigera, Inc. All rights reserved.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package conversion
import (
"crypto/sha1"
"encoding/hex"
"encoding/json"
"fmt"
"os"
"sort"
"strings"
log "github.com/sirupsen/logrus"
kapiv1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
apiv3 "github.com/unai-ttxu/libcalico-go/lib/apis/v3"
"github.com/unai-ttxu/libcalico-go/lib/backend/model"
"github.com/unai-ttxu/libcalico-go/lib/names"
cnet "github.com/unai-ttxu/libcalico-go/lib/net"
"github.com/unai-ttxu/libcalico-go/lib/numorstring"
networkingv1 "k8s.io/api/networking/v1"
"k8s.io/apimachinery/pkg/util/intstr"
)
var (
protoTCP = kapiv1.ProtocolTCP
)
type selectorType int8
const (
SelectorNamespace selectorType = iota
SelectorPod
)
// TODO: make this private and expose a public conversion interface instead
type Converter struct{}
// VethNameForWorkload returns a deterministic veth name
// for the given Kubernetes workload (WEP) name and namespace.
func VethNameForWorkload(namespace, podname string) string {
// A SHA1 is always 20 bytes long, and so is sufficient for generating the
// veth name and mac addr.
h := sha1.New()
h.Write([]byte(fmt.Sprintf("%s.%s", namespace, podname)))
prefix := os.Getenv("FELIX_INTERFACEPREFIX")
if prefix == "" {
// Prefix is not set. Default to "cali"
prefix = "cali"
} else {
// Prefix is set - use the first value in the list.
splits := strings.Split(prefix, ",")
prefix = splits[0]
}
log.WithField("prefix", prefix).Debugf("Using prefix to create a WorkloadEndpoint veth name")
return fmt.Sprintf("%s%s", prefix, hex.EncodeToString(h.Sum(nil))[:11])
}
// ParseWorkloadName extracts the Node name, Orchestrator, Pod name and endpoint from the
// given WorkloadEndpoint name.
// The expected format for k8s is <node>-k8s-<pod>-<endpoint>
func (c Converter) ParseWorkloadEndpointName(workloadName string) (names.WorkloadEndpointIdentifiers, error) {
return names.ParseWorkloadEndpointName(workloadName)
}
// NamespaceToProfile converts a Namespace to a Calico Profile. The Profile stores
// labels from the Namespace which are inherited by the WorkloadEndpoints within
// the Profile. This Profile also has the default ingress and egress rules, which are both 'allow'.
func (c Converter) NamespaceToProfile(ns *kapiv1.Namespace) (*model.KVPair, error) {
// Generate the labels to apply to the profile, using a special prefix
// to indicate that these are the labels from the parent Kubernetes Namespace.
labels := map[string]string{}
for k, v := range ns.Labels {
labels[NamespaceLabelPrefix+k] = v
}
// Create the profile object.
name := NamespaceProfileNamePrefix + ns.Name
profile := apiv3.NewProfile()
profile.ObjectMeta = metav1.ObjectMeta{
Name: name,
CreationTimestamp: ns.CreationTimestamp,
UID: ns.UID,
}
profile.Spec = apiv3.ProfileSpec{
Ingress: []apiv3.Rule{{Action: apiv3.Allow}},
Egress: []apiv3.Rule{{Action: apiv3.Allow}},
}
// Only set labels to apply when there are actually labels. This makes the
// result of this function consistent with the struct as loaded directly
// from etcd, which uses nil for the empty map.
if len(labels) != 0 {
profile.Spec.LabelsToApply = labels
}
// Embed the profile in a KVPair.
kvp := model.KVPair{
Key: model.ResourceKey{
Name: name,
Kind: apiv3.KindProfile,
},
Value: profile,
Revision: c.JoinProfileRevisions(ns.ResourceVersion, ""),
}
return &kvp, nil
}
// IsValidCalicoWorkloadEndpoint returns true if the pod should be shown as a workloadEndpoint
// in the Calico API and false otherwise. Note: since we completely ignore notifications for
// invalid Pods, it is important that pods can only transition from not-valid to valid and not
// the other way. If they transition from valid to invalid, we'll fail to emit a deletion
// event in the watcher.
func (c Converter) IsValidCalicoWorkloadEndpoint(pod *kapiv1.Pod) bool {
if c.IsHostNetworked(pod) {
log.WithField("pod", pod.Name).Debug("Pod is host networked.")
return false
} else if !c.IsScheduled(pod) {
log.WithField("pod", pod.Name).Debug("Pod is not scheduled.")
return false
}
return true
}
// IsReadyCalicoPod returns true if the pod is a valid Calico WorkloadEndpoint and has
// an IP address assigned (i.e. it's ready for Calico networking).
func (c Converter) IsReadyCalicoPod(pod *kapiv1.Pod) bool {
if !c.IsValidCalicoWorkloadEndpoint(pod) {
return false
} else if !c.HasIPAddress(pod) {
log.WithField("pod", pod.Name).Debug("Pod does not have an IP address.")
return false
}
return true
}
const (
// Completed is documented but doesn't seem to be in the API, it should be safe to include.
// Maybe it's in an older version of the API?
podCompleted kapiv1.PodPhase = "Completed"
)
func (c Converter) IsFinished(pod *kapiv1.Pod) bool {
switch pod.Status.Phase {
case kapiv1.PodFailed, kapiv1.PodSucceeded, podCompleted:
return true
}
return false
}
func (c Converter) IsScheduled(pod *kapiv1.Pod) bool {
return pod.Spec.NodeName != ""
}
func (c Converter) IsHostNetworked(pod *kapiv1.Pod) bool {
return pod.Spec.HostNetwork
}
func (c Converter) HasIPAddress(pod *kapiv1.Pod) bool {
return pod.Status.PodIP != "" || pod.Annotations[AnnotationPodIP] != ""
}
// GetPodIPs extracts the IP addresses from a Kubernetes Pod. At present, only a single IP
// is expected/supported. GetPodIPs loads the IP either from the PodIP field, if present, or
// the calico podIP annotation.
func (c Converter) GetPodIPs(pod *kapiv1.Pod) ([]string, error) {
var podIP string
if podIP = pod.Status.PodIP; podIP != "" {
log.WithField("ip", podIP).Debug("PodIP field filled in.")
} else if podIP = pod.Annotations[AnnotationPodIP]; podIP != "" {
log.WithField("ip", podIP).Debug("PodIP missing, falling back on Calico annotation.")
} else {
log.WithField("ip", podIP).Debug("Pod has no IP.")
return nil, nil
}
_, ipNet, err := cnet.ParseCIDROrIP(podIP)
if err != nil {
log.WithFields(log.Fields{"ip": podIP, "pod": pod.Name}).WithError(err).Error("Failed to parse pod IP")
return nil, err
}
return []string{ipNet.String()}, nil
}
// PodToWorkloadEndpoint converts a Pod to a WorkloadEndpoint. It assumes the calling code
// has verified that the provided Pod is valid to convert to a WorkloadEndpoint.
// PodToWorkloadEndpoint requires a Pods Name and Node Name to be populated. It will
// fail to convert from a Pod to WorkloadEndpoint otherwise.
func (c Converter) PodToWorkloadEndpoint(pod *kapiv1.Pod) (*model.KVPair, error) {
log.WithField("pod", pod).Debug("Converting pod to WorkloadEndpoint")
// Get all the profiles that apply
var profiles []string
// Pull out the Namespace based profile off the pod name and Namespace.
profiles = append(profiles, NamespaceProfileNamePrefix+pod.Namespace)
// Pull out the Serviceaccount based profile off the pod SA and namespace
if pod.Spec.ServiceAccountName != "" {
profiles = append(profiles, serviceAccountNameToProfileName(pod.Spec.ServiceAccountName, pod.Namespace))
}
wepids := names.WorkloadEndpointIdentifiers{
Node: pod.Spec.NodeName,
Orchestrator: apiv3.OrchestratorKubernetes,
Endpoint: "eth0",
Pod: pod.Name,
}
wepName, err := wepids.CalculateWorkloadEndpointName(false)
if err != nil {
return nil, err
}
ipNets, err := c.GetPodIPs(pod)
if err != nil {
// IP address was present but malformed in some way, handle as an explicit failure.
return nil, err
}
if c.IsFinished(pod) {
// Pod is finished but not yet deleted. In this state the IP will have been freed and returned to the pool
// so we need to make sure we don't let the caller believe it still belongs to this endpoint.
// Pods with no IPs will get filtered out before they get to Felix in the watcher syncer cache layer.
// We can't pretend the workload endpoint is deleted _here_ because that would confuse users of the
// native v3 Watch() API.
ipNets = nil
}
// Generate the interface name based on workload. This must match
// the host-side veth configured by the CNI plugin.
interfaceName := VethNameForWorkload(pod.Namespace, pod.Name)
// Build the labels map. Start with the pod labels, and append two additional labels for
// namespace and orchestrator matches.
labels := pod.Labels
if labels == nil {
labels = make(map[string]string, 2)
}
labels[apiv3.LabelNamespace] = pod.Namespace
labels[apiv3.LabelOrchestrator] = apiv3.OrchestratorKubernetes
if pod.Spec.ServiceAccountName != "" {
labels[apiv3.LabelServiceAccount] = pod.Spec.ServiceAccountName
}
// Pull out floating IP annotation
var floatingIPs []apiv3.IPNAT
if annotation, ok := pod.Annotations["cni.projectcalico.org/floatingIPs"]; ok && len(ipNets) > 0 {
// Parse Annotation data
var ips []string
err := json.Unmarshal([]byte(annotation), &ips)
if err != nil {
return nil, fmt.Errorf("failed to parse '%s' as JSON: %s", annotation, err)
}
// Get target for NAT
podip, podnet, err := cnet.ParseCIDROrIP(ipNets[0])
if err != nil {
return nil, fmt.Errorf("Failed to parse pod IP: %s", err)
}
netmask, _ := podnet.Mask.Size()
if netmask != 32 && netmask != 128 {
return nil, fmt.Errorf("PodIP is not a valid IP: Mask size is %d, not 32 or 128", netmask)
}
for _, ip := range ips {
floatingIPs = append(floatingIPs, apiv3.IPNAT{
InternalIP: podip.String(),
ExternalIP: ip,
})
}
}
// Map any named ports through.
var endpointPorts []apiv3.EndpointPort
for _, container := range pod.Spec.Containers {
for _, containerPort := range container.Ports {
if containerPort.Name != "" && containerPort.ContainerPort != 0 {
var modelProto numorstring.Protocol
switch containerPort.Protocol {
case kapiv1.ProtocolUDP:
modelProto = numorstring.ProtocolFromString("udp")
case kapiv1.ProtocolTCP, kapiv1.Protocol("") /* K8s default is TCP. */ :
modelProto = numorstring.ProtocolFromString("tcp")
default:
log.WithFields(log.Fields{
"protocol": containerPort.Protocol,
"pod": pod,
"port": containerPort,
}).Debug("Ignoring named port with unknown protocol")
continue
}
endpointPorts = append(endpointPorts, apiv3.EndpointPort{
Name: containerPort.Name,
Protocol: modelProto,
Port: uint16(containerPort.ContainerPort),
})
}
}
}
// Create the workload endpoint.
wep := apiv3.NewWorkloadEndpoint()
wep.ObjectMeta = metav1.ObjectMeta{
Name: wepName,
Namespace: pod.Namespace,
CreationTimestamp: pod.CreationTimestamp,
UID: pod.UID,
Labels: labels,
GenerateName: pod.GenerateName,
}
wep.Spec = apiv3.WorkloadEndpointSpec{
Orchestrator: "k8s",
Node: pod.Spec.NodeName,
Pod: pod.Name,
Endpoint: "eth0",
InterfaceName: interfaceName,
Profiles: profiles,
IPNetworks: ipNets,
Ports: endpointPorts,
IPNATs: floatingIPs,
}
// Embed the workload endpoint into a KVPair.
kvp := model.KVPair{
Key: model.ResourceKey{
Name: wepName,
Namespace: pod.Namespace,
Kind: apiv3.KindWorkloadEndpoint,
},
Value: wep,
Revision: pod.ResourceVersion,
}
return &kvp, nil
}
// K8sNetworkPolicyToCalico converts a k8s NetworkPolicy to a model.KVPair.
func (c Converter) K8sNetworkPolicyToCalico(np *networkingv1.NetworkPolicy) (*model.KVPair, error) {
// Pull out important fields.
policyName := fmt.Sprintf(K8sNetworkPolicyNamePrefix + np.Name)
// We insert all the NetworkPolicy Policies at order 1000.0 after conversion.
// This order might change in future.
order := float64(1000.0)
// Generate the ingress rules list.
var ingressRules []apiv3.Rule
for _, r := range np.Spec.Ingress {
rules, err := c.k8sRuleToCalico(r.From, r.Ports, np.Namespace, true)
if err != nil {
log.WithError(err).Warn("dropping k8s rule that couldn't be converted.")
} else {
ingressRules = append(ingressRules, rules...)
}
}
// Generate the egress rules list.
var egressRules []apiv3.Rule
for _, r := range np.Spec.Egress {
rules, err := c.k8sRuleToCalico(r.To, r.Ports, np.Namespace, false)
if err != nil {
log.WithError(err).Warn("dropping k8s rule that couldn't be converted")
} else {
egressRules = append(egressRules, rules...)
}
}
// Calculate Types setting.
ingress := false
egress := false
for _, policyType := range np.Spec.PolicyTypes {
switch policyType {
case networkingv1.PolicyTypeIngress:
ingress = true
case networkingv1.PolicyTypeEgress:
egress = true
}
}
types := []apiv3.PolicyType{}
if ingress {
types = append(types, apiv3.PolicyTypeIngress)
}
if egress {
types = append(types, apiv3.PolicyTypeEgress)
} else if len(egressRules) > 0 {
// Egress was introduced at the same time as policyTypes. It shouldn't be possible to
// receive a NetworkPolicy with an egress rule but without "Egress" specified in its types,
// but we'll warn about it anyway.
log.Warn("K8s PolicyTypes don't include 'egress', but NetworkPolicy has egress rules.")
}
// If no types were specified in the policy, then we're running on a cluster that doesn't
// include support for that field in the API. In that case, the correct behavior is for the policy
// to apply to only ingress traffic.
if len(types) == 0 {
types = append(types, apiv3.PolicyTypeIngress)
}
// Create the NetworkPolicy.
policy := apiv3.NewNetworkPolicy()
policy.ObjectMeta = metav1.ObjectMeta{
Name: policyName,
Namespace: np.Namespace,
CreationTimestamp: np.CreationTimestamp,
UID: np.UID,
}
policy.Spec = apiv3.NetworkPolicySpec{
Order: &order,
Selector: c.k8sSelectorToCalico(&np.Spec.PodSelector, SelectorPod),
Ingress: ingressRules,
Egress: egressRules,
Types: types,
}
// Build and return the KVPair.
return &model.KVPair{
Key: model.ResourceKey{
Name: policyName,
Namespace: np.Namespace,
Kind: apiv3.KindNetworkPolicy,
},
Value: policy,
Revision: np.ResourceVersion,
}, nil
}
// k8sSelectorToCalico takes a namespaced k8s label selector and returns the Calico
// equivalent.
func (c Converter) k8sSelectorToCalico(s *metav1.LabelSelector, selectorType selectorType) string {
// Only prefix pod selectors - this won't work for namespace selectors.
selectors := []string{}
if selectorType == SelectorPod {
selectors = append(selectors, fmt.Sprintf("%s == 'k8s'", apiv3.LabelOrchestrator))
}
if s == nil {
return strings.Join(selectors, " && ")
}
// For namespace selectors, if they are present but have no terms, it means "select all
// namespaces". We use empty string to represent the nil namespace selector, so use all() to
// represent all namespaces.
if selectorType == SelectorNamespace && len(s.MatchLabels) == 0 && len(s.MatchExpressions) == 0 {
return "all()"
}
// matchLabels is a map key => value, it means match if (label[key] ==
// value) for all keys.
keys := make([]string, 0, len(s.MatchLabels))
for k := range s.MatchLabels {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
v := s.MatchLabels[k]
selectors = append(selectors, fmt.Sprintf("%s == '%s'", k, v))
}
// matchExpressions is a list of in/notin/exists/doesnotexist tests.
for _, e := range s.MatchExpressions {
valueList := strings.Join(e.Values, "', '")
// Each selector is formatted differently based on the operator.
switch e.Operator {
case metav1.LabelSelectorOpIn:
selectors = append(selectors, fmt.Sprintf("%s in { '%s' }", e.Key, valueList))
case metav1.LabelSelectorOpNotIn:
selectors = append(selectors, fmt.Sprintf("%s not in { '%s' }", e.Key, valueList))
case metav1.LabelSelectorOpExists:
selectors = append(selectors, fmt.Sprintf("has(%s)", e.Key))
case metav1.LabelSelectorOpDoesNotExist:
selectors = append(selectors, fmt.Sprintf("! has(%s)", e.Key))
}
}
return strings.Join(selectors, " && ")
}
func (c Converter) k8sRuleToCalico(rPeers []networkingv1.NetworkPolicyPeer, rPorts []networkingv1.NetworkPolicyPort, ns string, ingress bool) ([]apiv3.Rule, error) {
rules := []apiv3.Rule{}
peers := []*networkingv1.NetworkPolicyPeer{}
ports := []*networkingv1.NetworkPolicyPort{}
// Built up a list of the sources and a list of the destinations.
for _, f := range rPeers {
// We need to add a copy of the peer so all the rules don't
// point to the same location.
peers = append(peers, &networkingv1.NetworkPolicyPeer{
NamespaceSelector: f.NamespaceSelector,
PodSelector: f.PodSelector,
IPBlock: f.IPBlock,
})
}
for _, p := range rPorts {
// We need to add a copy of the port so all the rules don't
// point to the same location.
port := networkingv1.NetworkPolicyPort{}
if p.Port != nil {
portval := intstr.FromString(p.Port.String())
port.Port = &portval
// TCP is the implicit default (as per the definition of NetworkPolicyPort).
// Make the default explicit here because our data-model always requires
// the protocol to be specified if we're doing a port match.
port.Protocol = &protoTCP
}
if p.Protocol != nil {
protval := kapiv1.Protocol(fmt.Sprintf("%s", *p.Protocol))
port.Protocol = &protval
}
ports = append(ports, &port)
}
// If there no peers, or no ports, represent that as nil.
if len(peers) == 0 {
peers = []*networkingv1.NetworkPolicyPeer{nil}
}
if len(ports) == 0 {
ports = []*networkingv1.NetworkPolicyPort{nil}
}
// Combine destinations with sources to generate rules.
// TODO: This currently creates a lot of rules by making every combination of from / ports
// into a rule. We can combine these so that we don't need as many rules!
for _, port := range ports {
protocol, calicoPorts, err := c.k8sPortToCalicoFields(port)
if err != nil {
return nil, fmt.Errorf("failed to parse k8s port: %s", err)
}
for _, peer := range peers {
selector, nsSelector, nets, notNets := c.k8sPeerToCalicoFields(peer, ns)
if ingress {
// Build inbound rule and append to list.
rules = append(rules, apiv3.Rule{
Action: "Allow",
Protocol: protocol,
Source: apiv3.EntityRule{
Selector: selector,
NamespaceSelector: nsSelector,
Nets: nets,
NotNets: notNets,
},
Destination: apiv3.EntityRule{
Ports: calicoPorts,
},
})
} else {
// Build outbound rule and append to list.
rules = append(rules, apiv3.Rule{
Action: "Allow",
Protocol: protocol,
Destination: apiv3.EntityRule{
Ports: calicoPorts,
Selector: selector,
NamespaceSelector: nsSelector,
Nets: nets,
NotNets: notNets,
},
})
}
}
}
return rules, nil
}
func (c Converter) k8sPortToCalicoFields(port *networkingv1.NetworkPolicyPort) (protocol *numorstring.Protocol, dstPorts []numorstring.Port, err error) {
// If no port info, return zero values for all fields (protocol, dstPorts).
if port == nil {
return
}
// Port information available.
dstPorts, err = c.k8sPortToCalico(*port)
if err != nil {
return
}
protocol = c.k8sProtocolToCalico(port.Protocol)
return
}
func (c Converter) k8sProtocolToCalico(protocol *kapiv1.Protocol) *numorstring.Protocol {
if protocol != nil {
p := numorstring.ProtocolFromString(string(*protocol))
return &p
}
return nil
}
func (c Converter) k8sPeerToCalicoFields(peer *networkingv1.NetworkPolicyPeer, ns string) (selector, nsSelector string, nets []string, notNets []string) {
// If no peer, return zero values for all fields (selector, nets and !nets).
if peer == nil {
return
}
// Peer information available.
// Determine the source selector for the rule.
if peer.IPBlock != nil {
// Convert the CIDR to include.
_, ipNet, err := cnet.ParseCIDR(peer.IPBlock.CIDR)
if err != nil {
log.WithField("cidr", peer.IPBlock.CIDR).WithError(err).Error("Failed to parse CIDR")
return
}
nets = []string{ipNet.String()}
// Convert the CIDRs to exclude.
notNets = []string{}
for _, exception := range peer.IPBlock.Except {
_, ipNet, err = cnet.ParseCIDR(exception)
if err != nil {
log.WithField("cidr", exception).WithError(err).Error("Failed to parse CIDR")
return
}
notNets = append(notNets, ipNet.String())
}
// If IPBlock is set, then PodSelector and NamespaceSelector cannot be.
return
}
// IPBlock is not set to get here.
// Note that k8sSelectorToCalico() accepts nil values of the selector.
selector = c.k8sSelectorToCalico(peer.PodSelector, SelectorPod)
nsSelector = c.k8sSelectorToCalico(peer.NamespaceSelector, SelectorNamespace)
return
}
func (c Converter) k8sPortToCalico(port networkingv1.NetworkPolicyPort) ([]numorstring.Port, error) {
var portList []numorstring.Port
if port.Port != nil {
p, err := numorstring.PortFromString(port.Port.String())
if err != nil {
return nil, fmt.Errorf("invalid port %+v: %s", port.Port, err)
}
return append(portList, p), nil
}
// No ports - return empty list.
return portList, nil
}
// ProfileNameToNamespace extracts the Namespace name from the given Profile name.
func (c Converter) ProfileNameToNamespace(profileName string) (string, error) {
// Profile objects backed by Namespaces have form "kns.<ns_name>"
if !strings.HasPrefix(profileName, NamespaceProfileNamePrefix) {
// This is not backed by a Kubernetes Namespace.
return "", fmt.Errorf("Profile %s not backed by a Namespace", profileName)
}
return strings.TrimPrefix(profileName, NamespaceProfileNamePrefix), nil
}
// JoinNetworkPolicyRevisions constructs the revision from the individual CRD and K8s NetworkPolicy
// revisions.
func (c Converter) JoinNetworkPolicyRevisions(crdNPRev, k8sNPRev string) string {
return crdNPRev + "/" + k8sNPRev
}
// SplitNetworkPolicyRevision extracts the CRD and K8s NetworkPolicy revisions from the combined
// revision returned on the KDD NetworkPolicy client.
func (c Converter) SplitNetworkPolicyRevision(rev string) (crdNPRev string, k8sNPRev string, err error) {
if rev == "" {
return
}
revs := strings.Split(rev, "/")
if len(revs) != 2 {
err = fmt.Errorf("ResourceVersion is not valid: %s", rev)
return
}
crdNPRev = revs[0]
k8sNPRev = revs[1]
return
}
// serviceAccountNameToProfileName creates a profile name that is a join
// of 'ksa.' + namespace + "." + serviceaccount name.
func serviceAccountNameToProfileName(sa, namespace string) string {
// Need to incorporate the namespace into the name of the sa based profile
// to make them globally unique
if namespace == "" {
namespace = "default"
}
return ServiceAccountProfileNamePrefix + namespace + "." + sa
}
// ServiceAccountToProfile converts a ServiceAccount to a Calico Profile. The Profile stores
// labels from the ServiceAccount which are inherited by the WorkloadEndpoints within
// the Profile.
func (c Converter) ServiceAccountToProfile(sa *kapiv1.ServiceAccount) (*model.KVPair, error) {
// Generate the labels to apply to the profile, using a special prefix
// to indicate that these are the labels from the parent Kubernetes ServiceAccount.
labels := map[string]string{}
for k, v := range sa.ObjectMeta.Labels {
labels[ServiceAccountLabelPrefix+k] = v
}
name := serviceAccountNameToProfileName(sa.Name, sa.Namespace)
profile := apiv3.NewProfile()
profile.ObjectMeta = metav1.ObjectMeta{
Name: name,
CreationTimestamp: sa.CreationTimestamp,
UID: sa.UID,
}
// Only set labels to apply when there are actually labels. This makes the
// result of this function consistent with the struct as loaded directly
// from etcd, which uses nil for the empty map.
if len(labels) != 0 {
profile.Spec.LabelsToApply = labels
} else {
profile.Spec.LabelsToApply = nil
}
// Embed the profile in a KVPair.
kvp := model.KVPair{
Key: model.ResourceKey{
Name: name,
Kind: apiv3.KindProfile,
},
Value: profile,
Revision: c.JoinProfileRevisions("", sa.ResourceVersion),
}
return &kvp, nil
}
// ProfileNameToServiceAccount extracts the ServiceAccount name from the given Profile name.
func (c Converter) ProfileNameToServiceAccount(profileName string) (ns, sa string, err error) {
// Profile objects backed by ServiceAccounts have form "ksa.<namespace>.<sa_name>"
if !strings.HasPrefix(profileName, ServiceAccountProfileNamePrefix) {
// This is not backed by a Kubernetes ServiceAccount.
err = fmt.Errorf("Profile %s not backed by a ServiceAccount", profileName)
return
}
names := strings.SplitN(profileName, ".", 3)
if len(names) != 3 {
err = fmt.Errorf("Profile %s is not formatted correctly", profileName)
return
}
ns = names[1]
sa = names[2]
return
}
// JoinProfileRevisions constructs the revision from the individual namespace and serviceaccount
// revisions.
// This is conditional on the feature flag for serviceaccount set or not.
func (c Converter) JoinProfileRevisions(nsRev, saRev string) string {
return nsRev + "/" + saRev
}
// SplitProfileRevision extracts the namespace and serviceaccount revisions from the combined
// revision returned on the KDD service account based profile.
// This is conditional on the feature flag for serviceaccount set or not.
func (c Converter) SplitProfileRevision(rev string) (nsRev string, saRev string, err error) {
if rev == "" {
return
}
revs := strings.Split(rev, "/")
if len(revs) != 2 {
err = fmt.Errorf("ResourceVersion is not valid: %s", rev)
return
}
nsRev = revs[0]
saRev = revs[1]
return
}
|
<reponame>FANHATCHA/Algorithms
package javatests.com.williamfiset.algorithms.search;
import static org.junit.Assert.*;
import com.williamfiset.algorithms.search.InterpolationSearch;
import org.junit.Test;
public class InterpolationSearchTest {
private static InterpolationSearch search;
public static void prepareTest() {
search = new InterpolationSearch();
}
@Test
public void testCoverage1() {
int[] arr = {0, 1, 2, 3, 4, 5};
int index = search.interpolationSearch(arr, 2);
assertTrue(index == 2);
}
@Test
public void testCoverage2() {
int[] arr = {0, 1, 2, 3, 4, 5};
int index = search.interpolationSearch(arr, 5);
assertTrue(index == 5);
}
@Test
public void testCoverage3() {
int[] arr = {0, 1, 2, 3, 4, 5};
int index = search.interpolationSearch(arr, -1);
assertTrue(index == -1);
}
@Test
public void testCoverage4() {
int[] arr = {0, 1, 2, 3, 4, 5};
int index = search.interpolationSearch(arr, 8);
assertTrue(index == -1);
}
}
|
<reponame>NickFranz3/Nick-Clay-Test
/*
* Copyright 2002 Sun Microsystems, Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any
* kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
* WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
* EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES
* SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR
* DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN
* OR ITS LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR
* FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR
* PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY OF
* LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE SOFTWARE,
* EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that Software is not designed, licensed or intended
* for use in the design, construction, operation or maintenance of
* any nuclear facility.
*/
package com.sun.j2ee.blueprints.petstore.controller.web;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpSessionListener;
import javax.servlet.http.HttpSessionEvent;
import javax.naming.InitialContext;
// J2EE imports
import java.rmi.RemoteException;
import javax.ejb.CreateException;
import javax.ejb.FinderException;
// WAF imports
import com.sun.j2ee.blueprints.waf.controller.web.util.WebKeys;
import com.sun.j2ee.blueprints.waf.exceptions.GeneralFailureException;
import com.sun.j2ee.blueprints.waf.exceptions.AppException;
import com.sun.j2ee.blueprints.waf.controller.web.DefaultComponentManager;
import com.sun.j2ee.blueprints.waf.controller.web.WebController;
import com.sun.j2ee.blueprints.util.tracer.Debug;
// petstore imports
import com.sun.j2ee.blueprints.petstore.util.PetstoreKeys;
import com.sun.j2ee.blueprints.petstore.controller.ejb.ShoppingControllerLocal;
import com.sun.j2ee.blueprints.petstore.controller.ejb.ShoppingControllerLocalHome;
import com.sun.j2ee.blueprints.petstore.controller.ejb.ShoppingClientFacadeLocal;
import com.sun.j2ee.blueprints.petstore.util.JNDINames;
// cart component imports
import com.sun.j2ee.blueprints.cart.ejb.ShoppingCartLocal;
import com.sun.j2ee.blueprints.cart.ejb.ShoppingCartLocalHome;
// customer component imports
import com.sun.j2ee.blueprints.customer.ejb.CustomerLocal;
// service locator imports
import com.sun.j2ee.blueprints.servicelocator.web.ServiceLocator;
import com.sun.j2ee.blueprints.servicelocator.ServiceLocatorException;
/**
* This implmentation class of the ServiceLocator provides
* access to services in the web tier and ejb tier.
*
*/
public class PetstoreComponentManager extends DefaultComponentManager implements HttpSessionListener {
private ServiceLocator serviceLocator;
public PetstoreComponentManager () {
serviceLocator = ServiceLocator.getInstance();
}
/**
*
* Initialize another service locator programtically
*
*/
public void init(HttpSession session) {
session.setAttribute(PetstoreKeys.COMPONENT_MANAGER, this);
session.setAttribute(PetstoreKeys.CART, getShoppingCart(session));
}
/**
*
* Create the WebClientController which in turn should create the
* EJBClientController.
*
*/
public void sessionCreated(HttpSessionEvent se) {
super.sessionCreated(se);
se.getSession().setAttribute(PetstoreKeys.CART, getShoppingCart(se.getSession()));
}
public CustomerLocal getCustomer(HttpSession session) {
ShoppingControllerLocal scEjb = getShoppingController(session);
try {
ShoppingClientFacadeLocal scf = scEjb.getShoppingClientFacade();
//scf.setUserId(userId);
return scf.getCustomer();
} catch (FinderException e) {
System.err.println("PetstoreComponentManager finder error: " + e);
} catch (Exception e) {
System.err.println("PetstoreComponentManager error: " + e);
}
return null;
}
public ShoppingControllerLocal getShoppingController(HttpSession session) {
ShoppingControllerLocal scEjb = (ShoppingControllerLocal)session.getAttribute(PetstoreKeys.EJB_CONTROLLER);
if (scEjb == null) {
try {
ShoppingControllerLocalHome scEjbHome =
(ShoppingControllerLocalHome)serviceLocator.getLocalHome(JNDINames.SHOPPING_CONTROLLER_EJBHOME);
scEjb = scEjbHome.create();
session.setAttribute(PetstoreKeys.EJB_CONTROLLER, scEjb);
} catch (CreateException ce) {
throw new GeneralFailureException(ce.getMessage());
} catch (ServiceLocatorException ne) {
throw new GeneralFailureException(ne.getMessage());
}
}
return scEjb;
}
public ShoppingCartLocal getShoppingCart(HttpSession session) {
ShoppingControllerLocal scEjb = getShoppingController(session);
ShoppingClientFacadeLocal scf = scEjb.getShoppingClientFacade();
return scf.getShoppingCart();
}
}
|
import React from 'react';
import Image from 'next/image';
function Logo() {
return (
<Image width={300} height={80} alt="Logo todoo.xyz" src="/images/Logo.svg" />
);
}
export default Logo;
|
# Generated by Django 3.1.5 on 2021-01-31 22:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0015_rpc_signature'),
]
operations = [
migrations.AlterField(
model_name='rpc',
name='created_at',
field=models.DateTimeField(),
),
]
|
#!/bin/sh
## Based on
# ../dertable/dertable.sh
#
## Most recently, based on:
# /dcs01/stanley/work/brain_rna/deranalysis/deranalysis.sh
# Directories
MAINDIR=/dcl01/lieber/ajaffe/derRuns/derSoftware/simulation
WDIR=${MAINDIR}/deranalysis-original
DATADIR=${MAINDIR}/dertable
# Define variables
SHORT='derA-ori-sim'
# Construct shell files
#for chrnum in 22 21 Y 20 19 18 17 16 15 14 13 12 11 10 9 8 X 7 6 5 4 3 2 1
for chrnum in 22
do
for group in AB AC BC
do
echo "Creating script for chromosome ${chrnum} comparing groups $group"
chr="chr${chrnum}"
cat > ${WDIR}/.${SHORT}.${group}.${chr}.sh <<EOF
#!/bin/bash
#$ -cwd
#$ -m e
#$ -l mem_free=100G,h_vmem=150G,h_fsize=50G
#$ -N ${SHORT}.${group}.${chr}
echo "**** Job starts ****"
date
# Create output directory
mkdir -p ${WDIR}/${chr}
# Make logs directory
mkdir -p ${WDIR}/${chr}/logs
# Copy database to scratch disk
cp ${DATADIR}/${chr}/${chr}.db \${TMPDIR}/
# run derfinder-analysis.R
cd ${WDIR}/${chr}
module load R/2.15.x
Rscript ${WDIR}/derfinder-analysis.R -o "/dcl01/lieber/ajaffe/derRuns/derSoftware/simulation/dercount" -s "sample" -d "\${TMPDIR}/${chr}.db" -t ${chr} -c "${group}" -v TRUE
# Move log files into the logs directory
mv ${WDIR}/${SHORT}.${group}.${chr}.* ${WDIR}/${chr}/logs/
echo "**** Job ends ****"
date
EOF
call="qsub ${WDIR}/.${SHORT}.${group}.${chr}.sh"
echo $call
$call
done
done
|
import React from 'react';
import { Grid, Segment } from 'semantic-ui-react';
import styled from "styled-components";
import backgroundImage from "../images/farm_background.png"
const GameScreenDivStyle = styled.div`
border: 1px solid rgba(34,36,38,.15);
border-radius: .28571429rem;
background-color: #9fc5e8ff;
`;
const ImageStyle = styled.img`
max-width: 100%;
max-height: 100%;
height: auto;
display: block;
`;
function AnimalScreen(props) {
return (
<GameScreenDivStyle>
<ImageStyle src={backgroundImage} alt="Farm Background"/>
</GameScreenDivStyle>
);
}
export default AnimalScreen; |
package tr.com.minicrm.web.platform.configuration;
import com.mongodb.ConnectionString;
import com.mongodb.MongoClientSettings;
import com.mongodb.client.MongoClients;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.index.IndexOperations;
import org.springframework.data.mongodb.core.index.IndexResolver;
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver;
import tr.com.minicrm.productgroup.data.mongo.ProductGroupDataServiceImpl;
import tr.com.minicrm.productgroup.data.mongo.ProductGroupImpl;
import tr.com.minicrm.productgroup.data.mongo.service.SequenceGeneratorService;
import tr.com.minicrm.productgroup.data.service.ProductGroupDataService;
@Configuration
@ConditionalOnProperty(value = "platform.datasource.databaseType", havingValue = "mongo")
public class MongoConfiguration {
@Bean
MongoTemplate mongoTemplate(@Value("${platform.datasource.database:''}") String database,
@Value("${platform.datasource.jdbcUrl:''}") String jdbcUrl) {
ConnectionString connectionString = new ConnectionString(jdbcUrl);
MongoClientSettings mongoClientSettings =
MongoClientSettings.builder().applyConnectionString(connectionString).build();
return new MongoTemplate(MongoClients.create(mongoClientSettings), database);
}
@Bean
SequenceGeneratorService sequenceGeneratorService(MongoTemplate mongoTemplate) {
IndexOperations indexOps = mongoTemplate.indexOps(ProductGroupImpl.class);
IndexResolver resolver = new MongoPersistentEntityIndexResolver(mongoTemplate.getConverter().getMappingContext());
resolver.resolveIndexFor(ProductGroupImpl.class).forEach(indexOps::ensureIndex);
return new SequenceGeneratorService(mongoTemplate);
}
@Bean
ProductGroupDataService productGroupDataService(MongoTemplate mongoTemplate,
SequenceGeneratorService sequenceGeneratorService) {
return new ProductGroupDataServiceImpl(mongoTemplate, sequenceGeneratorService);
}
}
|
<reponame>danielleolgin/to-fix-backend<gh_stars>10-100
var check = function(item, isLocked) {
var now = new Date();
var lockedTill = new Date(item.lockedTill);
if (isLocked) {
return lockedTill > now;
} else {
return lockedTill < now;
}
};
var api = (module.exports = {});
api.unlocked = function(item) {
return check(item, false);
};
api.locked = function(item) {
return check(item, true);
};
|
#!/bin/bash
set -o errexit -o pipefail
SYSROOT_DIR="${PREFIX}"/aarch64-conda-linux-gnu/sysroot
mkdir -p "${SYSROOT_DIR}"
if [[ -d usr/lib ]]; then
if [[ ! -d lib ]]; then
ln -s usr/lib lib
fi
fi
if [[ -d usr/lib64 ]]; then
if [[ ! -d lib64 ]]; then
ln -s usr/lib64 lib64
fi
fi
pushd ${SRC_DIR}/binary > /dev/null 2>&1
rsync -K -a . "${SYSROOT_DIR}"
popd
pushd ${SYSROOT_DIR}/usr/lib64
rm libGLX_system.so.0
ln -s libGLX_mesa.so.0 libGLX_system.so.0
popd
|
<gh_stars>0
package cn.st.domain;
/**
* @description:
* @author: st
* @create: 2021-02-04 14:06
**/
public class Account {
private String name;
private Integer balance;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getBalance() {
return balance;
}
public void setBalance(Integer balance) {
this.balance = balance;
}
@Override
public String toString() {
return "Account{" +
"name='" + name + '\'' +
", balance=" + balance +
'}';
}
}
|
def celsius_to_fahrenheit(celsius):
return (celsius * 9/5) + 32 |
#!/bin/sh -eux
DOCKER_BUILDKIT=1 docker build --progress=plain --tag ghcr.io/rekgrpth/django.docker . 2>&1 | tee build.log
|
#!/bin/bash
if [[ -f /home/ec2-user/environment/bootstrap.log ]]; then
exit 1
fi
set -x
exec >/home/ec2-user/environment/bootstrap.log; exec 2>&1
sudo yum -y -q install jq sssd realmd oddjob oddjob-mkhomedir adcli samba-common samba-common-tools krb5-workstation openldap-clients policycoreutils-python
sudo chown -R ec2-user:ec2-user /home/ec2-user/
#source cluster profile and move to the home dir
cd /home/ec2-user/environment
. cluster_env
#needed to join the domain
sudo cp /etc/resolv.conf /etc/resolv.conf.OK
IPS=$(aws ds describe-directories --directory-id "${AD_ID}" --query 'DirectoryDescriptions[*].DnsIpAddrs' --output text)
export IP_AD1=$(echo "${IPS}" | awk '{print $1}')
export IP_AD2=$(echo "${IPS}" | awk '{print $2}')
ADMIN_PW=$(aws secretsmanager get-secret-value --secret-id "hpc-1click-${CLUSTER_NAME}" --query SecretString --output text --region "${AWS_REGION_NAME}")
export SECRET_ARN=$(aws secretsmanager describe-secret --secret-id "hpc-1click-${CLUSTER_NAME}" --query ARN --output text --region "${AWS_REGION_NAME}")
echo ";Generated by Cloud9-Bootstrap.sh" | sudo tee /etc/resolv.conf
echo ";search corp.pcluster.com" | sudo tee -a /etc/resolv.conf
for IP in ${IPS}
do
echo "${IP} corp.pcluster.com" | sudo tee -a /etc/hosts
echo "nameserver ${IP}" | sudo tee -a /etc/resolv.conf
done
echo "${ADMIN_PW}" | sudo realm join -U Admin corp.pcluster.com
echo "${ADMIN_PW}" | adcli create-user -x -U Admin --domain=corp.pcluster.com --display-name=ReadOnlyUser ReadOnlyUser
echo "${ADMIN_PW}" | adcli create-user -x -U Admin --domain=corp.pcluster.com --display-name=user000 user000
sudo cp /etc/resolv.conf.OK /etc/resolv.conf
#install Lustre client
sudo amazon-linux-extras install -y lustre2.10 > /dev/null 2>&1
python3 -m pip install "aws-parallelcluster" --upgrade --user --quiet
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.38.0/install.sh | bash
chmod ug+x ~/.nvm/nvm.sh
source ~/.nvm/nvm.sh > /dev/null 2>&1
nvm install node > /dev/null 2>&1
if [[ $FSX_ID == "AUTO" ]];then
FSX=$(cat <<EOF
- MountDir: /fsx
Name: new
StorageType: FsxLustre
FsxLustreSettings:
StorageCapacity: 1200
DeploymentType: SCRATCH_2
ImportedFileChunkSize: 1024
DataCompressionType: LZ4
ExportPath: s3://${S3_BUCKET}
ImportPath: s3://${S3_BUCKET}
AutoImportPolicy: NEW_CHANGED
EOF
)
else
FSX=$(cat <<EOF
- MountDir: /fsx
Name: existing
StorageType: FsxLustre
FsxLustreSettings:
FileSystemId: ${FSX_ID}
EOF
)
fi
export FSX
if [[ $PRIVATE_SUBNET_ID == "NONE" ]];then
export SUBNET_ID="${PUBLIC_SUBNET_ID}"
export USE_PUBLIC_IPS='true'
echo "export SUBNET_ID=\"${PUBLIC_SUBNET_ID}\"" >> cluster_env
echo "export USE_PUBLIC_IPS='true'" >> cluster_env
else
export SUBNET_ID="${PRIVATE_SUBNET_ID}"
export USE_PUBLIC_IPS='false'
echo "export SUBNET_ID=\"${PRIVATE_SUBNET_ID}\"" >> cluster_env
echo "export USE_PUBLIC_IPS='false'" >> cluster_env
fi
/usr/bin/envsubst < "1click-hpc/parallelcluster/config.${AWS_REGION_NAME}.sample.yaml" > config.${AWS_REGION_NAME}.yaml
/usr/bin/envsubst '${SLURM_DB_ENDPOINT}' < "1click-hpc/sacct/mysql/db.config" > db.config
/usr/bin/envsubst '${SLURM_DB_ENDPOINT}' < "1click-hpc/sacct/slurm/slurmdbd.conf" > slurmdbd.conf
/usr/bin/envsubst '${S3_BUCKET}' < "1click-hpc/enginframe/fm.browse.ui" > fm.browse.ui
aws s3 cp --quiet db.config "s3://${S3_BUCKET}/1click-hpc/sacct/mysql/db.config" --region "${AWS_REGION_NAME}"
aws s3 cp --quiet slurmdbd.conf "s3://${S3_BUCKET}/1click-hpc/sacct/slurm/slurmdbd.conf" --region "${AWS_REGION_NAME}"
aws s3 cp --quiet fm.browse.ui "s3://${S3_BUCKET}/1click-hpc/enginframe/fm.browse.ui" --region "${AWS_REGION_NAME}"
rm -f db.config slurmdbd.conf fm.browse.ui
#Create the key pair (remove the existing one if it has the same name)
aws ec2 create-key-pair --key-name ${KEY_PAIR} --query KeyMaterial --output text > /home/ec2-user/.ssh/id_rsa
if [ $? -ne 0 ]; then
aws ec2 delete-key-pair --key-name ${KEY_PAIR}
aws ec2 create-key-pair --key-name ${KEY_PAIR} --query KeyMaterial --output text > /home/ec2-user/.ssh/id_rsa
fi
sudo chmod 400 /home/ec2-user/.ssh/id_rsa
#Create the cluster and wait
/home/ec2-user/.local/bin/pcluster create-cluster --cluster-name "hpc-1click-${CLUSTER_NAME}" --cluster-configuration config.${AWS_REGION_NAME}.yaml --rollback-on-failure false --wait
HEADNODE_PRIVATE_IP=$(/home/ec2-user/.local/bin/pcluster describe-cluster --cluster-name "hpc-1click-${CLUSTER_NAME}" | jq -r '.headNode.privateIpAddress')
echo "export HEADNODE_PRIVATE_IP='${HEADNODE_PRIVATE_IP}'" >> cluster_env
# Modify the Message Of The Day
sudo rm -f /etc/update-motd.d/*
sudo aws s3 cp --quiet "s3://${S3_BUCKET}/1click-hpc/scripts/motd" /etc/update-motd.d/10-HPC --region "${AWS_REGION_NAME}" || exit 1
sudo chmod +x /etc/update-motd.d/10-HPC
echo 'run-parts /etc/update-motd.d' >> /home/ec2-user/.bash_profile
#attach the ParallelCluster SG to the Cloud9 instance (for FSx or NFS)
INSTANCE_ID=$(curl http://169.254.169.254/latest/meta-data/instance-id)
SG_CLOUD9=$(aws ec2 describe-instances --instance-ids $INSTANCE_ID --query Reservations[*].Instances[*].SecurityGroups[*].GroupId --output text)
SG_HEADNODE=$(aws cloudformation describe-stack-resources --stack-name "hpc-1click-${CLUSTER_NAME}" --logical-resource-id ComputeSecurityGroup --query "StackResources[*].PhysicalResourceId" --output text)
aws ec2 modify-instance-attribute --instance-id $INSTANCE_ID --groups $SG_CLOUD9 $SG_HEADNODE
#increase the maximum number of files that can be handled by file watcher,
sudo bash -c 'echo "fs.inotify.max_user_watches=524288" >> /etc/sysctl.conf' && sudo sysctl -p
if [[ $FSX_ID == "AUTO" ]];then
FSX_STACK_NAME=$(aws cloudformation describe-stack-resources --stack-name "hpc-1click-${CLUSTER_NAME}" --logical-resource-id FSXSubstack --query "StackResources[*].PhysicalResourceId" --output text)
FSX_ID=$(aws cloudformation describe-stacks --stack-name $FSX_STACK_NAME --query "Stacks[*].Outputs[*].OutputValue" --output text)
fi
FSX_DNS_NAME=$(aws fsx describe-file-systems --file-system-ids $FSX_ID --query "FileSystems[*].DNSName" --output text)
FSX_MOUNT_NAME=$(aws fsx describe-file-systems --file-system-ids $FSX_ID --query "FileSystems[*].LustreConfiguration.MountName" --output text)
#mount the same FSx created for the HPC Cluster
mkdir fsx
sudo mount -t lustre -o noatime,flock $FSX_DNS_NAME@tcp:/$FSX_MOUNT_NAME fsx
sudo bash -c "echo \"$FSX_DNS_NAME@tcp:/$FSX_MOUNT_NAME /home/ec2-user/environment/fsx lustre defaults,noatime,flock,_netdev 0 0\" >> /etc/fstab"
sudo chmod 755 fsx
sudo chown ec2-user:ec2-user fsx
aws ds reset-user-password --directory-id "${AD_ID}" --user-name "ReadOnlyUser" --new-password "${ADMIN_PW}" --region "${AWS_REGION_NAME}"
aws ds reset-user-password --directory-id "${AD_ID}" --user-name "user000" --new-password "${ADMIN_PW}" --region "${AWS_REGION_NAME}"
# send SUCCESFUL to the wait handle
curl -X PUT -H 'Content-Type:' \
--data-binary "{\"Status\" : \"SUCCESS\",\"Reason\" : \"Configuration Complete\",\"UniqueId\" : \"$HEADNODE_PRIVATE_IP\",\"Data\" : \"$HEADNODE_PRIVATE_IP\"}" \
"${WAIT_HANDLE}" |
#!/usr/bin/env bash
SELF=$(basename $0)
_script="$(readlink -f ${BASH_SOURCE[0]})"
_base="$(dirname $_script)"
_root="$(dirname $_base)"
. "$_base/.helpers.sh"
cd "$_root" || exit
doandlog "mkdir -p 01_voraussetzungen/volumes"
doandlog "cp bin/voltemplate.gitignore 01_voraussetzungen/volumes/.gitignore"
doandlog "rm -f 01_voraussetzungen/volumes/var_log"
doandlog "ln -f -s /var/lib/docker/volumes/01_voraussetzungen_var_log/_data 01_voraussetzungen/volumes/var_log"
sudoandlog "setfacl -R -m u:${USER}:rwx 01_voraussetzungen/volumes/var_log/"
doandlog "mkdir -p 02_installation/volumes"
doandlog "cp bin/voltemplate.gitignore 02_installation/volumes/.gitignore"
doandlog "rm -f 02_installation/volumes/var_log"
doandlog "ln -f -s /var/lib/docker/volumes/02_installation_var_log/_data 02_installation/volumes/var_log"
sudoandlog "setfacl -R -m u:${USER}:rwx 02_installation/volumes/var_log/"
doandlog "rm -f 02_installation/volumes/var_simplesamlphp"
doandlog "ln -f -s /var/lib/docker/volumes/02_installation_var_simplesamlphp/_data 02_installation/volumes/var_simplesamlphp"
sudoandlog "setfacl -R -m u:${USER}:rwx 02_installation/volumes/var_simplesamlphp/"
doandlog "mkdir -p 03_konfiguration/volumes"
doandlog "cp bin/voltemplate.gitignore 03_konfiguration/volumes/.gitignore"
doandlog "rm -f 03_konfiguration/volumes/var_log"
doandlog "ln -f -s /var/lib/docker/volumes/03_konfiguration_var_log/_data 03_konfiguration/volumes/var_log"
sudoandlog "setfacl -R -m u:${USER}:rwx 03_konfiguration/volumes/var_log/"
doandlog "rm -f 03_konfiguration/volumes/var_simplesamlphp"
doandlog "ln -f -s /var/lib/docker/volumes/03_konfiguration_var_simplesamlphp/_data 03_konfiguration/volumes/var_simplesamlphp"
sudoandlog "setfacl -R -m u:${USER}:rwx 03_konfiguration/volumes/var_simplesamlphp/"
doandlog "mkdir -p 04_serviceprovider/volumes"
doandlog "cp bin/voltemplate.gitignore 04_serviceprovider/volumes/.gitignore"
doandlog "rm -f 04_serviceprovider/volumes/var_log"
doandlog "ln -f -s /var/lib/docker/volumes/04_serviceprovider_var_log/_data 04_serviceprovider/volumes/var_log"
sudoandlog "setfacl -R -m u:${USER}:rwx 04_serviceprovider/volumes/var_log/"
doandlog "rm -f 04_serviceprovider/volumes/var_simplesamlphp"
doandlog "ln -f -s /var/lib/docker/volumes/04_serviceprovider_var_simplesamlphp/_data 04_serviceprovider/volumes/var_simplesamlphp"
sudoandlog "setfacl -R -m u:${USER}:rwx 04_serviceprovider/volumes/var_simplesamlphp/"
doandlog "mkdir -p 05_integration/volumes"
doandlog "cp bin/voltemplate.gitignore 05_integration/volumes/.gitignore"
doandlog "rm -f 05_integration/volumes/var_log"
doandlog "ln -f -s /var/lib/docker/volumes/05_integration_var_log/_data 05_integration/volumes/var_log"
sudoandlog "setfacl -R -m u:${USER}:rwx 05_integration/volumes/var_log/"
doandlog "rm -f 05_integration/volumes/var_simplesamlphp"
doandlog "ln -f -s /var/lib/docker/volumes/05_integration_var_simplesamlphp/_data 05_integration/volumes/var_simplesamlphp"
sudoandlog "setfacl -R -m u:${USER}:rwx 05_integration/volumes/var_simplesamlphp/"
doandlog "mkdir -p 06_metarefresh/volumes"
doandlog "cp bin/voltemplate.gitignore 06_metarefresh/volumes/.gitignore"
doandlog "rm -f 06_metarefresh/volumes/var_log"
doandlog "ln -f -s /var/lib/docker/volumes/06_metarefresh_var_log/_data 06_metarefresh/volumes/var_log"
sudoandlog "setfacl -R -m u:${USER}:rwx 06_metarefresh/volumes/var_log/"
doandlog "rm -f 06_metarefresh/volumes/var_simplesamlphp"
doandlog "ln -f -s /var/lib/docker/volumes/06_metarefresh_var_simplesamlphp/_data 06_metarefresh/volumes/var_simplesamlphp"
sudoandlog "setfacl -R -m u:${USER}:rwx 06_metarefresh/volumes/var_simplesamlphp/"
doandlog "mkdir -p 07_authproc/volumes"
doandlog "cp bin/voltemplate.gitignore 07_authproc/volumes/.gitignore"
doandlog "rm -f 07_authproc/volumes/var_log"
doandlog "ln -f -s /var/lib/docker/volumes/07_authproc_var_log/_data 07_authproc/volumes/var_log"
sudoandlog "setfacl -R -m u:${USER}:rwx 07_authproc/volumes/var_log/"
doandlog "rm -f 07_authproc/volumes/var_simplesamlphp"
doandlog "ln -f -s /var/lib/docker/volumes/07_authproc_var_simplesamlphp/_data 07_authproc/volumes/var_simplesamlphp"
sudoandlog "setfacl -R -m u:${USER}:rwx 07_authproc/volumes/var_simplesamlphp/"
doandlog "mkdir -p 08_production/volumes"
doandlog "cp bin/voltemplate.gitignore 08_production/volumes/.gitignore"
doandlog "rm -f 08_production/volumes/var_log"
doandlog "ln -f -s /var/lib/docker/volumes/08_production_var_log/_data 08_production/volumes/var_log"
sudoandlog "setfacl -R -m u:${USER}:rwx 08_production/volumes/var_log/"
doandlog "rm -f 08_production/volumes/var_simplesamlphp"
doandlog "ln -f -s /var/lib/docker/volumes/08_production_var_simplesamlphp/_data 08_production/volumes/var_simplesamlphp"
sudoandlog "setfacl -R -m u:${USER}:rwx 08_production/volumes/var_simplesamlphp/"
doandlog "mkdir -p 09_extras/volumes"
doandlog "cp bin/voltemplate.gitignore 09_extras/volumes/.gitignore"
doandlog "rm -f 09_extras/volumes/var_log"
doandlog "ln -f -s /var/lib/docker/volumes/09_extras_var_log/_data 09_extras/volumes/var_log"
sudoandlog "setfacl -R -m u:${USER}:rwx 09_extras/volumes/var_log/"
doandlog "rm -f 09_extras/volumes/var_simplesamlphp"
doandlog "ln -f -s /var/lib/docker/volumes/09_extras_var_simplesamlphp/_data 09_extras/volumes/var_simplesamlphp"
sudoandlog "setfacl -R -m u:${USER}:rwx 09_extras/volumes/var_simplesamlphp/"
|
"""
User interface utilities
"""
from . import interface_utils
__all__ = ['interface_utils']
|
add_lunch_combo dosp_falcon-userdebug
add_lunch_combo dosp_falcon-eng
|
#!/bin/bash
# Shell script that starts the Jupyter Python Notebook
# in the docker container
if [ -z "$JUPYTER_NOTEBOOK_PASSWORD" ]
then
echo "No Jupyter Notebook password provided - starting in unsafe mode"
echo "Set password using -e JUPYTER_NOTEBOOK_PASSWORD={sha of password}"
jupyter notebook \
--port=8888 --no-browser \
--ip=0.0.0.0 --allow-root \
--NotebookApp.password='' --NotebookApp.token=''
else
echo "Jupyter Notebook password provided by user"
jupyter notebook \
--port=8888 --no-browser \
--ip=0.0.0.0 --allow-root \
--NotebookApp.password=$JUPYTER_NOTEBOOK_PASSWORD
fi
|
# [START maps_http_places_queryautocomplete_location_weighted]
curl -L -X GET 'https://maps.googleapis.com/maps/api/place/autocomplete/json?input=Market%20in%20Barcelona&location=42.3675294%2C-71.186966&radius=10000&key=YOUR_API_KEY'
# [END maps_http_places_queryautocomplete_location_weighted] |
/* Copyright 2021 Google LLC.
SPDX-License-Identifier: Apache-2.0 */
/* global chrome */
// From https://html.spec.whatwg.org/multipage/forms.html. Added 'role'.
// Also aria-* and data-*
const ATTRIBUTES = {
'global': ['accesskey', 'autocapitalize', 'autofocus', 'class', 'contenteditable', 'dir', 'draggable',
'enterkeyhint', 'hidden', 'inputmode', 'is', 'id', 'itemid', 'itemprop', 'itemref', 'itemscope',
'itemtype', 'lang', 'nonce', 'role', 'spellcheck', 'style', 'tabindex', 'title', 'translate'],
'button': ['disabled', 'form', 'formaction', 'formenctype', 'formmethod', 'formnovalidate',
'formtarget', 'name', 'type', 'value'],
'form': ['accept-charset', 'action', 'autocomplete', 'enctype', 'method', 'name', 'novalidate',
'target', 'rel'],
// autocorrect for Safari
'input': ['accept', 'alt', 'autocomplete', 'autocorrect', 'checked', 'dirname', 'disabled',
'form', 'formaction', 'formenctype', 'formmethod', 'formnovalidate', 'formtarget', 'height',
'list', 'max', 'maxlength', 'min', 'minlength', 'multiple', 'name', 'pattern', 'placeholder',
'readonly', 'required', 'size', 'src', 'step', 'type', 'value', 'width', 'title'],
'label': ['for'],
'select': ['autocomplete', 'disabled', 'form', 'multiple', 'name', 'required', 'size'],
'textarea': ['autocomplete', 'cols', 'dirname', 'disabled', 'form', 'maxlength', 'minlength',
'name', 'placeholder', 'readonly', 'required', 'rows', 'wrap']
};
// Listen for a message from the popup that it has been opened.
// Need to re-run the audits here every time the popup is opened.
chrome.runtime.onMessage.addListener(
(request, sender, sendResponse) => {
// console.log('message received in content-script:', request.message);
if (request.message === 'popup opened') {
getAndStoreElementData();
}
}
);
// Get data for form and form field elements, then store the data using chrome.storage.
// Need to do this every time the extension popup is opened,
// in case something in the page has been changed dynamically.
// Once complete, send a response to the popup.
function getAndStoreElementData() {
chrome.storage.local.clear(() => {
const error = chrome.runtime.lastError;
if (error) {
console.error('chrome.storage.local.clear() error in content-script.js:', error);
} else {
// Run this every time the popup is opened, in case page elements are updated dynamically.
const elementData = {
form: getElementInfo('form', ['id', 'name', 'action', 'method', 'containsFormFields']),
input: getElementInfo('input', ['id', 'name', 'autocomplete', 'placeholder', 'required', 'type']),
select: getElementInfo('select', ['id', 'name', 'autocomplete', 'required']),
textarea: getElementInfo('textarea', ['id', 'name', 'autocomplete', 'required']),
button: getElementInfo('button', ['id', 'name', 'textContent', 'type']),
label: getElementInfo('label', ['for', 'textContent', 'invalidContent']),
};
chrome.storage.local.set({elementData: elementData}, () => {
console.log('elementData', elementData);
chrome.runtime.sendMessage({message: 'stored element data'});
});
}
});
}
// Get attribute (or textContent) values for all elements of a given name,
// e.g. all input or label elements.
function getElementInfo(tagName, properties) {
const elementInfo = [];
// Get all the elements with this elementName.
const elements = [...document.getElementsByTagName(tagName)];
for (const element of elements) {
elementInfo.push(getElementProperties(element, properties));
}
return elementInfo;
}
// Get attribute values and other properties for a form or form field element.
// TODO: better way to add properties that are only used for one element, e.g. label.invalidContent.
function getElementProperties(element, properties) {
let elementProperties = {
// For form elements, formAncestorID will be used to check for forms in forms (which is an error).
// NB: closest() returns the element it's called on if that matches the selector.
formAncestorID: element.parentNode.closest('form') ?
element.parentNode.closest('form').getAttribute('id') : null,
tagName: element.tagName.toLowerCase(),
};
const invalidAttributes = getInvalidAttributes(element);
if (invalidAttributes) {
elementProperties.invalidAttributes = invalidAttributes;
}
for (const property of properties) {
if (property === 'textContent') {
elementProperties.textContent = element.textContent.trim();
} else if (property === 'containsFormFields') {
// Used for forms.
elementProperties.containsFormFields =
element.querySelector('button, input, select, textarea') !== null;
} else if (property === 'required') {
elementProperties.required = element.hasAttribute('required') ? 'required' : null;
} else if (property === 'invalidContent') {
// Used for labels.
const invalidNodes = [...element.querySelectorAll('a, button, h1, h2, h3, h4, h5, h6')];
elementProperties.invalidContent = invalidNodes.map(node => node.nodeName.toLowerCase()).join(', ');
} else {
elementProperties[property] = element.hasAttribute(property) ?
element.getAttribute(property) : null;
}
}
return elementProperties;
}
// Return a comma-separate list of invalid attributes for an element.
function getInvalidAttributes(element) {
return [...element.attributes]
.map(attribute => attribute.name)
.filter(attributeName => {
return !(ATTRIBUTES[element.tagName.toLowerCase()].includes(attributeName) ||
ATTRIBUTES.global.includes(attributeName) ||
attributeName.startsWith('aria-') ||
attributeName.startsWith('data-') ||
// Allow inline event handlers.
attributeName.startsWith('on'));
})
.join(', ');
}
|
package com.oandmdigital.radioplayer.event;
public class IsPlayingEvent {
private boolean isPlaying;
public IsPlayingEvent(boolean isPlaying) {
this.isPlaying = isPlaying;
}
public boolean isPlaying() {
return isPlaying;
}
}
|
// 1100. 하얀 칸
// 2019.05.14
#include<iostream>
using namespace std;
int main()
{
char arr[9][9];
for (int i = 0; i < 8; i++)
{
cin >> arr[i];
}
int ans = 0;
// 짝수 루틴
for (int i = 0; i < 8; i += 2)
{
for (int j = 0; j < 8; j++)
{
if (j % 2 == 0 && arr[i][j] == 'F')
{
ans++;
}
}
}
// 홀수 루틴
for (int i = 1; i < 9; i += 2)
{
for (int j = 0; j < 8; j++)
{
if (j % 2 == 1 && arr[i][j] == 'F')
{
ans++;
}
}
}
cout << ans << endl;
return 0;
}
|
import subprocess
def get_taxonomy_info(sequence_id):
command = f'efetch -db nucleotide -id "{sequence_id}" -format gpc | xtract -element "INSDSeq_taxonomy"'
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
if process.returncode == 0:
return output.decode('utf-8').strip()
else:
return f"Error: {error.decode('utf-8').strip()}" |
var _ = require('./lodash'),
Helpers = require('./util/helpers'),
sanitizeOptions = require('./util/sanitize').sanitizeOptions,
self;
const GAP = ' ',
URLENCODED = 'urlencoded',
FORM_DATA = 'formdata',
RAW = 'raw',
FILE = 'file';
self = module.exports = {
/**
* Used to return options which are specific to a particular plugin
*
* @returns {Array}
*/
getOptions: function () {
return [
{
name: 'Set request timeout',
id: 'requestTimeout',
type: 'positiveInteger',
default: 0,
description: 'Set number of milliseconds the request should wait for a response' +
' before timing out (use 0 for infinity)'
},
{
name: 'Follow redirects',
id: 'followRedirect',
type: 'boolean',
default: true,
description: 'Automatically follow HTTP redirects'
}
];
},
/**
* Used to convert the postman sdk-request object in shell-httpie reuqest snippet
*
* @param {Object} request - postman SDK-request object
* @param {Object} options
* @param {Number} options.requestTimeout : time in milli-seconds after which request will bail out
(default: 0 -> never bail out)
* @param {Boolean} options.followRedirect : whether to allow redirects of a request
* @param {Function} callback - function with parameters (error, snippet)
*/
convert: function (request, options, callback) {
var snippet = '',
parsedBody,
parsedHeaders,
bodyMode,
timeout,
url = '',
handleRedirect = (enableRedirect) => { if (enableRedirect) { return GAP + '--follow' + GAP; } return GAP; },
handleRequestTimeout = (time) => {
if (time) {
return '--timeout ' + (time / 1000) + GAP;
}
return '--timeout 3600' + GAP;
};
// check whether options was passed or not
if (_.isFunction(options)) {
callback = options;
options = null;
}
else if (!_.isFunction(callback)) { // check whether callback is a function
throw new Error('Shell-Httpie~convert: Callback not a function');
}
options = sanitizeOptions(options, self.getOptions());
Helpers.parseURLVariable(request);
url = Helpers.addHost(request) + Helpers.addPort(request) + Helpers.addPathandQuery(request);
timeout = options.requestTimeout;
parsedHeaders = Helpers.addHeaders(request);
// snippet construction based on the request body
if (request.hasOwnProperty('body')) {
if (request.body.hasOwnProperty('mode')) {
bodyMode = request.body.mode;
parsedBody = Helpers.getRequestBody(request.body[bodyMode], bodyMode);
// handling every type of content-disposition
switch (bodyMode) {
case URLENCODED:
snippet += 'http --ignore-stdin --form' + handleRedirect(options.followRedirect);
snippet += handleRequestTimeout(timeout);
snippet += request.method + GAP + url + ' \\\n';
snippet += parsedBody + (parsedHeaders ? (' \\\n' + parsedHeaders) : '');
break;
case FORM_DATA:
snippet += 'http --ignore-stdin --form' + handleRedirect(options.followRedirect);
snippet += handleRequestTimeout(timeout);
snippet += request.method + GAP + url + ' \\\n';
snippet += parsedBody + (parsedHeaders ? (' \\\n' + parsedHeaders) : '');
break;
case RAW:
if (parsedBody) {
snippet += 'printf ' + parsedBody + '| ';
}
snippet += 'http ' + handleRedirect(options.followRedirect) + handleRequestTimeout(timeout);
snippet += request.method + GAP + url + (parsedHeaders ? (' \\\n' + parsedHeaders) : '');
break;
case FILE:
snippet += `cat ${parsedBody} | `;
snippet += 'http ' + handleRedirect(options.followRedirect) + handleRequestTimeout(timeout);
snippet += request.method + GAP + url + (parsedHeaders ? (' \\\n' + parsedHeaders) : '');
break;
default:
return callback('Shell-Httpie~convert: Not a valid Content-Type in request body', null);
}
}
else {
snippet += 'http' + handleRedirect(options.followRedirect) + handleRequestTimeout(timeout);
snippet += request.method + GAP + url + (parsedHeaders ? (' \\\n' + parsedHeaders) : '');
}
}
else { // forming a request without a body
snippet += 'http' + handleRedirect(options.followRedirect) + handleRequestTimeout(timeout);
snippet += request.method + GAP + url + (parsedHeaders ? (' \\\n' + parsedHeaders) : '');
}
callback(null, snippet);
}
};
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolHopper-v1_ddpg_hardcopy_action_noise_seed1_run7_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolHopper-v1 --random-seed 1 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolHopper-v1/ddpg_hardcopy_action_noise_seed1_run7 --continuous-act-space-flag --double-ddpg-flag --target-hard-copy-flag
|
#!/bin/bash
surprises=0
verbose=false
number=$(ls -1 tests/*.ispc|wc -l)
counter=1
target=sse4
while getopts ":vt:h" opt;do
case $opt in
v) verbose=true
;;
t) target=$OPTARG
;;
h) cat <<EOF
usage: run_tests.sh [-v] [-t target] [filenames]
-v # verbose output
-t # specify compilation target (SSE4 is the default).
[filenames] # (optional) files to run through testing infrastructure
# if none are provided, all in tests/ will be run.
EOF
exit 1
esac
done
shift $(( $OPTIND - 1 ))
if [[ "$1" > 0 ]]; then
while [[ "$1" > 0 ]]; do
i=$1
shift
echo Running test $i
bc=${i%%ispc}bc
ispc -O2 $i -woff -o $bc --emit-llvm --target=$target
if [[ $? != 0 ]]; then
surprises=1
echo Test $i FAILED ispc compile
echo
else
ispc_test $bc
if [[ $? != 0 ]]; then
surprises=1
echo Test $i FAILED ispc_test
echo
fi
fi
/bin/rm -f $bc
done
else
echo Running all correctness tests
for i in tests/*.ispc; do
if $verbose; then
echo -en "Running test $counter of $number.\r"
fi
(( counter++ ))
bc=${i%%ispc}bc
ispc -O2 $i -woff -o $bc --emit-llvm --target=$target
if [[ $? != 0 ]]; then
surprises=1
echo Test $i FAILED ispc compile
echo
else
ispc_test $bc
if [[ $? != 0 ]]; then
surprises=1
echo Test $i FAILED ispc_test
echo
fi
fi
/bin/rm -f $bc
done
echo -e "\nRunning failing tests"
for i in failing_tests/*.ispc; do
(ispc -O2 $i -woff -o - --emit-llvm | ispc_test -) 2>/dev/null 1>/dev/null
if [[ $? == 0 ]]; then
surprises=1
echo Test $i UNEXPECTEDLY PASSED
echo
fi
done
fi
if [[ $surprises == 0 ]]; then
echo No surprises.
fi
exit $surprises
|
#!/bin/bash
mkdir anchor
swagger-codegen generate \
-i ../../kas-ref-docs/openapi/en/services/anchor/v1.yaml \
-l javascript \
-o ./anchor \
-c ./config.json; |
<filename>Chapter 07/handle_widget_resize.py
'''
Chapter 7
Handling Widget Resize
'''
from tkinter import Tk, Label, Pack
root= Tk()
label = Label(root, text = 'I am a Frame', bg='red')
label.pack(fill='both', expand=True)
def on_label_resized(event):
print('New Width', label.winfo_width())
print('New Height', label.winfo_height())
label.bind("<Configure>", on_label_resized)
root.mainloop()
|
# encoding: UTF-8
module Vines
module Agent
module Command
class Start
def run(opts)
raise 'vines-agent [--pid FILE] start' unless opts[:args].size == 0
require opts[:config]
agent = Vines::Agent::Agent.new(Config.instance)
daemonize(opts) if opts[:daemonize]
agent.start
end
private
def daemonize(opts)
daemon = Vines::Daemon.new(:pid => opts[:pid], :stdout => opts[:log],
:stderr => opts[:log])
if daemon.running?
raise "The vines agent is running as process #{daemon.pid}"
else
puts "The vines agent has started"
daemon.start
end
end
end
end
end
end |
(function() {
'use strict';
angular.module('sslv2App')
.controller('UserStudentCtrl', UserStudentCtrl);
UserStudentCtrl.$inject = ['$state', 'UserService', '$timeout', 'RESOURCES', '$stateParams'];
function UserStudentCtrl($state, UserService, $timeout, RESOURCES, $stateParams) {
var vm = this;
vm.students = {};
vm.user_id = $stateParams.id;
vm.submit = submit;
vm.full_name = localStorage.getItem("full_name");
UserService.getListStudent($stateParams.id)
.then(function(response) {
var data = _.get(response, 'data.data', "");
if (data !== "") {
vm.students = _.filter(data, function(v) {
return !v.added;
});
}
}, function(error) {
});
function submit(data) {
if (vm.new_student) {
var data = _.omit(data, ['student_id']);
UserService.addNewStudent(vm.user_id, data)
.then(function(response) {
if (response.data.success === true) {
vm.message = response.data.message;
closeMessage();
}
}, function(error) {
});
} else {
var data = _.omit(data, ['district_student_id', 'first_name', 'last_name', 'school_district', '']);
UserService.updateStudent(vm.user_id, data.student_id)
.then(function(response) {
if (response.data.success === true) {
vm.message = response.data.message;
closeMessage();
}
}, function(error) {
});
}
}
function closeMessage() {
$timeout(function() {
vm.message = "";
$state.go('dashboard.user_group', { id: vm.user_id });
}, 2000);
}
}
})();
|
import getters from './getters';
import actions from './actions';
import mutations from './mutations';
import RGBA from '../../models/rgba-model';
import FloodFillTool from '../../tools/flood-fill-tool';
import DrawTool from '../../tools/draw-tool';
var state = {
activeCanvas: {
}
}
export default {
state,
getters,
actions,
mutations
}
|
$(function () {
$('#sidebarToggle').click(function() {
$.ajax({
type: "GET",
url: $(this).data('url')
});
});
});
$(function () {
$("#printText").focus(function(){
$(this).select();
});
});
function copy() {
let textarea = document.getElementById("printText");
textarea.select();
document.execCommand("copy");
}
|
#!/bin/bash
################################################################################
#
# Find tags in a Docker Registry that reference a given image ID.
#
# Usage: Simply call program without arguments to get help.
#
# Source: https://github.com/kwk/docker-find-image-users
#
################################################################################
set -e
PROG_BASENAME=$(basename $0)
DEFAULT_REGISTRY_HOST=www.YOUR_DEFAULT_REGISTRY_HOST.com
if [ $# -lt 1 ]; then
echo "Usage: $PROG_BASENAME IMAGE_ID [REGISTRY_HOST_WITH_PORT [REPO [REPO ...]]]]"
echo ""
echo "This program searches the given Docker registry in either all"
echo "repos or the ones that are given that to find the tags that"
echo "reference the given IMAGE_ID."
echo ""
echo "If no registry host is given \"$DEFAULT_REGISTRY_HOST\" is used."
echo ""
echo "By default informational output is output to stderr. If you don't"
echo "want to see it, simply append 2>/dev/null to your call and only the"
echo "tags that reference the given IMAGE_ID will be printed."
echo ""
echo "NOTE: Currently this program only works with Docker Registry API v1."
exit 1
fi
IMAGE_ID=$1
REGISTRY_HOST=${2:-$DEFAULT_REGISTRY_HOST}
REGISTRY_URL=http://${REGISTRY_HOST}/v1
echo "- Using registry URL: $REGISTRY_URL" 1>&2
# How it is done:
#
# Search for all avaiable repositories and find all tags for each repo that
# reference the given image ID.
if [ $# -lt 3 ]; then
# No repo is explicitly given, so we search in all repos
REPOS=$(curl -s $REGISTRY_URL/search \
| jq '.results | .[].name' \
| tr -d '"')
else
REPOS="${@:3}"
fi
echo "- Searching in repositories: $(echo $REPOS | tr -d "\n")" 1>&2;
for repo in $REPOS; do
echo "- Searching in $repo ..." 1>&2
tags=$(curl -s $REGISTRY_URL/repositories/$repo/tags \
| jq ". as \$object | keys[] | select(\$object[.]==\"$IMAGE_ID\")" \
| tr -d '"')
for tag in $tags; do
echo "$repo:$tag"
done
done
|
# Shortcuts
alias copyssh="pbcopy < $HOME/.ssh/id_rsa.pub"
alias reloadcli="source $HOME/.zshrc"
alias reloaddns="dscacheutil -flushcache && sudo killall -HUP mDNSResponder"
alias ll="/usr/local/opt/coreutils/libexec/gnubin/ls -ahlF --color --group-directories-first"
weather() { curl -4 wttr.in/${1:-antwerp} }
alias phpstorm='open -a /Applications/PhpStorm.app "`pwd`"'
alias shrug="echo '¯\_(ツ)_/¯' | pbcopy"
alias c="clear"
alias zbundle="antibody bundle < $DOTFILES/zsh_plugins.txt > $DOTFILES/zsh_plugins.sh"
# Directories
alias dotfiles="cd $DOTFILES"
alias library="cd $HOME/Library"
alias sites="cd $HOME/Sites"
alias lara="sites && cd laravel/"
# Laravel
alias a="php artisan"
alias ams="php artisan migrate:fresh --seed"
# PHP
alias cfresh="rm -rf vendor/ composer.lock && composer i"
# JS
alias nfresh="rm -rf node_modules/ package-lock.json && npm install"
alias watch="npm run watch"
# Vagrant
alias v="vagrant global-status"
alias vup="vagrant up"
alias vhalt="vagrant halt"
alias vssh="vagrant ssh"
alias vreload="vagrant reload"
alias vrebuild="vagrant destroy --force && vagrant up"
# Docker
alias docker-composer="docker-compose"
#alias dstop="docker stop $(docker ps -a -q)"
#alias dpurgecontainers="dstop && docker rm $(docker ps -a -q)"
#alias dpurgeimages="docker rmi $(docker images -q)"
#dbuild() { docker build -t=$1 .; }
#dbash() { docker exec -it $(docker ps -aqf "name=$1") bash; }
# Git
alias commit="git add . && git commit -m"
alias gcommit="git add . && git commit"
alias amend="git commit --amend --no-edit"
alias amendall="git add . && amend"
alias wip="commit wip"
alias gst="git status"
alias gb="git branch"
alias gc="git checkout"
alias gd="git diff"
alias resolve="git add . && git commit --no-edit"
alias gl="git log --oneline --decorate --color"
alias gnuke="git clean -df && git reset --hard"
|
<reponame>congleetea/fuse
/*
* Software License Agreement (BSD License)
*
* Copyright (c) 2019, Locus Robotics
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef FUSE_CONSTRAINTS_UUID_ORDERING_H
#define FUSE_CONSTRAINTS_UUID_ORDERING_H
#include <fuse_core/uuid.h>
#include <boost/bimap/bimap.hpp>
#include <boost/bimap/unordered_set_of.hpp>
#include <boost/bimap/vector_of.hpp>
namespace fuse_constraints
{
/**
* @brief A class that represents a sequential ordering of UUIDs
*
* This is designed for use when marginalizing out variables, but it may have other uses.
*
* Specifically, this class maps a UUID to a sequential index. Bidirectional access is possible.
* If you have a UUID, the index can be retrieved in constant time. And if you have the index, the UUID
* may be retrieved in constant (and fast) time. Also, iterating through the UUIDs in sequence is an
* efficient operation.
*
* The UuidOrdering is not designed to be highly dynamic. UUIDs can be added, but not removed. UUIDs are
* assigned an index based on the order of insertion and cannot be modified.
*/
class UuidOrdering
{
public:
/**
* @brief Default constructor
*/
UuidOrdering() = default;
/**
* @brief Construct a UuidOrdering with the provided UUIDs
*
* Accepts an arbitrary number of UUIDs directly. It can be called like:
* @code{.cpp}
* UuidOrdering{uuid1, uuid2, uuid3};
* @endcode
*
* @param[in] uuid_list The list of involved UUIDs
*/
UuidOrdering(std::initializer_list<fuse_core::UUID> uuid_list);
/**
* @brief Construct a UuidOrdering with the UUIDs from the provided collection
*
* The \p UuidConstIterator class must meet the ForwardIterator requirements, and when dereferenced must
* be compatible with a \p const fuse_core::UUID&.
*
* @param[in] first Iterator pointing to the first UUID to add to the ordering
* @param[in] last Iterator pointing to one past the last UUID to add to the ordering
*/
template <typename UuidConstIterator>
UuidOrdering(UuidConstIterator first, UuidConstIterator last);
/**
* @brief Returns true if there are no UUIDs in this ordering
*/
bool empty() const;
/**
* @brief Returns the number of UUIDs in this ordering
*
* This is always equal to "last index + 1"
*/
size_t size() const;
/**
* @brief Return true if the index exists in the ordering
*/
bool exists(const unsigned int index) const;
/**
* @brief Return true if the UUID exists in the ordering
*/
bool exists(const fuse_core::UUID& uuid) const;
/**
* @brief Add a new UUID to the back of the ordering
*
* If the UUID already exists, no change to the ordering will occur.
*
* @param[in] uuid The UUID to insert
* @return True if the UUID was inserted, false if the UUID already existed
*/
bool push_back(const fuse_core::UUID& uuid);
/**
* @brief Access the UUID stored at the provided index
*
* Accessing an index that does not exist results in undefined behavior
*/
const fuse_core::UUID& operator[](const unsigned int index) const;
/**
* @brief Access the index associated with the provided UUID
*
* Accessing a UUID that does not exist results in the provided UUID being added to the ordering
*/
unsigned int operator[](const fuse_core::UUID& uuid);
/**
* @brief Access the UUID stored at the provided index
*
* If the requested index does not exist, an out_of_range exception will be thrown.
*/
const fuse_core::UUID& at(const unsigned int index) const;
/**
* @brief Access the index associated with the provided UUID
*
* If the requested UUID does not exist, an out_of_range exception will be thrown.
*/
unsigned int at(const fuse_core::UUID& uuid) const;
private:
using UuidOrderMapping = boost::bimaps::bimap<boost::bimaps::vector_of<unsigned int>,
boost::bimaps::unordered_set_of<fuse_core::UUID>>;
UuidOrderMapping order_; //!< Collection that contains the Index<-->UUID mapping
};
template <typename UuidConstIterator>
UuidOrdering::UuidOrdering(UuidConstIterator first, UuidConstIterator last)
{
for (; first != last; ++first)
{
order_.insert(order_.end(), UuidOrderMapping::value_type(order_.size(), *first));
}
}
} // namespace fuse_constraints
#endif // FUSE_CONSTRAINTS_UUID_ORDERING_H
|
#!/bin/bash
# Set bash to 'debug' mode, it will exit on :
# -e 'error', -u 'undefined variable', -o ... 'error in pipeline', -x 'print commands',
set -e
set -u
set -o pipefail
min_or_max=min # "min" or "max". This is to determine how the mixtures are generated in local/data.sh.
sample_rate=16k
train_set="tr05_simu_isolated_6ch_track_ali"
valid_set="dt05_simu_isolated_6ch_track_ali"
# test_sets="tr05_real_isolated_1ch_track"
# test_sets="et05_simu_isolated_1ch_track tr05_multi_noisy_si284"
test_sets="et05_simu_isolated_6ch_track_ali"
./enh_ti_ali.sh \
--ngpu 1 \
--num_nodes 1 \
--train_set "${train_set}" \
--valid_set "${valid_set}" \
--test_sets "${test_sets}" \
--enh_tag "conv_tasnet_informed_text_phn_6ch_tactron_encoder_ali_mtl10_h2_n2_last2" \
--channels 6 \
--speed_perturb_factors "0.9 1.0 1.1" \
--fs ${sample_rate} \
--audio_format wav \
--feats_type raw \
--ref_channel 3 \
--spk_num 1 \
--enh_config ./conf/tuning/train_enh_conv_tasnet_phn_informed_tactron_encoder_6ch_ali_mtl10_h2_n2_last2.yaml \
--token_list data/token_list/phn/tokens.kaldi.txt \
--g2p g2p_en_no_space \
--token_type raw \
--lm_train_text data/${train_set}/text \
--inference_model "valid.loss.best.pth" \
"$@"
|
#!/bin/bash
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
export WEBKIT_OUTPUTDIR=FuzzBuild
if [ "$(uname)" == "Darwin" ]; then
./Tools/Scripts/build-jsc --jsc-only --debug --cmakeargs="-DENABLE_STATIC_JSC=ON -DCMAKE_CXX_FLAGS='-fsanitize-coverage=trace-pc-guard -O3'"
elif [ "$(uname)" == "Linux" ]; then
./Tools/Scripts/build-jsc --jsc-only --debug --cmakeargs="-DENABLE_STATIC_JSC=ON -DCMAKE_C_COMPILER='/usr/bin/clang-10' -DCMAKE_CXX_COMPILER='/usr/bin/clang++-10' -DCMAKE_CXX_FLAGS='-fsanitize-coverage=trace-pc-guard -O3 -lrt'"
else
echo "Unsupported operating system"
fi
|
#!/bin/bash
# Build **and run** all the benchmarks
# Compilation (generating the PDG using Noelle) and emulation (using ICEmu)
# Are both CPU and Memory intensive. For 16GB of system memory we recoment not
# using more than 1 thread.
# This limitation is mainly due to the larger 'picojpeg' benchmark.
make -j 3
|
const fromEvent = require('graphcool-lib').fromEvent;
const JWT = require('jsonwebtoken');
const lti = require('ims-lti');
interface OutcomeServiceJSON {
cert_authority: string;
consumer_key: string;
consumer_secret: string;
language: string;
result_data_types: string;
service_url: string;
service_url_oauth: string;
service_url_parts: string;
signer: null;
source_did: string;
send_replace_result: null;
supports_result_data: null;
send_read_result: null;
send_delete_result: null;
send_replace_result_with_text: null;
send_replace_result_with_url: null;
}
interface NullOutcomeServiceJSON {
outcomeService: null;
}
export default async (event: any) => {
if (!event.context.graphcool.rootToken) {
console.log('Please provide a valid root token!')
return { error: 'assignment-lti-launch not configured correctly.' };
}
try {
const body = parseUrlEncodedBody(event.data.requestBody);
const graphcool = fromEvent(event);
const api = graphcool.api('simple/v1');
const ltiUserId = body.user_id;
const assignmentId = event.data.assignmentId;
const courseId = await getCourseId(api, assignmentId);
const assignmentType = event.data.assignmentType;
const lisPersonContactEmailPrimary = body.lis_person_contact_email_primary;
const key = body.oauth_consumer_key;
const secret = getLTISecret(key);
const ltiProvider = await validateLTIRequest(key, secret, {
body,
protocol: 'https',
url: `/lti${event.data.path.replace('{assignmentid}', assignmentId).replace('{assignmenttype}', assignmentType)}`,
headers: {
host: 'api.prendus.com'
},
method: event.data.method
});
const ltiSessionId = await createLTISession(api, ltiProvider, ltiUserId);
const ltiSessionIdJWT = JWT.sign({
ltiSessionId
}, process.env.PRENDUS_JWT_SECRET);
const ltiSessionIdJWTCookie = `ltiSessionIdJWT=${ltiSessionIdJWT}; Domain=${process.env.PRENDUS_CLIENT_DOMAIN}; Path=/`;
const ltiUser = await getLTIUser(api, ltiUserId);
const clientRedirectUrl = `assignment/${assignmentId}/${assignmentType.toLowerCase()}`;
const clientRedirectUrlCookie = `redirectUrl=${clientRedirectUrl}; Domain=${process.env.PRENDUS_CLIENT_DOMAIN}; Path=/`;
return {
data: await generateReturnValues(api, ltiUser, courseId, ltiSessionIdJWTCookie, clientRedirectUrlCookie, assignmentId, assignmentType, ltiUserId, lisPersonContactEmailPrimary, process.env.PRENDUS_JWT_SECRET)
};
}
catch(error) {
console.log(error);
return {
error: 'An error occurred'
};
}
};
async function createLTISession(api: any, ltiProvider: any, ltiUserId: string): Promise<string> {
const outcomeServiceJSON = jsonifyOutcomeService(ltiProvider.outcome_service);
const data = await api.request(`
mutation($ltiUserId: String!, $serializedOutcomeService: Json!) {
createLTISession(
ltiUserId: $ltiUserId
serializedOutcomeService: $serializedOutcomeService
) {
id
}
}
`, {
ltiUserId,
serializedOutcomeService: outcomeServiceJSON
});
return data.createLTISession.id;
}
function jsonifyOutcomeService(outcomeService: any): OutcomeServiceJSON | NullOutcomeServiceJSON {
if (!outcomeService.service_url) {
return {
outcomeService: null
};
}
else {
return {
cert_authority: outcomeService.cert_authority,
consumer_key: outcomeService.consumer_key,
consumer_secret: outcomeService.consumer_secret,
language: outcomeService.language,
result_data_types: outcomeService.result_data_types,
service_url: outcomeService.service_url,
service_url_oauth: outcomeService.service_url_oauth,
service_url_parts: outcomeService.service_url_parts,
signer: null,
source_did: outcomeService.source_did,
send_replace_result: null,
supports_result_data: null,
send_read_result: null,
send_delete_result: null,
send_replace_result_with_text: null,
send_replace_result_with_url: null
};
}
}
async function getCourseId(api: any, assignmentId: string): Promise<string> {
const data = await api.request(`
query {
Assignment(
id: "${assignmentId}"
) {
course {
id
}
}
}
`);
return data.Assignment.course.id;
}
function parseUrlEncodedBody(rawBody: string): any {
return rawBody
.split('&')
.map(x => x.split('='))
.reduce((result, x) => {
const key = decodeURIComponent(x[0]);
const value = decodeURIComponent(x[1].replace(/\+/g, '%20'));
return Object.assign({}, result, {
[key]: value
});
}, {});
}
async function generateReturnValues(api: any, ltiUser: any, courseId: string, ltiSessionIdJWTCookie: string, clientRedirectUrlCookie: string, assignmentId: string, assignmentType: string, ltiUserId: string, lisPersonContactEmailPrimary: string, rootToken: string) {
if (ltiUser) {
await enrollUserOnCourse(api, ltiUser.user.id, courseId);
await payForCourseIfFree(api, ltiUser.user.id, courseId);
//TODO we are only adding the cookie syntax in here until a more elegant solution is provided by AWS API Gateway or graph.cool (we'll be dropping AWS API Gateway as soon as graph.cool supports setting headers and gives full access to the response body)
return {
ltiSessionIdJWTCookie,
clientRedirectUrlCookie,
serverRedirectUrl: `${process.env.PRENDUS_CLIENT_ORIGIN}/assignment/${assignmentId}/${assignmentType.toLowerCase()}`
};
}
else {
// if the user does not exist yet in our system, we'll need to redirect them to the signup page and provide the url to go back to once they are signed up
// They also need the ltiJWT to have the cloud function connect their newly created User to a newly created LTIUser, and to use the same cloud function to authorize the User for the Assignment
const ltiJWT = JWT.sign({
assignmentId,
ltiUserId,
lisPersonContactEmailPrimary
}, rootToken);
//TODO we are only adding the cookie syntax in here until a more elegant solution is provided by AWS API Gateway or graph.cool (we'll be dropping AWS API Gateway as soon as graph.cool supports setting headers and gives full access to the response body)
return {
ltiJWTCookie: `ltiJWT=${ltiJWT}; Domain=${process.env.PRENDUS_CLIENT_DOMAIN}; Path=/`,
ltiSessionIdJWTCookie,
clientRedirectUrlCookie,
serverRedirectUrl: `${process.env.PRENDUS_CLIENT_ORIGIN}/authenticate`
};
}
}
function getLTISecret(key: string): string {
//TODO eventually this will retrieve from the databases the LTI secret associated with the assignment/course
return process.env.PRENDUS_LTI_SECRET;
}
async function validateLTIRequest(key: string, secret: string, req: any) {
return new Promise((resolve, reject) => {
const ltiProvider = new lti.Provider(key, secret);
ltiProvider.valid_request(req, (error: any, isValid: boolean) => {
if (isValid) {
resolve(ltiProvider);
}
else if (error) {
reject(error);
}
else {
reject('LTI request not valid');
}
});
});
}
async function getLTIUser(api: any, ltiUserId: string) {
const data = await api.request(`
query {
ltiUser: LTIUser(ltiUserId: "${ltiUserId}") {
user {
id
}
}
}
`);
return data.ltiUser;
}
async function payForCourseIfFree(api: any, userId: string, courseId: string) {
const data = await api.request(`
query {
Course(
id: "${courseId}"
) {
price
}
}
`);
const price = data.Course.price;
return price === 0 ? await payForCourseIfNoPurchaseExists(api, userId, courseId) : -1;
}
async function payForCourseIfNoPurchaseExists(api: any, userId: string, courseId: string) {
const data = await api.request(`
query {
allPurchases(filter: {
user: {
id: "${userId}"
}
course: {
id: "${courseId}"
}
}) {
id
}
}
`);
return data.allPurchases.length === 0 ? payForCourse(api, userId, courseId) : -1;
}
async function payForCourse(api: any, userId: string, courseId: string) {
const data = await api.request(`
mutation {
createPurchase(
userId: "${userId}"
amount: 0
courseId: "${courseId}"
stripeTokenId: "there is no stripeTokenId for a free course"
) {
course {
price
}
}
}
`);
return data.createPurchase.course.price;
}
async function enrollUserOnCourse(api: any, userId: string, courseId: string) {
const data = await api.request(`
mutation {
addToStudentsAndCourses(
enrolledCoursesCourseId: "${courseId}"
enrolledStudentsUserId: "${userId}"
) {
enrolledStudentsUser {
id
}
enrolledCoursesCourse {
id
}
}
}
`);
return data.addToStudentsAndCourses.enrolledStudentsUser.id;
}
|
<filename>src/main/java/com/atlassian/maven/plugin/clover/CloverOptimizerMojo.java
package com.atlassian.maven.plugin.clover;
import com.atlassian.maven.plugin.clover.internal.AbstractCloverMojo;
import com.atlassian.maven.plugin.clover.internal.ConfigUtil;
import com.atlassian.clover.CloverNames;
import com.atlassian.clover.ant.types.CloverOptimizedTestSet;
import com.atlassian.clover.ant.types.CloverAlwaysRunTestSet;
import com.atlassian.clover.util.FileUtils;
import com.google.common.collect.Iterables;
import org.apache.maven.model.Plugin;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.types.FileSet;
import org.apache.tools.ant.types.Resource;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import java.io.File;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Pattern;
/**
* Sets the 'test' property on the project which is used by the maven-surefire-plugin to determine which tests are run.
* If a snapshot file from a previous build, is found, that will be used to determine what tests should be run.
*/
@Mojo(name = "optimize", defaultPhase = LifecyclePhase.PROCESS_TEST_CLASSES)
public class CloverOptimizerMojo extends AbstractCloverMojo {
/**
* <p>The number of builds to run, before the snapshot file gets deleted.</p>
* <p>The snapshot stores the mapping between your test cases and source code. Over time, this becomes stale,
* so it is recommended to regenerate this file, by running all tests, on a regular basis.</p>
*/
@Parameter(property = "maven.clover.fullRunEvery", defaultValue = "10")
private int fullRunEvery;
/**
* A list of Tests to include for build optimization.
* If neither <i>optimizeIncludes</i> nor <i>optimizeExcludes</i> are supplied, then the
* <i>includes</i> specified in the maven-surefire-plugin's configuration will be used.
*/
@Parameter
private List<String> optimizeIncludes;
/**
* A list of Tests to exclude from build optimization.
* If neither <i>optimizeIncludes</i> nor <i>optimizeExcludes</i> are supplied, then the
* <i>excludes</i> specified in the maven-surefire-plugin's configuration will be used.
*/
@Parameter
private List<String> optimizeExcludes;
/**
* A list of Tests which should always be run. ie they will never be optimized away.
*/
@Parameter
private List<String> alwaysRunTests;
/**
* <b>NOTE:</b> This currently has no effect, because the maven-surefire-plugin re-orders the tests alphabetically.
*
* This controls how Clover optimizes your tests.
*
* By default - clover excludes any test case it deems as irrelevant to any changes made to your source code.
*
* "failfast" - (default) ensures your build FAILs fast ie: tests relevant to code changes are run first
*
* "random" - tests will be shuffled before run. Can be used to determine inter-test-dependencies.
*/
@Parameter(property = "maven.clover.ordering")
private String ordering;
/**
* Toggles whether or not build optimization is to be done or not.
*/
@Parameter(property = "maven.clover.optimize.enabled", defaultValue = "true")
private boolean enabled;
/**
* Controls whether or not to exclude tests that do not cover any modified files.
*
* If false, (and ordering is not random or original), Clover will not exclude any of the tests. Instead, they
* will be run in an optimal order to ensure the build fails as fast as possible. ie - tests that cover modify code
* first, then ascending by test time.
*/
@Parameter(property = "maven.clover.optimize.minimize", defaultValue = "true")
private boolean minimize;
/**
* The default test patterns to include.
*/
private static final List<String> DEFAULT_INCLUDES = Arrays.asList("**/Test*.java", "**/*Test.java", "**/*TestCase.java");
private static final String REGEX_START = "%regex[";
private static final String REGEX_END = "]";
public void execute() throws MojoExecutionException {
if (skip) {
getLog().info("Skipping build optimization.");
return;
}
// if there are no source files, then skip this mojo
final String sourceDirectory = getProject().getBuild().getSourceDirectory();
final String testSourceDirectory = getProject().getBuild().getTestSourceDirectory();
if (!new File(sourceDirectory).exists() && !new File(testSourceDirectory).exists()) {
getLog().info(sourceDirectory + " and " + testSourceDirectory + " do not exist. No optimization will be done for: "
+ getProject().getGroupId() + ":" + getProject().getArtifactId());
return;
}
final Project antProj = new Project();
antProj.init();
antProj.addBuildListener(new MvnLogBuildListener(getLog()));
final List<Resource> optimizedTests = configureOptimisedTestSet(antProj);
final StringBuffer testPattern = new StringBuffer();
for (final Resource test : optimizedTests) {
getLog().debug("Running TEST: " + test.getName());
testPattern.append(test.getName());
testPattern.append(",");
}
getLog().debug("Setting test property to: '" + testPattern + "'");
//Always set this to true because we can't be sure if the filtered list we have will result in no tests being run
//because we matched classes under src/test/ which aren't unit tests
getProject().getProperties().put("failIfNoTests", "false");
if (optimizedTests.size() == 0) {
// empty -Dtest values cause all tests to be run so let's put a dummy value
getProject().getProperties().put("test", "clover/optimized/test/PlaceHolder.java");
// ensure surefire wont fail if we run no tests
} else {
getProject().getProperties().put("test", testPattern.toString());
}
}
protected List<Resource> configureOptimisedTestSet(final Project antProj) {
List<String> includes = optimizeIncludes;
List<String> excludes = optimizeExcludes;
if (includes == null && excludes == null) {
getLog().debug("No clover excludes or includes specified. Falling back to Surefire configuration.");
final Plugin surefirePlugin = lookupSurefirePlugin();
if (surefirePlugin != null) {
includes = extractNestedStrings("includes", surefirePlugin);
excludes = extractNestedStrings("excludes", surefirePlugin);
}
// If there are still no includes use the default ones
if (includes == null) {
includes = DEFAULT_INCLUDES;
}
}
getLog().debug("Effective filtering: includes=" + includes + ", excludes=" + excludes);
final CloverOptimizedTestSet testsToRun = new CloverOptimizedTestSet();
testsToRun.setProject(antProj);
testsToRun.setLogger(new MvnLogger(getLog()));
testsToRun.setFullRunEvery(fullRunEvery);
testsToRun.setDebug(debug);
testsToRun.setSnapshotFile(new ConfigUtil(this).resolveSnapshotFile(snapshot));
if (ordering != null) {
final CloverOptimizedTestSet.TestOrdering order = new CloverOptimizedTestSet.TestOrdering();
order.setValue(ordering);
testsToRun.setOrdering(order);
}
testsToRun.setMinimize(minimize);
testsToRun.setEnabled(enabled);
antProj.setProperty(CloverNames.PROP_INITSTRING, resolveCloverDatabase());
antProj.setName(getProject().getName());
final List<String> testSources = getProject().getTestCompileSourceRoots();
for (String testSource : testSources) {
addTestRoot(antProj, includes, excludes, testsToRun, testSource);
}
return testsToRun.getOptimizedTestResource();
}
protected List<String> extractNestedStrings(final String elementName, final Plugin surefirePlugin) {
final Xpp3Dom config = (Xpp3Dom) surefirePlugin.getConfiguration();
return config == null ? null : extractNestedStrings(elementName, config);
}
/**
* Extracts nested values from the given config object into a List.
*
* @param childname the name of the first subelement that contains the list
* @param config the actual config object
*/
static List<String> extractNestedStrings(final String childname, final Xpp3Dom config) {
final Xpp3Dom subelement = config.getChild(childname);
if (subelement != null) {
final List<String> result = new LinkedList<String>();
final Xpp3Dom[] children = subelement.getChildren();
for (final Xpp3Dom child : children) {
result.add(child.getValue());
}
return result;
}
return null;
}
private void addTestRoot(final Project antProj, final List<String> includes, final List<String> excludes,
final CloverOptimizedTestSet testsToRun, final String testRoot) {
final File testRootDir = new File(testRoot);
if (!testRootDir.exists()) {
// if the test dir does not exist, do not add this as a fileset.
return;
}
getLog().info("Adding fileset: directory=" + testRootDir + ", includes=" + includes + ", excludes=" + excludes);
testsToRun.add(createFileSet(antProj, testRootDir, includes, excludes));
if (alwaysRunTests != null) {
// create fileset
final FileSet alwaysRunFileSet = createFileSet(antProj, testRootDir, alwaysRunTests, null);
// add it to an AlwaysRunTestSet
final CloverAlwaysRunTestSet alwaysRunTestSet = new CloverAlwaysRunTestSet();
alwaysRunTestSet.setProject(antProj);
alwaysRunTestSet.add(alwaysRunFileSet);
// then add that to the OptimizedTestSet
testsToRun.add(alwaysRunTestSet);
}
}
/**
* Creates a FileSet for <code>antProject</code> and base <code>directory</code> having a list of files
* to be included and excluded, according to <code>includes / excludes</code> wildcard patterns.
*
* @param antProject
* @param directory
* @param includes
* @param excludes
* @return FileSet
*/
FileSet createFileSet(final Project antProject, final File directory, final List<String> includes, final List<String> excludes) {
final FileSet testFileSet = new FileSet();
testFileSet.setProject(antProject);
testFileSet.setDir(directory);
final List<String> includesExpanded = explodePaths(directory, includes);
testFileSet.appendIncludes(Iterables.toArray(includesExpanded, String.class));
if (excludes != null && !excludes.isEmpty()) {
final List<String> excludesExpanded = explodePaths(directory, excludes);
testFileSet.appendExcludes(Iterables.toArray(excludesExpanded, String.class));
}
return testFileSet;
}
/**
* Search for maven-surefire-plugin in the list of build plugins. Returns a plugin instance or
* <code>null</code> if not found.
* @return Plugin maven-surefire-plugin or <code>null</code>
*/
private Plugin lookupSurefirePlugin() {
final String key = "org.apache.maven.plugins:maven-surefire-plugin";
final MavenProject mavenProject = getProject();
if (mavenProject == null) {
getLog().warn("Maven execution project is null. Surefire configuration will be ignored.");
return null;
}
final List<Plugin> plugins = mavenProject.getBuildPlugins();
for (final Plugin plugin : plugins) {
if (key.equalsIgnoreCase(plugin.getKey())) {
return plugin;
}
}
return null;
}
/**
* Resolves exact list of paths using the input <code>paths</code> list, because:
* 1) we can have multiple 'includes' tags in Ant FileSet and
* 2a) we can have multiple comma- or space-separated patterns in one 'includes'
* 2b) we can have regular expression entered (surefire specific feature)
*
* For 2a) String.split() is used, for 2b) a directory scan is performed
*
* See:
* <li>http://ant.apache.org/manual/Types/fileset.html</li>
* <li>http://maven.apache.org/plugins/maven-surefire-plugin/examples/inclusion-exclusion.html</li>
*
* @param paths list of paths (single or separated by space or comma)
* @return List<String>
*/
static List<String> explodePaths(final File directory, final List<String> paths) {
final List<String> explodedPaths = new LinkedList<String>();
for (final String path : paths) {
if (path.trim().startsWith("%regex[")) {
splitPathByRegexp(directory, explodedPaths, path);
} else {
splitPathBySeparators(explodedPaths, path);
}
}
return explodedPaths;
}
private static List<File> dirTreeMatchingPattern(final File dir, final Pattern pattern) {
final List<File> matchedFiles = new LinkedList<File>();
if (dir.isDirectory()) {
// recursive search
for (String fileName : dir.list()) {
matchedFiles.addAll(dirTreeMatchingPattern(new File(dir, fileName), pattern));
}
} else {
// add a file
if (pattern.matcher(dir.getPath()).matches()) {
matchedFiles.add(dir);
}
}
return matchedFiles;
}
/**
* Takes <code>pathRegex</code> regular expression in a form like "%regex[.*[Cat|Dog].*Test.*]" (as supported by
* surefire plugin) and searches for all files in <code>directory</code> whose path name matches this expression.
* Adds relative paths of found files to <code>outputList</code>.
*
* @param directory directory to be scanned
* @param outputList output list to which names of found files will be added
* @param pathRegex regular expression for file name
*/
private static void splitPathByRegexp(final File directory, final List<String> outputList, final String pathRegex) {
// extract regular expression from a path entry (we assume that there can be only one regexp)
final String regex = pathRegex.substring(
pathRegex.indexOf(REGEX_START) + REGEX_START.length(),
pathRegex.lastIndexOf(REGEX_END));
// create pattern for this regexp and find all files in directory matching it
final Pattern pattern = Pattern.compile(regex);
final List<File> matchedFiles = dirTreeMatchingPattern(directory, pattern);
// convert File->String and add to output list
for (final File file : matchedFiles) {
outputList.add(FileUtils.getRelativePath(directory, file));
}
}
private static void splitPathBySeparators(final List<String> outputList, final String path) {
final String ANT_PATTERN_SEPARATOR = "[, ]";
final String splittedPaths[] = path.split(ANT_PATTERN_SEPARATOR);
for (String splittedPath : splittedPaths) {
if (splittedPath.length() > 0) {
outputList.add(splittedPath);
}
}
}
}
|
#!/bin/sh
docker build \
-t shane/devhome.swh-vim-env:xenial \
-t shane/devhome.swh-vim-env:latest \
.
|
<reponame>yandld/lpc_uart_server<filename>mcu_source/Libraries/utilities/chusb/src/vsc/usbd_cp210x.c
#include <string.h>
#include "usb_common.h"
#include "usbd.h"
#include "usbd_cp210x.h"
#include "uart_bridge.h"
#include "uart.h"
/* https://www.silabs.com/documents/public/application-notes/AN571.pdf */
typedef struct
{
uint16_t vid;
uint16_t pid;
const char* name;
}cp210x_id_t;
cp210x_id_t cp210x_id_table[] =
{
{0x10C4, 0xEA60, "CP2102"}, /* USB-1UART */
{0x10C4, 0xEA70, "CP2105"}, /* USB-2UART */
{0x10C4, 0xEA71, "CP2108"}, /* USB-4UART */
};
typedef struct
{
struct usbd_t *h;
cp210x_id_t id;
struct usbd_cp210x_callback_t *cb;
}cp210x_t;
static cp210x_t cp210x;
#define USBD_CP210X_CH0_BULKIN (1)
#define USBD_CP210X_CH0_BULKOUT (2)
#define USBD_CP210X_BUCK_SIZE (512)
static cp210x_cpr_t cpr;
static cp210x_ssr_t ssr;
static uint8_t cdc_out_buf[USBD_CP210X_BUCK_SIZE];
static rt_sem_t cp_out_sem;
extern rt_mutex_t usb_lock;
const uint8_t cp210x_descriptor[] =
{
/* CH0 */
USB_DESC_LENGTH_INTERFACE,
USB_DESC_TYPE_INTERFACE,
0, /* interfac index */
0x00,
0x02, /* 2 eps */
USB_CLASS_VEND_SPECIFIC,
0x00,
0x00,
USBD_IF_STR_IDX(USBD_MSC_IF_IDX),
/* eps descriptor */
0x07,
USB_DESC_TYPE_ENDPOINT,
(0x00 | USBD_CP210X_CH0_BULKOUT),
0x02,
WBVAL(USBD_CP210X_BUCK_SIZE),
0,
0x07,
USB_DESC_TYPE_ENDPOINT,
(0x80 | USBD_CP210X_CH0_BULKIN),
0x02, /* buck endpoint */
WBVAL(USBD_CP210X_BUCK_SIZE), /* size */
0, /* internal, 0 in buck */
};
/* data handler */
static uint32_t cp210x_data_ep_handler(uint8_t ep, uint8_t dir)
{
if(dir == 1) /* in */
{
if(ep == USBD_CP210X_CH0_BULKIN)
{
bridge_uart_usb_data_in_ready();
}
}
else /* out */
{
if(ep == USBD_CP210X_CH0_BULKOUT)
{
rt_sem_release(cp_out_sem);
}
}
return 0;
}
/* handle vender specfic class request */
static uint32_t cp210x_vender_request_handler(struct usbd_t *h)
{
uint8_t in_resp[8];
switch(h->setup.request)
{
case CP210X_VENDOR_SPECIFIC:
USBD_TRACE("CP210X_VENDOR_SPECIFIC\r\n");
switch(h->setup.value)
{
case 0x370B:
in_resp[0] = 0;
begin_data_in_stage(in_resp, 1);
break;
default:
USBD_TRACE("unknown vender specific request value:%X\r\n", h->setup.value);
break;
}
break;
case CP210X_SET_LINE_CTL:
USBD_TRACE("CP210X_SET_LINE_CTL\r\n");
break;
case CP210X_SET_CHAR:
USBD_TRACE("CP210X_SET_CHAR\r\n");
break;
case CP210X_IFC_ENABLE:
USBD_TRACE("CP210X_IFC_ENABLE\r\n");
break;
case CP210X_SET_BAUDDIV:
USBD_TRACE("CP210X_SET_BAUDDIV\r\n");
break;
case CP210X_GET_MDMSTS:
USBD_TRACE("CP210X_GET_MDMSTS\r\n");
in_resp[0] = 0;
begin_data_in_stage(in_resp, 1);
break;
case CP210X_SET_FLOW:
USBD_TRACE("CP210X_SET_FLOW\r\n");
break;
case CP210X_SET_CHARS:
USBD_TRACE("CP210X_SET_CHARS\r\n");
break;
case CP210X_SET_BAUDRATE:
USBD_TRACE("CP210X_SET_BAUDRATE\r\n");
break;
case CP210X_GET_COMM_STATUS:
USBD_TRACE("CP210X_GET_COMM_STATUS\r\n");
ssr.ulAmountInOutQueue = 0;
begin_data_in_stage((uint8_t*)&ssr, sizeof(ssr));
break;
case CP210X_GET_PROPS:
USBD_TRACE("CP210X_GET_PROPS\r\n");
cpr.wLength = sizeof(cp210x_cpr_t);
cpr.bcdVersion = 0x0100;
cpr.ulServiceMask = 0x00000001;
cpr.ulMaxTxQueue = USBD_CP210X_BUCK_SIZE;
cpr.ulCurrentRxQueue = USBD_CP210X_BUCK_SIZE;
cpr.ulMaxBaud = 1*1000*1000;
cpr.ulProvSubType = 0;
cpr.ulProvCapabilities = 0x00;
cpr.ulSettableParams = 0x3F;
cpr.ulSettableBaud = 0xFFFFFFFF;
cpr.wSettableData = 0xFF;
cpr.ulCurrentTxQueue = USBD_CP210X_BUCK_SIZE;
cpr.ulCurrentRxQueue = USBD_CP210X_BUCK_SIZE;
memcpy(cpr.uniProvName, L"SILABS USB V1.0", sizeof(cpr.uniProvName));
begin_data_in_stage((uint8_t*)&cpr, cpr.wLength);
break;
default:
USBD_TRACE("Unknown cp210x vender specfic reqeust\r\n");
break;
}
if(h->setup.length == 0 && (h->setup.request_type & 0x80) == 0x00)
{
usbd_status_in_stage();
}
return 0;
}
void cp_out_thread_entry(void* parameter)
{
int i, free, size;
while(1)
{
rt_sem_take(cp_out_sem, RT_WAITING_FOREVER);
/* read data from USB */
rt_mutex_take(usb_lock, RT_WAITING_FOREVER);
size = usbd_ep_read(USBD_CP210X_CH0_BULKOUT, cdc_out_buf);
rt_mutex_release(usb_lock);
for(i=4; i<5; i++)
{
/* get free buffer */
free = bridge_uart_tx_get_free(i);
while(free < size)
{
free = bridge_uart_tx_get_free(i);
rt_thread_delay(1);
}
bridge_uart_send(i, cdc_out_buf, size);
}
}
}
void usbd_vsc_cp210x_init(struct usbd_t *h)
{
cp210x.id = cp210x_id_table[0];
uint8_t *p;
struct uconfig_descriptor *uconfiguration_descriptor;
desc_t d;
/* make descriptor */
get_descriptor_data("device_descriptor", &d);
struct udevice_descriptor* device_desc = (struct udevice_descriptor*)d.buf;
device_desc->bDeviceClass = 0x00;
device_desc->bDeviceSubClass = 0x00;
device_desc->bDeviceProtocol = 0x00;
device_desc->idVendor = cp210x.id.vid;
device_desc->idProduct = cp210x.id.pid;
/* make configuration descriptor */
get_descriptor_data("configuration_descriptor", &d);
uconfiguration_descriptor = (struct uconfig_descriptor *)d.buf;
uconfiguration_descriptor->bLength = USB_DESC_LENGTH_CONFIG;
uconfiguration_descriptor->type = USB_DESC_TYPE_CONFIGURATION;
uconfiguration_descriptor->wTotalLength = USB_DESC_LENGTH_CONFIG;
uconfiguration_descriptor->bNumInterfaces = 1; /* interfae count */
uconfiguration_descriptor->bConfigurationValue = 1;
uconfiguration_descriptor->iConfiguration = 0;
uconfiguration_descriptor->bmAttributes = 0x80;
uconfiguration_descriptor->MaxPower = 0x32;
/* make intf and add to configuation data */
p = uconfiguration_descriptor->data;
d.buf = cp210x_descriptor;
d.len = sizeof(cp210x_descriptor);
memcpy(p, d.buf, d.len);
p += d.len;
uconfiguration_descriptor->wTotalLength += d.len;
h->vender_request_handler = cp210x_vender_request_handler;
h->data_ep_handler = cp210x_data_ep_handler;
rt_thread_t tid;
cp_out_sem = rt_sem_create("cpout", 0, RT_IPC_FLAG_FIFO);
tid = rt_thread_create("cpout", cp_out_thread_entry, RT_NULL, 512, 19, 20);
rt_thread_startup(tid);
}
|
const mongoose = require('mongoose');
const { Schema } = mongoose;
const loginTokenSchema = new Schema({
token: { type: String, required: true, unique: true },
timestamp: Date
});
const LoginToken = mongoose.model('LoginToken', loginTokenSchema);
module.exports = LoginToken;
|
#!/bin/bash
MONGO_URI_SOURCE=$1
MONGO_URI_TARGET=$2
DB_NAME_SOURCE="${MONGO_URI_SOURCE##*/}"
DB_NAME_TARGET="${MONGO_URI_TARGET##*/}"
mongodump --uri="${MONGO_URI_SOURCE}" --forceTableScan --archive | mongorestore --uri="${MONGO_URI_TARGET}" --archive --nsInclude="${DB_NAME_SOURCE}.*" --nsFrom="${DB_NAME_SOURCE}.*" --nsTo="${DB_NAME_TARGET}.*" --drop |
#!/bin/bash
source ${HOME}/.bashrc.rdbox-hq
cat << EoPatch
--- kube-proxy.yml.orig 2019-01-08 01:48:16.336980589 +0000
+++ kube-proxy-arm.yml 2019-01-08 01:52:14.696980589 +0000
@@ -3,7 +3,7 @@ kind: DaemonSet
metadata:
labels:
k8s-app: kube-proxy
- name: kube-proxy
+ name: kube-proxy-arm
namespace: kube-system
spec:
selector:
@@ -27,7 +27,7 @@ spec:
fieldRef:
apiVersion: v1
fieldPath: spec.nodeName
- image: k8s.gcr.io/kube-proxy:v${KUBERNETES_VERSION}
+ image: k8s.gcr.io/kube-proxy-arm:v${KUBERNETES_VERSION}
imagePullPolicy: IfNotPresent
name: kube-proxy
resources: {}
@@ -46,6 +46,8 @@ spec:
readOnly: true
dnsPolicy: ClusterFirst
hostNetwork: true
+ nodeSelector:
+ beta.kubernetes.io/arch: arm
priorityClassName: system-node-critical
restartPolicy: Always
schedulerName: default-scheduler
EoPatch
|
def parse_configuration(config: list) -> dict:
extracted_info = {}
extracted_info['model_architecture'] = config[0]
extracted_info['dataset_config'] = config[1]
extracted_info['checkpoint_url'] = globals().get('checkpoint', None)
return extracted_info |
package it.isislab.swiftlang.abm.netlogo;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.HashMap;
import org.kohsuke.args4j.CmdLineException;
import org.kohsuke.args4j.CmdLineParser;
import org.kohsuke.args4j.Option;
import org.nlogo.api.CompilerException;
import org.nlogo.api.LogoException;
import org.nlogo.headless.HeadlessWorkspace;
import scala.util.Random;
/**
@Author(
name = "<NAME>",
date = "05/02/2016"
)
*/
public class NetLogoWrapper
{
HeadlessWorkspace workspace;
@Option(name="-m",usage="netlogo model path")
private String model_path;
@Option(name="-outfile",usage="output to this file",metaVar="OUTPUT")
private File out = new File(".");
@Option(name="-trial",usage="number of runs")
private Integer trial;
@Option(name="-runid",usage="run identify")
private String id;
@Option(name="-s",usage="number of steps")
private Integer steps;
@Option(name="-i",usage="input list: var1,value1,var2,value2")
private String input;
@Option(name="-o",usage="output list: var1,value1,var2,value2")
private String output;
HashMap<String, Object> outputs=new HashMap<String, Object>();
private boolean toPrint=true;
class PrintWait extends Thread{
@Override
public void run() {
String[] phases = {"|", "/", "-", "\\"};
while (toPrint)
{
for (String phase : phases)
{
System.out.print(("\r"+phase));
try {
Thread.sleep(50);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
try {
String[] arguments_output=output.split(",");
for (int i = 0; i < arguments_output.length; i+=1) {
Object outvalue=outputs.get(arguments_output[i]);
if(outvalue==null)
outputs.put(arguments_output[i], workspace.report(arguments_output[i]));
else{
try{
String sout=(String)outvalue;
sout+="-"+workspace.report(arguments_output[i]);
outputs.put(arguments_output[i], sout);
}catch(Exception e1)
{
try{
Integer sout=(Integer)outvalue;
sout+=(Integer)workspace.report(arguments_output[i]);
outputs.put(arguments_output[i], sout);
}catch(Exception e2)
{
try{
Double sout=(Double)outvalue;
sout+=(Double)workspace.report(arguments_output[i]);
outputs.put(arguments_output[i], sout);
}catch(Exception e3)
{
e3.getStackTrace();
}
}
}
}
}
} catch (CompilerException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (LogoException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
public static void main(String[] args) throws IOException {
new NetLogoWrapper().doMain(args);
}
public void doMain(String[] args) throws IOException {
CmdLineParser parser = new CmdLineParser(this);
try {
parser.parseArgument(args);
if( input.isEmpty() )
throw new CmdLineException(parser,"No model paramters in input is given");
} catch( CmdLineException e ) {
System.err.println(e.getMessage());
System.err.println("java NetLogoWrapper [options...] arguments...");
parser.printUsage(System.err);
System.err.println();
return;
}
System.out.println("NetLogo model: "+model_path);
System.out.println("Output file: "+ out);
Random r=new Random(System.currentTimeMillis());
String[] arguments=input.split(",");
HashMap<String, String> parameter=new HashMap<String, String>();
System.out.println("Model parameters:");
for (int i = 0; i < arguments.length; i+=2) {
try{
System.out.println(arguments[i]+" "+arguments[i+1]);
parameter.put(arguments[i], arguments[i+1]);
}catch (Exception e) {
System.out.println("java NetLogoWrapper [options...] arguments...");
System.out.println("You must pass parameters setting as couple: var_name1 value1 var_name2 vaue2 ...");
// print the list of available options
parser.printUsage(System.err);
System.exit(-1);
}
}
System.out.println("Start simulation: ");
workspace = HeadlessWorkspace.newInstance() ;
try {
workspace.open(model_path);
for (int i = 0; i < trial; i++) {
toPrint=true;
int seed=r.nextInt();
System.out.println("Run "+i+" with seed: "+seed);
for(String variable_name: parameter.keySet())
{
workspace.command("set "+variable_name+" "+parameter.get(variable_name));
}
workspace.command("random-seed "+seed);
workspace.command("setup");
PrintWait waiter= new PrintWait();
waiter.start();
workspace.command("repeat "+steps+" [ go ]") ;
toPrint=false;
waiter.join();
System.out.println("End run "+i);
}
workspace.dispose();
System.out.println("\nOutput parameters:");
PrintWriter print_output;
try {
print_output = new PrintWriter(out);
String[] arguments_output=output.split(",");
print_output.print("\"run\",\"tick\"");
for (int i = 0; i < arguments_output.length; i+=1) {
print_output.print(",\""+arguments_output[i]+"\"");
}
print_output.print("\n");
print_output.print(id+","+steps);
for (int i = 0; i < arguments_output.length; i+=1) {
if(outputs.get(arguments_output[i]) instanceof String)
{
print_output.print(","+outputs.get(arguments_output[i]));
}
else{
if(outputs.get(arguments_output[i]) instanceof Integer)
{
System.out.println(arguments_output[i]+" "+((Integer)outputs.get(arguments_output[i])/trial));
print_output.print(","+((Integer)outputs.get(arguments_output[i])/trial));
}
else{
System.out.println(arguments_output[i]+" "+((Double)outputs.get(arguments_output[i])/trial));
print_output.print(","+((Double)outputs.get(arguments_output[i])/trial));
}
}
}
print_output.print("\n");
print_output.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
catch(Exception ex) {
ex.printStackTrace();
}
}
} |
package Partition_Labels;
import sun.nio.cs.ext.MacHebrew;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.PriorityQueue;
public class Solution {
public List<Integer> partitionLabels(String S) {
if (S == null || S.length() == 0) {
return null;
}
List<Integer> res = new ArrayList<>();
/**
* Greedy solution:
* 1. Treat every char's appearance as intervals, remember the index of its first and last appearance
* 2. Use PriorityQueue queue to store and sort intervals
* 3. Each time we poll 2 intervals, if they intersects, we merge them and add it back to queue
* else we remains the 2nd interval and add the 1st to results, until no interval in queue
* 74ms
*/
// Interval[] intervals = new Interval[26];
//
// for (int i = 0; i < S.length(); i++) {
// int index = S.charAt(i) - 'a';
// if (intervals[index] == null) {
// intervals[index] = new Interval(i, i);
// } else {
// intervals[index].end = i;
// }
// }
//
// PriorityQueue<Interval> queue = new PriorityQueue<>(((o1, o2) -> o1.start - o2.start));
// for (Interval intv: intervals) {
// if (intv != null) {
// queue.offer(intv);
// }
// }
//
// while (!queue.isEmpty()) {
// Interval cur = queue.poll();
// if (queue.isEmpty()) {
// res.add(cur.end - cur.start + 1);
// break;
// }
// Interval next = queue.peek();
// if (Math.max(cur.start, next.start) <= Math.min(cur.end, next.end)) {
// next = queue.poll();
// Interval intv = new Interval(Math.min(cur.start, next.start), Math.max(cur.end, next.end));
// queue.offer(intv);
// } else {
// res.add(cur.end - cur.start + 1);
// }
// }
/**
* Greedy solution optimized: only consider the last occurrence fo a letter
* When we add a result, the start is certain, so we only need to extend the end
* 16ms
*/
int[] last = new int[26];
for (int i = 0; i < S.length(); i++) {
last[S.charAt(i) - 'a'] = i;
}
int j = 0, anchor = 0;
for (int i = 0; i < S.length(); i++) {
j = Math.max(j, last[S.charAt(i) - 'a']);
if (i == j) {
res.add(i - anchor + 1);
anchor = i + 1;
}
}
return res;
}
class Interval {
int start;
int end;
public Interval(int start, int end) {
this.start = start;
this.end = end;
}
@Override
public String toString() {
return String.format("(%d, %d)", start, end);
}
}
public static void main(String[] args) {
Solution s = new Solution();
System.out.println(s.partitionLabels("ababcbacadefegdehijhklij")); // [9,7,8]
}
} |
echo "Compiling Neutralinojs..."
if [ -e bin/neutralino-linux ]; then
rm bin/neutralino-linux
fi
g++ resources.cpp \
helpers.cpp \
main.cpp \
server/router.cpp \
server/neuserver.cpp \
server/ping.cpp \
settings.cpp \
auth/authbasic.cpp \
auth/permission.cpp \
lib/boxer/boxer_linux.cpp \
lib/easylogging/easylogging++.cc \
platform/linux/platform.cpp \
api/filesystem/filesystem.cpp \
api/os/os.cpp \
api/computer/computer.cpp \
api/debug/debug.cpp \
api/storage/storage.cpp \
api/app/app.cpp \
api/window/window.cpp \
-pthread \
-std=c++17 \
-DELPP_NO_DEFAULT_LOG_FILE=1 \
-DWEBVIEW_GTK=1 \
`pkg-config --cflags --libs gtk+-3.0 webkit2gtk-4.0 glib-2.0 appindicator3-0.1` \
-o bin/neutralino-linux \
-no-pie \
-Os \
-I .
if [ -e bin/neutralino-linux ]; then
echo "OK: Neutralino binary is compiled in to bin/neutralino-linux"
else
echo "ERR: Neutralino binary is not compiled"
fi
|
#!/bin/sh
set -e
PROG_NAME=$0
JOB_NAMESPACE=$1
JOB_NAMES=$2
JVM_CONFIG=$3
if [ -z "${JVM_CONFIG}" ]; then
JVM_CONFIG="-Xms2048m -Xmx2048m -Xss512k"
fi
ROCKETMQ_STREAMS_HOME=$(cd $(dirname ${BASH_SOURCE[0]})/..; pwd)
ROCKETMQ_STREAMS_JOBS_DIR=$ROCKETMQ_STREAMS_HOME/jobs
ROCKETMQ_STREAMS_CONFIGURATION=$ROCKETMQ_STREAMS_HOME/conf
ROCKETMQ_STREAMS_EXT=$ROCKETMQ_STREAMS_HOME/ext
ROCKETMQ_STREAMS_DEPENDENCIES=$ROCKETMQ_STREAMS_HOME/lib
ROCKETMQ_STREAMS_LOGS=$ROCKETMQ_STREAMS_HOME/log/catalina.out
if [ -z "${JAVA_HOME:-}" ]; then
JAVA="java -server"
else
JAVA="$JAVA_HOME/bin/java -server"
fi
JAVA_OPTIONS=${JAVA_OPTIONS:-}
JVM_OPTS=()
if [ ! -z "${JAVA_OPTIONS}" ]; then
JVM_OPTS+=("${JAVA_OPTIONS}")
fi
if [ ! -z "${JVM_CONFIG}" ]; then
JVM_OPTS+=("${JVM_CONFIG}")
fi
JVM_OPTS+=( "-Dlog4j.configuration=$ROCKETMQ_STREAMS_CONFIGURATION/log4j.xml" )
# shellcheck disable=SC2068
# shellcheck disable=SC2039
if [ ! -z "${JOB_NAMES}" -a ! -z "${JOB_NAMESPACE}" ]; then
eval exec $JAVA ${JVM_OPTS[@]} -classpath "$ROCKETMQ_STREAMS_DEPENDENCIES/*:$ROCKETMQ_STREAMS_EXT/*:$ROCKETMQ_STREAMS_CONFIGURATION/*" org.apache.rsqldb.runner.StartAction $ROCKETMQ_STREAMS_JOBS_DIR $JOB_NAMESPACE $JOB_NAMES "&" >>"$ROCKETMQ_STREAMS_LOGS" 2>&1
elif [ ! -z "${JOB_NAMESPACE}" ]; then
eval exec $JAVA ${JVM_OPTS[@]} -classpath "$ROCKETMQ_STREAMS_DEPENDENCIES/*:$ROCKETMQ_STREAMS_EXT/*:$ROCKETMQ_STREAMS_CONFIGURATION/*" org.apache.rsqldb.runner.StartAction $ROCKETMQ_STREAMS_JOBS_DIR $JOB_NAMESPACE "&" >>"$ROCKETMQ_STREAMS_LOGS" 2>&1
else
eval exec $JAVA ${JVM_OPTS[@]} -classpath "$ROCKETMQ_STREAMS_DEPENDENCIES/*:$ROCKETMQ_STREAMS_EXT/*:$ROCKETMQ_STREAMS_CONFIGURATION/*" org.apache.rsqldb.runner.StartAction $ROCKETMQ_STREAMS_JOBS_DIR "&" >>"$ROCKETMQ_STREAMS_LOGS" 2>&1
fi
|
<reponame>langbl28/gatsby-hello
import React from "react"
import SEO from "../components/seo"
import { StaticImage } from "gatsby-plugin-image"
export default function Home() {
return (
<section>
<SEO title="UX/UI Portfolio" />
<div className="container mt-5">
<div className="row">
<div className="col-12 col-sm-6">
<StaticImage
//src="../images/dog.jpg"
src="https://images.pexels.com/photos/4588435/pexels-photo-4588435.jpeg"
aspectRatio={1 / 1}
alt="a dog"
className="hidden-sm mb-5"
/>
<StaticImage
src="https://images.pexels.com/photos/4588435/pexels-photo-4588435.jpeg"
alt="a dog"
className="hidden-sm-down"
/>
</div>
<div className="col-12 col-sm-6 align-self-center">
<h1 className="mb-5">Green Bay Print Shop</h1>
<h2 className="h5 mb-8">Creating Shirts</h2>
<p>
I’m a UI and UX designer based in Green Bay, Wisconsin. I'm
passionate about creating empathic digital experiences. For me,
it's all about connecting with users in a meaningful way.
</p>
<p className="mt-8">
<a class="btn mr-3" href="#">
Resume
</a>
<a class="btn" href="#">
Contact
</a>
</p>
</div>
</div>
</div>
</section>
)
}
|
<reponame>vanja-san/x-minecraft-launcher
const core = require('@actions/core');
const DRY = !process.env.CI;
async function main(output) {
output('build_number', process.env.GITHUB_RUN_NUMBER);
}
main(core ? core.setOutput : (k, v) => {
console.log(k)
console.log(v)
});
|
<reponame>Hi2129/Wonton_master
import React, { Component } from 'react';
import { Button } from 'reactstrap'
import './RotaryButton.css';
export class RotaryButtonCore extends Component {
static defaultProps = {
name: "旋钮",
ClassName: "RotaryButton",
size: [3, 6, 3, 6]
}
render() {
return (
<div>
<Button id="display" disabled>{this.props.value}</Button>
<Button id="button1" onClick={() => { this.props.onCheckClick("+") }}>+</Button>
<Button id="button2" onClick={() => { this.props.onCheckClick("-") }}>-</Button>
</div>
);
}
}
|
function deepestLevel(obj) {
let depth = 0,
k = Object.keys(obj);
if (!k.length) return 1;
for (let i=0; i<k.length; i++) {
if (typeof obj[k[i]] === 'object') {
let tempdepth = deepestLevel(obj[k[i]]);
if (tempdepth > depth) {
depth = tempdepth;
}
}
}
return 1 + depth;
} |
SELECT country, COUNT(*) AS number_of_toys_sold
FROM purchases
GROUP BY country
ORDER BY number_of_toys_sold DESC
LIMIT 5; |
<gh_stars>0
package dynamicGeneration;
import java.util.ArrayList;
import java.util.List;
import dynamicGeneration.structures.CssProp;
public class GeneralProperties {
public static List<CssProp> all(){
List<CssProp> rules = new ArrayList<CssProp>();
rules.add(new CssProp(".dm", "position", "relative"));
rules.add(new CssProp(".dm:before", "content", "''"));
rules.add(new CssProp(".dm:before", "background", "black"));
rules.add(new CssProp(".dm:before", "display", "block"));
rules.add(new CssProp(".dm:before", "position", "absolute"));
rules.add(new CssProp(".dm:before", "z-index", "1"));
rules.add(new CssProp(".dm:after", "display", "block"));
rules.add(new CssProp(".dm:after", "background", "black"));
rules.add(new CssProp(".dm:after", "content", "''"));
rules.add(new CssProp(".dm:after", "position", "absolute"));
return rules;
}
}
|
import time
import queue
from codetiming import Timer
def task(name, queue):
timer = Timer(text=f"Task {name} elapsed time: {{:.1f}}")
while not queue.empty():
delay = queue.get()
print(f"Task {name} running")
timer.start()
time.sleep(delay)
timer.stop()
yield
def main():
"""
This is the main entry point for the program
"""
# Create the queue of work
work_queue = queue.Queue()
# Put some work in the queue
for work in [15, 10, 5, 2]:
work_queue.put(work)
tasks = [task("One", work_queue), task("Two", work_queue)]
# Run the tasks
done = False
with Timer(text="\nTotal elapsed time: {:.1f}"):
while not done:
for t in tasks:
try:
next(t)
except StopIteration:
tasks.remove(t)
if len(tasks) == 0:
done = True
if __name__ == "__main__":
main()
|
import { Component, OnInit, EventEmitter} from '@angular/core';
@Component({
selector: 'kpagination',
templateUrl: './kpagination.html',
styleUrls: ['./kpagination.css'],
inputs: ['totalNbItems', 'nbItemsPerPage', 'currentPage:startPage'],
outputs: ['pageChange']
})
export class KPagination implements OnInit {
private _totalNbItems: number;
private _nbItemsPerPage: number;
currentPage = 1;
private _nbPages: number;
nbPagesSet: number[];
firstItem = 0;
lastItem = 0;
pageChange: EventEmitter<number> = new EventEmitter();
constructor() {}
// Utils
private updateItemsIndexesFromPage(): void {
this.firstItem = this.nbItemsPerPage * (this.currentPage - 1) + 1;
this.lastItem = Math.min(this.nbItemsPerPage * this.currentPage, this.totalNbItems);
}
private updateComputedAttrs(): void {
if (this._totalNbItems && this._nbItemsPerPage) {
this.nbPages = Math.ceil(this._totalNbItems / this._nbItemsPerPage);
}
this.updateItemsIndexesFromPage();
}
// Attributes pointcuts
set totalNbItems(val: number) {
this._totalNbItems = val;
this.updateComputedAttrs();
}
get totalNbItems(): number { return this._totalNbItems; }
set nbItemsPerPage(val: number) {
this._nbItemsPerPage = val;
this.updateComputedAttrs();
}
get nbItemsPerPage(): number { return this._nbItemsPerPage; }
set nbPages(val: number) {
this._nbPages = val;
}
get nbPages() {
return this._nbPages;
}
// lifecycle
ngOnInit() {
this.updateComputedAttrs();
}
// ######### View Methods
// ##### Go To Page
switchPage = (page: number): boolean => {
this.currentPage = page;
this.updateItemsIndexesFromPage();
this.pageChange.next(page);
// Disable standard link behavior
return false;
}
switchToFirst = (): boolean => {
return this.switchPage(1);
}
switchToPrev = (): boolean => {
const prev = Math.max(1, this.currentPage - 1);
return this.switchPage(prev);
}
switchToNext = (): boolean => {
const next = Math.min(this.nbPages, this.currentPage + 1);
return this.switchPage(next);
}
switchToLast = (): boolean => {
return this.switchPage(this.nbPages);
}
// ##### Get appropriated image
getFirstPageImage = () => 'pic_first' + (this.currentPage === 1 ? '_off' : '') + '.gif';
getPrevPageImage = () => 'pic_prev' + (this.currentPage === 1 ? '_off' : '') + '.gif';
getNextPageImage = () => 'pic_next' + (this.currentPage === this.nbPages ? '_off' : '') + '.gif';
getLastPageImage = () => 'pic_last' + (this.currentPage === this.nbPages ? '_off' : '') + '.gif';
}
|
-- ***************************************************************************
-- File: 12_34.sql
--
-- Developed By TUSC
--
-- Disclaimer: Neither Osborne/McGraw-Hill, TUSC, nor the author warrant
-- that this source code is error-free. If any errors are
-- found in this source code, please report them to TUSC at
-- (630)960-2909 ext 1011 or <EMAIL>.
-- ***************************************************************************
SPOOL 12_34.lis
DECLARE
CURSOR cur_tables IS
SELECT table_name
FROM user_tables
ORDER BY table_name;
lv_total_blocks_num PLS_INTEGER;
lv_total_bytes_num PLS_INTEGER;
lv_unused_blocks_num PLS_INTEGER;
lv_unused_bytes_num PLS_INTEGER;
lv_last_used_extent_file_num PLS_INTEGER;
lv_last_used_extent_block_num PLS_INTEGER;
lv_last_used_block_num PLS_INTEGER;
BEGIN
DBMS_OUTPUT.PUT_LINE(
'Current Blocks Bytes');
DBMS_OUTPUT.PUT_LINE(
'Schema Table Name Total Unused Total' ||
' Unused');
DBMS_OUTPUT.PUT_LINE('------------------' ||
'--------------------------------------------------------');
FOR cur_tables_rec IN cur_tables LOOP
DBMS_SPACE.UNUSED_SPACE(USER, cur_tables_rec.table_name,
'TABLE', lv_total_blocks_num, lv_total_bytes_num,
lv_unused_blocks_num, lv_unused_bytes_num,
lv_last_used_extent_file_num,
lv_last_used_extent_block_num,
lv_last_used_block_num);
DBMS_OUTPUT.PUT_LINE(RPAD(USER,15) ||
RPAD(cur_tables_rec.table_name,15) ||
TO_CHAR(lv_total_blocks_num, '999,999') ||
TO_CHAR(lv_unused_blocks_num, '999,999') ||
TO_CHAR(lv_total_bytes_num, '999,999,999') ||
TO_CHAR(lv_unused_bytes_num, '999,999,999'));
END LOOP;
END;
/
SPOOL OFF
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.