text
stringlengths 1
1.05M
|
|---|
import React, { useState } from 'react';
import {
View,
Text,
Button,
FlatList,
} from 'react-native';
const App = () => {
const [diningList, setDiningList] = useState([]);
const addToList = () => {
setDiningList(diningList.concat({
key: Math.random().toString(),
name: 'Name of food',
isConsumed: false
}));
};
const removeFromList = (key) => {
setDiningList(diningList.filter(item => item.key !== key));
};
const markAsConsumed = (key) => {
setDiningList(diningList.map(item => {
if (item.key === key) {
item.isConsumed = true;
}
return item;
}));
};
return (
<View>
<Button title="Add" onPress={addToList} />
<FlatList
data={diningList}
renderItem={({ item }) => (
<View>
<Text>{item.name}</Text>
<Button
title="Remove"
onPress={() => {
removeFromList(item.key);
}}
/>
<Button
title={item.isConsumed ? 'Mark as Not Consumed' :
'Mark as Consumed'}
onPress={() => {
markAsConsumed(item.key);
}}
/>
</View>
)}
/>
</View>
);
};
export default App;
|
<filename>src/main/java/Grafic/MainWindow.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Grafic;
import java.awt.event.ActionEvent;
import java.awt.event.ComponentEvent;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.util.Iterator;
import java.util.Set;
import javax.swing.JOptionPane;
import javax.swing.table.DefaultTableModel;
import javier.oobjetos.Pasajeros;
import javier.oobjetos.Tripulacion;
import javier.oobjetos.Vuelo;
import org.neodatis.odb.ODB;
import org.neodatis.odb.ODBFactory;
import org.neodatis.odb.Objects;
import org.neodatis.odb.core.query.IQuery;
import org.neodatis.odb.core.query.criteria.Where;
import org.neodatis.odb.impl.core.query.criteria.CriteriaQuery;
import org.neodatis.odb.ODBServer;
/**
*
* @author <NAME>
*/
public class MainWindow extends javax.swing.JFrame implements WindowListener {
/**
* Creates new form MainWindow
*/
public MainWindow() {
initComponents();
server = ODBFactory.openServer(8000);
server.addBase("basedatos", "./vuelos.neo");
server.startServer(true);
odb = server.openClient("basedatos");
}
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jTabbedPanePrincipal = new javax.swing.JTabbedPane();
jPanelTripulacion = new javax.swing.JPanel();
jPanel3 = new javax.swing.JPanel();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
jLabel4 = new javax.swing.JLabel();
jTextFieldCodigoTripulacion = new javax.swing.JTextField();
jTextFieldNombreTripulacion = new javax.swing.JTextField();
jTextFieldcategoriaTripulacion = new javax.swing.JTextField();
jButtonConsultarTripulacion = new javax.swing.JButton();
jButtonModificarTripulacion = new javax.swing.JButton();
jButtoninsertarTripulacion = new javax.swing.JButton();
jButtonBorrarTripulacion = new javax.swing.JButton();
jPanel4 = new javax.swing.JPanel();
jButtonAceptarTripulacion = new javax.swing.JButton();
jButtonCancelarTripulacion = new javax.swing.JButton();
jPanelPasajeros = new javax.swing.JPanel();
jPanel5 = new javax.swing.JPanel();
jLabel5 = new javax.swing.JLabel();
jLabel6 = new javax.swing.JLabel();
jLabel7 = new javax.swing.JLabel();
jLabel8 = new javax.swing.JLabel();
jTextFieldCodigoPasajeros = new javax.swing.JTextField();
jTextFieldNombrePasajeros = new javax.swing.JTextField();
jTextFieldTelefonoPasajeros = new javax.swing.JTextField();
jButtonConsultarPasajeros = new javax.swing.JButton();
jButtonModificarPasajeros = new javax.swing.JButton();
jButtoninsertarPasajeros = new javax.swing.JButton();
jButtonBorrarPasajeros = new javax.swing.JButton();
jPanel6 = new javax.swing.JPanel();
jButtonAceptarPasajeros = new javax.swing.JButton();
jButtonCancelarPasajeros = new javax.swing.JButton();
jLabel9 = new javax.swing.JLabel();
jTextFieldDireccionPasajeros = new javax.swing.JTextField();
jPanel1 = new javax.swing.JPanel();
jPanel7 = new javax.swing.JPanel();
jLabel10 = new javax.swing.JLabel();
jLabel11 = new javax.swing.JLabel();
jLabel12 = new javax.swing.JLabel();
jLabel13 = new javax.swing.JLabel();
jTextFieldIdentificadorVuelos = new javax.swing.JTextField();
jTextFieldOrigenVuelos = new javax.swing.JTextField();
jTextFieldDestinoVuelos = new javax.swing.JTextField();
jButtonConsultarVuelos = new javax.swing.JButton();
jButtonModificarVuelos = new javax.swing.JButton();
jButtoninsertarVuelos = new javax.swing.JButton();
jButtonBorrarVuelos = new javax.swing.JButton();
jPanel8 = new javax.swing.JPanel();
jButtonAceptarVuelos = new javax.swing.JButton();
jButtonCancelarVuelos = new javax.swing.JButton();
jButtonGestionTripulantes = new javax.swing.JButton();
jButtonGestionPasajeros = new javax.swing.JButton();
jPanel2 = new javax.swing.JPanel();
jScrollPane2 = new javax.swing.JScrollPane();
jTableTripulacion = new javax.swing.JTable();
jScrollPane1 = new javax.swing.JScrollPane();
jTablePasajeros = new javax.swing.JTable();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
addComponentListener(new java.awt.event.ComponentAdapter() {
public void componentShown(java.awt.event.ComponentEvent evt) {
formComponentShown(evt);
}
});
addWindowListener(new java.awt.event.WindowAdapter() {
public void windowClosed(java.awt.event.WindowEvent evt) {
formWindowClosed(evt);
}
});
jPanelTripulacion.addComponentListener(new java.awt.event.ComponentAdapter() {
public void componentShown(java.awt.event.ComponentEvent evt) {
jPanelTripulacionComponentShown(evt);
}
});
jPanel3.setBorder(javax.swing.BorderFactory.createEtchedBorder());
jLabel1.setText("Datos de Tripulante");
jLabel2.setText("Código:");
jLabel3.setText("Nombre:");
jLabel4.setText("Categoría:");
jButtonConsultarTripulacion.setText("Consultar");
jButtonConsultarTripulacion.addComponentListener(new java.awt.event.ComponentAdapter() {
public void componentShown(java.awt.event.ComponentEvent evt) {
jButtonConsultarTripulacionComponentShown(evt);
}
});
jButtonConsultarTripulacion.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonConsultarTripulacionActionPerformed(evt);
}
});
jButtonModificarTripulacion.setText("Modificar");
jButtonModificarTripulacion.addComponentListener(new java.awt.event.ComponentAdapter() {
public void componentShown(java.awt.event.ComponentEvent evt) {
jButtonModificarTripulacionComponentShown(evt);
}
});
jButtonModificarTripulacion.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonModificarTripulacionActionPerformed(evt);
}
});
jButtoninsertarTripulacion.setText("Insertar");
jButtoninsertarTripulacion.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtoninsertarTripulacionActionPerformed(evt);
}
});
jButtonBorrarTripulacion.setText("Borrar");
jButtonBorrarTripulacion.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonBorrarTripulacionActionPerformed(evt);
}
});
jButtonAceptarTripulacion.setText("Aceptar");
jButtonAceptarTripulacion.addComponentListener(new java.awt.event.ComponentAdapter() {
public void componentShown(java.awt.event.ComponentEvent evt) {
jButtonAceptarTripulacionComponentShown(evt);
}
});
jButtonAceptarTripulacion.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonAceptarTripulacionActionPerformed(evt);
}
});
jButtonCancelarTripulacion.setText("Cancelar");
jButtonCancelarTripulacion.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonCancelarTripulacionActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel4Layout = new javax.swing.GroupLayout(jPanel4);
jPanel4.setLayout(jPanel4Layout);
jPanel4Layout.setHorizontalGroup(
jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel4Layout.createSequentialGroup()
.addGap(31, 31, 31)
.addComponent(jButtonAceptarTripulacion)
.addGap(18, 18, 18)
.addComponent(jButtonCancelarTripulacion)
.addContainerGap(51, Short.MAX_VALUE))
);
jPanel4Layout.setVerticalGroup(
jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel4Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButtonAceptarTripulacion)
.addComponent(jButtonCancelarTripulacion))
.addContainerGap(15, Short.MAX_VALUE))
);
javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3);
jPanel3.setLayout(jPanel3Layout);
jPanel3Layout.setHorizontalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel1)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup()
.addComponent(jLabel3)
.addGap(16, 16, 16))
.addGroup(jPanel3Layout.createSequentialGroup()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel2)
.addComponent(jLabel4))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)))
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jTextFieldcategoriaTripulacion)
.addComponent(jTextFieldNombreTripulacion, javax.swing.GroupLayout.DEFAULT_SIZE, 271, Short.MAX_VALUE)
.addComponent(jTextFieldCodigoTripulacion))))
.addGap(18, 18, 18)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jButtonModificarTripulacion, javax.swing.GroupLayout.PREFERRED_SIZE, 85, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButtonConsultarTripulacion))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jButtoninsertarTripulacion, javax.swing.GroupLayout.PREFERRED_SIZE, 85, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButtonBorrarTripulacion, javax.swing.GroupLayout.PREFERRED_SIZE, 85, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGroup(jPanel3Layout.createSequentialGroup()
.addGap(70, 70, 70)
.addComponent(jPanel4, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel3Layout.setVerticalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jLabel1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextFieldCodigoTripulacion, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel2))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextFieldNombreTripulacion, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel3))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextFieldcategoriaTripulacion, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel4)))
.addGroup(jPanel3Layout.createSequentialGroup()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButtonConsultarTripulacion)
.addComponent(jButtoninsertarTripulacion))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButtonModificarTripulacion)
.addComponent(jButtonBorrarTripulacion))))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jPanel4, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(41, Short.MAX_VALUE))
);
javax.swing.GroupLayout jPanelTripulacionLayout = new javax.swing.GroupLayout(jPanelTripulacion);
jPanelTripulacion.setLayout(jPanelTripulacionLayout);
jPanelTripulacionLayout.setHorizontalGroup(
jPanelTripulacionLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanelTripulacionLayout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, 544, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(110, Short.MAX_VALUE))
);
jPanelTripulacionLayout.setVerticalGroup(
jPanelTripulacionLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanelTripulacionLayout.createSequentialGroup()
.addGap(15, 15, 15)
.addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(381, Short.MAX_VALUE))
);
jTabbedPanePrincipal.addTab("Tripulación", jPanelTripulacion);
jPanel5.setBorder(javax.swing.BorderFactory.createEtchedBorder());
jLabel5.setText("Datos de Pasajero");
jLabel6.setText("Código:");
jLabel7.setText("Nombre:");
jLabel8.setText("Teléfono:");
jButtonConsultarPasajeros.setText("Consultar");
jButtonConsultarPasajeros.addComponentListener(new java.awt.event.ComponentAdapter() {
public void componentShown(java.awt.event.ComponentEvent evt) {
jButtonConsultarPasajerosComponentShown(evt);
}
});
jButtonConsultarPasajeros.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonConsultarPasajerosActionPerformed(evt);
}
});
jButtonModificarPasajeros.setText("Modificar");
jButtonModificarPasajeros.addComponentListener(new java.awt.event.ComponentAdapter() {
public void componentShown(java.awt.event.ComponentEvent evt) {
jButtonModificarPasajerosComponentShown(evt);
}
});
jButtonModificarPasajeros.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonModificarPasajerosActionPerformed(evt);
}
});
jButtoninsertarPasajeros.setText("Insertar");
jButtoninsertarPasajeros.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtoninsertarPasajerosActionPerformed(evt);
}
});
jButtonBorrarPasajeros.setText("Borrar");
jButtonBorrarPasajeros.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonBorrarPasajerosActionPerformed(evt);
}
});
jButtonAceptarPasajeros.setText("Aceptar");
jButtonAceptarPasajeros.addComponentListener(new java.awt.event.ComponentAdapter() {
public void componentShown(java.awt.event.ComponentEvent evt) {
jButtonAceptarPasajerosComponentShown(evt);
}
});
jButtonAceptarPasajeros.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonAceptarPasajerosActionPerformed(evt);
}
});
jButtonCancelarPasajeros.setText("Cancelar");
javax.swing.GroupLayout jPanel6Layout = new javax.swing.GroupLayout(jPanel6);
jPanel6.setLayout(jPanel6Layout);
jPanel6Layout.setHorizontalGroup(
jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel6Layout.createSequentialGroup()
.addGap(31, 31, 31)
.addComponent(jButtonAceptarPasajeros)
.addGap(18, 18, 18)
.addComponent(jButtonCancelarPasajeros)
.addContainerGap(51, Short.MAX_VALUE))
);
jPanel6Layout.setVerticalGroup(
jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel6Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButtonAceptarPasajeros)
.addComponent(jButtonCancelarPasajeros))
.addContainerGap(15, Short.MAX_VALUE))
);
jLabel9.setText("Dirección:");
javax.swing.GroupLayout jPanel5Layout = new javax.swing.GroupLayout(jPanel5);
jPanel5.setLayout(jPanel5Layout);
jPanel5Layout.setHorizontalGroup(
jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel5Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel5Layout.createSequentialGroup()
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addGroup(jPanel5Layout.createSequentialGroup()
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel8)
.addComponent(jLabel7)
.addComponent(jLabel6))
.addGap(23, 23, 23)
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel5Layout.createSequentialGroup()
.addComponent(jTextFieldCodigoPasajeros, javax.swing.GroupLayout.PREFERRED_SIZE, 285, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, Short.MAX_VALUE))
.addComponent(jTextFieldNombrePasajeros, javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jTextFieldTelefonoPasajeros)))
.addGroup(javax.swing.GroupLayout.Alignment.LEADING, jPanel5Layout.createSequentialGroup()
.addComponent(jLabel9)
.addGap(18, 18, 18)
.addComponent(jTextFieldDireccionPasajeros)))
.addGap(18, 18, 18)
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jButtonModificarPasajeros, javax.swing.GroupLayout.PREFERRED_SIZE, 85, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButtonConsultarPasajeros))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jButtoninsertarPasajeros, javax.swing.GroupLayout.PREFERRED_SIZE, 85, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButtonBorrarPasajeros, javax.swing.GroupLayout.PREFERRED_SIZE, 85, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGroup(jPanel5Layout.createSequentialGroup()
.addComponent(jLabel5)
.addGap(0, 0, Short.MAX_VALUE)))
.addContainerGap())
.addGroup(jPanel5Layout.createSequentialGroup()
.addGap(72, 72, 72)
.addComponent(jPanel6, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel5Layout.setVerticalGroup(
jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel5Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jLabel5)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel5Layout.createSequentialGroup()
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextFieldCodigoPasajeros, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel6))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextFieldNombrePasajeros, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel7))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextFieldTelefonoPasajeros, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel8)))
.addGroup(jPanel5Layout.createSequentialGroup()
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButtonConsultarPasajeros)
.addComponent(jButtoninsertarPasajeros))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButtonModificarPasajeros)
.addComponent(jButtonBorrarPasajeros))))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextFieldDireccionPasajeros, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel9))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jPanel6, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(30, Short.MAX_VALUE))
);
javax.swing.GroupLayout jPanelPasajerosLayout = new javax.swing.GroupLayout(jPanelPasajeros);
jPanelPasajeros.setLayout(jPanelPasajerosLayout);
jPanelPasajerosLayout.setHorizontalGroup(
jPanelPasajerosLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanelPasajerosLayout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel5, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(84, Short.MAX_VALUE))
);
jPanelPasajerosLayout.setVerticalGroup(
jPanelPasajerosLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanelPasajerosLayout.createSequentialGroup()
.addGap(15, 15, 15)
.addComponent(jPanel5, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(362, Short.MAX_VALUE))
);
jTabbedPanePrincipal.addTab("Pasajeros", jPanelPasajeros);
jPanel7.setBorder(javax.swing.BorderFactory.createEtchedBorder());
jLabel10.setText("Datos de Vuelo");
jLabel11.setText("Identificador");
jLabel12.setText("Origen:");
jLabel13.setText("Destino:");
jButtonConsultarVuelos.setText("Consultar");
jButtonConsultarVuelos.addComponentListener(new java.awt.event.ComponentAdapter() {
public void componentShown(java.awt.event.ComponentEvent evt) {
jButtonConsultarVuelosComponentShown(evt);
}
});
jButtonConsultarVuelos.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonConsultarVuelosActionPerformed(evt);
}
});
jButtonModificarVuelos.setText("Modificar");
jButtonModificarVuelos.addComponentListener(new java.awt.event.ComponentAdapter() {
public void componentShown(java.awt.event.ComponentEvent evt) {
jButtonModificarVuelosComponentShown(evt);
}
});
jButtonModificarVuelos.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonModificarVuelosActionPerformed(evt);
}
});
jButtoninsertarVuelos.setText("Insertar");
jButtoninsertarVuelos.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtoninsertarVuelosActionPerformed(evt);
}
});
jButtonBorrarVuelos.setText("Borrar");
jButtonBorrarVuelos.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonBorrarVuelosActionPerformed(evt);
}
});
jButtonAceptarVuelos.setText("Aceptar");
jButtonAceptarVuelos.addComponentListener(new java.awt.event.ComponentAdapter() {
public void componentShown(java.awt.event.ComponentEvent evt) {
jButtonAceptarVuelosComponentShown(evt);
}
});
jButtonAceptarVuelos.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonAceptarVuelosActionPerformed(evt);
}
});
jButtonCancelarVuelos.setText("Cancelar");
javax.swing.GroupLayout jPanel8Layout = new javax.swing.GroupLayout(jPanel8);
jPanel8.setLayout(jPanel8Layout);
jPanel8Layout.setHorizontalGroup(
jPanel8Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel8Layout.createSequentialGroup()
.addGap(31, 31, 31)
.addComponent(jButtonAceptarVuelos)
.addGap(18, 18, 18)
.addComponent(jButtonCancelarVuelos)
.addContainerGap(51, Short.MAX_VALUE))
);
jPanel8Layout.setVerticalGroup(
jPanel8Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel8Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel8Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButtonAceptarVuelos)
.addComponent(jButtonCancelarVuelos))
.addContainerGap(15, Short.MAX_VALUE))
);
jButtonGestionTripulantes.setText("Gestión de Tripulantes");
jButtonGestionTripulantes.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonGestionTripulantesActionPerformed(evt);
}
});
jButtonGestionPasajeros.setText("Gestión de Pasajeros");
javax.swing.GroupLayout jPanel7Layout = new javax.swing.GroupLayout(jPanel7);
jPanel7.setLayout(jPanel7Layout);
jPanel7Layout.setHorizontalGroup(
jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel7Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel7Layout.createSequentialGroup()
.addComponent(jLabel10)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel7Layout.createSequentialGroup()
.addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING, false)
.addGroup(javax.swing.GroupLayout.Alignment.LEADING, jPanel7Layout.createSequentialGroup()
.addComponent(jLabel11)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jTextFieldIdentificadorVuelos, javax.swing.GroupLayout.PREFERRED_SIZE, 296, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(javax.swing.GroupLayout.Alignment.LEADING, jPanel7Layout.createSequentialGroup()
.addComponent(jLabel12)
.addGap(17, 17, 17)
.addComponent(jTextFieldOrigenVuelos, javax.swing.GroupLayout.PREFERRED_SIZE, 329, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(javax.swing.GroupLayout.Alignment.LEADING, jPanel7Layout.createSequentialGroup()
.addComponent(jLabel13)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jTextFieldDestinoVuelos)))
.addGap(18, 27, Short.MAX_VALUE)
.addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel7Layout.createSequentialGroup()
.addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jButtonModificarVuelos, javax.swing.GroupLayout.PREFERRED_SIZE, 85, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButtonConsultarVuelos))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jButtoninsertarVuelos, javax.swing.GroupLayout.PREFERRED_SIZE, 85, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButtonBorrarVuelos, javax.swing.GroupLayout.PREFERRED_SIZE, 85, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addComponent(jButtonGestionTripulantes, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, 176, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGroup(jPanel7Layout.createSequentialGroup()
.addGap(66, 66, 66)
.addComponent(jPanel8, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jButtonGestionPasajeros, javax.swing.GroupLayout.PREFERRED_SIZE, 176, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap())
);
jPanel7Layout.setVerticalGroup(
jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel7Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jLabel10)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel7Layout.createSequentialGroup()
.addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextFieldIdentificadorVuelos, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel11))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextFieldOrigenVuelos, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel12))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextFieldDestinoVuelos, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel13)))
.addGroup(jPanel7Layout.createSequentialGroup()
.addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButtonConsultarVuelos)
.addComponent(jButtoninsertarVuelos))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButtonModificarVuelos)
.addComponent(jButtonBorrarVuelos))))
.addGap(18, 18, 18)
.addComponent(jButtonGestionTripulantes)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel8, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButtonGestionPasajeros))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jScrollPane2.setBorder(javax.swing.BorderFactory.createTitledBorder("Personal de tripulación : 0"));
jTableTripulacion.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{null, null, null},
{null, null, null},
{null, null, null},
{null, null, null}
},
new String [] {
"Código", "Nombre", "Categoria"
}
) {
Class[] types = new Class [] {
java.lang.Integer.class, java.lang.Object.class, java.lang.String.class
};
public Class getColumnClass(int columnIndex) {
return types [columnIndex];
}
});
jScrollPane2.setViewportView(jTableTripulacion);
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane2)
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup()
.addContainerGap(16, Short.MAX_VALUE)
.addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 164, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(15, 15, 15))
);
jScrollPane1.setBorder(javax.swing.BorderFactory.createTitledBorder("Numero de Pasajeros: 0"));
jTablePasajeros.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null}
},
new String [] {
"Codigo", "Nombre", "Teléfono", "Dirección"
}
) {
Class[] types = new Class [] {
java.lang.Integer.class, java.lang.String.class, java.lang.String.class, java.lang.Integer.class
};
public Class getColumnClass(int columnIndex) {
return types [columnIndex];
}
});
jScrollPane1.setViewportView(jTablePasajeros);
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING, false)
.addComponent(jPanel2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jPanel7, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jScrollPane1))
.addContainerGap(48, Short.MAX_VALUE))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGap(15, 15, 15)
.addComponent(jPanel7, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 158, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(7, Short.MAX_VALUE))
);
jTabbedPanePrincipal.addTab("Vuelos", jPanel1);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jTabbedPanePrincipal, javax.swing.GroupLayout.PREFERRED_SIZE, 662, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(32, 32, 32))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jTabbedPanePrincipal, javax.swing.GroupLayout.Alignment.TRAILING)
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jButtoninsertarTripulacionActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtoninsertarTripulacionActionPerformed
limpiarCamposTripulacion();
switchCamposTripulacion(true, true, true);
switchBotonesTripulacion(false, true, false, false);
jButtonAceptarTripulacion.setEnabled(true);
control = 1;
}//GEN-LAST:event_jButtoninsertarTripulacionActionPerformed
private void jButtonModificarTripulacionActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonModificarTripulacionActionPerformed
switchCamposTripulacion(false, true, true);
control = 2;
}//GEN-LAST:event_jButtonModificarTripulacionActionPerformed
private void jButtonModificarTripulacionComponentShown(java.awt.event.ComponentEvent evt) {//GEN-FIRST:event_jButtonModificarTripulacionComponentShown
}//GEN-LAST:event_jButtonModificarTripulacionComponentShown
private void jButtonConsultarTripulacionActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonConsultarTripulacionActionPerformed
if (!jTextFieldCodigoTripulacion.getText().isEmpty()){
IQuery query = new CriteriaQuery(Tripulacion.class, Where.equal("codigo", Integer.parseInt(jTextFieldCodigoTripulacion.getText())));
Objects<Tripulacion> resultado = odb.getObjects(query);
if(resultado.size()==0){
JOptionPane.showMessageDialog(this,"Objeto no encontrado");
//jPanelTripulacionComponentShown(new ComponentEvent (this, 0));
}
else{
Tripulacion tripulante = resultado.getFirst();
tripulanteBuscado = tripulante;
jTextFieldNombreTripulacion.setText(tripulante.getNombre());
jTextFieldcategoriaTripulacion.setText(tripulante.getCategoria());
//jPanelTripulacionComponentShown(new ComponentEvent (this, 0));
switchBotonesTripulacion(true, true, true, true);
}
}
else{
JOptionPane.showMessageDialog(this,"Introduces datos en el campo código");
}
}//GEN-LAST:event_jButtonConsultarTripulacionActionPerformed
private void jButtonConsultarTripulacionComponentShown(java.awt.event.ComponentEvent evt) {//GEN-FIRST:event_jButtonConsultarTripulacionComponentShown
}//GEN-LAST:event_jButtonConsultarTripulacionComponentShown
private void formWindowClosed(java.awt.event.WindowEvent evt) {//GEN-FIRST:event_formWindowClosed
server.close();
odb.close();
}//GEN-LAST:event_formWindowClosed
private void jButtonAceptarTripulacionActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonAceptarTripulacionActionPerformed
switch(control){
case 1:
//insertar
if(!(jTextFieldCodigoTripulacion.getText().isEmpty()||jTextFieldNombreTripulacion.getText().isEmpty()||jTextFieldcategoriaTripulacion.getText().isEmpty())){
IQuery query = new CriteriaQuery(Tripulacion.class, Where.equal("codigo", Integer.parseInt(jTextFieldCodigoTripulacion.getText())));
Objects<Tripulacion> resultado = odb.getObjects(query);
if(resultado.size()!=0){
JOptionPane.showMessageDialog(this, "Hay un objeto en la BBDD con ese código");
}
else {
Tripulacion tripulante = new Tripulacion (
Integer.parseInt(jTextFieldCodigoTripulacion.getText()),
jTextFieldNombreTripulacion.getText()
,jTextFieldcategoriaTripulacion.getText());
odb.store(tripulante);
odb.commit();
JOptionPane.showMessageDialog(this, "Pos guardao'");
jPanelTripulacionComponentShown(new ComponentEvent (this, 0));
}
}
else{
JOptionPane.showMessageDialog(this, "Introduce datos en todos los campos");
}
break;
case 2://modificar
//buscar para comprobar si anda repetido
IQuery query2 = new CriteriaQuery(Tripulacion.class, Where.equal("codigo", jTextFieldCodigoTripulacion.getText()));//tripulante buscado
Objects <Tripulacion> resultados = odb.getObjects(query2);
if(Integer.parseInt(jTextFieldCodigoTripulacion.getText()) == tripulanteBuscado.getCodigo()){
tripulanteBuscado.setNombre(jTextFieldNombreTripulacion.getText());
tripulanteBuscado.setCategoria(jTextFieldcategoriaTripulacion.getText());
odb.store(tripulanteBuscado);
odb.commit();
JOptionPane.showMessageDialog(this, "Guardadito");
}
else if(resultados.size()!=0) {
JOptionPane.showMessageDialog(this, "El código introducido ya existe en la BBDD");
}
else{
tripulanteBuscado.setCodigo(Integer.parseInt(jTextFieldCodigoTripulacion.getText()));
tripulanteBuscado.setNombre(jTextFieldNombreTripulacion.getText());
tripulanteBuscado.setCategoria(jTextFieldcategoriaTripulacion.getText());
odb.store(tripulanteBuscado);
odb.commit();
JOptionPane.showMessageDialog(this, "Guardadito");
}
jPanelTripulacionComponentShown(new ComponentEvent (this, 0));
break;
case 3: //delete
int BotonSi = JOptionPane.YES_NO_OPTION;
int dialogResult = JOptionPane.showConfirmDialog (null, "¿Seguro quieres borrar a este tripulante?","Aviso",BotonSi);
if(dialogResult == JOptionPane.YES_OPTION){
odb.delete(tripulanteBuscado);
odb.commit();
limpiarCamposTripulacion();
JOptionPane.showMessageDialog(this, "Borradito");
}
switchBotonesTripulacion(true,true,false,false);
break;
default:
break;
}
}//GEN-LAST:event_jButtonAceptarTripulacionActionPerformed
private void jButtonAceptarTripulacionComponentShown(java.awt.event.ComponentEvent evt) {//GEN-FIRST:event_jButtonAceptarTripulacionComponentShown
}//GEN-LAST:event_jButtonAceptarTripulacionComponentShown
private void formComponentShown(java.awt.event.ComponentEvent evt) {//GEN-FIRST:event_formComponentShown
// TODO add your handling code here:
}//GEN-LAST:event_formComponentShown
private void jPanelTripulacionComponentShown(java.awt.event.ComponentEvent evt) {//GEN-FIRST:event_jPanelTripulacionComponentShown
switchCamposTripulacion(true, false, false);
switchBotonesTripulacion(true, true, false, false);
}//GEN-LAST:event_jPanelTripulacionComponentShown
private void jButtonConsultarPasajerosComponentShown(java.awt.event.ComponentEvent evt) {//GEN-FIRST:event_jButtonConsultarPasajerosComponentShown
// TODO add your handling code here:
}//GEN-LAST:event_jButtonConsultarPasajerosComponentShown
private void jButtonConsultarPasajerosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonConsultarPasajerosActionPerformed
if (!jTextFieldCodigoPasajeros.getText().isEmpty()){
IQuery query = new CriteriaQuery(Pasajeros.class, Where.equal("codigo", Integer.parseInt(jTextFieldCodigoPasajeros.getText())));
Objects<Pasajeros> resultado = odb.getObjects(query);
if(resultado.size() == 0){
JOptionPane.showMessageDialog(this,"Objeto no encontrado");
//jPanelTripulacionComponentShown(new ComponentEvent (this, 0));
}
else{
Pasajeros pasajero = resultado.getFirst();
pasajerobuscado = pasajero;
jTextFieldNombrePasajeros.setText(pasajero.getNombre());
jTextFieldTelefonoPasajeros.setText(pasajero.getTel());
jTextFieldDireccionPasajeros.setText(pasajero.getDireccion());
//jPanelTripulacionComponentShown(new ComponentEvent (this, 0));
switchBotonesPasajeros(true, true, true, true);
}
}
else{
JOptionPane.showMessageDialog(this,"Introduzca datos en el campo código");
}
}//GEN-LAST:event_jButtonConsultarPasajerosActionPerformed
private void jButtonModificarPasajerosComponentShown(java.awt.event.ComponentEvent evt) {//GEN-FIRST:event_jButtonModificarPasajerosComponentShown
// TODO add your handling code here:
}//GEN-LAST:event_jButtonModificarPasajerosComponentShown
private void jButtonModificarPasajerosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonModificarPasajerosActionPerformed
control = 2;
}//GEN-LAST:event_jButtonModificarPasajerosActionPerformed
private void jButtoninsertarPasajerosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtoninsertarPasajerosActionPerformed
control = 1;
}//GEN-LAST:event_jButtoninsertarPasajerosActionPerformed
private void jButtonAceptarPasajerosComponentShown(java.awt.event.ComponentEvent evt) {//GEN-FIRST:event_jButtonAceptarPasajerosComponentShown
// TODO add your handling code here:
}//GEN-LAST:event_jButtonAceptarPasajerosComponentShown
private void jButtonAceptarPasajerosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonAceptarPasajerosActionPerformed
switch(control){
case 1://insertar
if(!(jTextFieldCodigoPasajeros.getText().isEmpty()||
jTextFieldNombrePasajeros.getText().isEmpty()||
jTextFieldDireccionPasajeros.getText().isEmpty()||
jTextFieldTelefonoPasajeros.getText().isEmpty())){
IQuery query = new CriteriaQuery(Pasajeros.class, Where.equal("codigo", Integer.parseInt(jTextFieldCodigoPasajeros.getText())));
Objects<Pasajeros> resultado = odb.getObjects(query);
if(resultado.size()!=0){
JOptionPane.showMessageDialog(this, "Ya hay un objeto en la BBDD con ese código");
}
else {
Pasajeros pasajero = new Pasajeros(
Integer.parseInt(jTextFieldCodigoPasajeros.getText()),
jTextFieldNombrePasajeros.getText(),
jTextFieldTelefonoPasajeros.getText(),
jTextFieldDireccionPasajeros.getText());
odb.store(pasajero);
odb.commit();
JOptionPane.showMessageDialog(this, "Pos guardao'");
jPanelTripulacionComponentShown(new ComponentEvent (this, 0));
}
}
else{
JOptionPane.showMessageDialog(this, "Introduce datos en todos los campos");
}
break;
case 2:
//buscar para comprobar si anda repetido
IQuery query2 = new CriteriaQuery(Pasajeros.class, Where.equal("codigo", jTextFieldCodigoPasajeros.getText()));//pasajero buscado
Objects <Pasajeros> resultados = odb.getObjects(query2);
if(Integer.parseInt(jTextFieldCodigoPasajeros.getText()) == pasajerobuscado.getCodigo()){
pasajerobuscado.setNombre(jTextFieldCodigoPasajeros.getText());
pasajerobuscado.setTel(jTextFieldTelefonoPasajeros.getText());
pasajerobuscado.setDireccion(jTextFieldDireccionPasajeros.getText());
pasajerobuscado.setNombre(jTextFieldNombrePasajeros.getText());
odb.store(pasajerobuscado);
odb.commit();
JOptionPane.showMessageDialog(this, "Guardadito");
}
else if(resultados.size()!=0) {
JOptionPane.showMessageDialog(this, "El código introducido ya existe en la BBDD");
}
else{
pasajerobuscado.setCodigo(Integer.parseInt(jTextFieldCodigoTripulacion.getText()));
pasajerobuscado.setNombre(jTextFieldCodigoPasajeros.getText());
pasajerobuscado.setTel(jTextFieldTelefonoPasajeros.getText());
pasajerobuscado.setDireccion(jTextFieldDireccionPasajeros.getText());
pasajerobuscado.setNombre(jTextFieldNombrePasajeros.getText());
odb.store(pasajerobuscado);
odb.commit();
JOptionPane.showMessageDialog(this, "Guardadito");
}
jPanelTripulacionComponentShown(new ComponentEvent (this, 0));
break;
case 3:
int BotonSi = JOptionPane.YES_NO_OPTION;
int dialogResult = JOptionPane.showConfirmDialog (null, "¿Seguro quieres borrar a este pasajero?","Aviso",BotonSi);
if(dialogResult == JOptionPane.YES_OPTION){
odb.delete(pasajerobuscado);
odb.commit();
limpiarCamposPasajeros();
JOptionPane.showMessageDialog(this, "Borradito");
}
switchBotonesPasajeros(true,true,false,false);
break;
}
}//GEN-LAST:event_jButtonAceptarPasajerosActionPerformed
private void jButtonConsultarVuelosComponentShown(java.awt.event.ComponentEvent evt) {//GEN-FIRST:event_jButtonConsultarVuelosComponentShown
// TODO add your handling code here:
}//GEN-LAST:event_jButtonConsultarVuelosComponentShown
private void jButtonConsultarVuelosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonConsultarVuelosActionPerformed
if (!jTextFieldIdentificadorVuelos.getText().isEmpty()){
IQuery query = new CriteriaQuery(Vuelo.class, Where.equal("identificador", jTextFieldIdentificadorVuelos.getText()));
Objects<Vuelo> resultado = odb.getObjects(query);
if(resultado.size() == 0){
JOptionPane.showMessageDialog(this,"Objeto no encontrado");
}
else{
Vuelo vuelo = resultado.getFirst();
vuelobuscado = vuelo;
jTextFieldIdentificadorVuelos.setText(vuelo.getIdentificador());
jTextFieldOrigenVuelos.setText(vuelo.getAeropuerto_origen());
jTextFieldDestinoVuelos.setText(vuelo.getAeropuerto_destino());
DefaultTableModel modeloTabla = (DefaultTableModel) jTableTripulacion.getModel();
modeloTabla.setRowCount(vuelobuscado.getTripulacionSet().size());
Set<Tripulacion> settripulacion = vuelobuscado.getTripulacionSet();
int i = 0;
if(settripulacion!=null){
Iterator iteratriupacion = settripulacion.iterator();
while (iteratriupacion.hasNext())
{
Tripulacion tripu = (Tripulacion) iteratriupacion.next();
modeloTabla.setValueAt(tripu.getCodigo(), i, 0);
modeloTabla.setValueAt(tripu.getNombre(), i, 1);
modeloTabla.setValueAt(tripu.getCategoria(), i, 2);
i++;
}
jTableTripulacion.setModel(modeloTabla);
}
}
}
else{
JOptionPane.showMessageDialog(this,"Introduzca datos en el campo código");
}
}//GEN-LAST:event_jButtonConsultarVuelosActionPerformed
private void jButtonModificarVuelosComponentShown(java.awt.event.ComponentEvent evt) {//GEN-FIRST:event_jButtonModificarVuelosComponentShown
// TODO add your handling code here:
}//GEN-LAST:event_jButtonModificarVuelosComponentShown
private void jButtonModificarVuelosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonModificarVuelosActionPerformed
controlvuelo = 2;
switchCamposVuelos(false,true,true);
switchBotonesVuelo(true, true, false, false);
}//GEN-LAST:event_jButtonModificarVuelosActionPerformed
private void jButtoninsertarVuelosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtoninsertarVuelosActionPerformed
controlvuelo = 1;
}//GEN-LAST:event_jButtoninsertarVuelosActionPerformed
private void jButtonAceptarVuelosComponentShown(java.awt.event.ComponentEvent evt) {//GEN-FIRST:event_jButtonAceptarVuelosComponentShown
// TODO add your handling code here:
}//GEN-LAST:event_jButtonAceptarVuelosComponentShown
private void jButtonAceptarVuelosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonAceptarVuelosActionPerformed
switch(controlvuelo){
case 1:
if(!(jTextFieldIdentificadorVuelos.getText().isEmpty()||jTextFieldOrigenVuelos.getText().isEmpty()||jTextFieldDestinoVuelos.getText().isEmpty())){
IQuery query = new CriteriaQuery(Vuelo.class, Where.equal("identificador", jTextFieldIdentificadorVuelos.getText()));
Objects<Vuelo> resultado = odb.getObjects(query);
if(resultado.size()!=0){
JOptionPane.showMessageDialog(this, "Ya hay un objeto en la BBDD con ese código");
}
else {
Vuelo vuelo = new Vuelo(
jTextFieldIdentificadorVuelos.getText(),
jTextFieldOrigenVuelos.getText(),
jTextFieldDestinoVuelos.getText());
odb.store(vuelo);
odb.commit();
JOptionPane.showMessageDialog(this, "Pos guardao'");
jPanelTripulacionComponentShown(new ComponentEvent (this, 0));
}
}
else{
JOptionPane.showMessageDialog(this, "Introduce datos en todos los campos");
}
break;
case 2://modificar
IQuery query2 = new CriteriaQuery(Vuelo.class, Where.equal("identificador", jTextFieldIdentificadorVuelos.getText()));//pasajero buscado
Objects <Pasajeros> resultados = odb.getObjects(query2);
vuelobuscado.setAeropuerto_destino(jTextFieldDestinoVuelos.getText());
vuelobuscado.setAeropuerto_origen(jTextFieldOrigenVuelos.getText());
odb.store(vuelobuscado);
odb.commit();
JOptionPane.showMessageDialog(this, "Guardadito");
jPanelTripulacionComponentShown(new ComponentEvent (this, 0));
switchBotonesVuelo(true,true,true,true);
switchCamposVuelos(true,true,true);
break;
case 3:// borrar
int BotonSi = JOptionPane.YES_NO_OPTION;
int dialogResult = JOptionPane.showConfirmDialog (null, "¿Seguro quieres borrar este vuelo","Aviso",BotonSi);
if(dialogResult == JOptionPane.YES_OPTION){
odb.delete(vuelobuscado);
odb.commit();
limpiarCamposPasajeros();
JOptionPane.showMessageDialog(this, "Borradito");
}
break;
}
}//GEN-LAST:event_jButtonAceptarVuelosActionPerformed
private void jButtonGestionTripulantesActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonGestionTripulantesActionPerformed
ventanatripulacion = new TripulacionVuelo(this);
ventanatripulacion.setVisible(true);
}//GEN-LAST:event_jButtonGestionTripulantesActionPerformed
private void jButtonBorrarTripulacionActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonBorrarTripulacionActionPerformed
control = 3;
switchBotonesTripulacion(false,false,false,true);
}//GEN-LAST:event_jButtonBorrarTripulacionActionPerformed
/*consultar, insertar, modificar, borrar*/
private void jButtonCancelarTripulacionActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonCancelarTripulacionActionPerformed
control = 0;
switchBotonesTripulacion(true,true,false,false);
limpiarCamposTripulacion();
jButtonAceptarTripulacion.setEnabled(false);
}//GEN-LAST:event_jButtonCancelarTripulacionActionPerformed
private void jButtonBorrarPasajerosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonBorrarPasajerosActionPerformed
control = 3;
switchBotonesPasajeros(true, true, false, false);
}//GEN-LAST:event_jButtonBorrarPasajerosActionPerformed
private void jButtonBorrarVuelosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonBorrarVuelosActionPerformed
controlvuelo = 3;
}//GEN-LAST:event_jButtonBorrarVuelosActionPerformed
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(MainWindow.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(MainWindow.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(MainWindow.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(MainWindow.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new MainWindow().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton jButtonAceptarPasajeros;
private javax.swing.JButton jButtonAceptarTripulacion;
private javax.swing.JButton jButtonAceptarVuelos;
private javax.swing.JButton jButtonBorrarPasajeros;
private javax.swing.JButton jButtonBorrarTripulacion;
private javax.swing.JButton jButtonBorrarVuelos;
private javax.swing.JButton jButtonCancelarPasajeros;
private javax.swing.JButton jButtonCancelarTripulacion;
private javax.swing.JButton jButtonCancelarVuelos;
private javax.swing.JButton jButtonConsultarPasajeros;
private javax.swing.JButton jButtonConsultarTripulacion;
private javax.swing.JButton jButtonConsultarVuelos;
private javax.swing.JButton jButtonGestionPasajeros;
private javax.swing.JButton jButtonGestionTripulantes;
private javax.swing.JButton jButtonModificarPasajeros;
private javax.swing.JButton jButtonModificarTripulacion;
private javax.swing.JButton jButtonModificarVuelos;
private javax.swing.JButton jButtoninsertarPasajeros;
private javax.swing.JButton jButtoninsertarTripulacion;
private javax.swing.JButton jButtoninsertarVuelos;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel10;
private javax.swing.JLabel jLabel11;
private javax.swing.JLabel jLabel12;
private javax.swing.JLabel jLabel13;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel6;
private javax.swing.JLabel jLabel7;
private javax.swing.JLabel jLabel8;
private javax.swing.JLabel jLabel9;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JPanel jPanel3;
private javax.swing.JPanel jPanel4;
private javax.swing.JPanel jPanel5;
private javax.swing.JPanel jPanel6;
private javax.swing.JPanel jPanel7;
private javax.swing.JPanel jPanel8;
private javax.swing.JPanel jPanelPasajeros;
private javax.swing.JPanel jPanelTripulacion;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JTabbedPane jTabbedPanePrincipal;
private javax.swing.JTable jTablePasajeros;
private javax.swing.JTable jTableTripulacion;
private javax.swing.JTextField jTextFieldCodigoPasajeros;
private javax.swing.JTextField jTextFieldCodigoTripulacion;
private javax.swing.JTextField jTextFieldDestinoVuelos;
private javax.swing.JTextField jTextFieldDireccionPasajeros;
private javax.swing.JTextField jTextFieldIdentificadorVuelos;
private javax.swing.JTextField jTextFieldNombrePasajeros;
private javax.swing.JTextField jTextFieldNombreTripulacion;
private javax.swing.JTextField jTextFieldOrigenVuelos;
private javax.swing.JTextField jTextFieldTelefonoPasajeros;
private javax.swing.JTextField jTextFieldcategoriaTripulacion;
// End of variables declaration//GEN-END:variables
private static ODB odb = null;
private static ODBServer server = null;
private Tripulacion tripulanteBuscado = null;
private Pasajeros pasajerobuscado = null;
private int control = 0;
private int controlvuelo = 0;
public Vuelo vuelobuscado = null;
TripulacionVuelo ventanatripulacion;
public static ODB getOdb(){
return odb;
}
public static void setOdb(){
MainWindow.odb = odb;
}
private void switchCamposTripulacion(Boolean codigo, Boolean nombre, Boolean categoria){
jTextFieldCodigoTripulacion.setEnabled(codigo);
jTextFieldNombreTripulacion.setEnabled(nombre);
jTextFieldcategoriaTripulacion.setEnabled(categoria);
}
private void limpiarCamposTripulacion(){
jTextFieldCodigoTripulacion.setText("");
jTextFieldNombreTripulacion.setText("");
jTextFieldcategoriaTripulacion.setText("");
}
private void limpiarCamposPasajeros() {
jTextFieldCodigoPasajeros.setText("");
jTextFieldDireccionPasajeros.setText("");
jTextFieldNombrePasajeros.setText("");
jTextFieldTelefonoPasajeros.setText("");
}
/*consultar, insertar, modificar, borrar*/
private void switchBotonesTripulacion(Boolean consulta, Boolean insertar, Boolean modificar, Boolean borrar ){
jButtonConsultarTripulacion.setEnabled(consulta);
jButtoninsertarTripulacion.setEnabled(insertar);
jButtonModificarTripulacion.setEnabled(modificar);
jButtonBorrarTripulacion.setEnabled(borrar);
}
private void switchBotonesPasajeros(Boolean consulta, Boolean insertar, Boolean modificar, Boolean borrar ){
jButtonConsultarPasajeros.setEnabled(consulta);
jButtoninsertarPasajeros.setEnabled(insertar);
jButtonModificarPasajeros.setEnabled(modificar);
jButtonBorrarPasajeros.setEnabled(borrar);
}
private void switchBotonesVuelo(Boolean consulta, Boolean insertar, Boolean modificar, Boolean borrar ){
jButtonConsultarVuelos.setEnabled(consulta);
jButtoninsertarVuelos.setEnabled(insertar);
jButtonModificarVuelos.setEnabled(modificar);
jButtonBorrarVuelos.setEnabled(borrar);
}
private void switchCamposVuelos (Boolean codigo, Boolean origen, Boolean destino){
jTextFieldIdentificadorVuelos.setEnabled(codigo);
jTextFieldOrigenVuelos.setEnabled(origen);
jTextFieldDestinoVuelos.setEnabled(destino);
}
@Override
public void windowOpened(WindowEvent we) {
}
@Override
public void windowClosing(WindowEvent we) {
}
@Override
public void windowClosed(WindowEvent we) {
System.out.println("Cerrada");
jButtonConsultarVuelosActionPerformed(new ActionEvent(this,0,""));
}
@Override
public void windowIconified(WindowEvent we) {
}
@Override
public void windowDeiconified(WindowEvent we) {
}
@Override
public void windowActivated(WindowEvent we) {
}
@Override
public void windowDeactivated(WindowEvent we) {
}
private void jButtonAceptarTripulacion(boolean b) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
|
<reponame>Pleksus2022/pleksus-api
import { Module } from '@nestjs/common';
import { MongooseModule } from '@nestjs/mongoose';
import { USER } from 'src/common/models/models';
import { UserSchema } from './schema/user.schema';
import { UsersService } from './users.service';
import { UsersController } from './users.controller';
import { SendImagesController } from './send-images/send-images.controller';
@Module({
imports: [
MongooseModule.forFeatureAsync([
{ name: USER.name, useFactory: () => UserSchema },
])
],
providers: [UsersService],
exports: [UsersService],
controllers: [UsersController, SendImagesController],
})
export class UsersModule {}
|
function getFileExtension(filePath: string): string {
const lastDotIndex = filePath.lastIndexOf('.');
if (lastDotIndex <= 0 || lastDotIndex === filePath.length - 1) {
return ''; // No valid file extension found
}
return filePath.slice(lastDotIndex + 1);
}
// Test cases
console.log(getFileExtension("/path/to/file/document.pdf")); // Output: "pdf"
console.log(getFileExtension("/path/to/another/file/.gitignore")); // Output: ""
|
#!/bin/bash
./triggerfs-cli
|
<gh_stars>100-1000
// https://uva.onlinejudge.org/external/6/657.pdf
#include<bits/stdc++.h>
using namespace std;
using vs=vector<string>;
using vi=vector<int>;
using vvi=vector<vi>;
int main(){
ios::sync_with_stdio(0);
cin.tie(0);
for(int t=1;;t++){
int n,m;
cin>>m>>n;
if(!n)break;
vs a(n);
for(int i=0;i<n;i++)cin>>a[i];
vvi s(n,vi(m));
function<void(int,int,int)>dfs=[&](int i,int j,int k){
s[i][j]=k;
int y[]={-1,0,0,1};
int x[]={0,-1,1,0};
for(int l=0;l<4;l++){
int u=i+y[l],v=j+x[l];
if(u>=0&&u<n&&v>=0&&v<m&&!s[u][v]&&(a[u][v]=='*'||a[u][v]=='X'))
dfs(u,v,k);
}
};
int k=1;
for(int i=0;i<n;i++)
for(int j=0;j<m;j++)
if(!s[i][j]&&(a[i][j]=='*'||a[i][j]=='X'))
dfs(i,j,k++);
vvi r(n,vi(m));
function<void(int,int,int)>dfs2=[&](int i,int j,int k){
r[i][j]=k;
int y[]={-1,0,0,1};
int x[]={0,-1,1,0};
for(int l=0;l<4;l++){
int u=i+y[l],v=j+x[l];
if(u>=0&&u<n&&v>=0&&v<m&&!r[u][v]&&a[u][v]=='X')
dfs2(u,v,k);
}
};
int l=1;
for(int i=0;i<n;i++)
for(int j=0;j<m;j++)
if(!r[i][j]&&a[i][j]=='X')
dfs2(i,j,l++);
vi b(k),c(l);
for(int i=0;i<n;i++)
for(int j=0;j<m;j++)
if(r[i][j]&&!c[r[i][j]]){
c[r[i][j]]=1;
b[s[i][j]]++;
}
sort(b.begin(),b.end());
cout<<"Throw "<<t<<"\n";
for(int i=1;i<k;i++)cout<<b[i]<<" \n"[i==k-1];
cout<<"\n";
}
}
|
#!/bin/sh
forever stop linode-manager
|
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import {Like, Repository} from 'typeorm';
import {DesignerCollection} from "./designer-collection.entity";
import {IconTwin} from "../icon-twins/icon-twin.entity";
@Injectable()
export class DesignerCollectionService {
constructor(@InjectRepository(DesignerCollection) private designerCollectionRepository: Repository<DesignerCollection>) { }
async findDesignerCollection(name: string): Promise<DesignerCollection> {
const designerCollection = await this.designerCollectionRepository.findOne({ name });
return new Promise(resolve => {
resolve(designerCollection);
});
}
async findById(id: number): Promise<DesignerCollection> {
const designerCollectionID = await this.designerCollectionRepository.findOne({id});
return new Promise(resolve => {
resolve(designerCollectionID);
});
}
async getDesignerCollections(name:string): Promise<DesignerCollection[]> {
if (name){
return await this.designerCollectionRepository.find({
where: [{ name: Like(`%${name}%`) } ],
})
}else {
return await this.designerCollectionRepository.find();
}
}
async getDesignerCollection(id: number): Promise<DesignerCollection> {
return await this.designerCollectionRepository.findOne(id);
}
async createDesignerCollection(name: string,designerCollection: DesignerCollection) {
const new_designer_collection = new DesignerCollection;
new_designer_collection.name = name;
return this.designerCollectionRepository.save(new_designer_collection);
}
async updateDesignerCollection(state: boolean,owner_id: number, license_type:string,name: string, id:number, designerCollection: DesignerCollection) {
this.designerCollectionRepository.update({id:id}, {state:state, name: name,license_type:license_type,owner_id: owner_id });
return this.designerCollectionRepository.findOne({id});
}
async deleteDesignerCollection(id: number) {
this.designerCollectionRepository.update({id:id},{is_deleted:true});
return await this.designerCollectionRepository.find({id});
}
}
|
<gh_stars>1-10
/* test4033.exec.cpp */
//----------------------------------------------------------------------------------------
//
// Project: CCore 3.50
//
// Tag: HCore
//
// License: Boost Software License - Version 1.0 - August 17th, 2003
//
// see http://www.boost.org/LICENSE_1_0.txt or the local copy
//
// Copyright (c) 2017 <NAME>. All rights reserved.
//
//----------------------------------------------------------------------------------------
#include <CCore/test/test.h>
#include <CCore/inc/FileSystem.h>
namespace App {
namespace Private_4033 {
} // namespace Private_4033
using namespace Private_4033;
/* Testit<4033> */
template<>
const char *const Testit<4033>::Name="Test4033 exec";
template<>
bool Testit<4033>::Main()
{
FileSystem fs;
fs.exec("."_c,"../../../tools/PTPServer/root/exectest.exe"_c,"ключ гайка молоток"_c);
return true;
}
} // namespace App
|
# Function to flatten a nested dictionary
def flatten_dictionary(dictionary, flattened = None):
# Initialize an empty dictionary
if flattened == None:
flattened = {}
for key, value in dictionary.items():
# If the value is not a dictionary, add it
# to the result
if not isinstance(value, dict):
flattened[key] = value
# If the value is a dictionary, recursively call the
# function with the sub-dictionary
else:
temp = flatten_dictionary(value)
for key_ in temp:
flattened[key + "." + key_] = temp[key_]
return flattened
|
def replace_vowels(sentence):
"""
This function replaces all the vowels in a sentence with an underscore(_).
"""
vowels = ['a', 'e', 'i', 'o', 'u']
string = ""
for char in sentence:
if char.lower() in vowels:
char = "_"
string += char
return string
# Test code
result = replace_vowels("The quick brown fox jumps over the lazy dog.")
print(result) # Output: Th_ q___ck br_wn f_x j_mps __vr th_ l_zy d_g.
|
/*
Jameleon - An automation testing tool..
Copyright (C) 2003-2007 <NAME> (<EMAIL>)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package net.sf.jameleon.util;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import net.sf.jameleon.bean.FunctionalPoint;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
public class InstanceSerializerTest extends TestCase {
protected final static String SER_FILE = "foobarFile.txt";
protected final static String PACKAGE_DIR = "tst/java/net/sf/jameleon/util/";
protected FunctionalPoint fp;
public InstanceSerializerTest( String name ) {
super( name );
}
//JUnit Methods
public static void main(String args[]) {
junit.textui.TestRunner.run( suite() );
}
public static Test suite() {
return new TestSuite( InstanceSerializerTest.class );
}
public void setUp() {
File dir = new File("tst/_tmp");
dir.mkdir();
fp = new FunctionalPoint();
}
public void tearDown() {
File f = new File(SER_FILE);
f.delete();
f = new File(PACKAGE_DIR+SER_FILE);
f.delete();
fp = null;
}
public void testSerializeWithUncreatedDir() throws IOException{
File f = new File(JameleonUtility.fixFileSeparators("tst/_tmp/serialize/uncreated/someFile.txt"));
try{
InstanceSerializer.serialize(fp, f);
}catch (FileNotFoundException fnfe){
fail("The directory should be auto-created");
}finally{
f.delete();
}
}
public void testSerializeWithNoParentDir() throws Exception{
File f = new File("uncreatedFile.txt");
try{
InstanceSerializer.serialize(fp, f);
}finally{
f.delete();
}
}
public void testDeserializeFunctionalPoint()
throws IOException, ClassNotFoundException{
fp.setAuthor("Kay");
fp.addStep("Serialize Me");
File f = new File(JameleonUtility.fixFileSeparators("tst/_tmp/"+SER_FILE));
InstanceSerializer.serialize(fp, f);
//Just to be sure.
fp = null;
fp = (FunctionalPoint)InstanceSerializer.deserialize(JameleonUtility.fixFileSeparators("tst/_tmp/"+SER_FILE));
assertNotNull("Object should not be null", fp);
assertEquals("Object author is not the same", "Kay", fp.getAuthor());
assertEquals("Object steps", 1, fp.getSteps().size());
}
public void testDeserializeFunctionalPointFromInputStream()
throws IOException, ClassNotFoundException{
fp.setAuthor("Kay");
fp.addStep("Serialize Me");
File f = new File(JameleonUtility.fixFileSeparators(PACKAGE_DIR+SER_FILE));
InstanceSerializer.serialize(fp, f);
assertTrue(f.getAbsolutePath() + " does not exist", f.exists());
//Just to be sure.
fp = null;
FileInputStream fis = new FileInputStream(f);
fp = (FunctionalPoint)InstanceSerializer.deserialize(fis);
assertNotNull("Object should not be null", fp);
assertEquals("Object author", "Kay", fp.getAuthor());
assertEquals("Object steps", 1, fp.getSteps().size());
}
public void testDeserializeFunctionalPointWithUTF8Characters()
throws IOException, ClassNotFoundException{
fp.setAuthor("Christian");
fp.addStep("한굴 [Korean]");
File f = new File(JameleonUtility.fixFileSeparators("tst/_tmp/"+SER_FILE));
InstanceSerializer.serialize(fp, f);
//Just to be sure.
fp = null;
fp = (FunctionalPoint)InstanceSerializer.deserialize(JameleonUtility.fixFileSeparators("tst/_tmp/"+SER_FILE));
assertNotNull("Object should not be null", fp);
assertEquals("Object author is not the same", "Christian", fp.getAuthor());
assertEquals("Object steps", 1, fp.getSteps().size());
assertEquals("Steps text", "한굴 [Korean]", fp.getSteps().get(0));
}
}
|
#pragma once
#include <utility>
#define CONCAT(left, right) left##right
#if defined(__COUNTER__)
#define MAKE_UNIQUE_NAME CONCAT(__defer__, __COUNTER__)
#elif defined(__LINE__)
#define MAKE_UNIQUE_NAME CONCAT(__defer__, __LINE__)
#else
#error The __COUNTER__ and __LINE__ directives are not defined.
#endif
#define DEFER(code) auto MAKE_UNIQUE_NAME = ::emptify::Defer([&] { code; });
namespace emptify
{
template <typename Function>
class Deferral
{
private:
Function f_;
public:
Deferral(Function&& f) noexcept
: f_(std::forward<Function>(f))
{
}
~Deferral() { f_(); }
};
template <typename Function>
Deferral<Function> Defer(Function&& f) noexcept
{
// No idea why std::forward is needed here,
// but I think it's supposed to create a
// wrapper rvalue reference for a function.
return Deferral<Function>(std::forward<Function>(f));
}
}
|
/*
* Copyright (c) 2008-2019, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.query.impl;
import com.hazelcast.nio.serialization.Data;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* Store indexes out of turn.
*/
public class UnsortedIndexStore extends BaseIndexStore {
private volatile Map<Data, QueryableEntry> recordsWithNullValue;
private final ConcurrentMap<Comparable, Map<Data, QueryableEntry>> recordMap
= new ConcurrentHashMap<Comparable, Map<Data, QueryableEntry>>(1000);
private final IndexFunctor<Comparable, QueryableEntry> addFunctor;
private final IndexFunctor<Comparable, Data> removeFunctor;
public UnsortedIndexStore(IndexCopyBehavior copyOn) {
super(copyOn);
if (copyOn == IndexCopyBehavior.COPY_ON_WRITE) {
addFunctor = new CopyOnWriteAddFunctor();
removeFunctor = new CopyOnWriteRemoveFunctor();
recordsWithNullValue = Collections.emptyMap();
} else {
addFunctor = new AddFunctor();
removeFunctor = new RemoveFunctor();
recordsWithNullValue = new ConcurrentHashMap<Data, QueryableEntry>();
}
}
@Override
Object newIndexInternal(Comparable newValue, QueryableEntry record) {
return addFunctor.invoke(newValue, record);
}
@Override
Object removeIndexInternal(Comparable oldValue, Data recordKey) {
return removeFunctor.invoke(oldValue, recordKey);
}
@Override
public void clear() {
takeWriteLock();
try {
recordsWithNullValue.clear();
recordMap.clear();
} finally {
releaseWriteLock();
}
}
@Override
public Set<QueryableEntry> getSubRecordsBetween(Comparable from, Comparable to) {
takeReadLock();
try {
MultiResultSet results = createMultiResultSet();
Comparable paramFrom = from;
Comparable paramTo = to;
int trend = paramFrom.compareTo(paramTo);
if (trend == 0) {
Map<Data, QueryableEntry> records = recordMap.get(paramFrom);
if (records != null) {
copyToMultiResultSet(results, records);
}
return results;
}
if (trend < 0) {
Comparable oldFrom = paramFrom;
paramFrom = to;
paramTo = oldFrom;
}
for (Map.Entry<Comparable, Map<Data, QueryableEntry>> recordMapEntry : recordMap.entrySet()) {
Comparable value = recordMapEntry.getKey();
if (value.compareTo(paramFrom) <= 0 && value.compareTo(paramTo) >= 0) {
Map<Data, QueryableEntry> records = recordMapEntry.getValue();
if (records != null) {
copyToMultiResultSet(results, records);
}
}
}
return results;
} finally {
releaseReadLock();
}
}
@Override
public Set<QueryableEntry> getSubRecords(ComparisonType comparisonType, Comparable searchedValue) {
takeReadLock();
try {
MultiResultSet results = createMultiResultSet();
for (Map.Entry<Comparable, Map<Data, QueryableEntry>> recordMapEntry : recordMap.entrySet()) {
Comparable value = recordMapEntry.getKey();
boolean valid;
int result = searchedValue.compareTo(value);
switch (comparisonType) {
case LESSER:
valid = result > 0;
break;
case LESSER_EQUAL:
valid = result >= 0;
break;
case GREATER:
valid = result < 0;
break;
case GREATER_EQUAL:
valid = result <= 0;
break;
case NOT_EQUAL:
valid = result != 0;
break;
default:
throw new IllegalStateException("Unrecognized comparisonType: " + comparisonType);
}
if (valid) {
Map<Data, QueryableEntry> records = recordMapEntry.getValue();
if (records != null) {
copyToMultiResultSet(results, records);
}
}
}
return results;
} finally {
releaseReadLock();
}
}
@Override
public Set<QueryableEntry> getRecords(Comparable value) {
takeReadLock();
try {
if (value instanceof IndexImpl.NullObject) {
return toSingleResultSet(recordsWithNullValue);
} else {
return toSingleResultSet(recordMap.get(value));
}
} finally {
releaseReadLock();
}
}
@Override
public Set<QueryableEntry> getRecords(Set<Comparable> values) {
takeReadLock();
try {
MultiResultSet results = createMultiResultSet();
for (Comparable value : values) {
Map<Data, QueryableEntry> records;
if (value instanceof IndexImpl.NullObject) {
records = recordsWithNullValue;
} else {
records = recordMap.get(value);
}
if (records != null) {
copyToMultiResultSet(results, records);
}
}
return results;
} finally {
releaseReadLock();
}
}
/**
* Adds entry to the given index map without copying it.
* Needs to be invoked in a thread-safe way.
*
* @see IndexCopyBehavior
*/
private class AddFunctor implements IndexFunctor<Comparable, QueryableEntry> {
@Override
public Object invoke(Comparable attribute, QueryableEntry entry) {
if (attribute instanceof IndexImpl.NullObject) {
return recordsWithNullValue.put(entry.getKeyData(), entry);
} else {
Map<Data, QueryableEntry> records = recordMap.get(attribute);
if (records == null) {
records = new ConcurrentHashMap<Data, QueryableEntry>(1, LOAD_FACTOR, 1);
recordMap.put(attribute, records);
}
return records.put(entry.getKeyData(), entry);
}
}
}
/**
* Adds entry to the given index map copying it to secure exclusive access.
* Needs to be invoked in a thread-safe way.
*
* @see IndexCopyBehavior
*/
private class CopyOnWriteAddFunctor implements IndexFunctor<Comparable, QueryableEntry> {
@Override
public Object invoke(Comparable attribute, QueryableEntry entry) {
Object oldValue;
if (attribute instanceof IndexImpl.NullObject) {
HashMap<Data, QueryableEntry> copy = new HashMap<Data, QueryableEntry>(recordsWithNullValue);
oldValue = copy.put(entry.getKeyData(), entry);
recordsWithNullValue = copy;
} else {
Map<Data, QueryableEntry> records = recordMap.get(attribute);
if (records == null) {
records = new HashMap<Data, QueryableEntry>();
}
records = new HashMap<Data, QueryableEntry>(records);
oldValue = records.put(entry.getKeyData(), entry);
recordMap.put(attribute, records);
}
return oldValue;
}
}
/**
* Removes entry from the given index map without copying it.
* Needs to be invoked in a thread-safe way.
*
* @see IndexCopyBehavior
*/
private class RemoveFunctor implements IndexFunctor<Comparable, Data> {
@Override
public Object invoke(Comparable attribute, Data indexKey) {
Object oldValue;
if (attribute instanceof IndexImpl.NullObject) {
oldValue = recordsWithNullValue.remove(indexKey);
} else {
Map<Data, QueryableEntry> records = recordMap.get(attribute);
if (records != null) {
oldValue = records.remove(indexKey);
if (records.size() == 0) {
recordMap.remove(attribute);
}
} else {
oldValue = null;
}
}
return oldValue;
}
}
/**
* Removes entry from the given index map copying it to secure exclusive access.
* Needs to be invoked in a thread-safe way.
*
* @see IndexCopyBehavior
*/
private class CopyOnWriteRemoveFunctor implements IndexFunctor<Comparable, Data> {
@Override
public Object invoke(Comparable attribute, Data indexKey) {
Object oldValue;
if (attribute instanceof IndexImpl.NullObject) {
HashMap<Data, QueryableEntry> copy = new HashMap<Data, QueryableEntry>(recordsWithNullValue);
oldValue = copy.remove(indexKey);
recordsWithNullValue = copy;
} else {
Map<Data, QueryableEntry> records = recordMap.get(attribute);
if (records != null) {
records = new HashMap<Data, QueryableEntry>(records);
oldValue = records.remove(indexKey);
if (records.isEmpty()) {
recordMap.remove(attribute);
} else {
recordMap.put(attribute, records);
}
} else {
oldValue = null;
}
}
return oldValue;
}
}
@Override
public String toString() {
return "UnsortedIndexStore{"
+ "recordMap=" + recordMap.size()
+ '}';
}
}
|
<filename>src/trace/listener.ts
import { Context } from "aws-lambda";
import {
addLambdaFunctionTagsToXray,
TraceContext,
readStepFunctionContextFromEvent,
StepFunctionContext,
} from "./context";
import { patchHttp, unpatchHttp } from "./patch-http";
import { TraceContextService } from "./trace-context-service";
import { extractTriggerTags, extractHTTPStatusCodeTag } from "./trigger";
import { logDebug, tagObject } from "../utils";
import { didFunctionColdStart } from "../utils/cold-start";
import { datadogLambdaVersion } from "../constants";
import { Source, ddtraceVersion } from "./constants";
import { patchConsole } from "./patch-console";
import { SpanContext, TraceOptions, TracerWrapper } from "./tracer-wrapper";
import { SpanInferrer } from "./span-inferrer";
import { SpanWrapper } from "./span-wrapper";
export type TraceExtractor = (event: any, context: Context) => TraceContext;
export interface TraceConfig {
/**
* Whether to automatically patch all outgoing http requests with Datadog's hybrid tracing headers.
* @default true.
*/
autoPatchHTTP: boolean;
/**
* Whether to capture the lambda payload and response in Datadog.
*/
captureLambdaPayload: boolean;
/**
* Whether to create inferred spans for managed services
*/
createInferredSpan: boolean;
/**
* Whether to automatically patch console.log with Datadog's tracing ids.
*/
injectLogContext: boolean;
/**
* Whether to merge traces produced from dd-trace with X-Ray
* @default false
*/
mergeDatadogXrayTraces: boolean;
/**
* Custom trace extractor function
*/
traceExtractor?: TraceExtractor;
}
export class TraceListener {
private contextService: TraceContextService;
private context?: Context;
private stepFunctionContext?: StepFunctionContext;
private tracerWrapper: TracerWrapper;
private inferrer: SpanInferrer;
private inferredSpan?: SpanWrapper;
private wrappedCurrentSpan?: SpanWrapper;
private triggerTags?: { [key: string]: string };
private lambdaSpanParentContext?: SpanContext;
public get currentTraceHeaders() {
return this.contextService.currentTraceHeaders;
}
constructor(private config: TraceConfig) {
this.tracerWrapper = new TracerWrapper();
this.contextService = new TraceContextService(this.tracerWrapper);
this.inferrer = new SpanInferrer(this.tracerWrapper);
}
public onStartInvocation(event: any, context: Context) {
const tracerInitialized = this.tracerWrapper.isTracerAvailable;
if (this.config.injectLogContext) {
patchConsole(console, this.contextService);
logDebug("Patched console output with trace context");
} else {
logDebug("Didn't patch console output with trace context");
}
// If the DD tracer is initialized then it's doing http patching so we don't again here
if (this.config.autoPatchHTTP && !tracerInitialized) {
logDebug("Patching HTTP libraries");
patchHttp(this.contextService);
} else {
logDebug("Not patching HTTP libraries", { autoPatchHTTP: this.config.autoPatchHTTP, tracerInitialized });
}
const rootTraceHeaders = this.contextService.extractHeadersFromContext(event, context, this.config.traceExtractor);
// The aws.lambda span needs to have a parented to the Datadog trace context from the
// incoming event if available or the X-Ray trace context if hybrid tracing is enabled
let parentSpanContext: SpanContext | undefined;
if (this.contextService.traceSource === Source.Event || this.config.mergeDatadogXrayTraces) {
parentSpanContext = rootTraceHeaders ? this.tracerWrapper.extract(rootTraceHeaders) ?? undefined : undefined;
logDebug("Attempting to find parent for the aws.lambda span");
} else {
logDebug("Didn't attempt to find parent for aws.lambda span", {
mergeDatadogXrayTraces: this.config.mergeDatadogXrayTraces,
traceSource: this.contextService.traceSource,
});
}
if (this.config.createInferredSpan) {
this.inferredSpan = this.inferrer.createInferredSpan(event, context, parentSpanContext);
}
this.lambdaSpanParentContext = this.inferredSpan?.span || parentSpanContext;
this.context = context;
this.triggerTags = extractTriggerTags(event, context);
this.stepFunctionContext = readStepFunctionContextFromEvent(event);
}
/**
* onEndingInvocation runs after the user function has returned
* but before the wrapped function has returned
* this is needed to apply tags to the lambda span
* before it is flushed to logs or extension
*
* @param event
* @param result
* @param shouldTagPayload
*/
public onEndingInvocation(event: any, result: any, shouldTagPayload = false) {
// Guard clause if something has gone horribly wrong
// so we won't crash user code.
if (!this.tracerWrapper.currentSpan) return;
this.wrappedCurrentSpan = new SpanWrapper(this.tracerWrapper.currentSpan, {});
if (shouldTagPayload) {
tagObject(this.tracerWrapper.currentSpan, "function.request", event);
tagObject(this.tracerWrapper.currentSpan, "function.response", result);
}
if (this.triggerTags) {
const statusCode = extractHTTPStatusCodeTag(this.triggerTags, result);
// Store the status tag in the listener to send to Xray on invocation completion
this.triggerTags["http.status_code"] = statusCode!;
if (this.tracerWrapper.currentSpan) {
this.tracerWrapper.currentSpan.setTag("http.status_code", statusCode);
}
if (this.inferredSpan) {
this.inferredSpan.setTag("http.status_code", statusCode);
}
}
}
public async onCompleteInvocation() {
// Create a new dummy Datadog subsegment for function trigger tags so we
// can attach them to X-Ray spans when hybrid tracing is used
if (this.triggerTags) {
addLambdaFunctionTagsToXray(this.triggerTags);
}
// If the DD tracer is initialized it manages patching of the http lib on its own
const tracerInitialized = this.tracerWrapper.isTracerAvailable;
if (this.config.autoPatchHTTP && !tracerInitialized) {
logDebug("Unpatching HTTP libraries");
unpatchHttp();
}
if (this.inferredSpan) {
logDebug("Finishing inferred span");
const finishTime = this.inferredSpan.isAsync() ? this.wrappedCurrentSpan?.startTime() : Date.now();
this.inferredSpan.finish(finishTime);
}
}
public onWrap<T = (...args: any[]) => any>(func: T): T {
const options: TraceOptions = {};
if (this.context) {
logDebug("Creating the aws.lambda span");
const functionArn = (this.context.invokedFunctionArn ?? "").toLowerCase();
const tk = functionArn.split(":");
options.tags = {
cold_start: didFunctionColdStart(),
function_arn: tk.length > 7 ? tk.slice(0, 7).join(":") : functionArn,
function_version: tk.length > 7 ? tk[7] : "$LATEST",
request_id: this.context.awsRequestId,
resource_names: this.context.functionName,
functionname: this.context?.functionName?.toLowerCase(),
datadog_lambda: datadogLambdaVersion,
dd_trace: ddtraceVersion,
};
if (
(this.contextService.traceSource === Source.Xray && this.config.mergeDatadogXrayTraces) ||
this.contextService.traceSource === Source.Event
) {
options.tags["_dd.parent_source"] = this.contextService.traceSource;
}
if (this.triggerTags) {
options.tags = { ...options.tags, ...this.triggerTags };
}
}
if (this.stepFunctionContext) {
logDebug("Applying step function context to the aws.lambda span");
options.tags = {
...options.tags,
...this.stepFunctionContext,
};
}
if (this.lambdaSpanParentContext) {
options.childOf = this.lambdaSpanParentContext;
}
options.type = "serverless";
options.service = "aws.lambda";
if (this.context) {
options.resource = this.context.functionName;
}
return this.tracerWrapper.wrap("aws.lambda", options, func);
}
}
|
<gh_stars>10-100
package com.semmle.ts.ast;
import com.semmle.js.ast.SourceLocation;
import com.semmle.js.ast.Visitor;
import java.util.List;
/** A union type such as <tt>number | string | boolean</tt>. */
public class UnionTypeExpr extends TypeExpression {
private final List<ITypeExpression> elementTypes;
public UnionTypeExpr(SourceLocation loc, List<ITypeExpression> elementTypes) {
super("UnionTypeExpr", loc);
this.elementTypes = elementTypes;
}
/** The members of the union; always contains at least two types. */
public List<ITypeExpression> getElementTypes() {
return elementTypes;
}
@Override
public <C, R> R accept(Visitor<C, R> v, C c) {
return v.visit(this, c);
}
}
|
<filename>shared-types/transaction-category-mapping.d.ts
interface ITransactionCategoryMapping {
_id?: any;
owner?: string;
amount: number;
description?: string;
transaction_id?: string;
category_id: string;
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.bookOpen = void 0;
var bookOpen = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M2 3h6a4 4 0 0 1 4 4v14a3 3 0 0 0-3-3H2z"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M22 3h-6a4 4 0 0 0-4 4v14a3 3 0 0 1 3-3h7z"
},
"children": []
}],
"attribs": {
"fill": "none",
"stroke": "currentColor",
"stroke-width": "2",
"stroke-linecap": "round",
"stroke-linejoin": "round"
}
};
exports.bookOpen = bookOpen;
|
<filename>main.go
package main
import (
"flag"
"log"
"time"
"github.com/andrewloable/obs-html-recorder/config"
"github.com/andrewloable/obs-html-recorder/obs"
"github.com/andrewloable/obs-html-recorder/profile"
)
func main() {
widthFlag := flag.Int("w", 1920, "width of the browser")
heightFlag := flag.Int("h", 1080, "height of the browser")
urlFlag := flag.String("url", "https://fast.com", "the url of the html to be recorded")
secondsFlag := flag.Int("s", 10, "time in seconds to record the html")
flag.Parse()
_, err := config.ReadConfig()
if err != nil {
log.Println(err)
return
}
if config.AppConfig.IsReady {
log.Println(config.AppConfig)
}
prof := profile.Profile{
Width: *widthFlag,
Height: *heightFlag,
}
log.Println("set profile ", prof)
client, err := obs.InitiateObsRecorder(prof)
if err != nil {
log.Println("initiation error ", err)
}
log.Println("record html")
obs.RecordHTML(client, *urlFlag, *secondsFlag, prof)
time.Sleep(time.Second * 2)
client.Disconnect()
obs.TerminateObs()
}
|
<filename>spec/cloaked_spec.rb
# frozen_string_literal: true
RSpec.describe Cloaked do
it 'has a version number' do
expect(Cloaked::VERSION).not_to be nil
end
describe 'with default values' do
subject { PostWithDefaultOptions.new }
before do
@stubbed_base64 = SecureRandom.urlsafe_base64(Cloaked::DEFAULT_SIZE)
allow(SecureRandom).to receive(:urlsafe_base64).and_return(@stubbed_base64)
end
it 'generates random string for key with SecureRandom.urlsafe_base64' do
subject.valid?
expect(subject.public_id).to eq(@stubbed_base64)
end
end
describe 'with :hex method option' do
subject { PostWithHexOption.new }
before do
@stubbed_hex = SecureRandom.hex(Cloaked::DEFAULT_SIZE)
allow(SecureRandom).to receive(:hex).and_return(@stubbed_hex)
end
it 'generates random string for key with SecureRandom.hex' do
subject.valid?
expect(subject.public_id).to eq(@stubbed_hex)
end
end
describe 'with :uuid method option' do
subject { PostWithUuidOption.new }
before do
@stubbed_uuid = SecureRandom.uuid
allow(SecureRandom).to receive(:uuid).and_return(@stubbed_uuid)
end
it 'generates random string for key with SecureRandom.uuid' do
subject.valid?
expect(subject.public_id).to eq(@stubbed_uuid)
end
end
describe 'with size option' do
subject { PostWithSizeOption.new }
it 'generates random string with a custom length' do
subject.valid?
expect(subject.public_id.length).to eq(32)
end
end
describe 'with prefix option' do
subject { PostWithPrefixOption.new }
it 'generates random string that starts with the prefix' do
subject.valid?
expect(subject.public_id).to start_with('CLK.')
end
end
describe 'with an array of keys' do
subject { PostWithArrayOfKeys.new }
before { subject.valid? }
it 'generates a random string for each key' do
expect(subject.public_id).to be_present
expect(subject.public_thread_id).to be_present
end
it 'generates unique values for each key' do
expect(subject.public_id).not_to eq(subject.public_thread_id)
end
it 'generates accepts options' do
expect(subject.public_id.length).to eq(36)
expect(subject.public_thread_id.length).to eq(36)
end
end
end
|
#!/usr/bin/env bash
# XXX: this script is intended to be run from
# a fresh Digital Ocean droplet with Ubuntu
# upon its completion, you must either reset
# your terminal or run `source ~/.profile`
# change this to a specific release or branch
BRANCH=master
sudo apt-get update -y
sudo apt-get upgrade -y
sudo apt-get install -y make
# get and unpack golang
curl -O https://storage.googleapis.com/golang/go1.10.linux-amd64.tar.gz
tar -xvf go1.10.linux-amd64.tar.gz
# move go binary and add to path
mv go /usr/local
echo "export PATH=\$PATH:/usr/local/go/bin" >> ~/.profile
# create the goApps directory, set GOPATH, and put it on PATH
mkdir goApps
echo "export GOPATH=/root/goApps" >> ~/.profile
echo "export PATH=\$PATH:\$GOPATH/bin" >> ~/.profile
source ~/.profile
# get the code and move into repo
REPO=github.com/cosmos/cosmos-sdk
go get $REPO
cd $GOPATH/src/$REPO
# build & install master
git checkout $BRANCH
make get_tools
make get_vendor_deps
make install
make install_examples
|
<reponame>vivekthangathurai/github-coverage-reporter-plugin<filename>src/main/java/io/jenkins/plugins/gcr/models/CoverageType.java
package io.jenkins.plugins.gcr.models;
import java.util.Arrays;
import java.util.stream.Stream;
public enum CoverageType {
JACOCO("jacoco"),
COBERTURA("cobertura"),
SONARQUBE("sonarqube");
private String identifier;
CoverageType(String identifier) {
this.identifier = identifier;
}
public String getIdentifier() {
return identifier;
}
public static CoverageType fromIdentifier(String identifier) {
Stream<CoverageType> stream = Arrays.stream(CoverageType.values());
return stream.filter(c -> c.getIdentifier().equals(identifier)).findFirst().orElse(null);
}
}
|
<filename>Chapter10-AdvancedAWSCloudFormation/10-02-CR-Lambda-Function.js
/**
* This is a custom resource handler that creates an S3 bucket
* and then populates it with test data.
*/
var aws = require("aws-sdk");
var s3 = new aws.S3();
const SUCCESS = 'SUCCESS';
const FAILED = 'FAILED';
const KEY = 'test_data.csv';
exports.handler = function(event, context) {
console.info('mycrlambda event', event);
// When CloudFormation requests a delete,
// remove the object and the bucket.
if (event.RequestType == "Delete") {
let params = {
Bucket: event.ResourceProperties.BucketName,
Key: KEY
};
s3.deleteObject(params, function(err, data) {
if (err) {
console.log(err, err.stack);
sendResponse(event, context, FAILED);
} else {
console.log('Deleted object', data);
let params = {
Bucket: event.ResourceProperties.BucketName
};
s3.deleteBucket(params, function(err, data) {
if (err) {
console.log(err, err.stack);
sendResponse(event, context, FAILED);
} else {
console.log("Deleted bucket", data);
sendResponse(event, context, SUCCESS);
}
});
}
});
return;
}
if (event.RequestType == "Update") {
// Nothing to do here
sendResponse(event, context, SUCCESS);
return;
}
var params = {
Bucket: event.ResourceProperties.BucketName
};
s3.createBucket(params, function(err, data) {
if (err) {
console.log(err, err.stack);
sendResponse(event, context, FAILED, data);
} else {
console.log('Created bucket ' +
event.ResourceProperties.BucketName);
// Now that we have created the bucket,
// populate it with test data
params = {
Body: '1,\"A\"\n2,\"B\"\n3,\"C\"',
Bucket: event.ResourceProperties.BucketName,
Key: KEY
};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err, err.stack);
sendResponse(event, context, FAILED, data);
} else {
console.log('Created object test_data.csv');
sendResponse(event, context, SUCCESS, data);
}
});
}
});
};
/**
* Send a response to the signed URL provided by CloudFormation.
*/
function sendResponse(event, context, status, data) {
var body = JSON.stringify({
Status: status,
Reason: "",
PhysicalResourceId: context.logStreamName,
StackId: event.StackId,
RequestId: event.RequestId,
LogicalResourceId: event.LogicalResourceId,
Data: data
});
console.log("body:\n", body);
var https = require("https");
var url = require("url");
var parsedUrl = url.parse(event.ResponseURL);
var options = {
hostname: parsedUrl.hostname,
port: 443,
path: parsedUrl.path,
method: "PUT",
headers: {
"content-type": "",
"content-length": body.length
}
};
var request = https.request(options, function(response) {
console.log("response.statusCode: " +
response.statusCode);
console.log("response.headers: " +
JSON.stringify(response.headers));
context.done();
});
request.on("error", function(error) {
console.log("sendResponse Error:" + error);
context.done();
});
request.write(body);
request.end();
}
|
import * as vscode from "vscode";
import env from "@esbuild-env";
import { createWebviewManager, IWebviewManager } from "./webview-handler";
import { createEventHubAdapter } from "./events/event-manager";
import { Commands } from "./commands";
import { loadSnowpackConfig } from "./debug/snowpack-dev";
import { createCoreAPI } from "./modules/core-module";
import { createMessageHandler } from "./messages/message-manager";
import type { CoreEvents } from "./app/message-protocol";
import { createModuleManager } from "./modules/module-manager";
export function activate(context: vscode.ExtensionContext) {
const globalEventHubAdapter = createEventHubAdapter<CoreEvents>();
const globalModuleManager = createModuleManager(
createCoreAPI(context, globalEventHubAdapter)
);
const globalMessageHandler = createMessageHandler({
moduleManager: globalModuleManager,
eventAdapter: globalEventHubAdapter,
});
const webviewManager = createWebviewManager(
"ui",
"Extension UI of React",
context
);
context.subscriptions.push(webviewManager);
context.subscriptions.push(globalEventHubAdapter);
context.subscriptions.push(globalModuleManager.api.ScriptService);
const { open: doOpen, reload, close } = webviewManager;
const open = async function (this: IWebviewManager) {
await globalModuleManager.api.ScriptService.check();
doOpen.call(this);
webviewManager.messageHandler ??
webviewManager.attach(globalMessageHandler);
globalEventHubAdapter.attach(webviewManager.panel!.webview);
webviewManager.onClose(() =>
globalEventHubAdapter.detach(webviewManager.panel!.webview)
);
};
context.subscriptions.push(
vscode.commands.registerCommand(
Commands.WebviewControll.Open,
open.bind(webviewManager)
)
);
context.subscriptions.push(
vscode.commands.registerCommand(
Commands.WebviewControll.Close,
close.bind(webviewManager)
)
);
context.subscriptions.push(
vscode.commands.registerCommand(
Commands.WebviewControll.Reload,
reload.bind(webviewManager)
)
);
context.subscriptions.push(
vscode.commands.registerCommand(
Commands.ScriptControl.ExecuteCurrentScript,
() => globalModuleManager.api.ScriptService.executeCurrent()
)
);
context.subscriptions.push(
vscode.commands.registerCommand(
Commands.ScriptControl.ForceCheckUserScriptsFolder,
() => globalModuleManager.api.ScriptService.check(true)
)
);
if (env.ENV === "dev") {
loadSnowpackConfig(context).then((config) => {
webviewManager.devServerConfig = {
port: config.devOptions.port,
hmrSocketPort: config.devOptions.hmrPort ?? config.devOptions.port,
};
vscode.commands.executeCommand(Commands.WebviewControll.Open).then(() => {
console.log("Successfully opened webview");
});
});
}
}
export function deactivate() {
// Do nothing
}
|
def extract_license_info(file_path: str) -> str:
with open(file_path, 'r') as file:
lines = file.readlines()
license_info = ''
for line in lines:
if line.strip().startswith('#'):
license_info += line.replace('#', '').strip() + ' '
else:
break # Stop when the license text ends
return license_info.strip()
|
#pragma once
#include <string>
//returns true if opening bracket
bool isOpeningBracket(const char &symbol);
//returns true if closing bracket
bool isClosingBracket(const char &symbol);
//returns true if family bracket
bool isFamilyBracket(const char &symbol1, const char &symbol2);
//the function returns true if the balance of brackets is met
bool balanceCheck(const std::string &str);
//the function returns true if the program has passed testing
bool test();
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package test;
import java.net.URL;
import org.fhwa.c2cri.testmodel.NRTM;
import org.fhwa.c2cri.testmodel.Need;
import org.fhwa.c2cri.testmodel.OtherRequirement;
import org.fhwa.c2cri.testmodel.Requirement;
/**
* The Class TestNRTM.
*
* @author TransCore ITS, LLC
* Last Updated: 1/8/2014
*/
public class TestNRTM {
/**
* The main method.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param args the arguments
*/
public static void main(String[] args) {
try{
URL nrtmPath = new URL("file:/c:/temp/RI_NRTM_QUERY.csv");
URL nrtmPath2 = new URL("file:/c:/temp/2306_NRTM_QUERY.csv");
NRTM thisNRTM = new NRTM(nrtmPath, nrtmPath2);
System.out.println("\n\n");
int index = 0;
for (Need thisNeed: thisNRTM.getUserNeeds().needs){
index++;
System.out.println(index+" Need->"+thisNeed.getTitle()+" "+(thisNeed.isExtension()?"Extension":""));
for (Requirement thisRequirement : thisNeed.getProjectRequirements().requirements){
for (OtherRequirement theOtherRequirement: thisRequirement.getOtherRequirements().otherRequirements){
System.out.println(thisNeed.getProjectRequirements().lh_requirementsMap.containsKey(thisRequirement.getTitle())+" "+thisRequirement.getTitle()+" -- "+theOtherRequirement.getOtherRequirement());
}
}
}
} catch (Exception ex){
ex.printStackTrace();
}
}
}
|
#!/bin/bash
# Copyright 2015 and onwards Sanford Ryza, Juliet Hougland, Uri Laserson, Sean Owen and Joshua Wills
#
# See LICENSE file for further information.
curl -o $2/$1.csv https://ichart.yahoo.com/table.csv?s=$1&a=0&b=1&c=2000&d=0&e=31&f=2013&g=d&ignore=.csv
|
def bubble_sort(lst):
# Traverse through all list elements
for i in range(len(lst)):
# Last i elements are already in place
for j in range(0, len(lst)-i-1):
# traverse the list from 0 to n-i-1
# Swap if the element found is greater
# than the next element
if lst[j] > lst[j+1] :
lst[j], lst[j+1] = lst[j+1], lst[j]
return lst
# Test list
lst = [12, 9, 4, 99, 120, 1]
print(bubble_sort(lst))
|
import { Vec2 } from "../vectors/Vec2";
import { addVectors } from "./addVectors";
describe("addVectors", () => {
it("adds two vectors of the same type.", () => {
const inputVector = new Vec2(1, 2);
const outputVector = addVectors(inputVector, inputVector);
// @ts-ignore
expect(outputVector.x).toEqual(2);
// @ts-ignore
expect(outputVector.y).toEqual(4);
});
it("adds multiple vectors of the same type.", () => {
const inputVector = new Vec2(1, 2);
const outputVector = addVectors(inputVector, inputVector, inputVector);
// @ts-ignore
expect(outputVector.x).toEqual(3);
// @ts-ignore
expect(outputVector.y).toEqual(6);
});
});
|
export * from "./Downloads";
|
<reponame>Morlack/unleash
'use strict';
const joi = require('joi');
const Controller = require('../controller');
const { clientMetricsSchema } = require('./metrics-schema');
class ClientMetricsController extends Controller {
constructor({ clientMetricsStore, clientInstanceStore }, getLogger) {
super();
this.logger = getLogger('/api/client/metrics');
this.clientMetricsStore = clientMetricsStore;
this.clientInstanceStore = clientInstanceStore;
this.post('/', this.registerMetrics);
}
async registerMetrics(req, res) {
const data = req.body;
const clientIp = req.ip;
const { error, value } = joi.validate(data, clientMetricsSchema);
if (error) {
this.logger.warn('Invalid metrics posted', error);
return res.status(400).json(error);
}
try {
await this.clientMetricsStore.insert(value);
await this.clientInstanceStore.insert({
appName: value.appName,
instanceId: value.instanceId,
clientIp,
});
res.status(202).end();
} catch (e) {
this.logger.error('failed to store metrics', e);
res.status(500).end();
}
}
}
module.exports = ClientMetricsController;
|
#!/bin/bash
# Additional configuration and packages that our Vagrantbox requires
# We will need php7.0, so install it
sudo apt-get -y update
sudo add-apt-repository -y ppa:ondrej/php
sudo apt-get -y install php7.0
sudo apt-get -y update
# This includes the base php7.0 packages, plus a couple mbstring and dom that
# some of the composer dependencies require
sudo apt-get -y install php7.0-mysql pdo pdo_mysql pdo_pgsql pgsql soap libapache2-mod-php7.0 php7.0-mbstring php7.0-dom php7.0-curl php7.0-zip
sudo a2dismod php5
sudo a2enmod php7.0
sudo apachectl restart
# Make sure the composer has a recent version. This probably
# only suppress the yellow banner
sudo composer self-update
|
/*
* C compiler file mcdpriv.h, version 1
* (Private interfaces within machine-dependent back end).
* Copyright (C) Acorn Computers Ltd., 1988, Codemist Ltd 1994
* SPDX-Licence-Identifier: Apache-2.0
*/
/*
* RCS $Revision$
* Checkin $Date$
* Revising $Author$
*/
#ifndef __mcdpriv_h
#define __mcdpriv_h 1
#include "mcdep.h"
#ifndef JOPCODEDEF_ONLY
#include "codebuf.h"
#endif
#define P_RET 1
#define P_CMPZ 2
#define P_PRE 4 /* Used for pre-auto-index */
#define P_POST 8 /* Used for post-auto-index */
/* $$$$$ things definitely inapplicable to thumb removed */
#define P_BASEALIGNED 0x800
#define _a_read_r1 0x1
#define _a_read_r2 0x2
#define _a_read_r3 0x4
#define _a_read_r4 0x8
#define _a_set_r1 0x10
#define _a_set_r2 0x20
#define _a_modify_mem 0x40
#define _a_call 0x80
#define _a_regmask_r1 0x100
#define _a_regmask_r3 0x200
#define _a_gap_r1 0x400
#define _a_gap_r2 0x800
#define _a_read_mem 0x1000
extern struct JopTable a_joptable[];
#ifdef DEFINE_A_JOPTABLE
# if defined ENABLE_CG || defined ENABLE_REGS || defined ENABLE_CSE
# define a_with_bits(n, b) {n, b},
# else
# define a_with_bits(n, b) { b },
# endif
struct JopTable a_joptable[] = {
#else
# define a_with_bits(n, b)
#endif
#define a_attributes(op) (a_joptable[(op-J_LAST_JOPCODE-1) & J_TABLE_BITS].bits)
#ifndef THUMB_INLINE_ASSEMBLER
#define J_BICR (J_LAST_JOPCODE+1L)
a_with_bits("BICR", _a_read_r3+_a_read_r2+_a_set_r1)
#define J_LAST_A_JOPCODE J_BICR
#else
#define J_LAST_A_JOPCODE J_LAST_JOPCODE
#endif
#ifdef DEFINE_A_JOPTABLE
0
};
#endif
#ifndef JOPCODEDEF_ONLY
typedef struct { /* like struct Icode but with RealReg's and a peep field */
Icode ic;
int32 peep; /* opcode extension */
int32 dataflow;
int32 cond;
} PendingOp;
extern bool gen_pass;
extern int32 current_procnum;
extern List3 *label_values, *label_references;
void a_pr_jopcode(PendingOp *p);
extern char frameregnames[(16-9)*2];
/* register number -> name for sb (or v6), ip, fp, sl, sp
* (variable because dependent on calling standard
* in force). Also does lr and pc for simplicity.
*/
extern void setcallingstandard(char);
/* One of 'A', 'R', 'U', 'M', default set by compiler
* build, value alterable by configuration option.
*/
extern int32 pcs_flags;
/* Beware: these values are also written in the config tool */
#define PCS_CALLCHANGESPSR 1
#define PCS_FPE3 2
#define PCS_NOSTACKCHECK 4
#define PCS_REENTRANT 8
#define PCS_FPREGARGS 16 /* never in pcs_flags, only a pcs config value */
/* but mustn't overlap with pcs_flags bits */
#define PCS_NOFP 32
#define PCS_SOFTFP 64
#define PCS_INTERWORK 128
#define PCS_ACCESS_CONSTDATA_WITH_ADR 256
#define PCS_ZH_MASK 0xff /* options passed from driver via -zh argument */
/* excludes access_constdata_with_adr */
#ifndef PCS_DEFAULTS
# define PCS_DEFAULTS 0
#endif
#define localcg_debug(n) (debugging(DEBUG_LOCALCG) && (localcg_debugcount & (n)))
#define NONLEAF (PROC_ARGPUSH | PROC_ARGADDR | PROC_BIGSTACK | BLKCALL)
/* STACKCHECK (NONLEAF subset) defines when stack check is REALLY needed */
#define STACKCHECK (PROC_BIGSTACK | BLKCALL)
#define MOVC_LOOP_THRESHOLD 24
extern int32 movc_workregs(const PendingOp *const p);
extern bool movc_preserving_r1r2(PendingOp *p, bool dead_r2);
extern int32 a_loads_r1(const PendingOp * const p);
extern int32 a_uses_r1(const PendingOp * const p);
extern int32 a_reads_r1(const PendingOp * const p);
extern int32 a_loads_r2(const PendingOp * const p);
extern int32 a_uses_r2(const PendingOp * const p);
extern int32 a_reads_r2(const PendingOp * const p);
extern int32 a_uses_r3(const PendingOp * const p);
extern int32 a_uses_r4(const PendingOp * const p);
extern int32 a_modifies_mem(const PendingOp * const p);
extern int32 a_uses_mem(const PendingOp * const p);
extern bool a_corrupts_r1(PendingOp const* p);
extern bool a_corrupts_r2(PendingOp const* p);
extern bool setspsr(const Icode * const ic);
typedef struct
{
uint32 use, def, corrupt, dead;
} RegisterUsage;
#define regs_in(u) ((u)->use)
#define regs_out(u) ((u)->def)
#define regs_corrupt(u) ((u)->corrupt)
#define regs_dead(u) ((u)->dead)
#define regs_read(u) ((u)->use)
#define regs_written(u) ((u)->def | (u)->corrupt)
#define regs_free(u) ((u)->corrupt | (u)->dead)
#define regs_used(u) ((u)->use | (u)->def | (u)->corrupt)
/* returns the complete register usage of c */
extern GetRegisterUsage(const PendingOp *c, RegisterUsage *u);
extern char *CheckJopcodeP(const PendingOp *p, CheckJopcode_flags flags);
extern int32 power_of_two(int32 n);
extern int32 regofregbit(int32 m);
extern void show_inst_direct(PendingOp *p);
extern void peephole_op(PendingOp *p, bool flush);
extern void peephole_reinit(void);
extern void peephole_init(void);
extern void peephole_tidy(void);
#ifdef TARGET_HAS_AOF
#define aof_fpreg xr_objflg1 /* fn passes FP args in FP registers */
#define aof_usessb xr_objflg2 /* defined fn 'uses' sb */
#define aof_leaf xr_objflg3 /* defined fn is a leaf */
extern Symstr *data_sym, *bss_sym, *adcon_sym;
typedef struct CommonDef {
struct CommonDef *next;
DataDesc data;
Symstr *name;
int index;
int32 refsize;
int32 stringpos;
} CommonDef;
extern CommonDef *commondefs;
#else
#define aof_fpreg 0 /* fn passes FP args in FP registers */
#define aof_usessb 0 /* defined fn 'uses' sb */
#define aof_leaf 0 /* defined fn is a leaf */
#endif
extern DataDesc adconpool;
extern Symstr *adconpool_lab;
extern int adconpool_find(int32 w, int32 flavour, Symstr *sym);
extern void adconpool_flush(void);
extern void adconpool_init(void);
void localcg_endcode(void);
extern int integer_load_max;
extern int ldm_regs_max;
void target_lib_variant(char *b);
#endif /* JOPCODEDEDEF_ONLY */
#endif
/* end of thumb/mcdpriv.h */
|
#!/bin/sh
## Simplistic ReaPack index.xml generator
## v0.1.1 (2018-08-07)
##
## Copyright (C) 2016-2018 Przemyslaw Pawelczyk <przemoc@gmail.com>
##
## This script is licensed under the terms of the MIT license.
## https://opensource.org/licenses/MIT
cd "${0%/*}"
if [ -r .reapack-index.conf ]; then
while read -r LINE; do
LINE=$(echo "$LINE" | sed 's,^[ \t]*,,;s,[ \t]*$,,')
if [ "$LINE" != "${LINE#-n}" ] \
|| [ "$LINE" != "${LINE#--name}" ]
then
name=$(echo "$LINE" | sed 's,^[^ \t]*[ \t]*,,')
fi
if [ "$LINE" != "${LINE#-U}" ] \
|| [ "$LINE" != "${LINE#--url-template}" ]
then
url_template=$(echo "$LINE" | sed 's,^[^ \t]*[ \t]*,,')
fi
done <.reapack-index.conf
fi
if [ -r index.conf ]; then
. ./index.conf
fi
STATUS=$(git status --porcelain | sed -r '/index\.(conf|sh|xml)$/d')
if [ -n "$STATUS" ]; then
echo "$0: You have uncommited changes in working tree:" >&2
echo "$STATUS" | sed 's,^,'"$0"': ,' >&2
fi
INDENT=0
beg() { INDENT=$((INDENT+1)); }
end() { INDENT=$((INDENT-1)); }
iprint() { FMT=$1; shift; printf "%*s$FMT\n" $((INDENT*2)) "" "$@"; }
catdirs(){ find . -mindepth 1 -type d \( -name '.*' -prune -o -print \) \
| LC_ALL=C sort; }
files() { find -maxdepth 1 -type f | LC_ALL=C sort; }
(
echo '<?xml version="1.0" encoding="utf-8"?>'
COCKOSFORUM="http://forum.cockos.com/"
REAPACK_IVER=1
HEADCOMMIT=$(git rev-parse HEAD)
iprint '<index version="%s" name="%s" commit="%s">' \
"$REAPACK_IVER" "$name" "$HEADCOMMIT"
beg
iprint '<metadata>'
beg
if [ -n "$REPO_COCKOSFORUM_TID" ]; then
iprint '<link rel="%s" href="%s">%s</link>' \
website \
"${COCKOSFORUM}showthread.php?t=$REPO_COCKOSFORUM_TID" \
"Cockos Forum thread"
fi
if [ -n "$REPO_AUTH_COCKOSFORUM_UID" ]; then
iprint '<link rel="%s" href="%s">%s</link>' \
website \
"${COCKOSFORUM}member.php?u=$REPO_AUTH_COCKOSFORUM_UID" \
"Cockos Forum user"
fi
if [ -n "$REPO_URL" ]; then
iprint '<link rel="%s" href="%s">%s</link>' \
website "$REPO_URL" Repository
fi
end
iprint '</metadata>'
catdirs | while read -r DIR; do
DIR=${DIR#./}
iprint '<category name="%s">' "$DIR"
beg
( cd "$DIR"; files | while read -r FILE; do
FILEDESC=$(
sed '/^[ \t]*descfmt = "/!d;s,,,;s,".*,,;s, ([^)]*),,g;q' "$FILE" \
)
if [ -z "$FILEDESC" ]; then
FILEDESC=$(
sed -r '/^.*(ReaScript|JSFX) Name: [ \t]*/!d;s,,,;q' "$FILE" \
)
fi
FILEVERS=$(sed -r '/^.*[Vv]er(sion)?:?[ \t]*v?/!d;s,,,;s,[ \t].*,,;q' "$FILE")
FILEVERS=${FILEVERS:-1.0}
FILEAUTH=$(sed -r '/^.*[Aa]uthor:?[ \t]*/!d;s,,,;q' "$FILE")
FILEAUTH=${FILEAUTH:-$REPO_AUTH}
FILETIME=$(git log -1 --date=iso-strict --pretty=tformat:%ad "$FILE")
FILECOMM=$(git log -1 --pretty=tformat:%H "$FILE")
FILE=${FILE#./}
FILETYPE=
case "$FILE" in
*.eel) FILETYPE=script ;;
*.jsfx) FILETYPE=effect ;;
*.lua) FILETYPE=script ;;
*.py) FILETYPE=script ;;
*.theme) FILETYPE=theme ;;
*) continue ;;
esac
# ReaPack-index half-compatibility
commit=$FILECOMM
path="$DIR/$FILE"
version=$FILEVERS
eval FILEURL="\"$url_template\""
#
FILEURL=$(echo "$FILEURL" | sed 's, ,%20,g')
if [ -z "$FILEDESC" ]; then
FILEDESC=$(
echo "$FILE" | sed -r 's,^([^ ]* - |[^_]*_),,;s,\.[^.]*$,,' \
)
fi
iprint '<reapack name="%s" desc="%s" type="%s">' \
"$FILE" "$FILEDESC" "$FILETYPE"
beg
iprint '<version name="%s" author="%s" time="%s">' \
"$FILEVERS" "$FILEAUTH" "$FILETIME"
beg
iprint '<source main="true">%s</source>' \
"$FILEURL"
end
iprint '</version>'
end
iprint '</reapack>'
done )
end
iprint '</category>'
done
end
iprint '</index>'
) >index.xml
|
# bounding box
bbox=[-18.0, 18.0, -1.0, 20.0, -18.0, 18.0]
#starting to define obstacles
obstacle={
size=1
geo = broken_t.obj
color = (0.9, 0.7, 0.3)
position = (4, 0)
angle = 0
}
#define flocks
shepherd={
type = simple
size = 1
geo = ../../shepherd/behaviors.py/env/robot2.g
color = (0.1,0.4,0.1)
mass = 0.2
view_radius = 500
view_angle = 360
dist_center = (-17,17)
dist_dev = 3
separation = 0
cohesion = 0
alignment = 0
obst_repulsion = 0
#steering destination
goal = (-15,-15)
# ODE stuff
damping= 1
max_force = 10.0
target_attract=15
# define roadmap parameters here
roadmap_n = 80
roadmap_k = 5
}
# regular flock
flock={
type=scared
size = 6
geo = ../../shepherd/behaviors.py/env/robot2.g
color = (0.1,0.1,.5)
mass = 5
view_radius= 5
view_angle= 360
dist_center = (-15,12)
dist_dev = 3
afraid = 15
separation = 1
cohesion = 5
alignment = 3
obst_repulsion = 10
damping = 1
max_force = 30.0
}
|
<filename>src/shared/scheduler/TaskSchedulerData.h
/* Copyright (c) 2019 Skyward Experimental Rocketry
* Authors: <NAME>, <NAME>, <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <math/Stats.h>
#include <cstdint>
#include <ostream>
#ifndef BOARDCORE_SRC_SHARED_SCHEDULER_SCHEDULERDATA_H
#define BOARDCORE_SRC_SHARED_SCHEDULER_SCHEDULERDATA_H
/**
* Statistics for a task
*/
struct TaskStatResult
{
uint8_t id; ///< Task id
StatsResult activationStats; ///< Task activation stats
StatsResult periodStats; ///< Task period stats
StatsResult workloadStats; ///< Task workload stats
static std::string header()
{
return "id,act_min,act_max,act_mean,act_stddev,act_nsamples,"
"period_min,period_max,period_mean,period_stddev,period_"
"nsamples,"
"workload_min,workload_max,workload_mean,workload_stddev,"
"workload_"
"nsamples\n";
}
void print(std::ostream& os) const
{
os << (int)id << "," << activationStats.minValue << ","
<< activationStats.maxValue << "," << activationStats.mean << ","
<< activationStats.stdev << "," << activationStats.nSamples << ","
<< periodStats.minValue << "," << periodStats.maxValue << ","
<< periodStats.mean << "," << periodStats.stdev << ","
<< periodStats.nSamples << "," << workloadStats.minValue << ","
<< workloadStats.maxValue << "," << workloadStats.mean << ","
<< workloadStats.stdev << "," << workloadStats.nSamples << "\n";
}
};
#endif
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Alamofire/Alamofire.framework"
install_framework "$BUILT_PRODUCTS_DIR/EVReflection/EVReflection.framework"
install_framework "$BUILT_PRODUCTS_DIR/EverliveSDK/EverliveSDK.framework"
install_framework "$BUILT_PRODUCTS_DIR/Kingfisher/Kingfisher.framework"
install_framework "$BUILT_PRODUCTS_DIR/SwiftyJSON/SwiftyJSON.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Alamofire/Alamofire.framework"
install_framework "$BUILT_PRODUCTS_DIR/EVReflection/EVReflection.framework"
install_framework "$BUILT_PRODUCTS_DIR/EverliveSDK/EverliveSDK.framework"
install_framework "$BUILT_PRODUCTS_DIR/Kingfisher/Kingfisher.framework"
install_framework "$BUILT_PRODUCTS_DIR/SwiftyJSON/SwiftyJSON.framework"
fi
|
#pragma once
#include "ShaderEnum.h"
#include "ShaderClass.h"
#include "impl/SimpleShader.h"
#include "impl/LightShader.h"
class ShaderManager
{
private:
DetailedArray<Enum::ShaderType> mShaderTypes;
std::unique_ptr<SimpleShader> mSimpleShader;
std::unique_ptr<LightShader> mLightShader;
public:
ShaderManager() {}
~ShaderManager() {}
void createShaders()
{
mSimpleShader = std::make_unique<SimpleShader>();
mSimpleShader->compile();
mLightShader = std::make_unique<LightShader>();
mLightShader->compile();
}
ShaderClass *getShader(Enum::ShaderType type)
{
if (type == Enum::ShaderType::Simple)
return (ShaderClass *)getSimpleShader();
if (type == Enum::ShaderType::Light)
return (ShaderClass *)getLightShader();
return nullptr;
}
SimpleShader *getSimpleShader() { return mSimpleShader.get(); }
LightShader *getLightShader() { return mLightShader.get(); }
};
|
package os.failsafe.executor.schedule;
import java.time.LocalDateTime;
import java.util.Optional;
public interface Schedule {
/**
* With a {@link Schedule} you can either plan a one time execution in future or a recurring execution.
*
* <p>For a <b>one-time</b> execution just let this method return {@link Optional#empty()} after your planned execution time has past.</p>
*
* <p>A <b>recurring execution</b> requires this method to always return the next planned time for execution. For example see {@link DailySchedule}.</p>
*
* @param currentTime the current time after the task finished its execution successfully
* @return the next planned execution time
*/
Optional<LocalDateTime> nextExecutionTime(LocalDateTime currentTime);
}
|
<gh_stars>1-10
const BaseComponent = require('../BaseComponent');
module.exports = class ImageComponent extends BaseComponent {
build(parent) {
return {
/**
* Returns a random image URL
*
* @param {mixed} inOpts {width = 640, height = 480, category}
*/
image: (inOpts = {}) => {
const categories = [
'abstract',
'animals',
'business',
'cats',
'city',
'food',
'nightlife',
'fashion',
'people',
'nature',
'sports',
'technics',
'transport',
];
const cat =
inOpts.category && categories.includes(inOpts.category)
? inOpts.category
: parent.random.arrayElement(categories);
const url = `http://placeimg.com/${inOpts.width || 640}/${inOpts.height || 480}`;
return `${url}/${cat}?${parent.random.number()}`;
},
};
}
};
|
CREATE TABLE people (
id INTEGER PRIMARY KEY,
name VARCHAR(255),
age INTEGER
);
|
function kotsadm() {
local src="$DIR/addons/kotsadm/1.37.0"
local dst="$DIR/kustomize/kotsadm"
try_1m_stderr object_store_create_bucket kotsadm
kotsadm_rename_postgres_pvc_1-12-2 "$src"
cp "$src/kustomization.yaml" "$dst/"
cp "$src/operator.yaml" "$dst/"
cp "$src/postgres.yaml" "$dst/"
cp "$src/kotsadm.yaml" "$dst/"
kotsadm_secret_cluster_token
kotsadm_secret_authstring
kotsadm_secret_password
kotsadm_secret_postgres
kotsadm_secret_dex_postgres
kotsadm_secret_s3
kotsadm_secret_session
kotsadm_api_encryption_key
if [ -n "$PROMETHEUS_VERSION" ]; then
kotsadm_api_patch_prometheus
fi
if [ -n "$PROXY_ADDRESS" ]; then
KUBERNETES_CLUSTER_IP=$(kubectl get services kubernetes --no-headers | awk '{ print $3 }')
render_yaml_file "$DIR/addons/kotsadm/1.37.0/tmpl-kotsadm-proxy.yaml" > "$DIR/kustomize/kotsadm/kotsadm-proxy.yaml"
insert_patches_strategic_merge "$DIR/kustomize/kotsadm/kustomization.yaml" kotsadm-proxy.yaml
fi
if [ "$AIRGAP" == "1" ]; then
cp "$DIR/addons/kotsadm/1.37.0/kotsadm-airgap.yaml" "$DIR/kustomize/kotsadm/kotsadm-airgap.yaml"
insert_patches_strategic_merge "$DIR/kustomize/kotsadm/kustomization.yaml" kotsadm-airgap.yaml
fi
if [ -n "$INSTALLATION_ID" ]; then
render_yaml_file "$DIR/addons/kotsadm/1.37.0/tmpl-kotsadm-installation-id.yaml" > "$DIR/kustomize/kotsadm/kotsadm-installation-id.yaml"
insert_patches_strategic_merge "$DIR/kustomize/kotsadm/kustomization.yaml" kotsadm-installation-id.yaml
fi
kotsadm_cacerts_file
kotsadm_kubelet_client_secret
kotsadm_metadata_configmap $src $dst
if [ -z "$KOTSADM_HOSTNAME" ]; then
KOTSADM_HOSTNAME="$PUBLIC_ADDRESS"
fi
if [ -z "$KOTSADM_HOSTNAME" ]; then
KOTSADM_HOSTNAME="$PRIVATE_ADDRESS"
fi
cat "$src/tmpl-start-kotsadm-web.sh" | sed "s/###_HOSTNAME_###/$KOTSADM_HOSTNAME:8800/g" > "$dst/start-kotsadm-web.sh"
kubectl create configmap kotsadm-web-scripts --from-file="$dst/start-kotsadm-web.sh" --dry-run -oyaml > "$dst/kotsadm-web-scripts.yaml"
kubectl delete pod kotsadm-migrations &> /dev/null || true;
kubectl delete deployment kotsadm-web &> /dev/null || true; # replaced by 'kotsadm' deployment in 1.12.0
kubectl delete service kotsadm-api &> /dev/null || true; # replaced by 'kotsadm-api-node' service in 1.12.0
# removed in 1.19.0
kubectl delete deployment kotsadm-api &> /dev/null || true
kubectl delete service kotsadm-api-node &> /dev/null || true
kubectl delete serviceaccount kotsadm-api &> /dev/null || true
kubectl delete clusterrolebinding kotsadm-api-rolebinding &> /dev/null || true
kubectl delete clusterrole kotsadm-api-role &> /dev/null || true
kotsadm_namespaces "$src" "$dst"
kubectl apply -k "$dst/"
kotsadm_kurl_proxy "$src" "$dst"
kotsadm_ready_spinner
kubectl label pvc kotsadm-postgres-kotsadm-postgres-0 velero.io/exclude-from-backup- kots.io/backup=velero --overwrite
kotsadm_cli $src
}
function kotsadm_join() {
kotsadm_cli "$DIR/addons/kotsadm/1.37.0"
}
function kotsadm_outro() {
local mainPod=$(kubectl get pods --selector app=kotsadm --no-headers | grep -E '(ContainerCreating|Running)' | head -1 | awk '{ print $1 }')
if [ -z "$mainPod" ]; then
mainPod="<main-pod>"
fi
printf "\n"
printf "\n"
printf "Kotsadm: ${GREEN}http://$KOTSADM_HOSTNAME:${KOTSADM_UI_BIND_PORT:-8800}${NC}\n"
if [ -n "$KOTSADM_PASSWORD" ]; then
printf "Login with password (will not be shown again): ${GREEN}$KOTSADM_PASSWORD${NC}\n"
else
printf "You can log in with your existing password. If you need to reset it, run ${GREEN}kubectl kots reset-password default${NC}\n"
fi
printf "\n"
printf "\n"
}
function kotsadm_secret_cluster_token() {
local CLUSTER_TOKEN=$(kubernetes_secret_value default kotsadm-cluster-token kotsadm-cluster-token)
if [ -z "$CLUSTER_TOKEN" ]; then
# check under old name
CLUSTER_TOKEN=$(kubernetes_secret_value default kotsadm-auto-create-cluster-token token)
if [ -n "$CLUSTER_TOKEN" ]; then
kubectl delete secret kotsadm-auto-create-cluster-token
else
CLUSTER_TOKEN=$(< /dev/urandom tr -dc A-Za-z0-9 | head -c16)
fi
fi
render_yaml_file "$DIR/addons/kotsadm/1.37.0/tmpl-secret-cluster-token.yaml" > "$DIR/kustomize/kotsadm/secret-cluster-token.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-cluster-token.yaml
# ensure all pods that consume the secret will be restarted
kubernetes_scale_down default deployment kotsadm
kubernetes_scale_down default deployment kotsadm-operator
}
function kotsadm_secret_authstring() {
local AUTHSTRING=$(kubernetes_secret_value default kotsadm-authstring kotsadm-authstring)
if [ -z "$AUTHSTRING" ]; then
AUTHSTRING="Kots $(< /dev/urandom tr -dc A-Za-z0-9 | head -c32)"
fi
if [[ ! "$AUTHSTRING" =~ ^'Kots ' && ! "$AUTHSTRING" =~ ^'Bearer ' ]]; then
AUTHSTRING="Kots $(< /dev/urandom tr -dc A-Za-z0-9 | head -c32)"
fi
render_yaml_file "$DIR/addons/kotsadm/1.37.0/tmpl-secret-authstring.yaml" > "$DIR/kustomize/kotsadm/secret-authstring.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-authstring.yaml
}
function kotsadm_secret_password() {
local BCRYPT_PASSWORD=$(kubernetes_secret_value default kotsadm-password passwordBcrypt)
if [ -z "$BCRYPT_PASSWORD" ]; then
# global, used in outro
KOTSADM_PASSWORD=$(< /dev/urandom tr -dc A-Za-z0-9 | head -c9)
BCRYPT_PASSWORD=$(echo "$KOTSADM_PASSWORD" | $DIR/bin/bcrypt --cost=14)
fi
render_yaml_file "$DIR/addons/kotsadm/1.37.0/tmpl-secret-password.yaml" > "$DIR/kustomize/kotsadm/secret-password.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-password.yaml
kubernetes_scale_down default deployment kotsadm
}
function kotsadm_secret_postgres() {
local POSTGRES_PASSWORD=$(kubernetes_secret_value default kotsadm-postgres password)
if [ -z "$POSTGRES_PASSWORD" ]; then
POSTGRES_PASSWORD=$(< /dev/urandom tr -dc A-Za-z0-9 | head -c16)
fi
render_yaml_file "$DIR/addons/kotsadm/1.37.0/tmpl-secret-postgres.yaml" > "$DIR/kustomize/kotsadm/secret-postgres.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-postgres.yaml
kubernetes_scale_down default deployment kotsadm
kubernetes_scale_down default deployment kotsadm-postgres
kubernetes_scale_down default deployment kotsadm-migrations
}
function kotsadm_secret_dex_postgres() {
local DEX_PGPASSWORD=$(kubernetes_secret_value default kotsadm-dex-postgres PGPASSWORD)
if [ -z "$DEX_PGPASSWORD" ]; then
DEX_PGPASSWORD=$(< /dev/urandom tr -dc A-Za-z0-9 | head -c32)
fi
render_yaml_file "$DIR/addons/kotsadm/1.37.0/tmpl-secret-dex-postgres.yaml" > "$DIR/kustomize/kotsadm/secret-dex-postgres.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-dex-postgres.yaml
kubernetes_scale_down default deployment kotsadm
}
function kotsadm_secret_s3() {
if [ -z "$VELERO_LOCAL_BUCKET" ]; then
VELERO_LOCAL_BUCKET=velero
fi
render_yaml_file "$DIR/addons/kotsadm/1.37.0/tmpl-secret-s3.yaml" > "$DIR/kustomize/kotsadm/secret-s3.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-s3.yaml
}
function kotsadm_secret_session() {
local JWT_SECRET=$(kubernetes_secret_value default kotsadm-session key)
if [ -z "$JWT_SECRET" ]; then
JWT_SECRET=$(< /dev/urandom tr -dc A-Za-z0-9 | head -c16)
fi
render_yaml_file "$DIR/addons/kotsadm/1.37.0/tmpl-secret-session.yaml" > "$DIR/kustomize/kotsadm/secret-session.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-session.yaml
kubernetes_scale_down default deployment kotsadm
}
function kotsadm_api_encryption_key() {
local API_ENCRYPTION=$(kubernetes_secret_value default kotsadm-encryption encryptionKey)
if [ -z "$API_ENCRYPTION" ]; then
# 24 byte key + 12 byte nonce, base64 encoded. This is separate from the base64 encoding used
# in secrets with kubectl. Kotsadm expects the value to be encoded when read as an env var.
API_ENCRYPTION=$(< /dev/urandom cat | head -c36 | base64)
fi
render_yaml_file "$DIR/addons/kotsadm/1.37.0/tmpl-secret-api-encryption.yaml" > "$DIR/kustomize/kotsadm/secret-api-encryption.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-api-encryption.yaml
kubernetes_scale_down default deployment kotsadm
}
function kotsadm_api_patch_prometheus() {
insert_patches_strategic_merge "$DIR/kustomize/kotsadm/kustomization.yaml" api-prometheus.yaml
cp "$DIR/addons/kotsadm/1.37.0/patches/api-prometheus.yaml" "$DIR/kustomize/kotsadm/api-prometheus.yaml"
}
function kotsadm_metadata_configmap() {
local src="$1"
local dst="$2"
# The application.yaml pre-exists from airgap bundle OR
# gets created below if user specified the app-slug and metadata exists.
if [ "$AIRGAP" != "1" ] && [ -n "$KOTSADM_APPLICATION_SLUG" ]; then
# If slug exists, but there's no branding, then replicated.app will return nothing.
# (application.yaml will remain empty)
echo "Retrieving app metadata: url=$REPLICATED_APP_URL, slug=$KOTSADM_APPLICATION_SLUG"
curl $REPLICATED_APP_URL/metadata/$KOTSADM_APPLICATION_SLUG > "$src/application.yaml"
fi
if test -s "$src/application.yaml"; then
cp "$src/application.yaml" "$dst/"
kubectl create configmap kotsadm-application-metadata --from-file="$dst/application.yaml" --dry-run -oyaml > "$dst/kotsadm-application-metadata.yaml"
insert_resources $dst/kustomization.yaml kotsadm-application-metadata.yaml
fi
}
function kotsadm_kurl_proxy() {
local src="$1/kurl-proxy"
local dst="$2/kurl-proxy"
mkdir -p "$dst"
cp "$src/kustomization.yaml" "$dst/"
cp "$src/rbac.yaml" "$dst/"
render_yaml_file "$src/tmpl-service.yaml" > "$dst/service.yaml"
render_yaml_file "$src/tmpl-deployment.yaml" > "$dst/deployment.yaml"
kotsadm_tls_secret
kubectl apply -k "$dst/"
}
function kotsadm_tls_secret() {
if kubernetes_resource_exists default secret kotsadm-tls; then
return 0
fi
cat > kotsadm.cnf <<EOF
[ req ]
default_bits = 2048
prompt = no
default_md = sha256
req_extensions = req_ext
distinguished_name = dn
[ dn ]
CN = kotsadm.default.svc.cluster.local
[ req_ext ]
subjectAltName = @alt_names
[ v3_ext ]
authorityKeyIdentifier=keyid,issuer:always
basicConstraints=CA:TRUE,pathlen:0
keyUsage=nonRepudiation,digitalSignature,keyEncipherment,keyCertSign
extendedKeyUsage=serverAuth
subjectAltName=@alt_names
[ alt_names ]
DNS.1 = kotsadm
DNS.2 = kotsadm.default
DNS.3 = kotsadm.default.svc
DNS.4 = kotsadm.default.svc.cluster
DNS.5 = kotsadm.default.svc.cluster.local
IP.1 = $PRIVATE_ADDRESS
EOF
if [ -n "$PUBLIC_ADDRESS" ]; then
echo "IP.2 = $PUBLIC_ADDRESS" >> kotsadm.cnf
fi
openssl req -newkey rsa:2048 -nodes -keyout kotsadm.key -config kotsadm.cnf -x509 -days 365 -out kotsadm.crt -extensions v3_ext
kubectl -n default create secret tls kotsadm-tls --key=kotsadm.key --cert=kotsadm.crt
kubectl -n default annotate secret kotsadm-tls acceptAnonymousUploads=1
rm kotsadm.cnf kotsadm.key kotsadm.crt
}
function kotsadm_kubelet_client_secret() {
if kubernetes_resource_exists default secret kubelet-client-cert; then
return 0
fi
kubectl -n default create secret generic kubelet-client-cert \
--from-file=client.crt="$(${K8S_DISTRO}_get_client_kube_apiserver_crt)" \
--from-file=client.key="$(${K8S_DISTRO}_get_client_kube_apiserver_key)" \
--from-file="$(${K8S_DISTRO}_get_server_ca)"
}
function kotsadm_cli() {
local src="$1"
if ! kubernetes_is_master; then
return 0
fi
if [ ! -f "$src/assets/kots.tar.gz" ] && [ "$AIRGAP" != "1" ]; then
mkdir -p "$src/assets"
curl -L "https://github.com/replicatedhq/kots/releases/download/v1.37.0/kots_linux_amd64.tar.gz" > "$src/assets/kots.tar.gz"
fi
pushd "$src/assets"
tar xf "kots.tar.gz"
mkdir -p "$KUBECTL_PLUGINS_PATH"
mv kots "$KUBECTL_PLUGINS_PATH/kubectl-kots"
popd
# https://github.com/replicatedhq/kots/issues/149
if [ ! -e /usr/lib64/libdevmapper.so.1.02.1 ] && [ -e /usr/lib64/libdevmapper.so.1.02 ]; then
ln -s /usr/lib64/libdevmapper.so.1.02 /usr/lib64/libdevmapper.so.1.02.1
fi
}
# copy pgdata from pvc named kotsadm-postgres to new pvc named kotsadm-postgres-kotsadm-postgres-0
# used by StatefulSet in 1.12.2+
function kotsadm_rename_postgres_pvc_1-12-2() {
local src="$1"
if kubernetes_resource_exists default deployment kotsadm-postgres; then
kubectl delete deployment kotsadm-postgres
fi
if ! kubernetes_resource_exists default pvc kotsadm-postgres; then
return 0
fi
printf "${YELLOW}Renaming PVC kotsadm-postgres to kotsadm-postgres-kotsadm-postgres-0${NC}\n"
kubectl apply -f "$src/kotsadm-postgres-rename-pvc.yaml"
spinner_until -1 kotsadm_postgres_pvc_renamed
kubectl delete pod kotsadm-postgres-rename-pvc
kubectl delete pvc kotsadm-postgres
}
function kotsadm_postgres_pvc_renamed {
local status=$(kubectl get pod kotsadm-postgres-rename-pvc -ojsonpath='{ .status.containerStatuses[0].state.terminated.reason }')
[ "$status" = "Completed" ]
}
function kotsadm_namespaces() {
local src="$1"
local dst="$2"
IFS=',' read -ra KOTSADM_APPLICATION_NAMESPACES_ARRAY <<< "$KOTSADM_APPLICATION_NAMESPACES"
for NAMESPACE in "${KOTSADM_APPLICATION_NAMESPACES_ARRAY[@]}"; do
kubectl create ns "$NAMESPACE" 2>/dev/null || true
done
}
function kotsadm_health_check() {
# Get pods below will initially return only 0 lines
# Then it will return 1 line: "PodScheduled=True"
# Finally, it will return 4 lines. And this is when we want to grep until "Ready=False" is not shown, and '1/1 Running' is
if [ $(kubectl get pods -l app=kotsadm -o jsonpath="{range .items[*]}{range .status.conditions[*]}{ .type }={ .status }{'\n'}{end}{end}" 2>/dev/null | wc -l) -ne 4 ]; then
# if this returns more than 4 lines, there are multiple copies of the pod running, which is a failure
return 1
fi
if [[ -n $(kubectl get pods -l app=kotsadm --field-selector=status.phase=Running -o jsonpath="{range .items[*]}{range .status.conditions[*]}{ .type }={ .status }{'\n'}{end}{end}" 2>/dev/null | grep -q Ready=False) ]]; then
# if there is a pod with Ready=False, then kotsadm is not ready
return 1
fi
if [[ -z $(kubectl get pods -l app=kotsadm --field-selector=status.phase=Running 2>/dev/null | grep '1/1' | grep 'Running') ]]; then
# when kotsadm is ready, it will be '1/1 Running'
return 1
fi
return 0
}
function kotsadm_ready_spinner() {
sleep 1 # ensure that kubeadm has had time to begin applying and scheduling the kotsadm pods
if ! spinner_until 120 kotsadm_health_check; then
kubectl logs -l app=kotsadm --all-containers --tail 10
bail "The kotsadm deployment in the kotsadm addon failed to deploy successfully."
fi
}
function kotsadm_cacerts_file() {
# Find the cacerts bundle on the host
# if it exists, add a patch to add the volume mount to kotsadm
# See https://github.com/golang/go/blob/ec4051763d439e7108bc673dd0b1bf1cbbc5dfc5/src/crypto/x509/root_linux.go
# TODO(dan): need to test this re-ordering
local sslDirectories
sslDirectories=( \
"/etc/ssl/certs/ca-certificates.crt" \ # Debian/Ubuntu/Gentoo etc.
"/etc/pki/tls/certs/ca-bundle.crt" \ # Fedora/RHEL 6
"/etc/ssl/ca-bundle.pem" \ # OpenSUSE
"/etc/pki/tls/cacert.pem" \ # OpenELEC
"/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem" \ # CentOS/RHEL 7
"/etc/ssl/cert.pem" \ # Alpine Linux
)
for cert_file in ${sslDirectories[@]}; do
if [ -f "$cert_file" ]; then
KOTSADM_TRUSTED_CERT_MOUNT="${cert_file}"
break
fi
done
if [ -n "$KOTSADM_TRUSTED_CERT_MOUNT" ]; then
render_yaml_file "$DIR/addons/kotsadm/1.37.0/tmpl-kotsadm-cacerts.yaml" > "$DIR/kustomize/kotsadm/kotsadm-cacerts.yaml"
insert_patches_strategic_merge "$DIR/kustomize/kotsadm/kustomization.yaml" kotsadm-cacerts.yaml
fi
}
|
<filename>src/main/scala/net/koseburak/api/AppointmentChecker.scala
package net.koseburak.api
import cats.effect.Sync
import cats.implicits._
import io.chrisdavenport.log4cats.Logger
import net.koseburak.model.AppointmentHttpResponse
import net.koseburak.model.AppointmentHttpResponse.ErrorResponse
import org.http4s.Uri
import org.http4s.circe._
import org.http4s.client.Client
sealed trait AppointmentChecker[F[_]] {
/**
* Check available appointments
*/
def check: F[AppointmentHttpResponse]
}
class GnibAppointmentChecker[F[_]: Sync](
category: String,
subCategory: String,
typ: String,
client: Client[F],
logger: Logger[F]
) extends AppointmentChecker[F] {
private val base = Uri.uri("https://burghquayregistrationoffice.inis.gov.ie/Website/AMSREG/AMSRegWeb.nsf/(getAppsNear)")
private val query = base.setQueryParams(
Map(
"readform" -> Seq(""),
"cat" -> Seq(category),
"sbcat" -> Seq(subCategory),
"typ" -> Seq(typ)
)
)
/**
* Check available appointments
*/
def check: F[AppointmentHttpResponse] =
client
.expect(query)(jsonOf[F, AppointmentHttpResponse])
.flatMap { response =>
logger.info(response.toString).map(_ => response)
}
.recoverWith {
case ex =>
logger.error(ex)("Request failed.").map(_ => ErrorResponse(ex.getMessage))
}
}
|
#!/usr/bin/env bash
echo "Starting run.sh"
cat /var/www/html/config/crontab.default > /var/www/html/config/crontab
if [[ ${CRONJOB_ITERATION} && ${CRONJOB_ITERATION-x} ]]; then
sed -i -e "s/0/1-59\/${CRONJOB_ITERATION}/g" /var/www/html/config/crontab
fi
crontab /var/www/html/config/crontab
echo "Starting Cronjob"
crond -l 2 -f &
echo "Starting nginx"
exec nginx -g "daemon off;"
exit 0;
|
<reponame>nrc34/angular2<gh_stars>0
import {Injectable} from 'angular2/core';
import {IAnimal} from './IAnimal';
@Injectable()
export class FirebaseService {
fbUrl: string = 'https://crackling-heat-1694.firebaseio.com/animals/';
fbRef: any;
animals:IAnimal[];
snapshot:any;
public isFirstTimeLoad:boolean;
constructor(){
this.animals = [];
this.isFirstTimeLoad = true;
this.fbRef = new Firebase(this.fbUrl);
this.fbRef.on('value', (snapshot)=>{
//console.log(snapshot.val());
this.snapshot = snapshot.val();
if(this.isFirstTimeLoad)
for(var item in this.snapshot){
//console.log(this.snapshot[item]);
this.animals.push(JSON.parse(this.snapshot[item]));
//console.log(JSON.parse(this.snapshot[item]));
}
});
this.fbRef.on('child_added', (snapshot)=>{
console.log(JSON.parse(snapshot.val()));
if(!this.isFirstTimeLoad)
this.animals.push(JSON.parse(snapshot.val()));
});
this.isFirstTimeLoad = false;
}
setAnimal(animal:IAnimal){
//const body:IAnimal = {type:animal.type, name:animal.name, age:animal.age};
var newRef = this.fbRef.push();
const body = JSON.stringify({key: newRef.toString(),
type:animal.type,
name:animal.name,
age:animal.age});
newRef.set(body);
}
getAnimals():IAnimal[]{
return this.animals;
}
removeAnimal(animal:IAnimal):void{
var path2remove = animal.key;
var ref2remove = new Firebase(path2remove);
ref2remove.remove();
}
}
|
<gh_stars>1-10
package zsync
import (
"fmt"
"io"
"testing"
"time"
)
var _ io.ReadWriter = &Buffer{}
func TestBuffer(t *testing.T) {
t.Skip()
buf := NewBuffer(nil)
go buf.Write([]byte("one "))
go fmt.Println(buf.String())
go buf.Write([]byte("two "))
go fmt.Println(buf.String())
go buf.Write([]byte("three "))
go fmt.Println(buf.String())
go buf.Write([]byte("four "))
go fmt.Println(buf.String())
go buf.Write([]byte("five "))
go fmt.Println(buf.String())
go buf.Write([]byte("six "))
go fmt.Println(buf.String())
time.Sleep(50 * time.Millisecond)
fmt.Println(buf.String())
}
|
function printProperties(object) {
for (const property in object) {
if (Object.prototype.hasOwnProperty.call(object, property)) {
console.log(property, object[property]);
}
}
}
|
export * from './Status.js'
export * from './TestFile.js'
export * from './TestSuite.js'
export * from './Test.js'
export * from './Run.js'
export * from './RunTestFile.js'
export * from './Settings.js'
|
import java.util.ArrayList;
public class JobInterviewSimulator {
// list of questions
private ArrayList<String> questions;
// constructor
public JobInterviewSimulator(ArrayList<String> questions) {
this.questions = questions;
}
// method for submitting questions
public void submitQuestions() {
// code for presenting the questions to the user
// and receiving answers
}
// method for evaluating the answers
public void evaluateAnswers() {
// code for evaluating the answers
}
// method for displaying the results
public void displayResults() {
// code for displaying the results to the user
}
}
|
from dataclasses import asdict
from elasticsearch import Elasticsearch
import os
class Author:
def __init__(self, id: str, first_name: str, last_name: str):
self.id = id
self.first_name = first_name
self.last_name = last_name
class AuthorManager:
def add_author(self, author: Author) -> None:
full_name = f"{author.first_name} {author.last_name}"
body = {**asdict(author), "full_name": full_name}
es = self._get_elastic()
es.index(index=os.environ['ES_AUTHORS_INDEX'], id=author.id, body=body)
def _get_elastic(self) -> Elasticsearch:
return Elasticsearch(os.environ['ES_URL'])
|
import numpy as np
import pandas as pd
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn.model_selection import train_test_split
# Create a list of labels
labels = ["Billing", "Feedback", "Shipping"]
# Read in the data from the customer emails
data = pd.read_csv("emails.csv")
# Get the text from the data
text = data['text']
# Create a CountVectorizer object for the texts
vectorizer = CountVectorizer()
X = vectorizer.fit_transform(text)
# Create a Multinomial Naive Bayes model
model = MultinomialNB()
# Split the data into training and test sets
X_train, X_test, y_train, y_test = train_test_split(X, data['label'], test_size=0.2)
# Fit the model to the training data
model.fit(X_train, y_train)
# Get the accuracy of the model on the test data
score = model.score(X_test, y_test)
print("Model accuracy: ", score)
|
let computerGuess;
let userGuesses = [];
let attempts = 0;
let maxGuesses;
let low = 1;
let high = 100;
function updateRange() {
const rangeOutput = document.getElementById("rangeOutput");
rangeOutput.innerText = `${low} - ${high}`;
rangeOutput.style.marginLeft = low + "%";
rangeOutput.style.marginRight = 100 - high + "%";
rangeOutput.classList.add("flash");
const lowValue = document.getElementById("low");
lowValue.style.flex = low + "%";
lowValue.style.background = "#ef7b54";
const space = document.getElementById("space");
space.style.flex = high - low + "%";
space.style.background = "#83E1D0";
const highValue = document.getElementById("high");
if (high == 100) highValue.style.flex = 0;
highValue.style.flex = 100 - high + "%";
highValue.style.background = "#ef7b54";
}
function updateRange() {
const rangeOutput = document.getElementById("rangeOutput");
rangeOutput.innerText = `${low} - ${high}`;
rangeOutput.style.marginLeft = low + "%";
rangeOutput.style.marginRight = 100 - high + "%";
rangeOutput.classList.add("flash");
const lowValue = document.getElementById("low");
lowValue.style.flex = low + "%";
lowValue.style.background = "#ef7b54";
const space = document.getElementById("space");
space.style.flex = high - low + "%";
space.style.background = "#83E1D0";
const highValue = document.getElementById("high");
if (high == 100) highValue.style.flex = 0;
highValue.style.flex = 100 - high + "%";
highValue.style.background = "#ef7b54";
}
function gameEnded() {
document.getElementById("newGameButton").style.display = "inline";
document.getElementById("inputBox").setAttribute("readonly", "readonly"); // (attr name, attr value)
}
function newGame() {
window.location.reload();
}
function init() {
computerGuess = Math.floor(Math.random() * 100 + 1);
document.getElementById("newGameButton").style.display = "none";
document.getElementById("gameArea").style.display = "none";
}
function startGameView() {
document.getElementById("welcomeScreen").style.display = "none";
document.getElementById("gameArea").style.display = "block";
}
function easyMode() {
maxGuesses = 10;
startGameView();
}
function hardMode() {
maxGuesses = 5;
startGameView();
}
function compareGuess() {
const userGuess = Number(document.getElementById("inputBox").value);
userGuesses.push(" " + userGuess);
document.getElementById("guesses").innerHTML = userGuesses;
attempts++;
document.getElementById("attempts").innerHTML = attempts;
if (attempts < maxGuesses) {
if (userGuess > computerGuess) {
if (userGuess < high) high = userGuess;
document.getElementById("textOutput").innerHTML =
"Your guess is too high";
document.getElementById("inputBox").value = "";
} else if (userGuess < computerGuess) {
if (userGuess > low) low = userGuess;
document.getElementById("textOutput").innerHTML = "Your guess is too low";
document.getElementById("inputBox").value = "";
} else {
let ans = document.getElementById("textOutput");
ans.style.color = '#1eff00';
ans.innerHTML ="Correct! You got it in " + attempts + " attempts";
gameEnded();
}
} else {
if (userGuess > computerGuess) {
var textOutput = document.getElementById("textOutput");
textOutput.style.color = 'red';
textOutput.innerHTML =
"YOU LOSE!, <br> The number was " + computerGuess;
gameEnded();
} else if (userGuess < computerGuess) {
document.getElementById("textOutput").innerHTML =
"YOU LOSE!, <br> The number was " + computerGuess;
gameEnded();
} else {
document.getElementById("textOutput").innerHTML =
"Correct! You got it in " + attempts + " attempts";
gameEnded();
}
}
updateRange();
}
for (let i = 0; i < keys.length-3; i++) {
keys[i].addEventListener("click", concat);
}
keys[15].addEventListener('click',calculate);
keys[16].addEventListener('click',reset);
keys[17].addEventListener('click',del);
scroller.addEventListener("click",move);
|
#!/bin/bash
build='builder/builder'
src_dir='data/animations/'
dst_dir='data/animations/'
actor_dir='data/built/'
ybot_dir='data/animations/ybot_retargeted/fbx/'
sampling_frequency='--sampling_frequency 15'
#$build 'data/models_actors/armadillo.fbx' 'data/built/armadillo' '--actor' '--root_bone' 'mixamorig:Hips' #'--scale' '0.01'
#for i in $src_dir*.fbx; do
#if [[ $i == *"sword"* ]]; then
#echo "building " $i
#$build $i $dst_dir`basename $i .fbx` '--animation' '--target_actor' 'data/built/armadillo.actor' $sampling_frequency
#fi
#done
$build 'data/models_actors/xbot.fbx' 'data/built/xbot' '--actor' '--root_bone' 'mixamorig:Hips' '--scale' '0.01'
for i in $src_dir*.fbx; do
if [[ $i == *"xbot"* ]]; then
echo "building " $i
$build $i $dst_dir`basename $i .fbx` '--animation' '--target_actor' 'data/built/xbot.actor' $sampling_frequency
fi
done
printf "\n"
$build 'data/models_actors/ybot.fbx' 'data/built/ybot' '--actor' '--root_bone' 'mixamorig:Hips' '--scale' '0.01'
for i in $src_dir*.fbx; do
if [[ $i == *"ybot"* ]]; then
echo "building " $i
$build $i $dst_dir`basename $i .fbx` '--animation' '--target_actor' 'data/built/ybot.actor' $sampling_frequency
fi
done
printf "\n"
for i in $ybot_dir*.fbx; do
echo "building " $i
$build $i $dst_dir`basename $i .fbx` '--animation' '--target_actor' 'data/built/ybot.actor' $sampling_frequency
done
|
<gh_stars>1-10
import { Component, OnInit } from '@angular/core';
import {Router} from "@angular/router";
import { AuthService } from 'src/app/services/solid.auth.service';
import { RdfService } from 'src/app/services/rdf.service';
@Component({
selector: 'app-navbar',
templateUrl: './navbar.component.html',
styles: ['./navbar.component.css']
})
export class NavbarComponent implements OnInit {
constructor(private router: Router,private auth: AuthService,private rdf: RdfService) { }
ngOnInit() {
}
getUsername(): string {
try{
let id = this.rdf.session.webId;
let username = id.replace('https://', '');
let user = username.split('.')[0];
return user;
}
catch(error){
console.log(`Error webId: ${error}`);
}
}
goToChat() {
this.router.navigateByUrl('/chat');
}
logout() {
this.auth.solidSignOut();
}
}
|
def generate_report(data):
# create an empty output dictionary
output = {}
# Iterate over the data
for employee in data:
# get the employee name
name = employee['name']
# get the employee salary
salary = employee['salary']
# add the salary info to the output dictionary
output[name] = salary
# return the output of the report
return output
|
def smallestNumber(nums):
smallest = nums[0]
for i in range(1, len(nums)):
if nums[i] < smallest:
smallest = nums[i]
return smallest
nums = [4, 5, 6, 7, 8]
smallestNum = smallestNumber(nums)
print('The smallest number is', smallestNum)
|
#include <opencv2/opencv.hpp>
using namespace cv;
int FromBinary(bool b1, bool b2, bool b3, bool b4, bool b5) {
int result = (b1 ? 16 : 0) + (b2 ? 8 : 0) + (b3 ? 4 : 0) + (b4 ? 2 : 0) + (b5 ? 1 : 0);
return result;
}
bool ReadBox(Mat image, int size, Point p1, Point p2) {
// Implementation to read a box from the input image
// Use the provided size, p1, and p2 to define the box region and extract relevant information
// Return true if successful, false otherwise
return true; // Placeholder return value
}
Point MatchingMethod(int method, Mat templ, void* result_data) {
Mat result;
// Perform template matching using the specified method and store the result in the 'result' Mat
// Use the templ and result_data parameters as needed for template matching
Point matchLoc;
// Find the location of the matched template in the source image and store it in matchLoc
return matchLoc; // Placeholder return value
}
|
export function getQueryString(field, url) {
var href = url ? url : window.location.href;
var reg = new RegExp( '[?&]' + field + '=([^&#]*)', 'i' );
var string = reg.exec(href);
return string ? string[1] : null;
}
export function postData(url, data, callback, self) {
const request = new XMLHttpRequest();
request.responseType = 'document';
request.open('POST', url, true);
request.onload = function() {
callback.call(self, this);
};
request.send(data);
}
|
#!/bin/bash
#Get Wordpress installer
sudo setenforce 0
sudo sed -i 's/permissive/disabled/' /etc/sysconfig/selinux
wget http://wordpress.org/latest.tar.gz
tar -xzf latest.tar.gz
sudo rsync -avP ~/wordpress/ /var/www/html/
sudo mkdir -p /var/www/html/wp-content/uploads
sudo chown -R apache:apache /var/www/html/*
#Configure WP
cd /var/www/html
cp wp-config-sample.php wp-config.php
sed -i '/DB_NAME/s/database_name_here/wordpress/g' wp-config.php
sed -i '/DB_USER/s/username_here/@@{WP_DB_USER}@@/g' wp-config.php
sed -i '/DB_PASSWORD/s/password_here/@@{WP_DB_PASSWORD}@@/g' wp-config.php
sed -i '/DB_HOST/s/localhost/@@{MYSQL.address}@@/g' wp-config.php
sudo systemctl restart httpd
|
# The Book of Ruby - http://www.sapphiresteel.com
class X
def x
print( "x:" )
def y
print("y:")
end
def z
print( "z:" )
y
end
end
end
ob = X.new
ob.x
puts
ob.y
puts
ob.z
|
appledoc SERemoteWebDriver.h --project-name selenium --project-company "Appium" --company-id com.appium --output ~/Desktop/help .
|
import {IFFFieldModel, IFFFieldModelProps } from ".";
export interface IFFTextAreaFieldModelProps extends IFFFieldModelProps {}
export interface IFFTextAreaFieldModel extends IFFFieldModel {
props: IFFTextAreaFieldModelProps;
}
|
<reponame>cotarr/collab-backend-api<gh_stars>0
// -----------------------------------------------------------------------------
//
// ExpressJs Web Server
//
// Public Routes:
// /status
// /.well-known/security.txt (only if configured)
//
// Secure Routes:
// /secure
// /v1/* (Mock REST API)
// -----------------------------------------------------------------------------
'use strict';
// native node packages
const http = require('http');
// express packages
const express = require('express');
const logger = require('morgan');
const compression = require('compression');
const app = express();
const { authInit, requireAccessToken } = require('@cotarr/collab-backend-token-auth');
// Routes
const routes = require('./routes/index');
// Custom Modules
const checkVhost = require('./middlewares/check-vhost');
const securityContact = require('./utils/security-contact');
// Configuration
const config = require('./config');
const logConfig = require('./utils/log-config');
const nodeEnv = process.env.NODE_ENV || 'development';
if (nodeEnv === 'production') {
if (config.oauth2.clientSecret === 'ssh-secret') {
console.error('Error, oauth2 client secret must be changed for production');
process.exit(1);
}
}
// body parser for accepting JSON
app.use(express.json());
// Submission data from <form> elements can be disabled by removing
// the urlencoded bodyparser (x-www-form-urlencoded not parsed).
app.use(express.urlencoded({ extended: false }));
if (nodeEnv === 'production') {
app.use(compression());
}
// HTTP access log
app.use(logger(logConfig.format, logConfig.options));
//
// Initialize the authorization middleware
//
authInit({
authURL: config.oauth2.authURL,
clientId: config.oauth2.clientId,
clientSecret: config.oauth2.clientSecret,
tokenCacheSeconds: config.oauth2.tokenCacheSeconds,
tokenCacheCleanSeconds: config.oauth2.tokenCacheCleanSeconds
});
//
// /status Is the server alive?
//
app.get('/status', (req, res) => res.json({ status: 'ok' }));
// Route for security.txt
app.get('/.well-known/security.txt', securityContact);
// From this point, reject all requests not maching vhost domain name
app.use(checkVhost.rejectNotVhost);
//
// /secure Secure route for confirming credentials remotely
//
app.get('/secure',
requireAccessToken(),
(req, res) => res.json({ secure: 'ok' })
);
// ---------------------------
// Routes for mock REST API
// ---------------------------
app.use('/v1', requireAccessToken(), routes);
// ---------------------------------
// T E S T E R R O R
// ---------------------------------
// app.get('/error', (req, res, next) => { throw new Error('Test error'); });
// ---------------------------------
// E R R O R H A N D L E R S
// ---------------------------------
//
// catch 404 Not Found
//
app.use(function (req, res, next) {
const err = new Error(http.STATUS_CODES[404]);
err.status = 404;
return res.set('Content-Type', 'text/plain').status(err.status).send(err.message);
});
//
// Custom error handler
//
app.use(function (err, req, res, next) {
// per Node docs, if response in progress, must be returned to default error handler
if (res.headersSent) return next(err);
const status = err.status || 500;
let message = http.STATUS_CODES[status] || 'Unknown Error Occurred';
if ((err.message) && (message !== err.message)) message += ', ' + err.message;
message = 'Status: ' + status.toString() + ', ' + message;
if (nodeEnv === 'production') {
console.log(message);
return res.set('Content-Type', 'text/plain').status(status).send(message);
} else {
console.log(err);
return res.set('Content-Type', 'text/plain').status(status).send(message + '\n' + err.stack);
}
});
module.exports = app;
|
#!/bin/bash
REPOS_DIR=/etc/yum.repos.d
DISTRO_NAME=centos8
LSB_RELEASE=redhat-lsb-core
EXCLUDE_UPGRADE=fuse,mercury,daos,daos-\*
bootstrap_dnf() {
:
}
group_repo_post() {
# Nothing to do for EL
:
}
distro_custom() {
# install the debuginfo repo in case we get segfaults
cat <<"EOF" > $REPOS_DIR/CentOS-Debuginfo.repo
[core-0-debuginfo]
name=CentOS-8 - Debuginfo
baseurl=http://debuginfo.centos.org/8/$basearch/
gpgcheck=1
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-Debug-8
enabled=0
EOF
# force install of avocado 69.x
dnf -y erase avocado{,-common} \
python2-avocado{,-plugins-{output-html,varianter-yaml-to-mux}} \
python3-pyyaml
pip3 install "avocado-framework<70.0"
pip3 install "avocado-framework-plugin-result-html<70.0"
pip3 install "avocado-framework-plugin-varianter-yaml-to-mux<70.0"
pip3 install clustershell
if ! rpm -q nfs-utils; then
dnf -y install nfs-utils
fi
}
post_provision_config_nodes() {
bootstrap_dnf
# Reserve port ranges 31416-31516 for DAOS and CART servers
echo 31416-31516 > /proc/sys/net/ipv4/ip_local_reserved_ports
if $CONFIG_POWER_ONLY; then
rm -f $REPOS_DIR/*.hpdd.intel.com_job_daos-stack_job_*_job_*.repo
time dnf -y erase fio fuse ior-hpc mpich-autoload \
ompi argobots cart daos daos-client dpdk \
fuse-libs libisa-l libpmemobj mercury mpich \
openpa pmix protobuf-c spdk libfabric libpmem \
libpmemblk munge-libs munge slurm \
slurm-example-configs slurmctld slurm-slurmmd
fi
time dnf repolist
# the group repo is always on the test image
#add_group_repo
#add_local_repo
time dnf repolist
if [ -n "$INST_REPOS" ]; then
local repo
for repo in $INST_REPOS; do
branch="master"
build_number="lastSuccessfulBuild"
if [[ $repo = *@* ]]; then
branch="${repo#*@}"
repo="${repo%@*}"
if [[ $branch = *:* ]]; then
build_number="${branch#*:}"
branch="${branch%:*}"
fi
fi
local repo_url="${JENKINS_URL}"job/daos-stack/job/"${repo}"/job/"${branch//\//%252F}"/"${build_number}"/artifact/artifacts/$DISTRO_NAME/
dnf config-manager --add-repo="${repo_url}"
disable_gpg_check "$repo_url"
done
fi
if [ -n "$INST_RPMS" ]; then
# shellcheck disable=SC2086
time dnf -y erase $INST_RPMS
fi
rm -f /etc/profile.d/openmpi.sh
rm -f /tmp/daos_control.log
time dnf -y install $LSB_RELEASE
# shellcheck disable=SC2086
if [ -n "$INST_RPMS" ] &&
! time dnf -y install $INST_RPMS; then
rc=${PIPESTATUS[0]}
dump_repos
exit "$rc"
fi
distro_custom
# now make sure everything is fully up-to-date
if ! time dnf -y upgrade \
--exclude "$EXCLUDE_UPGRADE"; then
dump_repos
exit 1
fi
if [ -f /etc/do-release ]; then
cat /etc/do-release
fi
cat /etc/os-release
exit 0
}
|
#!/bin/bash
# submit all sims
./submit_sim.sh "aipw" 1000 5 "sim-aipw"
./submit_sim.sh "ipw" 1000 5 "sim-ipw"
|
<reponame>Nikscorp/datadog-mock
// Copyright (c) 2017-2018, <NAME> <<EMAIL>>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"context"
"log"
"net"
"os"
"strings"
"syscall"
)
type Sink struct {
out chan<- []byte
}
// NewSink creates new UDP sink with channel for complete events
func NewSink(out chan<- []byte) Sink {
return Sink{
out: out,
}
}
func (r *Sink) ReadFromUDP(conn *net.UDPConn) {
buf := make([]byte, 0xffff)
for {
n, _, err := conn.ReadFromUDP(buf)
switch {
case (n == 0 && err == nil):
return
case err != nil && strings.Contains(err.Error(), "use of closed network connection"):
return
case err == syscall.EAGAIN || err == syscall.EWOULDBLOCK:
continue
case n == 0:
continue
case err != nil:
log.Println(err)
continue
default:
}
var result = make([]byte, n)
copy(result, buf[0:n])
r.out <- result
}
}
// Run starts UDP sink with UPD stream source
func (r *Sink) Run(ctx context.Context) {
addr, err := net.ResolveUDPAddr("udp", ":8125")
if err != nil {
log.Println(err)
os.Exit(1)
}
conn, err := net.ListenUDP("udp", addr)
if err != nil {
log.Println(err)
os.Exit(1)
}
defer conn.Close()
go r.ReadFromUDP(conn)
<-ctx.Done()
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Services;
import Controllers.ControllerAnimal;
import Objects.*;
import java.util.ArrayList;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.table.AbstractTableModel;
/**
*
* @author Gilberto
*/
public class MyTableModel extends AbstractTableModel {
private boolean isAdotado;
private static final String[] columnNames
= {"Foto", "Nome", "Sexo", "Idade", "Espécie"};
private ArrayList<Animal> animals;
private ControllerAnimal controllerAnimal;
//O construtor já recebe os dados do ArrayList para exibição
public MyTableModel(boolean isAdotado) throws Exception {
this.controllerAnimal = new ControllerAnimal();
this.animals = this.controllerAnimal.getList(isAdotado);
fireTableRowsInserted(0, this.animals.size() - 1);//Força a chamada de getValueAt() para inserir todas as células na JTable
}
public MyTableModel(Animal animal) throws Exception {
this.controllerAnimal = new ControllerAnimal();
this.animals = this.controllerAnimal.retrieve(animal);
fireTableRowsInserted(0, this.animals.size() - 1);//Força a chamada de getValueAt() para inserir todas as células na JTable
}
public ArrayList<Animal> getListAnimal(){
return animals;
}
//O construtor já recebe os dados do ArrayList para exibição
public MyTableModel(ArrayList<Animal> a) {
this.animals = a;
fireTableRowsInserted(0, a.size() - 1); //Força a chamada de getValueAt() para inserir todas as células na JTable
}
@Override
//Obrigatório incluir: devolve o nome da coluna para exibição no JTable
public String getColumnName(int column) {
return columnNames[column];
}
//Acrescentado para adicionar uma linha ao JTableModel
public void addRow(Animal d) throws Exception {
this.controllerAnimal.insert(d);
this.animals = this.controllerAnimal.getList(d.isAdotado());
fireTableRowsInserted(0, this.animals.size() - 1); //Força a chamada de getValueAt() para inserir mais uma linha ao final da JTable
setValueAt(d, this.animals.size() - 1, 0);
}
//Acrescentado para remover uma linha do JTableModel
public void removeRow(int index) throws Exception {
if (index != -1) {
Animal a = this.animals.get(index);
this.controllerAnimal.delete(a);
this.animals.remove(index);
fireTableRowsDeleted(index, index); //Força a remoção de uma linha da JTable
}
}
//Acrescentado para atualizar uma linha inteira. Mais prático que o método setValuesAt()
public void updateRow(int index, Animal d) throws Exception {
if (index != -1) {
d.setId(this.animals.get(index).getId()); // pois d passado não tem o atributo id preenchido!
this.controllerAnimal.update(d);
this.animals.remove(index);
this.animals.add(index, d);
fireTableRowsUpdated(index, index); //Força a atualização de uma linha da JTable
}
}
//Acrescentado para retornar o objeto associado à linha selecionada
public Animal getRowSelected(int index) {
if (index != -1) {
return this.animals.get(index);
}
return null;
}
@Override
//Obrigatório incluir: devolve o total de linhs para desenhar a JTable
public int getRowCount() {
return this.animals.size();
}
@Override
//Obrigatório incluir: devolve o total de colunas para desenhar a JTable
public int getColumnCount() {
return columnNames.length;
}
@Override
//Obrigatório incluir: método é chamado para exibir cada celula na JTable
public Object getValueAt(int rowIndex, int columnIndex) {
ImageIcon image;
image = Services.Controll_Images.montarImagem(
this.animals.get(rowIndex).getImagem(),
200,-1);
switch (columnIndex) {
case 0:
return image;
case 1:
return this.animals.get(rowIndex).getNome();
case 2:
return this.animals.get(rowIndex).getSexo();
case 3:
return this.animals.get(rowIndex).getIdade() + " ";
case 4:
return this.animals.get(rowIndex).getEspecie();
}
return null;
}
@Override
public Class getColumnClass(int column) {
if (column == 0) {
return Icon.class;
} else {
return getValueAt(0, column).getClass();
}
}
}
|
<gh_stars>0
// pool-model.js - A mongoose model
//
// See http://mongoosejs.com/docs/models.html
// for more of what you can do here.
import addressSchema from './schemas/addressSchema';
const PoolStatus = {
PENDING_DEPLOYMENT: 'pending_deployment',
ACTIVE: 'active',
PENDING_CLOSE_POOL: 'pending_close_pool',
CLOSED: 'closed',
PENDING_TOKEN_BATCH: 'pending_token_batch',
PAYOUT_ENABLED: 'payout_enabled',
PENDING_ENABLE_REFUNDS: 'pending_enable_refunds',
REFUNDS_ENABLED: 'refunds_enabled',
PAUSED: 'paused',
};
function createModel(app) {
const mongooseClient = app.get('mongooseClient');
const { Schema } = mongooseClient;
const AddressSchema = addressSchema(Schema);
const pool = new Schema(
{
status: {
type: String,
require: true,
enum: Object.values(PoolStatus),
default: PoolStatus.PENDING_DEPLOYMENT,
},
owner: { type: String, required: true },
ownerAddress: { type: String, required: true },
contractAddress: { type: String },
maxAllocation: { type: Number, required: true },
fee: { type: Number, required: true },
feePayoutCurrency: { type: String, required: true },
payoutAddress: { type: String },
payoutTxData: { type: String },
adminPayoutAddress: { type: String, required: true },
poolbaseFee: { type: Number, required: true },
inputsHash: { type: String },
name: { type: String, required: true },
description: { type: String },
disclaimer: { type: String },
minContribution: { type: Number, required: true },
maxContribution: { type: Number, required: true },
admins: [AddressSchema],
whitelist: [AddressSchema],
contributionCount: { type: Number },
tokenBatchCount: { type: Number },
grossInvested: { type: Number },
netInvested: { type: Number },
transactions: [{ type: String }],
pendingTx: { type: Schema.Types.Mixed },
},
{
timestamps: true,
},
);
return mongooseClient.model('pool', pool);
}
module.exports = {
PoolStatus,
createModel,
};
|
export PYTHONPATH="$(pwd)"
export CUDA_VISIBLE_DEVICES="1"
OUTDIR=checkpoints/slkces/multi_m2m_dds/
python generate.py data-bin/ted_slkces/ \
--task multilingual_translation \
--gen-subset test \
--path "$OUTDIR"/checkpoint_best.pt \
--batch-size 16 \
--lenpen 1.5 \
--remove-bpe sentencepiece \
--lang-pairs "slk-eng,ces-eng,eng-slk,eng-ces" \
--encoder-langtok 'tgt' \
--source-lang slk --target-lang eng \
--skip-invalid-size-inputs-valid-test \
--beam 5 > "$OUTDIR"/test_slkeng.log
python generate.py data-bin/ted_slkces/ \
--task multilingual_translation \
--gen-subset test \
--path "$OUTDIR"/checkpoint_best.pt \
--batch-size 16 \
--lenpen 1.5 \
--remove-bpe sentencepiece \
--lang-pairs "slk-eng,ces-eng,eng-slk,eng-ces" \
--encoder-langtok 'tgt' \
--source-lang ces --target-lang eng \
--skip-invalid-size-inputs-valid-test \
--beam 5 > "$OUTDIR"/test_ceseng.log
python generate.py data-bin/ted_slkces/ \
--task multilingual_translation \
--gen-subset test \
--path "$OUTDIR"/checkpoint_best.pt \
--batch-size 16 \
--lenpen 1.5 \
--remove-bpe sentencepiece \
--lang-pairs "slk-eng,ces-eng,eng-slk,eng-ces" \
--encoder-langtok 'tgt' \
--source-lang eng --target-lang slk \
--skip-invalid-size-inputs-valid-test \
--beam 5 > "$OUTDIR"/test_engslk.log
python generate.py data-bin/ted_slkces/ \
--task multilingual_translation \
--gen-subset test \
--path "$OUTDIR"/checkpoint_best.pt \
--batch-size 16 \
--lenpen 1.5 \
--remove-bpe sentencepiece \
--lang-pairs "slk-eng,ces-eng,eng-slk,eng-ces" \
--encoder-langtok 'tgt' \
--source-lang eng --target-lang ces \
--skip-invalid-size-inputs-valid-test \
--beam 5 > "$OUTDIR"/test_engces.log
#grep ^H checkpoints/tag_fw_slk-eng/fwtrans_test.log | cut -f3 > checkpoints/tag_fw_slk-eng/fwtrans_test.decode
|
def smallest_divisible(n):
res = 1
for i in range(2, n + 1):
if res % i != 0:
for j in range(i, n+1, i):
if j % i == 0:
res *= j
break
return res
|
echo "This file will reorganize your bed file, such that lines correspond to a chromosome are grouped together. And the starting coordinate in lines are ordered ascendingly."
echo "reorganize_bed_file.sh <input bed file> <output bed file>"
echo "input and output files should be in gzip format. You should always do that!"
echo "this file assume that there is a header line in the input file, which should NOT be copied to the output file"
echo "this code also assumes that the chrom_symbol in the input bed file is chr<chrom_index>. Please go into this file to fix the code if you want to adjust it. "
input_file=$(readlink -f $1)
output_fn=$(readlink -f $2)
rm -f $output_fn # so that we can write new one
rm -f ${output_fn}.gz
declare -a chrom_list=(1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 X Y M)
# copy the header line from the input file into the output file
# zcat $input_file | sed -n 1p > $output_fn
# the following line of code is used for filtering snp gwax data into chromosome, ordering snps based on their genomic positions
for chrom_index in "${chrom_list[@]}"
do
if [[ $input_file =~ \.t?gz$ ]]; # check if the input file has extension .gz or .tgz
then
zcat $input_file | sed -n '1,$p' | awk -F'\t' -v chrom_symbol="chr${chrom_index}" 'BEGIN {OFS="\t"} { if ($1 == chrom_symbol) print $0; }' | sort -k2n >> $output_fn # fix the chrom_symbol fomat if needed. It's mentioned in the instruction of this code
echo "Done processing for chromosome $chrom_index"
else
cat $input_file | sed -n '1,$p' | awk -F'\t' -v chrom_symbol="chr${chrom_index}" 'BEGIN {OFS="\t"}{ if ($1 == chrom_symbol) print $0; }' | sort -k2n >> $output_fn # fix the chrom_symbol fomat if needed. It's mentioned in the instruction of this code
echo "Done processing for chromosome $chrom_index"
fi
done
echo "We zipped your output bed file. Therefore, the bed file name is now: ${output_fn}.gz"
|
if args.distributed_wrapper == "DataParallel":
initialize_data_parallel_training(
world_size=args.distributed_world_size,
buffer_size=2**28,
process_group=process_group,
)
elif args.distributed_wrapper == "SlowMo":
if _GOSSIP_DISABLED:
raise ImportError("Gossip-based communication is disabled. SlowMo cannot be used.")
else:
initialize_slowmo_training(process_group)
else:
raise ValueError("Unsupported distributed wrapper: {}".format(args.distributed_wrapper))
|
#!/bin/bash
# https://gist.github.com/verdimrc/a10dd3ea00a34b0ffb3e8ee8d5cde8b5#file-bash-sh-L20-L34
#
# Utility function to get script's directory (deal with Mac OSX quirkiness).
# This function is ambidextrous as it works on both Linux and OSX.
get_bin_dir() {
local READLINK=readlink
if [[ $(uname) == 'Darwin' ]]; then
READLINK=greadlink
if [ $(which greadlink) == '' ]; then
echo '[ERROR] Mac OSX requires greadlink. Install with "brew install greadlink"' >&2
exit 1
fi
fi
local BIN_DIR=$(dirname "$($READLINK -f ${BASH_SOURCE[0]})")
echo -n ${BIN_DIR}
}
BIN_DIR=$(get_bin_dir)
# Install some useful CLI toolkits
sudo yum install -y htop tree dstat dos2unix tig
sudo /usr/bin/pip3 install --no-cache-dir nbdime ranger-fm
mkdir -p /home/ec2-user/.config/ranger/
echo set line_numbers relative >> /home/ec2-user/.config/ranger/rc.conf
${BIN_DIR}/adjust-sm-git.sh 'Firstname Lastname' first.last@email.abc
${BIN_DIR}/change-fontsize.sh
${BIN_DIR}/fix-osx-keymap.sh
${BIN_DIR}/patch-bash-config.sh
${BIN_DIR}/fix-ipython.sh
${BIN_DIR}/init-vim.sh
${BIN_DIR}/mount-efs-accesspoint.sh fsid,fsapid,mountpoint
# These require jupyter lab restarted and browser reloaded, to see the changes.
${BIN_DIR}/patch-jupyter-config.sh
|
package cronUC
func New(uc *UCInteractor) *UCInteractor {
return uc
}
|
#!/usr/bin/env bash
#
# Move config file for DI, and api-key
rsync -av vendor/Lundmark/forecaster/config ./
# Move main source files
rsync -av vendor/Lundmark/forecaster/src ./
# Move unittest-related files
rsync -av vendor/Lundmark/forecaster/test ./
|
import React, { Component } from 'react';
import { Circle } from 'react-google-maps';
class MapViewCircle extends Component {
render() {
return (
<Circle
draggable={this.props.draggable}
center={{ lat: this.props.center.latitude, lng: this.props.center.longitude }}
radius={this.props.radius}
options={{
strokeColor : this.props.strokeColor,
fillColor : this.props.fillColor
}}
/>
);
}
}
export default MapViewCircle;
|
//
// AppDelegate.h
// CoreDataSample
//
// Created by king on 2021/4/21.
//
#import <UIKit/UIKit.h>
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property (nonatomic, strong) UIWindow *window;
@end
|
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.record = void 0;
var record = {
"viewBox": "0 0 8 8",
"children": [{
"name": "path",
"attribs": {
"d": "M3 0c-1.66 0-3 1.34-3 3s1.34 3 3 3 3-1.34 3-3-1.34-3-3-3z",
"transform": "translate(1 1)"
}
}]
};
exports.record = record;
|
/********GET QUESTIONS WITH ANSWERS FOR A PARTICULAR PRODUCT ID********/
//http://192.168.3.11/qa/questions/?product_id=1&count=5
{
"product_id": "1",
"results": [
{
"question_id": 3,
"question_body": "Does this product run big or small?",
"question_date": "2019-01-17T00:00:00.000Z",
"asker_name": "jbilas",
"question_helpfulness": 8,
"reported": 0,
"answers": {}
},
{
"question_id": 5,
"question_body": "Can I wash it?",
"question_date": "2018-02-08T00:00:00.000Z",
"asker_name": "cleopatra",
"question_helpfulness": 7,
"reported": 0,
"answers": {
"46": {
"id": 46,
"body": "I've thrown it in the wash and it seems fine",
"date": "2018-02-08T00:00:00.000Z",
"answerer_name": "marcanthony",
"helpfulness": 8,
"photos": []
},
"64": {
"id": 64,
"body": "It says not to",
"date": "2018-03-08T00:00:00.000Z",
"answerer_name": "ceasar",
"helpfulness": 0,
"photos": []
},
"96": {
"id": 96,
"body": "I wouldn't machine wash it",
"date": "2018-03-08T00:00:00.000Z",
"answerer_name": "ceasar",
"helpfulness": 0,
"photos": []
},
"101": {
"id": 101,
"body": "Only if you want to ruin it!",
"date": "2018-03-08T00:00:00.000Z",
"answerer_name": "ceasar",
"helpfulness": 5,
"photos": []
},
"107": {
"id": 107,
"body": "Yes",
"date": "2018-03-08T00:00:00.000Z",
"answerer_name": "Seller",
"helpfulness": 4,
"photos": []
}
}
},
{
"question_id": 4,
"question_body": "How long does it last?",
"question_date": "2019-07-06T00:00:00.000Z",
"asker_name": "funnygirl",
"question_helpfulness": 6,
"reported": 0,
"answers": {
"65": {
"id": 65,
"body": "It runs small",
"date": "2019-11-17T00:00:00.000Z",
"answerer_name": "dschulman",
"helpfulness": 1,
"photos": [
"https://images.unsplash.com/photo-1470116892389-0de5d9770b2c?ixlib=rb-1.2.1&ixid=eyJhcHBfaWQiOjEyMDd9&auto=format&fit=crop&w=1567&q=80",
"https://images.unsplash.com/photo-1536922645426-5d658ab49b81?ixlib=rb-1.2.1&ixid=eyJhcHBfaWQiOjEyMDd9&auto=format&fit=crop&w=1650&q=80"
]
},
"89": {
"id": 89,
"body": "Showing no wear after a few months!",
"date": "2019-09-06T00:00:00.000Z",
"answerer_name": "sillyguy",
"helpfulness": 8,
"photos": []
}
}
},
{
"question_id": 1,
"question_body": "What fabric is the top made of?",
"question_date": "2018-01-04T00:00:00.000Z",
"asker_name": "yankeelover",
"question_helpfulness": 1,
"reported": 0,
"answers": {
"5": {
"id": 5,
"body": "Something pretty soft but I can't be sure",
"date": "2018-01-04T00:00:00.000Z",
"answerer_name": "metslover",
"helpfulness": 5,
"photos": [
"https://images.unsplash.com/photo-1530519729491-aea5b51d1ee1?ixlib=rb-1.2.1&ixid=eyJhcHBfaWQiOjEyMDd9&auto=format&fit=crop&w=1651&q=80",
"https://images.unsplash.com/photo-1511127088257-53ccfcc769fa?ixlib=rb-1.2.1&ixid=eyJhcHBfaWQiOjEyMDd9&auto=format&fit=crop&w=1650&q=80",
"https://images.unsplash.com/photo-1500603720222-eb7a1f997356?ixlib=rb-1.2.1&ixid=eyJhcHBfaWQiOjEyMDd9&auto=format&fit=crop&w=1653&q=80"
]
},
"7": {
"id": 7,
"body": "Its the best! Seriously magic fabric",
"date": "2018-01-04T00:00:00.000Z",
"answerer_name": "metslover",
"helpfulness": 7,
"photos": []
},
"8": {
"id": 8,
"body": "DONT BUY IT! It's bad for the environment",
"date": "2018-01-04T00:00:00.000Z",
"answerer_name": "metslover",
"helpfulness": 8,
"photos": []
},
"57": {
"id": 57,
"body": "Suede",
"date": "2018-11-04T00:00:00.000Z",
"answerer_name": "metslover",
"helpfulness": 7,
"photos": []
},
"95": {
"id": 95,
"body": "Supposedly suede, but I think its synthetic",
"date": "2018-12-04T00:00:00.000Z",
"answerer_name": "metslover",
"helpfulness": 3,
"photos": []
}
}
}
]
}
//http://18.224.37.110/qa/questions/?product_id=2&count=5
{
"product_id": "2",
"results": [
{
"question_id": 11,
"question_body": "Where does this product ship from?",
"question_date": "2018-01-06T00:00:00.000Z",
"asker_name": "jbilas",
"question_helpfulness": 24,
"reported": 0,
"answers": {
"23": {
"id": 23,
"body": "It ships from the facility in Tulsa",
"date": "2018-01-06T00:00:00.000Z",
"answerer_name": "dschulman",
"helpfulness": 25,
"photos": []
},
"31": {
"id": 31,
"body": "Mine was delivered from Oklahoma",
"date": "2018-01-06T00:00:00.000Z",
"answerer_name": "dschulman",
"helpfulness": 23,
"photos": []
}
}
},
{
"question_id": 13,
"question_body": "What fabric is the bottom made of?",
"question_date": "2019-02-18T00:00:00.000Z",
"asker_name": "cleopatra",
"question_helpfulness": 7,
"reported": 0,
"answers": {
"2": {
"id": 2,
"body": "Some kind of recycled rubber, works great!",
"date": "2019-03-18T00:00:00.000Z",
"answerer_name": "marcanthony",
"helpfulness": 2,
"photos": []
},
"16": {
"id": 16,
"body": "Rubber",
"date": "2019-03-18T00:00:00.000Z",
"answerer_name": "Seller",
"helpfulness": 7,
"photos": []
},
"35": {
"id": 35,
"body": "Its a rubber sole",
"date": "2019-03-18T00:00:00.000Z",
"answerer_name": "marcanthony",
"helpfulness": 7,
"photos": []
},
"47": {
"id": 47,
"body": "The rubber on the bottom wears thin quickly",
"date": "2019-02-18T00:00:00.000Z",
"answerer_name": "marcanthony",
"helpfulness": 9,
"photos": []
}
}
},
{
"question_id": 9,
"question_body": "I'm allergic to dye #17, does this product contain any?",
"question_date": "2019-01-24T00:00:00.000Z",
"asker_name": "l33tgamer",
"question_helpfulness": 6,
"reported": 0,
"answers": {
"49": {
"id": 49,
"body": "Yes",
"date": "2019-11-24T00:00:00.000Z",
"answerer_name": "n00bgamer",
"helpfulness": 3,
"photos": []
}
}
},
{
"question_id": 10,
"question_body": "Why is this product cheaper here than other sites?",
"question_date": "2018-04-24T00:00:00.000Z",
"asker_name": "toofast",
"question_helpfulness": 5,
"reported": 0,
"answers": {}
},
{
"question_id": 14,
"question_body": "Where is this product made?",
"question_date": "2018-10-04T00:00:00.000Z",
"asker_name": "jbilas",
"question_helpfulness": 4,
"reported": 0,
"answers": {
"17": {
"id": 17,
"body": "China",
"date": "2018-08-04T00:00:00.000Z",
"answerer_name": "Seller",
"helpfulness": 8,
"photos": []
}
}
}
]
}
//http://192.168.3.11/qa/questions/?product_id=3&count=5
{
"product_id": "3",
"results": [
{
"question_id": 24,
"question_body": "Where does this product ship from?",
"question_date": "2017-11-04T00:00:00.000Z",
"asker_name": "toofast",
"question_helpfulness": 17,
"reported": 0,
"answers": {
"62": {
"id": 62,
"body": "Mine was delivered from Oklahoma",
"date": "2017-11-04T00:00:00.000Z",
"answerer_name": "toofast",
"helpfulness": 14,
"photos": []
},
"73": {
"id": 73,
"body": "It ships from the facility in Tulsa",
"date": "2017-11-04T00:00:00.000Z",
"answerer_name": "toofast",
"helpfulness": 19,
"photos": []
}
}
},
{
"question_id": 22,
"question_body": "Is this product sustainable?",
"question_date": "2018-09-04T00:00:00.000Z",
"asker_name": "cleopatra",
"question_helpfulness": 12,
"reported": 0,
"answers": {
"25": {
"id": 25,
"body": "Its made from sustainable parts and manufactured in a green facility",
"date": "2018-10-04T00:00:00.000Z",
"answerer_name": "marcanthony",
"helpfulness": 17,
"photos": []
}
}
},
{
"question_id": 25,
"question_body": "Where is this product made?",
"question_date": "2018-08-12T00:00:00.000Z",
"asker_name": "thegrimreaker",
"question_helpfulness": 9,
"reported": 0,
"answers": {
"74": {
"id": 74,
"body": "Taiwan",
"date": "2018-09-12T00:00:00.000Z",
"answerer_name": "thegrimreaker",
"helpfulness": 0,
"photos": []
}
}
},
{
"question_id": 17,
"question_body": "Does this product run big or small?",
"question_date": "2018-11-12T00:00:00.000Z",
"asker_name": "coolkid",
"question_helpfulness": 8,
"reported": 0,
"answers": {
"18": {
"id": 18,
"body": "Runs small, I'd say",
"date": "2018-01-12T00:00:00.000Z",
"answerer_name": "warmkid",
"helpfulness": 9,
"photos": []
}
}
},
{
"question_id": 19,
"question_body": "Why is this product cheaper here than other sites?",
"question_date": "2018-11-28T00:00:00.000Z",
"asker_name": "jbilas",
"question_helpfulness": 6,
"reported": 0,
"answers": {}
}
]
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
/********GET ANSWERS RELATED TO A PARTICULAR QUESTION ID********/
//http://192.168.3.11/qa/questions/1/answers?page=1
{
"question": "1",
"page": 0,
"count": 5,
"results": [
{
"answer_id": 8,
"body": "DONT BUY IT! It's bad for the environment",
"date": "2018-01-04T00:00:00.000Z",
"answerer_name": "metslover",
"helpfulness": 8,
"photos": []
},
{
"answer_id": 7,
"body": "Its the best! Seriously magic fabric",
"date": "2018-01-04T00:00:00.000Z",
"answerer_name": "metslover",
"helpfulness": 7,
"photos": []
},
{
"answer_id": 57,
"body": "Suede",
"date": "2018-11-04T00:00:00.000Z",
"answerer_name": "metslover",
"helpfulness": 7,
"photos": []
},
{
"answer_id": 5,
"body": "Something pretty soft but I can't be sure",
"date": "2018-01-04T00:00:00.000Z",
"answerer_name": "metslover",
"helpfulness": 5,
"photos": [
{
"id": 1,
"url": "https://images.unsplash.com/photo-1530519729491-aea5b51d1ee1?ixlib=rb-1.2.1&ixid=eyJhcHBfaWQiOjEyMDd9&auto=format&fit=crop&w=1651&q=80"
},
{
"id": 2,
"url": "https://images.unsplash.com/photo-1511127088257-53ccfcc769fa?ixlib=rb-1.2.1&ixid=eyJhcHBfaWQiOjEyMDd9&auto=format&fit=crop&w=1650&q=80"
},
{
"id": 3,
"url": "https://images.unsplash.com/photo-1500603720222-eb7a1f997356?ixlib=rb-1.2.1&ixid=eyJhcHBfaWQiOjEyMDd9&auto=format&fit=crop&w=1653&q=80"
}
]
},
{
"answer_id": 95,
"body": "Supposedly suede, but I think its synthetic",
"date": "2018-12-04T00:00:00.000Z",
"answerer_name": "metslover",
"helpfulness": 3,
"photos": []
}
]
}
//http://1172.16.58.3/qa/questions/2/answers?page=1
{
"question": "2",
"page": 0,
"count": 5,
"results": [
{
"answer_id": 102,
"body": "Some kind of recycled rubber, works great!",
"date": "2019-06-28T00:00:00.000Z",
"answerer_name": "dschulman",
"helpfulness": 6,
"photos": []
},
{
"answer_id": 84,
"body": "Rubber",
"date": "2019-06-28T00:00:00.000Z",
"answerer_name": "dschulman",
"helpfulness": 3,
"photos": []
},
{
"answer_id": 30,
"body": "Its a rubber sole",
"date": "2019-06-28T00:00:00.000Z",
"answerer_name": "dschulman",
"helpfulness": 2,
"photos": [
{
"id": 13,
"url": "https://images.unsplash.com/photo-1528318269466-69d920af5dad?ixlib=rb-1.2.1&ixid=eyJhcHBfaWQiOjEyMDd9&auto=format&fit=crop&w=1650&q=80"
}
]
},
{
"answer_id": 75,
"body": "The rubber on the bottom wears thin quickly",
"date": "2019-06-28T00:00:00.000Z",
"answerer_name": "dschulman",
"helpfulness": 2,
"photos": []
}
]
}
//http://1172.16.58.3/qa/questions/3/answers?page=1
{
"question": "3",
"page": 0,
"count": 5,
"results": []
}
|
read a;b=(, ABC chokudai);echo ${b[$a]}
|
<reponame>zcong1993/mongoose-cache
import { Redis } from 'ioredis'
import { Model, Document } from 'mongoose'
export interface Context {
redis: Redis
enable?: boolean
externalKeys?: string[]
extQuery?: any
}
export interface ContextWithModel<T extends Document, QueryHelpers = {}>
extends Context {
model: Model<T, QueryHelpers>
}
|
#!/usr/bin/env bash
# Set bash to 'debug' mode, it will exit on :
# -e 'error', -u 'undefined variable', -o ... 'error in pipeline', -x 'print commands',
set -e
set -u
set -o pipefail
log() {
local fname=${BASH_SOURCE[1]##*/}
echo -e "$(date '+%Y-%m-%dT%H:%M:%S') (${fname}:${BASH_LINENO[0]}:${FUNCNAME[1]}) $*"
}
min() {
local a b
a=$1
for b in "$@"; do
if [ "${b}" -le "${a}" ]; then
a="${b}"
fi
done
echo "${a}"
}
SECONDS=0
# General configuration
stage=1 # Processes starts from the specified stage.
stop_stage=10000 # Processes is stopped at the specified stage.
skip_data_prep=false # Skip data preparation stages.
skip_train=false # Skip training stages.
skip_eval=false # Skip decoding and evaluation stages.
skip_upload=true # Skip packing and uploading stages.
skip_upload_hf=true # Skip uploading to hugging face stages.
ngpu=1 # The number of gpus ("0" uses cpu, otherwise use gpu).
num_nodes=1 # The number of nodes.
nj=32 # The number of parallel jobs.
inference_nj=32 # The number of parallel jobs in decoding.
gpu_inference=false # Whether to perform gpu decoding.
dumpdir=dump # Directory to dump features.
expdir=exp # Directory to save experiments.
python=python3 # Specify python to execute espnet commands.
# Data preparation related
local_data_opts= # The options given to local/data.sh.
# Speed perturbation related
speed_perturb_factors= # perturbation factors, e.g. "0.9 1.0 1.1" (separated by space).
# Feature extraction related
feats_type=raw # Feature type (raw or fbank_pitch).
audio_format=flac # Audio format: wav, flac, wav.ark, flac.ark (only in feats_type=raw).
fs=16k # Sampling rate.
min_wav_duration=0.1 # Minimum duration in second.
max_wav_duration=20 # Maximum duration in second.
# Tokenization related
oov="<unk>" # Out of vocabulary symbol.
blank="<blank>" # CTC blank symbol
sos_eos="<sos/eos>" # sos and eos symbole
token_joint=false # whether to use a single bpe system for both source and target languages
src_case=lc.rm
src_token_type=bpe # Tokenization type (char or bpe) for source languages.
src_nbpe=30 # The number of BPE vocabulary for source language.
src_bpemode=unigram # Mode of BPE for source language (unigram or bpe).
src_bpe_input_sentence_size=100000000 # Size of input sentence for BPE for source language.
src_bpe_nlsyms= # non-linguistic symbols list, separated by a comma, for BPE of source language
src_bpe_char_cover=1.0 # character coverage when modeling BPE for source language
tgt_case=tc
tgt_token_type=bpe # Tokenization type (char or bpe) for target language.
tgt_nbpe=30 # The number of BPE vocabulary for target language.
tgt_bpemode=unigram # Mode of BPE (unigram or bpe) for target language.
tgt_bpe_input_sentence_size=100000000 # Size of input sentence for BPE for target language.
tgt_bpe_nlsyms= # non-linguistic symbols list, separated by a comma, for BPE for target language.
tgt_bpe_char_cover=1.0 # character coverage when modeling BPE for target language.
# Ngram model related
use_ngram=false
ngram_exp=
ngram_num=3
# Language model related
use_lm=true # Use language model for ST decoding.
lm_tag= # Suffix to the result dir for language model training.
lm_exp= # Specify the directory path for LM experiment.
# If this option is specified, lm_tag is ignored.
lm_stats_dir= # Specify the directory path for LM statistics.
lm_config= # Config for language model training.
lm_args= # Arguments for language model training, e.g., "--max_epoch 10".
# Note that it will overwrite args in lm config.
use_word_lm=false # Whether to use word language model.
num_splits_lm=1 # Number of splitting for lm corpus.
# shellcheck disable=SC2034
word_vocab_size=10000 # Size of word vocabulary.
# ST model related
st_tag= # Suffix to the result dir for st model training.
st_exp= # Specify the directory path for ST experiment.
# If this option is specified, st_tag is ignored.
st_stats_dir= # Specify the directory path for ST statistics.
st_config= # Config for st model training.
st_args= # Arguments for st model training, e.g., "--max_epoch 10".
# Note that it will overwrite args in st config.
pretrained_asr= # Pretrained model to load
ignore_init_mismatch=false # Ignore initial mismatch
feats_normalize=global_mvn # Normalizaton layer type.
num_splits_st=1 # Number of splitting for lm corpus.
src_lang=es # source language abbrev. id (e.g., es)
tgt_lang=en # target language abbrev. id (e.g., en)
# Upload model related
hf_repo=
# Decoding related
use_k2=false # Whether to use k2 based decoder
use_streaming=false # Whether to use streaming decoding
batch_size=1
inference_tag= # Suffix to the result dir for decoding.
inference_config= # Config for decoding.
inference_args= # Arguments for decoding, e.g., "--lm_weight 0.1".
# Note that it will overwrite args in inference config.
inference_lm=valid.loss.ave.pth # Language model path for decoding.
inference_ngram=${ngram_num}gram.bin
inference_st_model=valid.acc.ave.pth # ST model path for decoding.
# e.g.
# inference_st_model=train.loss.best.pth
# inference_st_model=3epoch.pth
# inference_st_model=valid.acc.best.pth
# inference_st_model=valid.loss.ave.pth
download_model= # Download a model from Model Zoo and use it for decoding.
# [Task dependent] Set the datadir name created by local/data.sh
train_set= # Name of training set.
valid_set= # Name of validation set used for monitoring/tuning network training.
test_sets= # Names of test sets. Multiple items (e.g., both dev and eval sets) can be specified.
src_bpe_train_text= # Text file path of bpe training set for source language.
tgt_bpe_train_text= # Text file path of bpe training set for target language.
lm_train_text= # Text file path of language model training set.
lm_dev_text= # Text file path of language model development set.
lm_test_text= # Text file path of language model evaluation set.
nlsyms_txt=none # Non-linguistic symbol list if existing.
cleaner=none # Text cleaner.
g2p=none # g2p method (needed if token_type=phn).
lang=noinfo # The language type of corpus.
score_opts= # The options given to sclite scoring
local_score_opts= # The options given to local/score.sh.
st_speech_fold_length=800 # fold_length for speech data during ST training.
st_text_fold_length=150 # fold_length for text data during ST training.
lm_fold_length=150 # fold_length for LM training.
help_message=$(cat << EOF
Usage: $0 --train-set "<train_set_name>" --valid-set "<valid_set_name>" --test_sets "<test_set_names>"
Options:
# General configuration
--stage # Processes starts from the specified stage (default="${stage}").
--stop_stage # Processes is stopped at the specified stage (default="${stop_stage}").
--skip_data_prep # Skip data preparation stages (default="${skip_data_prep}").
--skip_train # Skip training stages (default="${skip_train}").
--skip_eval # Skip decoding and evaluation stages (default="${skip_eval}").
--skip_upload # Skip packing and uploading stages (default="${skip_upload}").
--ngpu # The number of gpus ("0" uses cpu, otherwise use gpu, default="${ngpu}").
--num_nodes # The number of nodes (default="${num_nodes}").
--nj # The number of parallel jobs (default="${nj}").
--inference_nj # The number of parallel jobs in decoding (default="${inference_nj}").
--gpu_inference # Whether to perform gpu decoding (default="${gpu_inference}").
--dumpdir # Directory to dump features (default="${dumpdir}").
--expdir # Directory to save experiments (default="${expdir}").
--python # Specify python to execute espnet commands (default="${python}").
# Data preparation related
--local_data_opts # The options given to local/data.sh (default="${local_data_opts}").
# Speed perturbation related
--speed_perturb_factors # speed perturbation factors, e.g. "0.9 1.0 1.1" (separated by space, default="${speed_perturb_factors}").
# Feature extraction related
--feats_type # Feature type (raw, fbank_pitch or extracted, default="${feats_type}").
--audio_format # Audio format: wav, flac, wav.ark, flac.ark (only in feats_type=raw, default="${audio_format}").
--fs # Sampling rate (default="${fs}").
--min_wav_duration # Minimum duration in second (default="${min_wav_duration}").
--max_wav_duration # Maximum duration in second (default="${max_wav_duration}").
# Tokenization related
--oov # Out of vocabulary symbol (default="${oov}").
--blank # CTC blank symbol (default="${blank}").
--sos_eos # sos and eos symbole (default="${sos_eos}").
--token_joint=false # Whether to use a single bpe system for both source and target languages.
# if set as true, will use tgt_* for processing (default="${token_joint}").
--src_token_type=bpe # Tokenization type (char or bpe) for source languages. (default="${src_token_type}").
--src_nbpe=30 # The number of BPE vocabulary for source language. (default="${src_nbpe}").
--src_bpemode=unigram # Mode of BPE for source language (unigram or bpe). (default="${src_bpemode}").
--src_bpe_input_sentence_size=100000000 # Size of input sentence for BPE for source language. (default="${src_bpe_input_sentence_size}").
--src_bpe_nlsyms= # Non-linguistic symbols list, separated by a comma, for BPE of source language. (default="${src_bpe_nlsyms}").
--src_bpe_char_cover=1.0 # Character coverage when modeling BPE for source language. (default="${src_bpe_char_cover}").
--tgt_token_type=bpe # Tokenization type (char or bpe) for target language. (default="${tgt_token_type}").
--tgt_nbpe=30 # The number of BPE vocabulary for target language. (default="${tgt_nbpe}").
--tgt_bpemode=unigram # Mode of BPE (unigram or bpe) for target language. (default="${tgt_bpemode}").
--tgt_bpe_input_sentence_size=100000000 # Size of input sentence for BPE for target language. (default="${tgt_bpe_input_sentence_size}").
--tgt_bpe_nlsyms= # Non-linguistic symbols list, separated by a comma, for BPE for target language. (default="${tgt_bpe_nlsyms}").
--tgt_bpe_char_cover=1.0 # Character coverage when modeling BPE for target language. (default="${tgt_bpe_char_cover}").
# Language model related
--lm_tag # Suffix to the result dir for language model training (default="${lm_tag}").
--lm_exp # Specify the directory path for LM experiment.
# If this option is specified, lm_tag is ignored (default="${lm_exp}").
--lm_stats_dir # Specify the directory path for LM statistics (default="${lm_stats_dir}").
--lm_config # Config for language model training (default="${lm_config}").
--lm_args # Arguments for language model training (default="${lm_args}").
# e.g., --lm_args "--max_epoch 10"
# Note that it will overwrite args in lm config.
--use_word_lm # Whether to use word language model (default="${use_word_lm}").
--word_vocab_size # Size of word vocabulary (default="${word_vocab_size}").
--num_splits_lm # Number of splitting for lm corpus (default="${num_splits_lm}").
# ST model related
--st_tag # Suffix to the result dir for st model training (default="${st_tag}").
--st_exp # Specify the directory path for ST experiment.
# If this option is specified, st_tag is ignored (default="${st_exp}").
--st_stats_dir # Specify the directory path for ST statistics (default="${st_stats_dir}").
--st_config # Config for st model training (default="${st_config}").
--st_args # Arguments for st model training (default="${st_args}").
# e.g., --st_args "--max_epoch 10"
# Note that it will overwrite args in st config.
--pretrained_asr= # Pretrained model to load (default="${pretrained_asr}").
--ignore_init_mismatch= # Ignore mismatch parameter init with pretrained model (default="${ignore_init_mismatch}").
--feats_normalize # Normalizaton layer type. (default="${feats_normalize}").
--num_splits_st # Number of splitting for lm corpus. (default="${num_splits_st}").
--src_lang= # source language abbrev. id (e.g., es). (default="${src_lang}")
--tgt_lang= # target language abbrev. id (e.g., en). (default="${tgt_lang}")
# Decoding related
--inference_tag # Suffix to the result dir for decoding (default="${inference_tag}").
--inference_config # Config for decoding (default="${inference_config}").
--inference_args # Arguments for decoding (default="${inference_args}").
# e.g., --inference_args "--lm_weight 0.1"
# Note that it will overwrite args in inference config.
--inference_lm # Language model path for decoding (default="${inference_lm}").
--inference_st_model # ST model path for decoding (default="${inference_st_model}").
--download_model # Download a model from Model Zoo and use it for decoding (default="${download_model}").
# [Task dependent] Set the datadir name created by local/data.sh
--train_set # Name of training set (required).
--valid_set # Name of validation set used for monitoring/tuning network training (required).
--test_sets # Names of test sets.
# Multiple items (e.g., both dev and eval sets) can be specified (required).
--src_bpe_train_text # Text file path of bpe training set for source language.
--tgt_bpe_train_text # Text file path of bpe training set for target language
--lm_train_text # Text file path of language model training set.
--lm_dev_text # Text file path of language model development set (default="${lm_dev_text}").
--lm_test_text # Text file path of language model evaluation set (default="${lm_test_text}").
--nlsyms_txt # Non-linguistic symbol list if existing (default="${nlsyms_txt}").
--cleaner # Text cleaner (default="${cleaner}").
--g2p # g2p method (default="${g2p}").
--lang # The language type of corpus (default=${lang}).
--score_opts # The options given to sclite scoring (default="{score_opts}").
--local_score_opts # The options given to local/score.sh (default="{local_score_opts}").
--st_speech_fold_length # fold_length for speech data during ST training (default="${st_speech_fold_length}").
--st_text_fold_length # fold_length for text data during ST training (default="${st_text_fold_length}").
--lm_fold_length # fold_length for LM training (default="${lm_fold_length}").
EOF
)
log "$0 $*"
# Save command line args for logging (they will be lost after utils/parse_options.sh)
run_args=$(pyscripts/utils/print_args.py $0 "$@")
. utils/parse_options.sh
if [ $# -ne 0 ]; then
log "${help_message}"
log "Error: No positional arguments are required."
exit 2
fi
. ./path.sh
. ./cmd.sh
# Check required arguments
[ -z "${train_set}" ] && { log "${help_message}"; log "Error: --train_set is required"; exit 2; };
[ -z "${valid_set}" ] && { log "${help_message}"; log "Error: --valid_set is required"; exit 2; };
[ -z "${test_sets}" ] && { log "${help_message}"; log "Error: --test_sets is required"; exit 2; };
# Check feature type
if [ "${feats_type}" = raw ]; then
data_feats=${dumpdir}/raw
elif [ "${feats_type}" = fbank_pitch ]; then
data_feats=${dumpdir}/fbank_pitch
elif [ "${feats_type}" = fbank ]; then
data_feats=${dumpdir}/fbank
elif [ "${feats_type}" == extracted ]; then
data_feats=${dumpdir}/extracted
else
log "${help_message}"
log "Error: not supported: --feats_type ${feats_type}"
exit 2
fi
# Extra files for translation process
utt_extra_files="text.${src_case}.${src_lang} text.${tgt_case}.${tgt_lang}"
# Use the same text as ST for bpe training if not specified.
[ -z "${src_bpe_train_text}" ] && src_bpe_train_text="${data_feats}/${train_set}/text.${src_case}.${src_lang}"
[ -z "${tgt_bpe_train_text}" ] && tgt_bpe_train_text="${data_feats}/${train_set}/text.${tgt_case}.${tgt_lang}"
# Use the same text as ST for lm training if not specified.
[ -z "${lm_train_text}" ] && lm_train_text="${data_feats}/${train_set}/text.${tgt_case}.${tgt_lang}"
# Use the same text as ST for lm training if not specified.
[ -z "${lm_dev_text}" ] && lm_dev_text="${data_feats}/${valid_set}/text.${tgt_case}.${tgt_lang}"
# Use the text of the 1st evaldir if lm_test is not specified
[ -z "${lm_test_text}" ] && lm_test_text="${data_feats}/${test_sets%% *}/text.${tgt_case}.${tgt_lang}"
# Check tokenization type
if [ "${lang}" != noinfo ]; then
token_listdir=data/${lang}_token_list
else
token_listdir=data/token_list
fi
# The tgt bpedir is set for all cases when using bpe
tgt_bpedir="${token_listdir}/tgt_bpe_${tgt_bpemode}${tgt_nbpe}"
tgt_bpeprefix="${tgt_bpedir}"/bpe
tgt_bpemodel="${tgt_bpeprefix}".model
tgt_bpetoken_list="${tgt_bpedir}"/tokens.txt
tgt_chartoken_list="${token_listdir}"/char/tgt_tokens.txt
if "${token_joint}"; then
# if token_joint, the bpe training will use both src_lang and tgt_lang to train a single bpe model
src_bpedir="${tgt_bpedir}"
src_bpeprefix="${tgt_bpeprefix}"
src_bpemodel="${tgt_bpemodel}"
src_bpetoken_list="${tgt_bpetoken_list}"
src_chartoken_list="${tgt_chartoken_list}"
else
src_bpedir="${token_listdir}/src_bpe_${src_bpemode}${src_nbpe}"
src_bpeprefix="${src_bpedir}"/bpe
src_bpemodel="${src_bpeprefix}".model
src_bpetoken_list="${src_bpedir}"/tokens.txt
src_chartoken_list="${token_listdir}"/char/src_tokens.txt
fi
# NOTE: keep for future development.
# shellcheck disable=SC2034
tgt_wordtoken_list="${token_listdir}"/word/tgt_tokens.txt
if "${token_joint}"; then
src_wordtoken_list="${tgt_wordtoken_list}"
else
src_wordtoken_list="${token_listdir}"/word/src_tokens.txt
fi
# Set token types for src and tgt langs
if [ "${src_token_type}" = bpe ]; then
src_token_list="${src_bpetoken_list}"
elif [ "${src_token_type}" = char ]; then
src_token_list="${src_chartoken_list}"
src_bpemodel=none
elif [ "${src_token_type}" = word ]; then
src_token_list="${src_wordtoken_list}"
src_bpemodel=none
else
log "Error: not supported --src_token_type '${src_token_type}'"
exit 2
fi
if [ "${tgt_token_type}" = bpe ]; then
tgt_token_list="${tgt_bpetoken_list}"
elif [ "${tgt_token_type}" = char ]; then
tgt_token_list="${tgt_chartoken_list}"
tgt_bpemodel=none
elif [ "${tgt_token_type}" = word ]; then
tgt_token_list="${tgt_wordtoken_list}"
tgt_bpemodel=none
else
log "Error: not supported --tgt_token_type '${tgt_token_type}'"
exit 2
fi
if ${use_word_lm}; then
log "Error: Word LM is not supported yet"
exit 2
lm_token_list="${tgt_wordtoken_list}"
lm_token_type=word
else
lm_token_list="${tgt_token_list}"
lm_token_type="${tgt_token_type}"
fi
# Set tag for naming of model directory
if [ -z "${st_tag}" ]; then
if [ -n "${st_config}" ]; then
st_tag="$(basename "${st_config}" .yaml)_${feats_type}"
else
st_tag="train_${feats_type}"
fi
if [ "${lang}" != noinfo ]; then
st_tag+="_${lang}_${tgt_token_type}_${tgt_case}"
else
st_tag+="_${tgt_token_type}_${tgt_case}"
fi
if [ "${tgt_token_type}" = bpe ]; then
st_tag+="${tgt_nbpe}"
fi
# Add overwritten arg's info
if [ -n "${st_args}" ]; then
st_tag+="$(echo "${st_args}" | sed -e "s/--/\_/g" -e "s/[ |=/]//g")"
fi
if [ -n "${speed_perturb_factors}" ]; then
st_tag+="_sp"
fi
fi
if [ -z "${lm_tag}" ]; then
if [ -n "${lm_config}" ]; then
lm_tag="$(basename "${lm_config}" .yaml)"
else
lm_tag="train"
fi
if [ "${lang}" != noinfo ]; then
lm_tag+="_${lang}_${lm_token_type}"
else
lm_tag+="_${lm_token_type}"
fi
if [ "${lm_token_type}" = bpe ]; then
lm_tag+="${tgt_nbpe}"
fi
# Add overwritten arg's info
if [ -n "${lm_args}" ]; then
lm_tag+="$(echo "${lm_args}" | sed -e "s/--/\_/g" -e "s/[ |=/]//g")"
fi
fi
# The directory used for collect-stats mode
if [ -z "${st_stats_dir}" ]; then
if [ "${lang}" != noinfo ]; then
st_stats_dir="${expdir}/st_stats_${feats_type}_${lang}_${tgt_token_type}"
else
st_stats_dir="${expdir}/st_stats_${feats_type}_${tgt_token_type}"
fi
if [ "${tgt_token_type}" = bpe ]; then
st_stats_dir+="${tgt_nbpe}"
fi
if [ -n "${speed_perturb_factors}" ]; then
st_stats_dir+="_sp"
fi
fi
if [ -z "${lm_stats_dir}" ]; then
if [ "${lang}" != noinfo ]; then
lm_stats_dir="${expdir}/lm_stats_${lang}_${lm_token_type}"
else
lm_stats_dir="${expdir}/lm_stats_${lm_token_type}"
fi
if [ "${lm_token_type}" = bpe ]; then
lm_stats_dir+="${tgt_nbpe}"
fi
fi
# The directory used for training commands
if [ -z "${st_exp}" ]; then
st_exp="${expdir}/st_${st_tag}"
fi
if [ -z "${lm_exp}" ]; then
lm_exp="${expdir}/lm_${lm_tag}"
fi
if [ -z "${ngram_exp}" ]; then
ngram_exp="${expdir}/ngram"
fi
if [ -z "${inference_tag}" ]; then
if [ -n "${inference_config}" ]; then
inference_tag="$(basename "${inference_config}" .yaml)"
else
inference_tag=inference
fi
# Add overwritten arg's info
if [ -n "${inference_args}" ]; then
inference_tag+="$(echo "${inference_args}" | sed -e "s/--/\_/g" -e "s/[ |=]//g")"
fi
if "${use_lm}"; then
inference_tag+="_lm_$(basename "${lm_exp}")_$(echo "${inference_lm}" | sed -e "s/\//_/g" -e "s/\.[^.]*$//g")"
fi
if "${use_ngram}"; then
inference_tag+="_ngram_$(basename "${ngram_exp}")_$(echo "${inference_ngram}" | sed -e "s/\//_/g" -e "s/\.[^.]*$//g")"
fi
inference_tag+="_st_model_$(echo "${inference_st_model}" | sed -e "s/\//_/g" -e "s/\.[^.]*$//g")"
if "${use_k2}"; then
inference_tag+="_use_k2"
fi
fi
# ========================== Main stages start from here. ==========================
if ! "${skip_data_prep}"; then
if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then
log "Stage 1: Data preparation for data/${train_set}, data/${valid_set}, etc."
# [Task dependent] Need to create data.sh for new corpus
local/data.sh ${local_data_opts}
fi
if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then
if [ -n "${speed_perturb_factors}" ]; then
log "Stage 2: Speed perturbation: data/${train_set} -> data/${train_set}_sp"
for factor in ${speed_perturb_factors}; do
if [[ $(bc <<<"${factor} != 1.0") == 1 ]]; then
scripts/utils/perturb_data_dir_speed.sh --utt_extra_files "${utt_extra_files}" \
"${factor}" "data/${train_set}" "data/${train_set}_sp${factor}"
_dirs+="data/${train_set}_sp${factor} "
else
# If speed factor is 1, same as the original
_dirs+="data/${train_set} "
fi
done
utils/combine_data.sh --extra_files "${utt_extra_files}" "data/${train_set}_sp" ${_dirs}
for extra_file in ${utt_extra_files}; do
python pyscripts/utils/remove_duplicate_keys.py data/"${train_set}_sp"/${extra_file} > data/"${train_set}_sp"/${extra_file}.tmp
mv data/"${train_set}_sp"/${extra_file}.tmp data/"${train_set}_sp"/${extra_file}
done
else
log "Skip stage 2: Speed perturbation"
fi
fi
if [ -n "${speed_perturb_factors}" ]; then
train_set="${train_set}_sp"
fi
if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then
if [ "${feats_type}" = raw ]; then
log "Stage 3: Format wav.scp: data/ -> ${data_feats}"
# ====== Recreating "wav.scp" ======
# Kaldi-wav.scp, which can describe the file path with unix-pipe, like "cat /some/path |",
# shouldn't be used in training process.
# "format_wav_scp.sh" dumps such pipe-style-wav to real audio file
# and it can also change the audio-format and sampling rate.
# If nothing is need, then format_wav_scp.sh does nothing:
# i.e. the input file format and rate is same as the output.
for dset in "${train_set}" "${valid_set}" ${test_sets}; do
if [ "${dset}" = "${train_set}" ] || [ "${dset}" = "${valid_set}" ]; then
_suf="/org"
else
_suf=""
fi
utils/copy_data_dir.sh --validate_opts --non-print data/"${dset}" "${data_feats}${_suf}/${dset}"
# expand the utt_extra_files for multi-references
expand_utt_extra_files=""
for extra_file in ${utt_extra_files}; do
# with regex to suuport multi-references
for single_file in $(ls data/"${dset}"/${extra_file}*); do
cp ${single_file} "${data_feats}${_suf}/${dset}"
expand_utt_extra_files="${expand_utt_extra_files} $(basename ${single_file})"
done
done
echo "${expand_utt_extra_files}"
utils/fix_data_dir.sh --utt_extra_files "${expand_utt_extra_files}" "${data_feats}${_suf}/${dset}"
for extra_file in ${expand_utt_extra_files}; do
LC_ALL=C sort -u -k1,1 "${data_feats}${_suf}/${dset}/${extra_file}" -o "${data_feats}${_suf}/${dset}/${extra_file}"
done
rm -f ${data_feats}${_suf}/${dset}/{segments,wav.scp,reco2file_and_channel,reco2dur}
_opts=
if [ -e data/"${dset}"/segments ]; then
# "segments" is used for splitting wav files which are written in "wav".scp
# into utterances. The file format of segments:
# <segment_id> <record_id> <start_time> <end_time>
# "e.g. call-861225-A-0050-0065 call-861225-A 5.0 6.5"
# Where the time is written in seconds.
_opts+="--segments data/${dset}/segments "
fi
# shellcheck disable=SC2086
scripts/audio/format_wav_scp.sh --nj "${nj}" --cmd "${train_cmd}" \
--audio-format "${audio_format}" --fs "${fs}" ${_opts} \
"data/${dset}/wav.scp" "${data_feats}${_suf}/${dset}"
echo "${feats_type}" > "${data_feats}${_suf}/${dset}/feats_type"
done
elif [ "${feats_type}" = fbank_pitch ]; then
log "[Require Kaldi] Stage 3: ${feats_type} extract: data/ -> ${data_feats}"
for dset in "${train_set}" "${valid_set}" ${test_sets}; do
if [ "${dset}" = "${train_set}" ] || [ "${dset}" = "${valid_set}" ]; then
_suf="/org"
else
_suf=""
fi
# 1. Copy datadir
utils/copy_data_dir.sh --validate_opts --non-print data/"${dset}" "${data_feats}${_suf}/${dset}"
# expand the utt_extra_files for multi-references
expand_utt_extra_files=""
for extra_file in ${utt_extra_files}; do
# with regex to suuport multi-references
for single_file in $(ls data/"${dset}"/${extra_file}*); do
cp ${single_file} "${data_feats}${_suf}/${dset}"
expand_utt_extra_files="${expand_utt_extra_files} $(basename ${single_file})"
done
done
for extra_file in ${expand_utt_extra_files}; do
LC_ALL=C sort -u -k1,1 "${data_feats}${_suf}/${dset}/${extra_file}" -o "${data_feats}${_suf}/${dset}/${extra_file}"
done
# 2. Feature extract
_nj=$(min "${nj}" "$(<"${data_feats}${_suf}/${dset}/utt2spk" wc -l)")
steps/make_fbank_pitch.sh --nj "${_nj}" --cmd "${train_cmd}" "${data_feats}${_suf}/${dset}"
utils/fix_data_dir.sh --utt_extra_files "${expand_utt_extra_files}*" "${data_feats}${_suf}/${dset}"
# 3. Derive the the frame length and feature dimension
scripts/feats/feat_to_shape.sh --nj "${_nj}" --cmd "${train_cmd}" \
"${data_feats}${_suf}/${dset}/feats.scp" "${data_feats}${_suf}/${dset}/feats_shape"
# 4. Write feats_dim
head -n 1 "${data_feats}${_suf}/${dset}/feats_shape" | awk '{ print $2 }' \
| cut -d, -f2 > ${data_feats}${_suf}/${dset}/feats_dim
# 5. Write feats_type
echo "${feats_type}" > "${data_feats}${_suf}/${dset}/feats_type"
done
elif [ "${feats_type}" = fbank ]; then
log "Stage 3: ${feats_type} extract: data/ -> ${data_feats}"
log "${feats_type} is not supported yet."
exit 1
elif [ "${feats_type}" = extracted ]; then
log "Stage 3: ${feats_type} extract: data/ -> ${data_feats}"
# Assuming you don't have wav.scp, but feats.scp is created by local/data.sh instead.
for dset in "${train_set}" "${valid_set}" ${test_sets}; do
if [ "${dset}" = "${train_set}" ] || [ "${dset}" = "${valid_set}" ]; then
_suf="/org"
else
_suf=""
fi
# Generate dummy wav.scp to avoid error by copy_data_dir.sh
<data/"${dset}"/cmvn.scp awk ' { print($1,"<DUMMY>") }' > data/"${dset}"/wav.scp
utils/copy_data_dir.sh --validate_opts --non-print data/"${dset}" "${data_feats}${_suf}/${dset}"
# expand the utt_extra_files for multi-references
expand_utt_extra_files=""
for extra_file in ${utt_extra_files}; do
# with regex to suuport multi-references
for single_file in $(ls data/"${dset}"/${extra_file}*); do
cp ${single_file} "${data_feats}${_suf}/${dset}"
expand_utt_extra_files="${expand_utt_extra_files} $(basename ${single_file})"
done
done
utils/fix_data_dir.sh --utt_extra_files "${expand_utt_extra_files}*" "${data_feats}${_suf}/${dset}"
for extra_file in ${expand_utt_extra_files}; do
LC_ALL=C sort -u -k1,1 "${data_feats}${_suf}/${dset}/${extra_file}" -o "${data_feats}${_suf}/${dset}/${extra_file}"
done
# Derive the the frame length and feature dimension
_nj=$(min "${nj}" "$(<"${data_feats}${_suf}/${dset}/utt2spk" wc -l)")
scripts/feats/feat_to_shape.sh --nj "${_nj}" --cmd "${train_cmd}" \
"${data_feats}${_suf}/${dset}/feats.scp" "${data_feats}${_suf}/${dset}/feats_shape"
pyscripts/feats/feat-to-shape.py "scp:head -n 1 ${data_feats}${_suf}/${dset}/feats.scp |" - | \
awk '{ print $2 }' | cut -d, -f2 > "${data_feats}${_suf}/${dset}/feats_dim"
echo "${feats_type}" > "${data_feats}${_suf}/${dset}/feats_type"
done
else
log "Error: not supported: --feats_type ${feats_type}"
exit 2
fi
fi
if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then
log "Stage 4: Remove long/short data: ${data_feats}/org -> ${data_feats}"
# NOTE(kamo): Not applying to test_sets to keep original data
for dset in "${train_set}" "${valid_set}"; do
# Copy data dir
utils/copy_data_dir.sh --validate_opts --non-print "${data_feats}/org/${dset}" "${data_feats}/${dset}"
cp "${data_feats}/org/${dset}/feats_type" "${data_feats}/${dset}/feats_type"
for utt_extra_file in ${utt_extra_files}; do
cp "${data_feats}/org/${dset}/${utt_extra_file}" "${data_feats}/${dset}"
done
# Remove short utterances
_feats_type="$(<${data_feats}/${dset}/feats_type)"
if [ "${_feats_type}" = raw ]; then
_fs=$(python3 -c "import humanfriendly as h;print(h.parse_size('${fs}'))")
_min_length=$(python3 -c "print(int(${min_wav_duration} * ${_fs}))")
_max_length=$(python3 -c "print(int(${max_wav_duration} * ${_fs}))")
# utt2num_samples is created by format_wav_scp.sh
<"${data_feats}/org/${dset}/utt2num_samples" \
awk -v min_length="${_min_length}" -v max_length="${_max_length}" \
'{ if ($2 > min_length && $2 < max_length ) print $0; }' \
>"${data_feats}/${dset}/utt2num_samples"
<"${data_feats}/org/${dset}/wav.scp" \
utils/filter_scp.pl "${data_feats}/${dset}/utt2num_samples" \
>"${data_feats}/${dset}/wav.scp"
else
# Get frame shift in ms from conf/fbank.conf
_frame_shift=
if [ -f conf/fbank.conf ] && [ "$(<conf/fbank.conf grep -c frame-shift)" -gt 0 ]; then
# Assume using conf/fbank.conf for feature extraction
_frame_shift="$(<conf/fbank.conf grep frame-shift | sed -e 's/[-a-z =]*\([0-9]*\)/\1/g')"
fi
if [ -z "${_frame_shift}" ]; then
# If not existing, use the default number in Kaldi (=10ms).
# If you are using different number, you have to change the following value manually.
_frame_shift=10
fi
_min_length=$(python3 -c "print(int(${min_wav_duration} / ${_frame_shift} * 1000))")
_max_length=$(python3 -c "print(int(${max_wav_duration} / ${_frame_shift} * 1000))")
cp "${data_feats}/org/${dset}/feats_dim" "${data_feats}/${dset}/feats_dim"
<"${data_feats}/org/${dset}/feats_shape" awk -F, ' { print $1 } ' \
| awk -v min_length="${_min_length}" -v max_length="${_max_length}" \
'{ if ($2 > min_length && $2 < max_length) print $0; }' \
>"${data_feats}/${dset}/feats_shape"
<"${data_feats}/org/${dset}/feats.scp" \
utils/filter_scp.pl "${data_feats}/${dset}/feats_shape" \
>"${data_feats}/${dset}/feats.scp"
fi
# Remove empty text
<"${data_feats}/org/${dset}/text" \
awk ' { if( NF != 1 ) print $0; } ' >"${data_feats}/${dset}/text"
# fix_data_dir.sh leaves only utts which exist in all files
utils/fix_data_dir.sh --utt_extra_files "${utt_extra_files}" "${data_feats}/${dset}"
for utt_extra_file in ${utt_extra_files}; do
python pyscripts/utils/remove_duplicate_keys.py ${data_feats}/${dset}/${utt_extra_file} \
> ${data_feats}/${dset}/${utt_extra_file}.tmp
mv ${data_feats}/${dset}/${utt_extra_file}.tmp ${data_feats}/${dset}/${utt_extra_file}
done
done
# shellcheck disable=SC2002
cat ${lm_train_text} | awk ' { if( NF != 1 ) print $0; } ' > "${data_feats}/lm_train.txt"
fi
if [ ${stage} -le 5 ] && [ ${stop_stage} -ge 5 ]; then
# Combine source and target texts when using joint tokenization
if "${token_joint}"; then
log "Merge src and target data if joint BPE"
cat $tgt_bpe_train_text > ${data_feats}/${train_set}/text.${src_lang}_${tgt_lang}
[ ! -z "${src_bpe_train_text}" ] && cat ${src_bpe_train_text} >> ${data_feats}/${train_set}/text.${src_lang}_${tgt_lang}
# Set the new text as the target text
tgt_bpe_train_text="${data_feats}/${train_set}/text.${src_lang}_${tgt_lang}"
fi
# First generate tgt lang
if [ "${tgt_token_type}" = bpe ]; then
log "Stage 5a: Generate token_list from ${tgt_bpe_train_text} using BPE for tgt_lang"
mkdir -p "${tgt_bpedir}"
# shellcheck disable=SC2002
cat ${tgt_bpe_train_text} | cut -f 2- -d" " > "${tgt_bpedir}"/train.txt
if [ -n "${tgt_bpe_nlsyms}" ]; then
_opts_spm="--user_defined_symbols=${tgt_bpe_nlsyms}"
else
_opts_spm=""
fi
spm_train \
--input="${tgt_bpedir}"/train.txt \
--vocab_size="${tgt_nbpe}" \
--model_type="${tgt_bpemode}" \
--model_prefix="${tgt_bpeprefix}" \
--character_coverage=${tgt_bpe_char_cover} \
--input_sentence_size="${tgt_bpe_input_sentence_size}" \
${_opts_spm}
{
echo "${blank}"
echo "${oov}"
# Remove <unk>, <s>, </s> from the vocabulary
<"${tgt_bpeprefix}".vocab awk '{ if( NR != 1 && NR != 2 && NR != 3 ){ print $1; } }'
echo "${sos_eos}"
} > "${tgt_token_list}"
elif [ "${tgt_token_type}" = char ] || [ "${tgt_token_type}" = word ]; then
log "Stage 5a: Generate character level token_list from ${tgt_bpe_train_text} for tgt_lang"
_opts="--non_linguistic_symbols ${nlsyms_txt}"
# shellcheck disable=SC2002
cat ${tgt_bpe_train_text} | cut -f 2- -d" " > "${data_feats}"/token_train.txt
# The first symbol in token_list must be "<blank>" and the last must be also sos/eos:
# 0 is reserved for CTC-blank for ST and also used as ignore-index in the other task
${python} -m espnet2.bin.tokenize_text \
--token_type "${tgt_token_type}" \
--input "${data_feats}/token_train.txt" --output "${tgt_token_list}" ${_opts} \
--field 2- \
--cleaner "${cleaner}" \
--g2p "${g2p}" \
--write_vocabulary true \
--add_symbol "${blank}:0" \
--add_symbol "${oov}:1" \
--add_symbol "${sos_eos}:-1"
else
log "Error: not supported --token_type '${tgt_token_type}'"
exit 2
fi
# Create word-list for word-LM training
if ${use_word_lm} && [ "${tgt_token_type}" != word ]; then
log "Generate word level token_list from ${data_feats}/lm_train.txt"
${python} -m espnet2.bin.tokenize_text \
--token_type word \
--input "${data_feats}/lm_train.txt" --output "${lm_token_list}" \
--field 2- \
--cleaner "${cleaner}" \
--g2p "${g2p}" \
--write_vocabulary true \
--vocabulary_size "${word_vocab_size}" \
--add_symbol "${blank}:0" \
--add_symbol "${oov}:1" \
--add_symbol "${sos_eos}:-1"
fi
# Then generate src lang
if "${token_joint}"; then
log "Stage 5b: Skip separate token construction for src_lang when setting ${token_joint} as true"
else
if [ "${src_token_type}" = bpe ]; then
log "Stage 5b: Generate token_list from ${src_bpe_train_text} using BPE for src_lang"
mkdir -p "${src_bpedir}"
# shellcheck disable=SC2002
cat ${src_bpe_train_text} | cut -f 2- -d" " > "${src_bpedir}"/train.txt
if [ -n "${src_bpe_nlsyms}" ]; then
_opts_spm="--user_defined_symbols=${src_bpe_nlsyms}"
else
_opts_spm=""
fi
spm_train \
--input="${src_bpedir}"/train.txt \
--vocab_size="${src_nbpe}" \
--model_type="${src_bpemode}" \
--model_prefix="${src_bpeprefix}" \
--character_coverage=${src_bpe_char_cover} \
--input_sentence_size="${src_bpe_input_sentence_size}" \
${_opts_spm}
{
echo "${blank}"
echo "${oov}"
# Remove <unk>, <s>, </s> from the vocabulary
<"${src_bpeprefix}".vocab awk '{ if( NR != 1 && NR != 2 && NR != 3 ){ print $1; } }'
echo "${sos_eos}"
} > "${src_token_list}"
elif [ "${src_token_type}" = char ] || [ "${src_token_type}" = word ]; then
log "Stage 5b: Generate character level token_list from ${src_bpe_train_text} for src_lang"
_opts="--non_linguistic_symbols ${nlsyms_txt}"
# shellcheck disable=SC2002
cat ${src_bpe_train_text} | cut -f 2- -d" " > "${data_feats}"/token_train.txt
# The first symbol in token_list must be "<blank>" and the last must be also sos/eos:
# 0 is reserved for CTC-blank for ST and also used as ignore-index in the other task
${python} -m espnet2.bin.tokenize_text \
--token_type "${src_token_type}" \
--input "${data_feats}/token_train.txt" --output "${src_token_list}" ${_opts} \
--field 2- \
--cleaner "${cleaner}" \
--g2p "${g2p}" \
--write_vocabulary true \
--add_symbol "${blank}:0" \
--add_symbol "${oov}:1" \
--add_symbol "${sos_eos}:-1"
else
log "Error: not supported --token_type '${src_token_type}'"
exit 2
fi
fi
fi
else
log "Skip the stages for data preparation"
fi
# ========================== Data preparation is done here. ==========================
if ! "${skip_train}"; then
if "${use_lm}"; then
if [ ${stage} -le 6 ] && [ ${stop_stage} -ge 6 ]; then
log "Stage 6: LM collect stats: train_set=${data_feats}/lm_train.txt, dev_set=${lm_dev_text}"
_opts=
if [ -n "${lm_config}" ]; then
# To generate the config file: e.g.
# % python3 -m espnet2.bin.lm_train --print_config --optim adam
_opts+="--config ${lm_config} "
fi
# 1. Split the key file
_logdir="${lm_stats_dir}/logdir"
mkdir -p "${_logdir}"
# Get the minimum number among ${nj} and the number lines of input files
_nj=$(min "${nj}" "$(<${data_feats}/lm_train.txt wc -l)" "$(<${lm_dev_text} wc -l)")
key_file="${data_feats}/lm_train.txt"
split_scps=""
for n in $(seq ${_nj}); do
split_scps+=" ${_logdir}/train.${n}.scp"
done
# shellcheck disable=SC2086
utils/split_scp.pl "${key_file}" ${split_scps}
key_file="${lm_dev_text}"
split_scps=""
for n in $(seq ${_nj}); do
split_scps+=" ${_logdir}/dev.${n}.scp"
done
# shellcheck disable=SC2086
utils/split_scp.pl "${key_file}" ${split_scps}
# 2. Generate run.sh
log "Generate '${lm_stats_dir}/run.sh'. You can resume the process from stage 6 using this script"
mkdir -p "${lm_stats_dir}"; echo "${run_args} --stage 6 \"\$@\"; exit \$?" > "${lm_stats_dir}/run.sh"; chmod +x "${lm_stats_dir}/run.sh"
# 3. Submit jobs
log "LM collect-stats started... log: '${_logdir}/stats.*.log'"
# NOTE: --*_shape_file doesn't require length information if --batch_type=unsorted,
# but it's used only for deciding the sample ids.
# shellcheck disable=SC2086
${train_cmd} JOB=1:"${_nj}" "${_logdir}"/stats.JOB.log \
${python} -m espnet2.bin.lm_train \
--collect_stats true \
--use_preprocessor true \
--bpemodel "${tgt_bpemodel}" \
--token_type "${lm_token_type}"\
--token_list "${lm_token_list}" \
--non_linguistic_symbols "${nlsyms_txt}" \
--cleaner "${cleaner}" \
--g2p "${g2p}" \
--train_data_path_and_name_and_type "${data_feats}/lm_train.txt,text,text" \
--valid_data_path_and_name_and_type "${lm_dev_text},text,text" \
--train_shape_file "${_logdir}/train.JOB.scp" \
--valid_shape_file "${_logdir}/dev.JOB.scp" \
--output_dir "${_logdir}/stats.JOB" \
${_opts} ${lm_args} || { cat "${_logdir}"/stats.1.log; exit 1; }
# 4. Aggregate shape files
_opts=
for i in $(seq "${_nj}"); do
_opts+="--input_dir ${_logdir}/stats.${i} "
done
# shellcheck disable=SC2086
${python} -m espnet2.bin.aggregate_stats_dirs ${_opts} --output_dir "${lm_stats_dir}"
# Append the num-tokens at the last dimensions. This is used for batch-bins count
<"${lm_stats_dir}/train/text_shape" \
awk -v N="$(<${lm_token_list} wc -l)" '{ print $0 "," N }' \
>"${lm_stats_dir}/train/text_shape.${lm_token_type}"
<"${lm_stats_dir}/valid/text_shape" \
awk -v N="$(<${lm_token_list} wc -l)" '{ print $0 "," N }' \
>"${lm_stats_dir}/valid/text_shape.${lm_token_type}"
fi
if [ ${stage} -le 7 ] && [ ${stop_stage} -ge 7 ]; then
log "Stage 7: LM Training: train_set=${data_feats}/lm_train.txt, dev_set=${lm_dev_text}"
_opts=
if [ -n "${lm_config}" ]; then
# To generate the config file: e.g.
# % python3 -m espnet2.bin.lm_train --print_config --optim adam
_opts+="--config ${lm_config} "
fi
if [ "${num_splits_lm}" -gt 1 ]; then
# If you met a memory error when parsing text files, this option may help you.
# The corpus is split into subsets and each subset is used for training one by one in order,
# so the memory footprint can be limited to the memory required for each dataset.
_split_dir="${lm_stats_dir}/splits${num_splits_lm}"
if [ ! -f "${_split_dir}/.done" ]; then
rm -f "${_split_dir}/.done"
${python} -m espnet2.bin.split_scps \
--scps "${data_feats}/lm_train.txt" "${lm_stats_dir}/train/text_shape.${lm_token_type}" \
--num_splits "${num_splits_lm}" \
--output_dir "${_split_dir}"
touch "${_split_dir}/.done"
else
log "${_split_dir}/.done exists. Spliting is skipped"
fi
_opts+="--train_data_path_and_name_and_type ${_split_dir}/lm_train.txt,text,text "
_opts+="--train_shape_file ${_split_dir}/text_shape.${lm_token_type} "
_opts+="--multiple_iterator true "
else
_opts+="--train_data_path_and_name_and_type ${data_feats}/lm_train.txt,text,text "
_opts+="--train_shape_file ${lm_stats_dir}/train/text_shape.${lm_token_type} "
fi
# NOTE(kamo): --fold_length is used only if --batch_type=folded and it's ignored in the other case
log "Generate '${lm_exp}/run.sh'. You can resume the process from stage 7 using this script"
mkdir -p "${lm_exp}"; echo "${run_args} --stage 7 \"\$@\"; exit \$?" > "${lm_exp}/run.sh"; chmod +x "${lm_exp}/run.sh"
log "LM training started... log: '${lm_exp}/train.log'"
if echo "${cuda_cmd}" | grep -e queue.pl -e queue-freegpu.pl &> /dev/null; then
# SGE can't include "/" in a job name
jobname="$(basename ${lm_exp})"
else
jobname="${lm_exp}/train.log"
fi
# TODO(jiatong): fix bpe
# shellcheck disable=SC2086
${python} -m espnet2.bin.launch \
--cmd "${cuda_cmd} --name ${jobname}" \
--log "${lm_exp}"/train.log \
--ngpu "${ngpu}" \
--num_nodes "${num_nodes}" \
--init_file_prefix "${lm_exp}"/.dist_init_ \
--multiprocessing_distributed true -- \
${python} -m espnet2.bin.lm_train \
--ngpu "${ngpu}" \
--use_preprocessor true \
--bpemodel "${tgt_bpemodel}" \
--token_type "${lm_token_type}"\
--token_list "${lm_token_list}" \
--non_linguistic_symbols "${nlsyms_txt}" \
--cleaner "${cleaner}" \
--g2p "${g2p}" \
--valid_data_path_and_name_and_type "${lm_dev_text},text,text" \
--valid_shape_file "${lm_stats_dir}/valid/text_shape.${lm_token_type}" \
--fold_length "${lm_fold_length}" \
--resume true \
--output_dir "${lm_exp}" \
${_opts} ${lm_args}
fi
if [ ${stage} -le 8 ] && [ ${stop_stage} -ge 8 ]; then
log "Stage 8: Calc perplexity: ${lm_test_text}"
_opts=
# TODO(kamo): Parallelize?
log "Perplexity calculation started... log: '${lm_exp}/perplexity_test/lm_calc_perplexity.log'"
# shellcheck disable=SC2086
${cuda_cmd} --gpu "${ngpu}" "${lm_exp}"/perplexity_test/lm_calc_perplexity.log \
${python} -m espnet2.bin.lm_calc_perplexity \
--ngpu "${ngpu}" \
--data_path_and_name_and_type "${lm_test_text},text,text" \
--train_config "${lm_exp}"/config.yaml \
--model_file "${lm_exp}/${inference_lm}" \
--output_dir "${lm_exp}/perplexity_test" \
${_opts}
log "PPL: ${lm_test_text}: $(cat ${lm_exp}/perplexity_test/ppl)"
fi
else
log "Stage 6-8: Skip lm-related stages: use_lm=${use_lm}"
fi
if "${use_ngram}"; then
mkdir -p ${ngram_exp}
fi
if [ ${stage} -le 9 ] && [ ${stop_stage} -ge 9 ]; then
if "${use_ngram}"; then
log "Stage 9: Ngram Training: train_set=${data_feats}/lm_train.txt"
cut -f 2 -d " " ${data_feats}/lm_train.txt | lmplz -S "20%" --discount_fallback -o ${ngram_num} - >${ngram_exp}/${ngram_num}gram.arpa
build_binary -s ${ngram_exp}/${ngram_num}gram.arpa ${ngram_exp}/${ngram_num}gram.bin
else
log "Stage 9: Skip ngram stages: use_ngram=${use_ngram}"
fi
fi
if [ ${stage} -le 10 ] && [ ${stop_stage} -ge 10 ]; then
_st_train_dir="${data_feats}/${train_set}"
_st_valid_dir="${data_feats}/${valid_set}"
log "Stage 10: ST collect stats: train_set=${_st_train_dir}, valid_set=${_st_valid_dir}"
_opts=
if [ -n "${st_config}" ]; then
# To generate the config file: e.g.
# % python3 -m espnet2.bin.st_train --print_config --optim adam
_opts+="--config ${st_config} "
fi
_feats_type="$(<${_st_train_dir}/feats_type)"
if [ "${_feats_type}" = raw ]; then
_scp=wav.scp
if [[ "${audio_format}" == *ark* ]]; then
_type=kaldi_ark
else
# "sound" supports "wav", "flac", etc.
_type=sound
fi
_opts+="--frontend_conf fs=${fs} "
else
_scp=feats.scp
_type=kaldi_ark
_input_size="$(<${_st_train_dir}/feats_dim)"
_opts+="--input_size=${_input_size} "
fi
# 1. Split the key file
_logdir="${st_stats_dir}/logdir"
mkdir -p "${_logdir}"
# Get the minimum number among ${nj} and the number lines of input files
_nj=$(min "${nj}" "$(<${_st_train_dir}/${_scp} wc -l)" "$(<${_st_valid_dir}/${_scp} wc -l)")
key_file="${_st_train_dir}/${_scp}"
split_scps=""
for n in $(seq "${_nj}"); do
split_scps+=" ${_logdir}/train.${n}.scp"
done
# shellcheck disable=SC2086
utils/split_scp.pl "${key_file}" ${split_scps}
key_file="${_st_valid_dir}/${_scp}"
split_scps=""
for n in $(seq "${_nj}"); do
split_scps+=" ${_logdir}/valid.${n}.scp"
done
# shellcheck disable=SC2086
utils/split_scp.pl "${key_file}" ${split_scps}
# 2. Generate run.sh
log "Generate '${st_stats_dir}/run.sh'. You can resume the process from stage 10 using this script"
mkdir -p "${st_stats_dir}"; echo "${run_args} --stage 10 \"\$@\"; exit \$?" > "${st_stats_dir}/run.sh"; chmod +x "${st_stats_dir}/run.sh"
# 3. Submit jobs
log "ST collect-stats started... log: '${_logdir}/stats.*.log'"
# NOTE: --*_shape_file doesn't require length information if --batch_type=unsorted,
# but it's used only for deciding the sample ids.
# TODO(jiatong): fix different bpe model
# shellcheck disable=SC2086
${train_cmd} JOB=1:"${_nj}" "${_logdir}"/stats.JOB.log \
${python} -m espnet2.bin.st_train \
--collect_stats true \
--use_preprocessor true \
--bpemodel "${tgt_bpemodel}" \
--src_bpemodel "${src_bpemodel}" \
--token_type "${tgt_token_type}" \
--src_token_type "${src_token_type}" \
--token_list "${tgt_token_list}" \
--src_token_list "${src_token_list}" \
--non_linguistic_symbols "${nlsyms_txt}" \
--cleaner "${cleaner}" \
--g2p "${g2p}" \
--train_data_path_and_name_and_type "${_st_train_dir}/${_scp},speech,${_type}" \
--train_data_path_and_name_and_type "${_st_train_dir}/text.${tgt_case}.${tgt_lang},text,text" \
--train_data_path_and_name_and_type "${_st_train_dir}/text.${src_case}.${src_lang},src_text,text" \
--valid_data_path_and_name_and_type "${_st_valid_dir}/${_scp},speech,${_type}" \
--valid_data_path_and_name_and_type "${_st_valid_dir}/text.${tgt_case}.${tgt_lang},text,text" \
--valid_data_path_and_name_and_type "${_st_valid_dir}/text.${src_case}.${src_lang},src_text,text" \
--train_shape_file "${_logdir}/train.JOB.scp" \
--valid_shape_file "${_logdir}/valid.JOB.scp" \
--output_dir "${_logdir}/stats.JOB" \
${_opts} ${st_args} || { cat "${_logdir}"/stats.1.log; exit 1; }
# 4. Aggregate shape files
_opts=
for i in $(seq "${_nj}"); do
_opts+="--input_dir ${_logdir}/stats.${i} "
done
# shellcheck disable=SC2086
${python} -m espnet2.bin.aggregate_stats_dirs ${_opts} --output_dir "${st_stats_dir}"
# Append the num-tokens at the last dimensions. This is used for batch-bins count
<"${st_stats_dir}/train/text_shape" \
awk -v N="$(<${tgt_token_list} wc -l)" '{ print $0 "," N }' \
>"${st_stats_dir}/train/text_shape.${tgt_token_type}"
<"${st_stats_dir}/train/src_text_shape" \
awk -v N="$(<${src_token_list} wc -l)" '{ print $0 "," N }' \
>"${st_stats_dir}/train/src_text_shape.${src_token_type}"
<"${st_stats_dir}/valid/text_shape" \
awk -v N="$(<${tgt_token_list} wc -l)" '{ print $0 "," N }' \
>"${st_stats_dir}/valid/text_shape.${tgt_token_type}"
<"${st_stats_dir}/valid/src_text_shape" \
awk -v N="$(<${src_token_list} wc -l)" '{ print $0 "," N }' \
>"${st_stats_dir}/valid/src_text_shape.${src_token_type}"
fi
if [ ${stage} -le 11 ] && [ ${stop_stage} -ge 11 ]; then
_st_train_dir="${data_feats}/${train_set}"
_st_valid_dir="${data_feats}/${valid_set}"
log "Stage 11: ST Training: train_set=${_st_train_dir}, valid_set=${_st_valid_dir}"
_opts=
if [ -n "${st_config}" ]; then
# To generate the config file: e.g.
# % python3 -m espnet2.bin.st_train --print_config --optim adam
_opts+="--config ${st_config} "
fi
_feats_type="$(<${_st_train_dir}/feats_type)"
if [ "${_feats_type}" = raw ]; then
_scp=wav.scp
# "sound" supports "wav", "flac", etc.
if [[ "${audio_format}" == *ark* ]]; then
_type=kaldi_ark
else
_type=sound
fi
_fold_length="$((st_speech_fold_length * 100))"
_opts+="--frontend_conf fs=${fs} "
else
_scp=feats.scp
_type=kaldi_ark
_fold_length="${st_speech_fold_length}"
_input_size="$(<${_st_train_dir}/feats_dim)"
_opts+="--input_size=${_input_size} "
fi
if [ "${feats_normalize}" = global_mvn ]; then
# Default normalization is utterance_mvn and changes to global_mvn
_opts+="--normalize=global_mvn --normalize_conf stats_file=${st_stats_dir}/train/feats_stats.npz "
fi
if [ "${num_splits_st}" -gt 1 ]; then
# If you met a memory error when parsing text files, this option may help you.
# The corpus is split into subsets and each subset is used for training one by one in order,
# so the memory footprint can be limited to the memory required for each dataset.
_split_dir="${st_stats_dir}/splits${num_splits_st}"
if [ ! -f "${_split_dir}/.done" ]; then
rm -f "${_split_dir}/.done"
${python} -m espnet2.bin.split_scps \
--scps \
"${_st_train_dir}/${_scp}" \
"${_st_train_dir}/text.${tgt_case}.${tgt_lang}" \
"${_st_train_dir}/text.${src_case}.${src_lang}" \
"${st_stats_dir}/train/speech_shape" \
"${st_stats_dir}/train/text_shape.${tgt_token_type}" \
"${st_stats_dir}/train/src_text_shape.${src_token_type}" \
--num_splits "${num_splits_st}" \
--output_dir "${_split_dir}"
touch "${_split_dir}/.done"
else
log "${_split_dir}/.done exists. Spliting is skipped"
fi
_opts+="--train_data_path_and_name_and_type ${_split_dir}/${_scp},speech,${_type} "
_opts+="--train_data_path_and_name_and_type ${_split_dir}/text.${tgt_case}.${tgt_lang},text,text "
_opts+="--train_data_path_and_name_and_type ${_split_dir}/text.${src_case}.${src_lang},src_text,text "
_opts+="--train_shape_file ${_split_dir}/speech_shape "
_opts+="--train_shape_file ${_split_dir}/text_shape.${tgt_token_type} "
_opts+="--train_shape_file ${_split_dir}/src_text_shape.${src_token_type} "
_opts+="--multiple_iterator true "
else
_opts+="--train_data_path_and_name_and_type ${_st_train_dir}/${_scp},speech,${_type} "
_opts+="--train_data_path_and_name_and_type ${_st_train_dir}/text.${tgt_case}.${tgt_lang},text,text "
_opts+="--train_data_path_and_name_and_type ${_st_train_dir}/text.${src_case}.${src_lang},src_text,text "
_opts+="--train_shape_file ${st_stats_dir}/train/speech_shape "
_opts+="--train_shape_file ${st_stats_dir}/train/text_shape.${tgt_token_type} "
_opts+="--train_shape_file ${st_stats_dir}/train/src_text_shape.${src_token_type} "
fi
log "Generate '${st_exp}/run.sh'. You can resume the process from stage 11 using this script"
mkdir -p "${st_exp}"; echo "${run_args} --stage 11 \"\$@\"; exit \$?" > "${st_exp}/run.sh"; chmod +x "${st_exp}/run.sh"
# NOTE(kamo): --fold_length is used only if --batch_type=folded and it's ignored in the other case
log "ST training started... log: '${st_exp}/train.log'"
if echo "${cuda_cmd}" | grep -e queue.pl -e queue-freegpu.pl &> /dev/null; then
# SGE can't include "/" in a job name
jobname="$(basename ${st_exp})"
else
jobname="${st_exp}/train.log"
fi
# TODO(jiatong): fix bpe
# shellcheck disable=SC2086
${python} -m espnet2.bin.launch \
--cmd "${cuda_cmd} --name ${jobname}" \
--log "${st_exp}"/train.log \
--ngpu "${ngpu}" \
--num_nodes "${num_nodes}" \
--init_file_prefix "${st_exp}"/.dist_init_ \
--multiprocessing_distributed true -- \
${python} -m espnet2.bin.st_train \
--use_preprocessor true \
--bpemodel "${tgt_bpemodel}" \
--token_type "${tgt_token_type}" \
--token_list "${tgt_token_list}" \
--src_bpemodel "${src_bpemodel}" \
--src_token_type "${src_token_type}" \
--src_token_list "${src_token_list}" \
--non_linguistic_symbols "${nlsyms_txt}" \
--cleaner "${cleaner}" \
--g2p "${g2p}" \
--valid_data_path_and_name_and_type "${_st_valid_dir}/${_scp},speech,${_type}" \
--valid_data_path_and_name_and_type "${_st_valid_dir}/text.${tgt_case}.${tgt_lang},text,text" \
--valid_data_path_and_name_and_type "${_st_valid_dir}/text.${src_case}.${src_lang},src_text,text" \
--valid_shape_file "${st_stats_dir}/valid/speech_shape" \
--valid_shape_file "${st_stats_dir}/valid/text_shape.${tgt_token_type}" \
--valid_shape_file "${st_stats_dir}/valid/src_text_shape.${src_token_type}" \
--resume true \
--init_param ${pretrained_asr} \
--ignore_init_mismatch ${ignore_init_mismatch} \
--fold_length "${_fold_length}" \
--fold_length "${st_text_fold_length}" \
--fold_length "${st_text_fold_length}" \
--output_dir "${st_exp}" \
${_opts} ${st_args}
fi
else
log "Skip the training stages"
fi
if [ -n "${download_model}" ]; then
log "Use ${download_model} for decoding and evaluation"
st_exp="${expdir}/${download_model}"
mkdir -p "${st_exp}"
# If the model already exists, you can skip downloading
espnet_model_zoo_download --unpack true "${download_model}" > "${st_exp}/config.txt"
# Get the path of each file
_st_model_file=$(<"${st_exp}/config.txt" sed -e "s/.*'st_model_file': '\([^']*\)'.*$/\1/")
_st_train_config=$(<"${st_exp}/config.txt" sed -e "s/.*'st_train_config': '\([^']*\)'.*$/\1/")
# Create symbolic links
ln -sf "${_st_model_file}" "${st_exp}"
ln -sf "${_st_train_config}" "${st_exp}"
inference_st_model=$(basename "${_st_model_file}")
if [ "$(<${st_exp}/config.txt grep -c lm_file)" -gt 0 ]; then
_lm_file=$(<"${st_exp}/config.txt" sed -e "s/.*'lm_file': '\([^']*\)'.*$/\1/")
_lm_train_config=$(<"${st_exp}/config.txt" sed -e "s/.*'lm_train_config': '\([^']*\)'.*$/\1/")
lm_exp="${expdir}/${download_model}/lm"
mkdir -p "${lm_exp}"
ln -sf "${_lm_file}" "${lm_exp}"
ln -sf "${_lm_train_config}" "${lm_exp}"
inference_lm=$(basename "${_lm_file}")
fi
fi
if ! "${skip_eval}"; then
if [ ${stage} -le 12 ] && [ ${stop_stage} -ge 12 ]; then
log "Stage 12: Decoding: training_dir=${st_exp}"
if ${gpu_inference}; then
_cmd="${cuda_cmd}"
_ngpu=1
else
_cmd="${decode_cmd}"
_ngpu=0
fi
_opts=
if [ -n "${inference_config}" ]; then
_opts+="--config ${inference_config} "
fi
if "${use_lm}"; then
if "${use_word_lm}"; then
_opts+="--word_lm_train_config ${lm_exp}/config.yaml "
_opts+="--word_lm_file ${lm_exp}/${inference_lm} "
else
_opts+="--lm_train_config ${lm_exp}/config.yaml "
_opts+="--lm_file ${lm_exp}/${inference_lm} "
fi
fi
if "${use_ngram}"; then
_opts+="--ngram_file ${ngram_exp}/${inference_ngram}"
fi
# 2. Generate run.sh
log "Generate '${st_exp}/${inference_tag}/run.sh'. You can resume the process from stage 12 using this script"
mkdir -p "${st_exp}/${inference_tag}"; echo "${run_args} --stage 12 \"\$@\"; exit \$?" > "${st_exp}/${inference_tag}/run.sh"; chmod +x "${st_exp}/${inference_tag}/run.sh"
for dset in ${test_sets}; do
_data="${data_feats}/${dset}"
_dir="${st_exp}/${inference_tag}/${dset}"
_logdir="${_dir}/logdir"
mkdir -p "${_logdir}"
_feats_type="$(<${_data}/feats_type)"
if [ "${_feats_type}" = raw ]; then
_scp=wav.scp
if [[ "${audio_format}" == *ark* ]]; then
_type=kaldi_ark
else
_type=sound
fi
else
_scp=feats.scp
_type=kaldi_ark
fi
# 1. Split the key file
key_file=${_data}/${_scp}
split_scps=""
_nj=$(min "${inference_nj}" "$(<${key_file} wc -l)")
if "${use_streaming}"; then
st_inference_tool="espnet2.bin.st_inference_streaming"
else
st_inference_tool="espnet2.bin.st_inference"
fi
for n in $(seq "${_nj}"); do
split_scps+=" ${_logdir}/keys.${n}.scp"
done
# shellcheck disable=SC2086
utils/split_scp.pl "${key_file}" ${split_scps}
# 2. Submit decoding jobs
log "Decoding started... log: '${_logdir}/st_inference.*.log'"
# shellcheck disable=SC2086
${_cmd} --gpu "${_ngpu}" JOB=1:"${_nj}" "${_logdir}"/st_inference.JOB.log \
${python} -m ${st_inference_tool} \
--batch_size ${batch_size} \
--ngpu "${_ngpu}" \
--data_path_and_name_and_type "${_data}/${_scp},speech,${_type}" \
--key_file "${_logdir}"/keys.JOB.scp \
--st_train_config "${st_exp}"/config.yaml \
--st_model_file "${st_exp}"/"${inference_st_model}" \
--output_dir "${_logdir}"/output.JOB \
${_opts} ${inference_args}
# 3. Concatenates the output files from each jobs
for f in token token_int score text; do
for i in $(seq "${_nj}"); do
cat "${_logdir}/output.${i}/1best_recog/${f}"
done | LC_ALL=C sort -k1 >"${_dir}/${f}"
done
done
fi
if [ ${stage} -le 13 ] && [ ${stop_stage} -ge 13 ]; then
log "Stage 13: Scoring"
for dset in ${test_sets}; do
_data="${data_feats}/${dset}"
_dir="${st_exp}/${inference_tag}/${dset}"
# TODO(jiatong): add asr scoring and inference
_scoredir="${_dir}/score_bleu"
mkdir -p "${_scoredir}"
paste \
<(<"${_data}/text.${tgt_case}.${tgt_lang}" \
${python} -m espnet2.bin.tokenize_text \
-f 2- --input - --output - \
--token_type word \
--non_linguistic_symbols "${nlsyms_txt}" \
--remove_non_linguistic_symbols true \
--cleaner "${cleaner}" \
) \
<(<"${_data}/utt2spk" awk '{ print "(" $2 "-" $1 ")" }') \
>"${_scoredir}/ref.trn.org"
# NOTE(kamo): Don't use cleaner for hyp
paste \
<(<"${_dir}/text" \
${python} -m espnet2.bin.tokenize_text \
-f 2- --input - --output - \
--token_type word \
--non_linguistic_symbols "${nlsyms_txt}" \
--remove_non_linguistic_symbols true \
) \
<(<"${_data}/utt2spk" awk '{ print "(" $2 "-" $1 ")" }') \
>"${_scoredir}/hyp.trn.org"
# remove utterance id
perl -pe 's/\([^\)]+\)//g;' "${_scoredir}/ref.trn.org" > "${_scoredir}/ref.trn"
perl -pe 's/\([^\)]+\)//g;' "${_scoredir}/hyp.trn.org" > "${_scoredir}/hyp.trn"
# detokenizer
detokenizer.perl -l ${tgt_lang} -q < "${_scoredir}/ref.trn" > "${_scoredir}/ref.trn.detok"
detokenizer.perl -l ${tgt_lang} -q < "${_scoredir}/hyp.trn" > "${_scoredir}/hyp.trn.detok"
if [ ${tgt_case} = "tc" ]; then
echo "Case sensitive BLEU result (single-reference)" >> ${_scoredir}/result.tc.txt
sacrebleu "${_scoredir}/ref.trn.detok" \
-i "${_scoredir}/hyp.trn.detok" \
-m bleu chrf ter \
>> ${_scoredir}/result.tc.txt
log "Write a case-sensitive BLEU (single-reference) result in ${_scoredir}/result.tc.txt"
fi
# detokenize & remove punctuation except apostrophe
remove_punctuation.pl < "${_scoredir}/ref.trn.detok" > "${_scoredir}/ref.trn.detok.lc.rm"
remove_punctuation.pl < "${_scoredir}/hyp.trn.detok" > "${_scoredir}/hyp.trn.detok.lc.rm"
echo "Case insensitive BLEU result (single-reference)" >> ${_scoredir}/result.lc.txt
sacrebleu -lc "${_scoredir}/ref.trn.detok.lc.rm" \
-i "${_scoredir}/hyp.trn.detok.lc.rm" \
-m bleu chrf ter \
>> ${_scoredir}/result.lc.txt
log "Write a case-insensitve BLEU (single-reference) result in ${_scoredir}/result.lc.txt"
# process multi-references cases
multi_references=$(ls "${_data}/text.${tgt_case}.${tgt_lang}".* || echo "")
if [ "${multi_references}" != "" ]; then
case_sensitive_refs=""
case_insensitive_refs=""
for multi_reference in ${multi_references}; do
ref_idx="${multi_reference##*.}"
paste \
<(<${multi_reference} \
${python} -m espnet2.bin.tokenize_text \
-f 2- --input - --output - \
--token_type word \
--non_linguistic_symbols "${nlsyms_txt}" \
--remove_non_linguistic_symbols true \
--cleaner "${cleaner}" \
) \
<(<"${_data}/utt2spk" awk '{ print "(" $2 "-" $1 ")" }') \
>"${_scoredir}/ref.trn.org.${ref_idx}"
#
perl -pe 's/\([^\)]+\)//g;' "${_scoredir}/ref.trn.org.${ref_idx}" > "${_scoredir}/ref.trn.${ref_idx}"
detokenizer.perl -l ${tgt_lang} -q < "${_scoredir}/ref.trn.${ref_idx}" > "${_scoredir}/ref.trn.detok.${ref_idx}"
remove_punctuation.pl < "${_scoredir}/ref.trn.detok.${ref_idx}" > "${_scoredir}/ref.trn.detok.lc.rm.${ref_idx}"
case_sensitive_refs="${case_sensitive_refs} ${_scoredir}/ref.trn.detok.${ref_idx}"
case_insensitive_refs="${case_insensitive_refs} ${_scoredir}/ref.trn.detok.lc.rm.${ref_idx}"
done
if [ ${tgt_case} = "tc" ]; then
echo "Case sensitive BLEU result (multi-references)" >> ${_scoredir}/result.tc.txt
sacrebleu ${case_sensitive_refs} \
-i ${_scoredir}/hyp.trn.detok.lc.rm -m bleu chrf ter \
>> ${_scoredir}/result.tc.txt
log "Write a case-sensitve BLEU (multi-reference) result in ${_scoredir}/result.tc.txt"
fi
echo "Case insensitive BLEU result (multi-references)" >> ${_scoredir}/result.lc.txt
sacrebleu -lc ${case_insensitive_refs} \
-i ${_scoredir}/hyp.trn.detok.lc.rm -m bleu chrf ter \
>> ${_scoredir}/result.lc.txt
log "Write a case-insensitve BLEU (multi-reference) result in ${_scoredir}/result.lc.txt"
fi
done
# Show results in Markdown syntax
scripts/utils/show_translation_result.sh --case $tgt_case "${st_exp}" > "${st_exp}"/RESULTS.md
cat "${st_exp}"/RESULTS.md
fi
else
log "Skip the evaluation stages"
fi
packed_model="${st_exp}/${st_exp##*/}_${inference_st_model%.*}.zip"
if ! "${skip_upload}"; then
if [ ${stage} -le 14 ] && [ ${stop_stage} -ge 14 ]; then
log "Stage 14: Pack model: ${packed_model}"
_opts=
if "${use_lm}"; then
_opts+="--lm_train_config ${lm_exp}/config.yaml "
_opts+="--lm_file ${lm_exp}/${inference_lm} "
_opts+="--option ${lm_exp}/perplexity_test/ppl "
_opts+="--option ${lm_exp}/images "
fi
if [ "${feats_normalize}" = global_mvn ]; then
_opts+="--option ${st_stats_dir}/train/feats_stats.npz "
fi
if [ "${tgt_token_type}" = bpe ]; then
_opts+="--option ${tgt_bpemodel} "
_opts+="--option ${src_bpemodel} "
fi
if [ "${nlsyms_txt}" != none ]; then
_opts+="--option ${nlsyms_txt} "
fi
# shellcheck disable=SC2086
${python} -m espnet2.bin.pack st \
--st_train_config "${st_exp}"/config.yaml \
--st_model_file "${st_exp}"/"${inference_st_model}" \
${_opts} \
--option "${st_exp}"/RESULTS.md \
--option "${st_exp}"/RESULTS.md \
--option "${st_exp}"/images \
--outpath "${packed_model}"
fi
if [ ${stage} -le 15 ] && [ ${stop_stage} -ge 15 ]; then
log "Stage 15: Upload model to Zenodo: ${packed_model}"
# To upload your model, you need to do:
# 1. Sign up to Zenodo: https://zenodo.org/
# 2. Create access token: https://zenodo.org/account/settings/applications/tokens/new/
# 3. Set your environment: % export ACCESS_TOKEN="<your token>"
if command -v git &> /dev/null; then
_creator_name="$(git config user.name)"
_checkout="
git checkout $(git show -s --format=%H)"
else
_creator_name="$(whoami)"
_checkout=""
fi
# /some/where/espnet/egs2/foo/st1/ -> foo/st1
_task="$(pwd | rev | cut -d/ -f2 | rev)"
# foo/st1 -> foo
_corpus="${_task%/*}"
_model_name="${_creator_name}/${_corpus}_$(basename ${packed_model} .zip)"
# Generate description file
cat << EOF > "${st_exp}"/description
This model was trained by ${_creator_name} using ${_task} recipe in <a href="https://github.com/espnet/espnet/">espnet</a>.
<p> </p>
<ul>
<li><strong>Python API</strong><pre><code class="language-python">See https://github.com/espnet/espnet_model_zoo</code></pre></li>
<li><strong>Evaluate in the recipe</strong><pre>
<code class="language-bash">git clone https://github.com/espnet/espnet
cd espnet${_checkout}
pip install -e .
cd $(pwd | rev | cut -d/ -f1-3 | rev)
./run.sh --skip_data_prep false --skip_train true --download_model ${_model_name}</code>
</pre></li>
<li><strong>Results</strong><pre><code>$(cat "${st_exp}"/RESULTS.md)</code></pre></li>
<li><strong>ST config</strong><pre><code>$(cat "${st_exp}"/config.yaml)</code></pre></li>
<li><strong>LM config</strong><pre><code>$(if ${use_lm}; then cat "${lm_exp}"/config.yaml; else echo NONE; fi)</code></pre></li>
</ul>
EOF
# NOTE(kamo): The model file is uploaded here, but not published yet.
# Please confirm your record at Zenodo and publish it by yourself.
# shellcheck disable=SC2086
espnet_model_zoo_upload \
--file "${packed_model}" \
--title "ESPnet2 pretrained model, ${_model_name}, fs=${fs}, lang=${lang}" \
--description_file "${st_exp}"/description \
--creator_name "${_creator_name}" \
--license "CC-BY-4.0" \
--use_sandbox false \
--publish false
fi
else
log "Skip the uploading stages"
fi
if ! "${skip_upload_hf}"; then
if [ ${stage} -le 16 ] && [ ${stop_stage} -ge 16 ]; then
[ -z "${hf_repo}" ] && \
log "ERROR: You need to setup the variable hf_repo with the name of the repository located at HuggingFace" && \
exit 1
log "Stage 16: Upload model to HuggingFace: ${hf_repo}"
gitlfs=$(git lfs --version 2> /dev/null || true)
[ -z "${gitlfs}" ] && \
log "ERROR: You need to install git-lfs first" && \
exit 1
dir_repo=${expdir}/hf_${hf_repo//"/"/"_"}
[ ! -d "${dir_repo}" ] && git clone https://huggingface.co/${hf_repo} ${dir_repo}
if command -v git &> /dev/null; then
_creator_name="$(git config user.name)"
_checkout="git checkout $(git show -s --format=%H)"
else
_creator_name="$(whoami)"
_checkout=""
fi
# /some/where/espnet/egs2/foo/asr1/ -> foo/asr1
_task="$(pwd | rev | cut -d/ -f2 | rev)"
# foo/asr1 -> foo
_corpus="${_task%/*}"
_model_name="${_creator_name}/${_corpus}_$(basename ${packed_model} .zip)"
# copy files in ${dir_repo}
unzip -o ${packed_model} -d ${dir_repo}
# Generate description file
# shellcheck disable=SC2034
hf_task=speech-translation
# shellcheck disable=SC2034
espnet_task=ST
# shellcheck disable=SC2034
task_exp=${st_exp}
eval "echo \"$(cat scripts/utils/TEMPLATE_HF_Readme.md)\"" > "${dir_repo}"/README.md
this_folder=${PWD}
cd ${dir_repo}
if [ -n "$(git status --porcelain)" ]; then
git add .
git commit -m "Update model"
fi
git push
cd ${this_folder}
fi
else
log "Skip the uploading to HuggingFace stage"
fi
log "Successfully finished. [elapsed=${SECONDS}s]"
|
for (let i = 100; i <= 300; i++) {
console.log(i);
}
|
#! /bin/sh
./Pods/LicensePlist/license-plist --output-path RaccoonWallet/Resources/Settings.bundle
|
<reponame>jorgerodcan/cordovaDevelopment
/* Version 0.1 of F5 Steganography Software by <NAME> 1999 */
/*********************************************************/
/* JPEG Decoder */
/* <NAME> */
/* EE590 Directed Research */
/* Dr. Ortega */
/* Fall 1997 */
/* */
/* HuffTable.class: */
/* Extracts Huffman table from image header */
/* data. Instanciate one class for each table */
/* in the file the header. */
/* */
/* Methods: */
/* getHUFFVAL(), returns HUFFVAL array */
/* getVALPRT(), returns VALPTR array */
/* getMAXCODE(), returns MAXCODE array */
/* getMINCODE(), returns MINCODE array */
/* */
/********************** 11/4/97 **************************/
/*
* /////////////// DISCLAIMER///////////////////////////////// This software is
* provided by the author and contributors ``as is'' and any express or implied
* warranties, including, but not limited to, the implied warranties of
* merchantability and fitness for a particular purpose are dis- claimed. In no
* event shall the author or con- tributors be liable for any direct, indirect,
* incidental, special, exemplary, or consequen- tial damages (including, but
* not limited to, procurement of substitute goods or services; loss of use,
* data, or profits; or business interruption) however caused and on any theory
* of liability, whether in contract, strict liability, or tort (including
* negligence or otherwise) arising in any way out of the use of this software,
* even if advised of the poss- ibility of such damage.
* //////////////////////////////////////////////////////
*/
// westfeld
package net.f5.ortega;
import java.io.DataInputStream;
import java.io.IOException;
public class HuffTable {
// Instance variables
private final int[] BITS = new int[17];
private final int[] HUFFVAL = new int[256];
private final int[] HUFFCODE = new int[257];
private final int[] HUFFSIZE = new int[257];
private final int[] EHUFCO = new int[257];
private final int[] EHUFSI = new int[257];
private final int[] MINCODE = new int[17];
private final int[] MAXCODE = new int[18];
private final int[] VALPTR = new int[17];
private final int Ln;
private int SI, I, J, K, LASTK, CODE;
// Declare input steam
DataInputStream dis;
// Constructor Method
public HuffTable(final DataInputStream d, final int l) {
this.dis = d;
// System.out.println("L�nge="+l);
// Get table data from input stream
this.Ln = 19 + getTableData();
// System.out.println(Ln);
Generate_size_table(); // Flow Chart C.1
Generate_code_table(); // Flow Chart C.2
Order_codes(); // Flow Chart C.3
Decoder_tables(); // Generate decoder tables Flow Chart F.15
}
private void Decoder_tables() {
// Decoder table generation Flow Chart F.15
this.I = 0;
this.J = 0;
while (true) {
if (++this.I > 16)
return;
if (this.BITS[this.I] == 0) {
this.MAXCODE[this.I] = -1;
} else {
this.VALPTR[this.I] = this.J;
this.MINCODE[this.I] = this.HUFFCODE[this.J];
this.J = this.J + this.BITS[this.I] - 1;
this.MAXCODE[this.I] = this.HUFFCODE[this.J++];
}
}
}
private void Generate_code_table() {
// Generate Code table Flow Chart C.2
this.K = 0;
this.CODE = 0;
this.SI = this.HUFFSIZE[0];
while (true) {
this.HUFFCODE[this.K++] = this.CODE++;
if (this.HUFFSIZE[this.K] == this.SI) {
continue;
}
if (this.HUFFSIZE[this.K] == 0) {
break;
}
while (true) {
this.CODE <<= 1;
this.SI++;
if (this.HUFFSIZE[this.K] == this.SI) {
break;
}
}
}
}
private void Generate_size_table() {
// Generate HUFFSIZE table Flow Chart C.1
this.K = 0;
this.I = 1;
this.J = 1;
while (true) {
if (this.J > this.BITS[this.I]) {
this.J = 1;
this.I++;
if (this.I > 16) {
break;
}
} else {
this.HUFFSIZE[this.K++] = this.I;
this.J++;
}
}
this.HUFFSIZE[this.K] = 0;
this.LASTK = this.K;
}
private int getByte() {
try {
return this.dis.readUnsignedByte();
} catch (final IOException e) {
return -1;
}
}
// IO MethodS
public int[] getHUFFVAL() {
return this.HUFFVAL;
}
public int getLen() {
return this.Ln;
}
public int[] getMAXCODE() {
return this.MAXCODE;
}
public int[] getMINCODE() {
return this.MINCODE;
}
private int getTableData() {
// Get BITS list
int count = 0;
for (int x = 1; x < 17; x++) {
this.BITS[x] = getByte();
count += this.BITS[x];
}
// Read in HUFFVAL
for (int x = 0; x < count; x++) {
// System.out.println(Ln);
this.HUFFVAL[x] = getByte();
}
return count;
}
public int[] getVALPTR() {
return this.VALPTR;
}
private void Order_codes() {
// Order Codes Flow Chart C.3
this.K = 0;
while (true) {
this.I = this.HUFFVAL[this.K];
this.EHUFCO[this.I] = this.HUFFCODE[this.K];
this.EHUFSI[this.I] = this.HUFFSIZE[this.K++];
if (this.K >= this.LASTK) {
break;
}
}
}
}
|
#!/bin/bash
java ${JAVA_OPTS} ${SKYWALKING_COLLECTOR_OPTS} -classpath ${SKYWALKING_CLASSPATH} org.skywalking.apm.ui.ApplicationStartUp
|
<reponame>akashgp09/opencollective-api
import { GraphQLList } from 'graphql';
import models from '../../../models';
import { Forbidden, ValidationFailed } from '../../errors';
import { AccountReferenceInput, fetchAccountWithReference } from '../input/AccountReferenceInput';
import { MemberInvitation } from '../object/MemberInvitation';
const MemberInvitationsQuery = {
type: new GraphQLList(MemberInvitation),
description: '[AUTHENTICATED] Returns the pending invitations',
args: {
memberAccount: {
type: AccountReferenceInput,
description: 'Reference to an account of member',
},
account: {
type: AccountReferenceInput,
description: 'Reference to the Collective account',
},
},
async resolve(collective, args, { remoteUser }) {
if (!remoteUser) {
throw new Forbidden('Only collective admins can see pending invitations');
}
if (!(args.account || args.memberAccount)) {
throw new ValidationFailed('You must provide a reference either for collective or member collective');
}
let { memberAccount, account } = args;
if (account) {
account = await fetchAccountWithReference(account, { throwIfMissing: true });
}
if (memberAccount) {
memberAccount = await fetchAccountWithReference(memberAccount, { throwIfMissing: true });
}
// Must be an admin to see pending invitations
const isAdminOfAccount = account && remoteUser.isAdminOfCollective(account);
const isAdminOfMemberAccount = memberAccount && remoteUser.isAdminOfCollective(memberAccount);
// If not admin of account or member account throw forbidden
if (!(isAdminOfAccount || isAdminOfMemberAccount)) {
new Forbidden('Only collective admins can see pending invitations');
}
const where: Record<string, unknown> = {};
if (args.CollectiveId) {
where.CollectiveId = args.CollectiveId;
}
if (args.MemberCollectiveId) {
where.MemberCollectiveId = args.MemberCollectiveId;
}
return models.MemberInvitation.findAll({
where,
include: [
{ association: 'collective', required: true, attributes: [] },
{ association: 'memberCollective', required: true, attributes: [] },
],
});
},
};
export default MemberInvitationsQuery;
|
#!/bin/bash
# --------------------------------------------------------------------------
# OpenMS -- Open-Source Mass Spectrometry
# --------------------------------------------------------------------------
# Copyright The OpenMS Team -- Eberhard Karls University Tuebingen,
# ETH Zurich, and Freie Universitaet Berlin 2002-2018.
#
# This software is released under a three-clause BSD license:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of any author or any participating institution
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
# For a full list of authors, refer to the file AUTHORS.
# --------------------------------------------------------------------------
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL ANY OF THE AUTHORS OR THE CONTRIBUTING
# INSTITUTIONS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# --------------------------------------------------------------------------
# $Maintainer: Stephan Aiche $
# $Authors: Stephan Aiche $
# --------------------------------------------------------------------------
# This script finds all the required qt libs for the OpenMS installation
# we assume to have 3 arguments
# 1. an executable linking against all shipped libraries
# 2. the path where the OpenMS libs where build
# 3. the path were the external libs should be placed
if [ ! $# == 3 ]; then
echo "Usage: $0 /path/to/executables /path/to/libs /target/path"
exit
fi
ldd_targets=$(find $1 $2 -type f)
target_path=$3
globalSOs=()
for exe in $ldd_targets; do
currentSOs=$(ldd $exe | grep "libQt" | sed 's/.* => \(.*\) .*/\1/g')
for so in ${currentSOs}; do
#echo "cp ${so} ${target_path}"
#globalSOs$=($(printf "%s\n" "${globalSOs[@]}" | sort -u))
globalSOs+=($so)
done
done
# make list unique
uniqSOs=$(echo "${globalSOs[@]}" | tr ' ' '\n' | sort -u | tr '\n' ' ')
for so in ${uniqSOs[@]}; do
cp ${so} ${target_path}
done
|
import fs from "fs";
import { IDevSettings } from "./IDevSettings";
export class FileSettings implements IDevSettings {
private readonly localSettings: {
patient_tests_database: string;
mongo_connection_string: string;
allow_self_signed_mongo_cert: string;
audit_api_url: string;
enable_audit_integration_tests: boolean;
audit_auth_key: string;
};
public get patientCollection(): string { return "patients";}
public get testCollection(): string { return "tests";}
public get patientTestDatabase(): string { return this.localSettings.patient_tests_database;}
public get mongoConnectionString(): string { return this.localSettings.mongo_connection_string;}
public get allowSelfSignedMongoCert(): boolean { return JSON.parse(this.localSettings.allow_self_signed_mongo_cert || "true");}
public get auditAPIUrl(): URL | undefined { return (this.localSettings.audit_api_url ? new URL(this.localSettings.audit_api_url) : undefined); }
public get auditAuthKey(): string | undefined { return this.localSettings.audit_auth_key; }
public get enableAuditIntegrationTests(): boolean { return this.localSettings.enable_audit_integration_tests; }
public constructor(filePath = "local.settings.json") {
const localSettingsContent = fs.readFileSync(filePath).toString();
this.localSettings = JSON.parse(localSettingsContent).Values;
}
}
|
<reponame>ksilo/LiuAlgoTrader<filename>liualgotrader/trading/gemini.py
import asyncio
import base64
import hashlib
import hmac
import json
import os
import queue
import ssl
import time
import traceback
from datetime import date, datetime, timedelta
from threading import Thread
from typing import Dict, List, Optional, Tuple
import pandas as pd
import requests
import websocket
from pytz import timezone
from liualgotrader.common import config
from liualgotrader.common.assets import get_asset_min_qty, round_asset
from liualgotrader.common.tlog import tlog
from liualgotrader.common.types import Order, QueueMapper, ThreadFlags, Trade
from liualgotrader.trading.base import Trader
utctz = timezone("UTC")
class GeminiTrader(Trader):
gemini_api_key: Optional[str] = os.getenv("GEMINI_API_KEY")
gemini_api_secret: Optional[str] = os.getenv("GEMINI_API_SECRET")
base_url = "https://api.sandbox.gemini.com"
base_websocket = "wss://api.sandbox.gemini.com"
last_nonce = None
def __init__(self, qm: QueueMapper = None):
self.running_task: Optional[Thread] = None
self.hb_task: Optional[Thread] = None
self.send_hb = True
self.ws = None
self.flags: Optional[ThreadFlags] = None
super().__init__(qm)
@classmethod
def _generate_request_headers(cls, payload: Dict) -> Dict:
if not cls.gemini_api_secret or not cls.gemini_api_key:
raise AssertionError(
"both env variables GEMINI_API_KEY and GEMINI_API_SECRET must be set up"
)
t = datetime.now()
payload_nonce = int(time.mktime(t.timetuple()) * 1000)
if cls.last_nonce and cls.last_nonce == payload_nonce:
payload_nonce += 1
cls.last_nonce = payload_nonce
payload["nonce"] = str(payload_nonce)
encoded_payload = json.dumps(payload).encode()
b64 = base64.b64encode(encoded_payload)
signature = hmac.new(
cls.gemini_api_secret.encode(), b64, hashlib.sha384
).hexdigest()
return {
"Content-Type": "text/plain",
"Content-Length": "0",
"X-GEMINI-APIKEY": cls.gemini_api_key,
"X-GEMINI-PAYLOAD": b64,
"X-GEMINI-SIGNATURE": signature,
"Cache-Control": "no-cache",
}
def _generate_ws_headers(self, payload: Dict) -> Dict:
if not self.gemini_api_secret or not self.gemini_api_key:
raise AssertionError(
"both env variables GEMINI_API_KEY and GEMINI_API_SECRET must be set up"
)
t = datetime.now()
payload_nonce = str(int(time.mktime(t.timetuple()) * 1000))
payload["nonce"] = payload_nonce
encoded_payload = json.dumps(payload).encode()
b64 = base64.b64encode(encoded_payload)
signature = hmac.new(
self.gemini_api_secret.encode(), b64, hashlib.sha384
).hexdigest()
return {
"X-GEMINI-APIKEY": self.gemini_api_key,
"X-GEMINI-PAYLOAD": b64.decode(),
"X-GEMINI-SIGNATURE": signature,
}
@classmethod
def _get_order_event_type(cls, order_data: Dict) -> Order.EventType:
return (
Order.EventType.canceled
if order_data["is_cancelled"] == True
else Order.EventType.fill
if order_data["remaining_amount"] == "0"
else Order.EventType.partial_fill
)
@classmethod
def _get_trade_event_type(cls, trade_data: Dict) -> Order.EventType:
return (
Order.EventType.canceled
if trade_data["type"] == "cancelled"
else Order.EventType.rejected
if trade_data["type"] == "rejected"
else Order.EventType.canceled
if trade_data["type"] == "cancel_rejected"
else Order.EventType.fill
if trade_data["remaining_amount"] == "0"
else Order.EventType.partial_fill
)
@classmethod
def _get_order_side(cls, order_data: Dict) -> Order.FillSide:
return (
Order.FillSide.buy
if order_data["side"] == "buy"
else Order.FillSide.sell
)
@classmethod
def _order_from_dict(cls, order_data: Dict) -> Order:
trades = order_data.get("trades", [])
trade_fees: float = 0.0 + sum(float(t["fee_amount"]) for t in trades)
return Order(
order_id=order_data["order_id"],
symbol=order_data["symbol"].lower(),
filled_qty=float(order_data["executed_amount"]),
event=cls._get_order_event_type(order_data),
price=float(order_data["price"]),
side=cls._get_order_side(order_data),
submitted_at=pd.Timestamp(
ts_input=order_data["timestampms"], unit="ms", tz="UTC"
),
avg_execution_price=float(order_data["avg_execution_price"]),
remaining_amount=float(order_data["remaining_amount"]),
trade_fees=trade_fees,
)
@classmethod
def _trade_from_dict(cls, trade_dict: Dict) -> Trade:
tlog(f"GEMINI GOING TO SEND {trade_dict}")
return Trade(
order_id=trade_dict["order_id"],
symbol=trade_dict["symbol"].lower(),
event=cls._get_trade_event_type(trade_dict),
filled_qty=float(trade_dict["fill"]["amount"])
if "fill" in trade_dict
else 0.0,
trade_fee=float(
trade_dict["fill"]["fee"] if "fill" in trade_dict else 0.0
)
if "fill" in trade_dict
else 0.0,
filled_avg_price=float(trade_dict["avg_execution_price"] or 0.0),
liquidity=trade_dict["fill"]["liquidity"]
if "fill" in trade_dict
else "",
updated_at=pd.Timestamp(
ts_input=trade_dict["timestampms"], unit="ms", tz="UTC"
),
side=Order.FillSide[trade_dict["side"]],
)
async def is_fractionable(self, symbol: str) -> bool:
return True
def check_error(self, result: Dict):
if result.get("result") == "error":
raise AssertionError(
f"[EXCEPTION] {result['reason']}:{result['message']}"
)
async def is_order_completed(
self, order_id: str, external_order_id: Optional[str] = None
) -> Tuple[Order.EventType, float, float, float]:
order = await self.get_order(order_id)
return (
order.event,
order.avg_execution_price,
order.filled_qty,
order.trade_fees,
)
def get_market_schedule(
self,
) -> Tuple[Optional[datetime], Optional[datetime]]:
return (
datetime.today().replace(
hour=0, minute=0, second=0, microsecond=0, tzinfo=utctz
),
datetime.today().replace(
hour=23, minute=59, second=59, microsecond=0, tzinfo=utctz
),
)
def get_trading_days(
self, start_date: date, end_date: date = date.today()
) -> pd.DataFrame:
return pd.DataFrame(
index=pd.date_range(start=start_date, end=end_date)
)
def get_position(self, symbol: str) -> float:
symbol = symbol.lower()
endpoint = "/v1/balances"
url = self.base_url + endpoint
payload = {
"request": endpoint,
}
headers = self._generate_request_headers(payload)
response = requests.post(url, data=None, headers=headers)
if response.status_code == 200:
for b in response.json():
if b["currency"] == symbol:
return float(b["amount"])
return 0.0
raise AssertionError(
f"HTTP ERROR {response.status_code} {response.text}"
)
async def get_order(
self, order_id: str, client_order_id: Optional[str] = None
) -> Order:
endpoint = "/v1/order/status"
url = self.base_url + endpoint
payload = {
"request": endpoint,
"order_id": order_id,
"include_trades": True,
}
headers = self._generate_request_headers(payload)
response = requests.post(url, data=None, headers=headers)
if response.status_code == 200:
order_data = response.json()
self.check_error(order_data)
return self._order_from_dict(order_data)
raise AssertionError(
f"HTTP ERROR {response.status_code} {response.text}"
)
def is_market_open_today(self) -> bool:
return True
def get_time_market_close(self) -> Optional[timedelta]:
return datetime.today().replace(
hour=23, minute=59, second=59, microsecond=0, tzinfo=utctz
) - datetime.now().replace(tzinfo=utctz)
async def reconnect(self):
await self.close()
await self.run()
@classmethod
def heartbeat(cls, flags: ThreadFlags):
tlog("GEMINI HEARTBEAT thread starting")
while flags.run:
tlog("GEMINI HEARTBEAT")
endpoint = "/v1/heartbeat"
url = cls.base_url + endpoint
payload = {
"request": endpoint,
}
headers = cls._generate_request_headers(payload)
response = requests.post(url, data=None, headers=headers)
if response.status_code != 200:
raise AssertionError(
f"HEARTHBEAT HTTP ERROR {response.status_code} {response.text}"
)
time.sleep(20)
tlog("GEMINI HEARTBEAT thread terminated")
@classmethod
def on_message(cls, ws, msgs):
msgs = json.loads(msgs)
if type(msgs) != list:
return
for msg in msgs:
if msg["type"] in [
"fill",
"cancel_rejected",
"cancelled",
"rejected",
]:
trade = cls._trade_from_dict(msg)
tlog(f"GEMINI TRADING UPDATE:{trade}")
to_send = {
"EV": "trade_update",
"symbol": trade.symbol.lower(),
"trade": trade.__dict__,
}
try:
qs = cls.get_instance().queues
if qs:
for q in qs.get_allqueues():
q.put(to_send, timeout=1)
except queue.Full as f:
tlog(
f"[EXCEPTION] process_message(): queue for {symbol} is FULL:{f}, sleeping for 2 seconds and re-trying."
)
raise
@classmethod
def on_error(cls, ws, error):
tlog(f"[ERROR] GeminiTrader {error}")
@classmethod
def on_close(cls, ws, close_status_code, close_msg):
tlog(f"on_close(): status={close_status_code}, close_msg={close_msg}")
async def run(self):
if not self.running_task:
tlog("starting Gemini listener")
endpoint = "/v1/order/events"
payload = {"request": endpoint}
headers = self._generate_ws_headers(payload)
self.ws = websocket.WebSocketApp(
f"{self.base_websocket}{endpoint}?eventTypeFilter=cancel_rejected&eventTypeFilter=cancelled&eventTypeFilter=rejected&eventTypeFilter=fill&eventTypeFilter=closed&heartbeat=true",
on_message=self.on_message,
on_error=self.on_error,
on_close=self.on_close,
header=headers,
)
self.running_task = Thread(
target=self.ws.run_forever,
args=(None, {"cert_reqs": ssl.CERT_NONE}),
)
self.flags = ThreadFlags(run=True)
self.hb_task = Thread(target=self.heartbeat, args=(self.flags,))
self.running_task.start()
self.hb_task.start()
return self.running_task
async def close(self):
if self.running_task and self.running_task.is_alive():
tlog(f"close task {self.running_task}")
self.ws.keep_running = False
self.flags.run = False
self.running_task.join()
self.hb_task.join()
tlog("task terminated")
self.ws = None
self.running_task = None
self.hb_task = None
self.flags = None
async def get_tradeable_symbols(self) -> List[str]:
endpoint = "/v1/symbols"
url = self.base_url + endpoint
response = requests.get(url)
if response.status_code == 200:
return response.json()
raise AssertionError(
f"HTTP ERROR {response.status_code} {response.text}"
)
async def get_shortable_symbols(self) -> List[str]:
return []
async def is_shortable(self, symbol) -> bool:
return False
async def cancel_order(self, order: Order) -> bool:
endpoint = "/v1/order/cancel"
url = self.base_url + endpoint
payload = {"request": endpoint, "order_id": order.order_id}
headers = self._generate_request_headers(payload)
response = requests.post(url, data=None, headers=headers)
if response.status_code == 200:
order_status = response.json()
self.check_error(order_status)
return order_status
raise AssertionError(
f"HTTP ERROR {response.status_code} {response.text}"
)
async def submit_order(
self,
symbol: str,
qty: float,
side: str,
order_type: str,
time_in_force: str = None,
limit_price: str = None,
stop_price: str = None,
client_order_id: str = None,
extended_hours: bool = None,
order_class: str = None,
take_profit: dict = None,
stop_loss: dict = None,
trail_price: str = None,
trail_percent: str = None,
on_behalf_of: str = None,
) -> Order:
symbol = symbol.lower()
if order_type == "market":
raise AssertionError(
"GEMINI does not support market orders, use limit orders"
)
if float(qty) < get_asset_min_qty(symbol):
raise AssertionError(
f"GEMINI requested quantity of {qty} is below minimum for {symbol}"
)
endpoint = "/v1/order/new"
url = self.base_url + endpoint
qty = round_asset(symbol, float(qty))
payload = {
"request": endpoint,
"symbol": symbol,
"amount": str(qty),
"price": str(limit_price)
if order_type == "limit"
else str(60000.0 * qty),
"side": side,
"type": "exchange limit",
"client_order_id": client_order_id,
"options": ["immediate-or-cancel"]
if order_type == "market"
else [],
}
headers = self._generate_request_headers(payload)
response = requests.post(url, data=None, headers=headers)
if response.status_code == 200:
new_order = response.json()
self.check_error(new_order)
return self._order_from_dict(new_order)
if self.flags:
self.flags.run = False
await self.close()
raise AssertionError(
f"HTTP ERROR {response.status_code} {response.text}"
)
|
import Console from '@/utils/Console'
// import I18n from "@/mixins/I18n";
export default {
created () {
// dark mode
const dark = this.$store.getters['settings/dark']
if (typeof dark === 'boolean') {
this.$store.commit('settings/switchDark', this.dark ? 'dark' : 'light')
} else if (dark === null || dark === undefined) {
this.$store.commit('settings/switchDark', 'system')
}
// new data
const oldDataKeys = ['items', 'limitations', 'stages', 'trends', 'zones', 'personalMatrix', 'globalMatrix']
if (Object.keys(this.$store.state.data).some(key => ~oldDataKeys.indexOf(key))) {
Console.info('StoreUpgrader', 'deleting old data structure')
for (const key of oldDataKeys) {
delete this.$store.state.data[key]
}
this.$store.dispatch('data/fetch', true)
}
// remove deprecated penguin-stats-cacheTTL (cacheUpdatedAt)
localStorage.removeItem('penguin-stats-cacheTTL')
const settings = JSON.parse(localStorage.getItem('penguin-stats-settings'))
if (settings && settings.settings && settings.settings.excludedStages) {
delete settings.settings.excludedStages
localStorage.setItem('penguin-stats-settings', JSON.stringify(settings))
}
// if (this.$store.getters["settings/language"] === "zh") {
// this.changeLocale("zh-CN")
// }
}
}
|
SELECT MONTH(order_date), AVG(COUNT(order_id))
FROM orders
GROUP BY MONTH(order_date);
|
package com.metaring.springbootappexample.service;
import java.util.concurrent.CompletableFuture;
import com.metaring.framework.broadcast.BroadcastFunctionalitiesManager;
import com.metaring.framework.broadcast.Event;
import com.metaring.framework.broadcast.SingleCallback;
public class MessageFunctionalityImpl extends MessageFunctionality {
@Override
protected CompletableFuture<Void> preConditionCheck(String input) throws Exception {
return end;
}
@Override
protected CompletableFuture<Void> call(String input) throws Exception {
return BroadcastFunctionalitiesManager.contact(SingleCallback.create("message", (Event.create("message", dataRepresentationFromObject(input)))));
}
@Override
protected CompletableFuture<Void> postConditionCheck(String input) throws Exception {
return end;
}
}
|
<reponame>mohamedkhairy/dhis2-android-sdk
/*
* Copyright (c) 2004-2021, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.event.search;
import androidx.lifecycle.LiveData;
import androidx.paging.DataSource;
import androidx.paging.PagedList;
import org.hisp.dhis.android.core.arch.repositories.collection.ReadOnlyWithUidCollectionRepository;
import org.hisp.dhis.android.core.arch.repositories.filters.internal.EqFilterConnector;
import org.hisp.dhis.android.core.arch.repositories.filters.internal.ListFilterConnector;
import org.hisp.dhis.android.core.arch.repositories.filters.internal.PeriodFilterConnector;
import org.hisp.dhis.android.core.arch.repositories.filters.internal.ScopedFilterConnectorFactory;
import org.hisp.dhis.android.core.arch.repositories.object.ReadOnlyObjectRepository;
import org.hisp.dhis.android.core.arch.repositories.scope.RepositoryScope;
import org.hisp.dhis.android.core.common.AssignedUserMode;
import org.hisp.dhis.android.core.common.DateFilterPeriod;
import org.hisp.dhis.android.core.common.DateFilterPeriodHelper;
import org.hisp.dhis.android.core.common.State;
import org.hisp.dhis.android.core.event.Event;
import org.hisp.dhis.android.core.event.EventCollectionRepository;
import org.hisp.dhis.android.core.event.EventFilter;
import org.hisp.dhis.android.core.event.EventFilterCollectionRepository;
import org.hisp.dhis.android.core.event.EventObjectRepository;
import org.hisp.dhis.android.core.event.EventStatus;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnitMode;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import dagger.Reusable;
import io.reactivex.Single;
@Reusable
public final class EventQueryCollectionRepository implements ReadOnlyWithUidCollectionRepository<Event> {
private final EventCollectionRepositoryAdapter eventCollectionRepositoryAdapter;
private final ScopedFilterConnectorFactory<EventQueryCollectionRepository,
EventQueryRepositoryScope> connectorFactory;
private final EventFilterCollectionRepository eventFilterRepository;
private final EventQueryRepositoryScope scope;
@Inject
EventQueryCollectionRepository(final EventCollectionRepositoryAdapter eventCollectionRepositoryAdapter,
final EventFilterCollectionRepository eventFilterRepository,
final EventQueryRepositoryScope scope) {
this.eventCollectionRepositoryAdapter = eventCollectionRepositoryAdapter;
this.eventFilterRepository = eventFilterRepository;
this.scope = scope;
this.connectorFactory = new ScopedFilterConnectorFactory<>(s ->
new EventQueryCollectionRepository(eventCollectionRepositoryAdapter, eventFilterRepository, s));
}
public ListFilterConnector<EventQueryCollectionRepository, String> byUid() {
return connectorFactory.listConnector(uidList -> scope.toBuilder().events(uidList).build());
}
public PeriodFilterConnector<EventQueryCollectionRepository> byLastUpdated() {
return connectorFactory.periodConnector(filter -> {
DateFilterPeriod merged = DateFilterPeriodHelper.mergeDateFilterPeriods(scope.lastUpdatedDate(), filter);
return scope.toBuilder().lastUpdatedDate(merged).build();
});
}
/**
* Filter by event status.
* <br><b>IMPORTANT:</b> this filter accepts a list of event status, but only the first one will be used for
* the online query because the web API does not support querying by multiple status.
*
* @return Repository connector
*/
public ListFilterConnector<EventQueryCollectionRepository, EventStatus> byStatus() {
return connectorFactory.listConnector(status -> scope.toBuilder().eventStatus(status).build());
}
public EqFilterConnector<EventQueryCollectionRepository, String> byProgram() {
return connectorFactory.eqConnector(program -> scope.toBuilder().program(program).build());
}
public EqFilterConnector<EventQueryCollectionRepository, String> byProgramStage() {
return connectorFactory.eqConnector(programStage -> scope.toBuilder().programStage(programStage).build());
}
/**
* Filter by Event organisation units.
* <br><b>IMPORTANT:</b> this filter accepts a list of organisation units, but only the first one will be used for
* the online query because the web API does not support querying by multiple organisation units.
*
* @return Repository connector
*/
public ListFilterConnector<EventQueryCollectionRepository, String> byOrgUnits() {
return connectorFactory.listConnector(orgunits -> scope.toBuilder().orgUnits(orgunits).build());
}
public EqFilterConnector<EventQueryCollectionRepository, OrganisationUnitMode> byOrgUnitMode() {
return connectorFactory.eqConnector(mode -> scope.toBuilder().orgUnitMode(mode).build());
}
public PeriodFilterConnector<EventQueryCollectionRepository> byEventDate() {
return connectorFactory.periodConnector(filter -> {
DateFilterPeriod merged = DateFilterPeriodHelper.mergeDateFilterPeriods(scope.eventDate(), filter);
return scope.toBuilder().eventDate(merged).build();
});
}
public PeriodFilterConnector<EventQueryCollectionRepository> byCompleteDate() {
return connectorFactory.periodConnector(filter -> {
DateFilterPeriod merged = DateFilterPeriodHelper.mergeDateFilterPeriods(scope.completedDate(), filter);
return scope.toBuilder().completedDate(merged).build();
});
}
public PeriodFilterConnector<EventQueryCollectionRepository> byDueDate() {
return connectorFactory.periodConnector(filter -> {
DateFilterPeriod merged = DateFilterPeriodHelper.mergeDateFilterPeriods(scope.dueDate(), filter);
return scope.toBuilder().dueDate(merged).build();
});
}
public EqFilterConnector<EventQueryCollectionRepository, Boolean> byIncludeDeleted() {
return connectorFactory.eqConnector(includeDeleted -> scope.toBuilder().includeDeleted(includeDeleted).build());
}
public EqFilterConnector<EventQueryCollectionRepository, String> byTrackedEntityInstance() {
return connectorFactory.eqConnector(tei -> scope.toBuilder().trackedEntityInstance(tei).build());
}
public EqFilterConnector<EventQueryCollectionRepository, AssignedUserMode> byAssignedUser() {
return connectorFactory.eqConnector(userMode -> scope.toBuilder().assignedUserMode(userMode).build());
}
public EqFilterConnector<EventQueryCollectionRepository, String> byEventFilter() {
return connectorFactory.eqConnector(id -> {
EventFilter filter = eventFilterRepository.withEventDataFilters().uid(id).blockingGet();
return EventQueryRepositoryScopeHelper.addEventFilter(scope, filter);
});
}
/**
* Filter by sync status.
* <br><b>IMPORTANT:</b> using this filter forces <b>offlineOnly</b> mode.
*
* @return Repository connector
*/
public ListFilterConnector<EventQueryCollectionRepository, State> byStates() {
return connectorFactory.listConnector(states -> scope.toBuilder().states(states).build());
}
public ListFilterConnector<EventQueryCollectionRepository, String> byAttributeOptionCombo() {
return connectorFactory.listConnector(aoc -> scope.toBuilder().attributeOptionCombos(aoc).build());
}
public EqFilterConnector<EventQueryCollectionRepository,
RepositoryScope.OrderByDirection> orderByEventDate() {
return orderConnector(EventQueryScopeOrderColumn.EVENT_DATE);
}
public EqFilterConnector<EventQueryCollectionRepository,
RepositoryScope.OrderByDirection> orderByDueDate() {
return orderConnector(EventQueryScopeOrderColumn.DUE_DATE);
}
public EqFilterConnector<EventQueryCollectionRepository,
RepositoryScope.OrderByDirection> orderByCompleteDate() {
return orderConnector(EventQueryScopeOrderColumn.COMPLETED_DATE);
}
public EqFilterConnector<EventQueryCollectionRepository,
RepositoryScope.OrderByDirection> orderByCreated() {
return orderConnector(EventQueryScopeOrderColumn.CREATED);
}
public EqFilterConnector<EventQueryCollectionRepository,
RepositoryScope.OrderByDirection> orderByLastUpdated() {
return orderConnector(EventQueryScopeOrderColumn.LAST_UPDATED);
}
public EqFilterConnector<EventQueryCollectionRepository,
RepositoryScope.OrderByDirection> orderByOrganisationUnitName() {
return orderConnector(EventQueryScopeOrderColumn.ORGUNIT_NAME);
}
public EqFilterConnector<EventQueryCollectionRepository,
RepositoryScope.OrderByDirection> orderByTimeline() {
return orderConnector(EventQueryScopeOrderColumn.TIMELINE);
}
public EqFilterConnector<EventQueryCollectionRepository,
RepositoryScope.OrderByDirection> orderByDataElement(String dataElement) {
return orderConnector(EventQueryScopeOrderColumn.dataElement(dataElement));
}
private EqFilterConnector<EventQueryCollectionRepository,
RepositoryScope.OrderByDirection> orderConnector(
EventQueryScopeOrderColumn col) {
return connectorFactory.eqConnector(direction -> {
List<EventQueryScopeOrderByItem> order = new ArrayList<>(scope.order());
order.add(EventQueryScopeOrderByItem.builder().column(col).direction(direction).build());
return scope.toBuilder().order(order).build();
});
}
public EventQueryRepositoryScope getScope() {
return scope;
}
@Override
public EventObjectRepository uid(String uid) {
return getEventCollectionRepository().uid(uid);
}
@Override
public Single<List<String>> getUids() {
return getEventCollectionRepository().getUids();
}
@Override
public List<String> blockingGetUids() {
return getEventCollectionRepository().blockingGetUids();
}
@Override
public Single<List<Event>> get() {
return getEventCollectionRepository().get();
}
@Override
public List<Event> blockingGet() {
return getEventCollectionRepository().blockingGet();
}
@Override
public LiveData<PagedList<Event>> getPaged(int pageSize) {
return getEventCollectionRepository().getPaged(pageSize);
}
public DataSource<Event, Event> getDataSource() {
return getEventCollectionRepository().getDataSource();
}
@Override
public Single<Integer> count() {
return getEventCollectionRepository().count();
}
@Override
public int blockingCount() {
return getEventCollectionRepository().blockingCount();
}
@Override
public Single<Boolean> isEmpty() {
return getEventCollectionRepository().isEmpty();
}
@Override
public boolean blockingIsEmpty() {
return getEventCollectionRepository().blockingIsEmpty();
}
@Override
public ReadOnlyObjectRepository<Event> one() {
return getEventCollectionRepository().one();
}
private EventCollectionRepository getEventCollectionRepository() {
return eventCollectionRepositoryAdapter
.getCollectionRepository(scope)
.withTrackedEntityDataValues();
}
}
|
package dbis.piglet.backends.flink
import dbis.piglet.backends.BackendConf
import com.typesafe.config.ConfigFactory
import dbis.piglet.backends.PigletBackend
/**
* @author hage
*/
class FlinkConf extends BackendConf {
// loads the default configuration file in resources/application.conf
private val appconf = ConfigFactory.load()
/**
* Get the name of this backend
*
* @return Returns the name of this backend
*/
override def name: String = appconf.getString("backends.flink.name")
/**
* Get the path to the runner class that implements the PigletBackend interface
*/
override def runnerClass: PigletBackend = {
new FlinkRun
}
override def templateFile: String = appconf.getString("backends.flink.template")
override def defaultConnector: String = appconf.getString("backends.flink.connector")
override def raw = false
}
|
<gh_stars>100-1000
const config = require('@bedrockio/config');
const mongoose = require('mongoose');
const { logger } = require('@bedrockio/instrumentation');
mongoose.Promise = Promise;
const flags = {
// The underlying MongoDB driver has deprecated their current connection string parser.
useNewUrlParser: true,
// Make Mongoose's default index build use createIndex() instead of ensureIndex()
// to avoid deprecation warnings from the MongoDB driver
useCreateIndex: true,
// To opt in to using the MongoDB driver's new connection management engine.
// https://mongoosejs.com/docs/deprecations.html#useunifiedtopology
useUnifiedTopology: true,
// Set to false to make findOneAndUpdate() and findOneAndRemove()
// use native findOneAndUpdate() rather than findAndModify()
useFindAndModify: false,
};
exports.flags = flags;
exports.initialize = async function initialize() {
await mongoose.connect(config.get('MONGO_URI'), flags);
if (config.get('MONGO_DEBUG', 'boolean')) {
mongoose.set('debug', true);
}
const db = mongoose.connection;
db.on('error', () => {
logger.error('connection error');
});
return db;
};
|
class Vector:
def __init__(self, x, y):
self.x = x
self.y = y
def subtract(self, other):
return Vector(self.x - other.x, self.y - other.y)
def dot_product(self, other):
return self.x * other.x + self.y * other.y
def norm_square(self):
return self.dot_product(self)
def vector_projection_overlap(p0, p1, p2, p3):
def on_segment(p, q, r):
if (q.x <= max(p.x, r.x) and q.x >= min(p.x, r.x) and
q.y <= max(p.y, r.y) and q.y >= min(p.y, r.y)):
return True
return False
def orientation(p, q, r):
val = (q.y - p.y) * (r.x - q.x) - (q.x - p.x) * (r.y - q.y)
if val == 0:
return 0
return 1 if val > 0 else 2
def do_segments_overlap(p0, p1, p2, p3):
o1 = orientation(p0, p1, p2)
o2 = orientation(p0, p1, p3)
o3 = orientation(p2, p3, p0)
o4 = orientation(p2, p3, p1)
if (o1 != o2 and o3 != o4):
return True
if (o1 == 0 and on_segment(p0, p2, p1)):
return True
if (o2 == 0 and on_segment(p0, p3, p1)):
return True
if (o3 == 0 and on_segment(p2, p0, p3)):
return True
if (o4 == 0 and on_segment(p2, p1, p3)):
return True
return False
v = p1.subtract(p0)
n_square = v.norm_square()
v0 = p2.subtract(p0)
v1 = p3.subtract(p0)
v2 = p2.subtract(p1)
v3 = p3.subtract(p1)
proj_0 = v0.dot_product(v) / n_square
proj_1 = v1.dot_product(v) / n_square
proj_2 = v2.dot_product(v) / n_square
proj_3 = v3.dot_product(v) / n_square
if (proj_0 >= 0 and proj_0 <= 1) or (proj_1 >= 0 and proj_1 <= 1) or (proj_2 >= 0 and proj_2 <= 1) or (proj_3 >= 0 and proj_3 <= 1):
return True
return do_segments_overlap(p0, p1, p2, p3)
|
echo 'City, State, Country: ' . $city . ', ' . $state . ', ' . $country;
|
<filename>app/request_models/change_effective_date_request.rb
class ChangeEffectiveDateRequest
def self.from_csv_request(csv_request)
{
policy_id: csv_request[:policy_id],
effective_date: csv_request[:effective_date],
current_user: csv_request[:current_user],
transmit: (csv_request[:transmit] == 'yes')
}
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.