text stringlengths 1 1.05M |
|---|
package com.example.appausa.actializaciones;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.fragment.app.Fragment;
import com.android.volley.AuthFailureError;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.JsonArrayRequest;
import com.android.volley.toolbox.StringRequest;
import com.android.volley.toolbox.Volley;
import com.example.appausa.main.Login;
import com.example.appausa.R;
import com.example.appausa.model.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.HashMap;
import java.util.Map;
public class CambiarEstadoCuenta extends Fragment {
public static String user;
private EditText e1;
private TextView t1, t2, t3, t4, e2;
private Spinner s2;
private Button b1, b2;
private View l;
private RequestQueue rq;
private String estados = "", userm = "";
private boolean selfdata = false;
private ArrayAdapter<String> a;
private Log log = new Log();
public static CambiarEstadoCuenta newInstance(String u) {
CambiarEstadoCuenta frag = new CambiarEstadoCuenta();
Bundle args = new Bundle();
user = u;
return frag;
}
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
l = inflater.inflate(R.layout.cambiarestadocuenta, container, false);
((AppCompatActivity)getActivity()).getSupportActionBar().setTitle("Cambiar Estado Cuenta");
e1 = l.findViewById(R.id.docusernamem);
e2 = l.findViewById(R.id.usernammee);
t1 = l.findViewById(R.id.docuser);
t2 = l.findViewById(R.id.ultimolog);
t3 = l.findViewById(R.id.perfilactuals);
t4 = l.findViewById(R.id.estadoact);
s2 = l.findViewById(R.id.estadomod);
b1 = l.findViewById(R.id.buscarcuenta);
b2 = l.findViewById(R.id.bcuentamod);
b2.setEnabled(false);
s2.setSelection(0);
b1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String id = e1.getText().toString();
if (!id.isEmpty()) {
buscarDatosCuenta("http://192.168.0.13:80/appausamovil/obtenerDatosCuenta.php?id=" + id);
} else {
Toast.makeText(l.getContext(), "Campo de busqueda vacio", Toast.LENGTH_SHORT).show();
}
}
});
s2.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) {
if (pos != 0) {
estados = String.valueOf(parent.getItemAtPosition(pos));
} else {
estados = t4.getText().toString();
}
}
public void onNothingSelected(AdapterView<?> parent) {
}
});
b2.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (!e2.getText().toString().isEmpty() && !estados.isEmpty()){
if (selfdata){
AlertDialog.Builder a = new AlertDialog.Builder(l.getContext());
a.setMessage("Va a cambiar el estado de su cuenta a "+estados+", por seguridad, le pediremos que vuelva a iniciar sesión. De aceptar para continuar con la modificación o cancelar para detener el proceso");
a.setNegativeButton("Cancelar", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
clear();
dialog.cancel();
}
});
a.setPositiveButton("Aceptar", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
cambiarEstadoCuenta("http://192.168.0.13:80/appausamovil/cambiarEstadoCuenta.php", e2.getText().toString(),estados);
Intent intent = new Intent(l.getContext(), Login.class);
intent.putExtra("mensaje","Sesión finalizada");
startActivity(intent);
}
});
AlertDialog r = a.create();
r.setTitle("Alerta");
r.setIcon(R.mipmap.iconoapp);
r.show();
} else {
cambiarEstadoCuenta("http://192.168.0.13:80/appausamovil/cambiarEstadoCuenta.php", e2.getText().toString(),estados);
}
} else {
Toast.makeText(l.getContext(), "Revice que todos los campos esten llenos correctamente", Toast.LENGTH_SHORT).show();
}
}
});
return l;
}
private void cambiarEstadoCuenta(String url, final String usuario, final String estados) {
StringRequest sr = new StringRequest(Request.Method.POST, url, new Response.Listener<String>() {
@Override
public void onResponse(String response) {
Toast.makeText(l.getContext(), "Los datos se modificaron exitosamente", Toast.LENGTH_LONG).show();
log.insertlog(user, "Cuenta Modificado Exitoso", "Se modificaron los datos de la cuenta " + usuario, "http://192.168.0.13:80/appausamovil/insertarlog.php",l.getContext());
clear();
}
}, new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
Toast.makeText(l.getContext(), "Problemas en la modificación", Toast.LENGTH_LONG).show();
log.insertlog(user, "Cuenta Modificado Fallido", "Se trataron de modificar los datos de la cuenta " + userm+" sin exito", "http://192.168.0.13:80/appausamovil/insertarlog.php",l.getContext());
}
}) {
@Override
protected Map<String, String> getParams() throws AuthFailureError {
Map<String,String> param = new HashMap<String,String>();
param.put("cusername",usuario);
param.put("cestado",estados);
return param;
}
};
RequestQueue rq = Volley.newRequestQueue(l.getContext());
rq.add(sr);
}
private void clear() {
e1.setText("");
e2.setText("");
t1.setText("");
t2.setText("");
t3.setText("");
t4.setText("");
}
private void buscarDatosCuenta(String url) {
JsonArrayRequest js = new JsonArrayRequest(url, new Response.Listener<JSONArray>() {
@Override
public void onResponse(JSONArray response) {
JSONObject jo = null;
for (int i = 0; i < response.length(); i++) {
try {
jo = response.getJSONObject(i);
String username = jo.getString("username"),
estado = jo.getString("estado"),
doucmento = jo.getString("usuario"),
perfil = jo.getString("perfil"),
ul = jo.optString("ultimologin");
if (user.equals(username)){
selfdata = true;
}
e2.setText(username);
t1.setText(doucmento);
t2.setText(ul);
t3.setText(perfil);
t4.setText(estado);
b2.setEnabled(true);
userm = username;
} catch (JSONException je) {
Toast.makeText(l.getContext(), je.getMessage(), Toast.LENGTH_LONG).show();
}
}
log.insertlog(user, "Cuenta Buscado", "Se buscaron los datos de la cuenta " + userm, "http://192.168.0.13:80/appausamovil/insertarlog.php",l.getContext());
}
}, new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
Toast.makeText(l.getContext(), "El correo o la contraseña no coinciden", Toast.LENGTH_LONG).show();
}
});
rq = Volley.newRequestQueue(l.getContext());
rq.add(js);
}
}
|
<filename>src/main/java/vectorwing/farmersdelight/common/block/RiceBaleBlock.java<gh_stars>0
package vectorwing.farmersdelight.common.block;
import net.minecraft.core.BlockPos;
import net.minecraft.core.Direction;
import net.minecraft.world.damagesource.DamageSource;
import net.minecraft.world.entity.Entity;
import net.minecraft.world.item.context.BlockPlaceContext;
import net.minecraft.world.level.BlockGetter;
import net.minecraft.world.level.Level;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.Mirror;
import net.minecraft.world.level.block.Rotation;
import net.minecraft.world.level.block.state.BlockState;
import net.minecraft.world.level.block.state.StateDefinition;
import net.minecraft.world.level.block.state.properties.BlockStateProperties;
import net.minecraft.world.level.block.state.properties.DirectionProperty;
@SuppressWarnings("deprecation")
public class RiceBaleBlock extends Block
{
public static final DirectionProperty FACING = BlockStateProperties.FACING;
public RiceBaleBlock(Properties properties) {
super(properties);
this.registerDefaultState(this.getStateDefinition().any().setValue(FACING, Direction.UP));
}
@Override
public void fallOn(Level worldIn, BlockState state, BlockPos pos, Entity entityIn, float fallDistance) {
entityIn.causeFallDamage(fallDistance, 0.2F, DamageSource.FALL);
}
@Override
public BlockState getStateForPlacement(BlockPlaceContext context) {
return this.defaultBlockState().setValue(FACING, context.getClickedFace());
}
@Override
public int getFireSpreadSpeed(BlockState state, BlockGetter world, BlockPos pos, Direction face) {
return 60;
}
@Override
public int getFlammability(BlockState state, BlockGetter world, BlockPos pos, Direction face) {
return 20;
}
@Override
protected void createBlockStateDefinition(StateDefinition.Builder<Block, BlockState> builder) {
builder.add(FACING);
}
@Override
public BlockState rotate(BlockState state, Rotation rot) {
return state.setValue(FACING, rot.rotate(state.getValue(FACING)));
}
@Override
public BlockState mirror(BlockState state, Mirror mirrorIn) {
return state.rotate(mirrorIn.getRotation(state.getValue(FACING)));
}
}
|
import BaseValidator from '../validators/-base';
export class NotValidator extends BaseValidator {
constructor(validator) {
super();
this.validator = validator;
}
check(value) {
return !this.validator.check(value);
}
}
export default function not(validator) {
return new NotValidator(validator);
}
|
def print_pattern(N):
for i in range(N):
for j in range(i+1):
print("*", end = " ")
print()
print_pattern(5) |
<filename>quadrupedal/inverse_dynamics.py
import numpy as np
import sympy as sy
import scipy.linalg as la
#3DoF Inverse Dynamics
class InverseDynamics:
def __init__(self,jointVectorList,linkInertiaList, comVectorList,linkMassList, positionGain=900,velocityGain=5):
#set to matrix
self.b1=jointVectorList[0]
self.b2=jointVectorList[1]
self.b3=jointVectorList[2]
self.b4=jointVectorList[3]
self.m = linkMassList
self.S = comVectorList
self.Hhat=[]
self._g = np.matrix([0.,0.,-9.8,0.]).T
for i in range(3):
self.Hhat.append(self.HhatMatrix(Ihat=linkInertiaList[i],S=comVectorList[i],m=linkMassList[i]))
self.Kp = positionGain
self.Kv = velocityGain
def forward(self,jointPosition,jointAcceleration, jointVelocity):
M = self.M(jointPosition)
h = self.h(jointPosition, jointVelocity)
g = self.g(theta)
return M*jointAcceleration + h + g
def solve(self,acceleration_ref,position,position_ref,velocity_ref, jointVelocity,jointPosition,jacobian,diffJacobian): #全部リスト
M = self.M(jointPosition)
h = self.h(jointPosition, jointVelocity)
g = self.g(jointPosition)
x = np.matrix(position).T
xd = np.matrix(position_ref).T
vd = np.matrix(velocity_ref).T
a_ref = np.matrix(acceleration_ref).T
dtheta = np.matrix(jointVelocity).T
v= jacobian*dtheta
#u = a_ref + Kv*(vd-v)+Kp*(xd-x)
#tau = M * la.inv(jacobian)*(u-diffJacobian*dtheta)+h+g
feedForwardTau = M * la.inv(jacobian)*(a_ref-diffJacobian*dtheta)+h+g
feedBackTau = jacobian.T*(self.Kv*(vd-v)+self.Kp*(xd-x))
tau = feedBackTau + feedForwardTau
return [tau[0,0],tau[1,0],tau[2,0]]
def M(self,theta):
M = np.matrix(np.zeros((3,3)))
q1 = theta[0]
q2 = theta[1]
q3 = theta[2]
dT01dq1 = np.matrix(np.zeros((4,4)))
dT01dq1[1,1] = -np.sin(q1)
dT01dq1[1,2] = -np.cos(q1)
dT01dq1[2,1] = np.cos(q1)
dT01dq1[2,2] = -np.sin(q1)
dT02dq1 = np.matrix(np.zeros((4,4)))
dT02dq1[1,0] = np.cos(q1)*np.sin(q2)
dT02dq1[1,1] = -np.sin(q1)
dT02dq1[1,2] = -np.cos(q1)*np.cos(q2)
dT02dq1[1,3] = -self.b2[2]*np.cos(q1)-self.b2[1]*np.sin(q1)
dT02dq1[2,0] = np.sin(q1)*np.sin(q2)
dT02dq1[2,1] = np.cos(q1)
dT02dq1[2,2] = -np.cos(q2)*np.sin(q1)
dT02dq1[2,3] = self.b2[1]*np.cos(q1)-self.b2[2]*np.sin(q1)
dT02dq2 = np.matrix(np.zeros((4,4)))
dT02dq2[0,0] = -np.sin(q2)
dT02dq2[0,2] = np.cos(q2)
dT02dq2[1,0] = np.cos(q2)*np.sin(q1)
dT02dq2[1,2] = np.sin(q1)*np.sin(q2)
dT02dq2[2,0] = -np.cos(q1)*np.cos(q2)
dT02dq2[2,2] = -np.cos(q1)*np.sin(q2)
dT03dq1 = np.matrix(np.zeros((4,4)))
dT03dq1[1,0] = np.sin(q2+q3)*np.cos(q1)
dT03dq1[1,1] = -np.sin(q1)
dT03dq1[1,2] = -np.cos(q2+q3)*np.cos(q1)
dT03dq1[1,3] = self.b3[0]*np.cos(q1)*np.sin(q2) - self.b2[1]*np.sin(q1) - self.b3[1]*np.sin(q1) - self.b3[2]*np.cos(q1)*np.cos(q2) - self.b2[2]*np.cos(q1)
dT03dq1[2,0] = np.sin(q2+q3)*np.sin(q1)
dT03dq1[2,1] = np.cos(q1)
dT03dq1[2,2] = -np.cos(q2+q3)*np.sin(q1)
dT03dq1[2,3] = self.b2[1]*np.cos(q1) - self.b3[1]*np.cos(q1) - self.b2[2]*np.sin(q1) - self.b3[2]*np.cos(q2)*np.sin(q1) + self.b3[0]*np.sin(q1)*np.sin(q2)
dT03dq2 = np.matrix(np.zeros((4,4)))
dT03dq2[0,0] = -np.sin(q2+q3)
dT03dq2[0,2] = np.cos(q2+q3)
dT03dq2[0,3] = self.b3[2]*np.cos(q2)-self.b3[0]*np.sin(q2)
dT03dq2[1,0] = np.cos(q2+q3)*np.sin(q1)
dT03dq2[1,2] = np.sin(q2+q3)*np.sin(q1)
dT03dq2[1,3] = np.sin(q1)*(self.b3[0]*np.cos(q2)+self.b3[2]*np.sin(q2))
dT03dq2[2,0] = -np.cos(q2+q3)*np.cos(q1)
dT03dq2[2,2] = -np.sin(q2+q3)*np.cos(q1)
dT03dq2[2,3] = -np.cos(q1)*(self.b3[0]*np.cos(q2)+self.b3[2]*np.sin(q2))
dT03dq3 = np.matrix(np.zeros((4,4)))
dT03dq3[0,0] = -np.sin(q2+q3)
dT03dq3[0,2] = np.cos(q2+q3)
dT03dq3[1,0] = np.cos(q2+q3)*np.sin(q1)
dT03dq3[1,2] = np.sin(q2+q3)*np.sin(q1)
dT03dq3[2,0] = -np.cos(q2+q3)*np.cos(q1)
dT03dq3[2,2] = -np.sin(q2+q3)*np.cos(q1)
M[0,0] = np.trace(dT01dq1 * self.Hhat[0] * dT01dq1.T) + np.trace(dT02dq1 * self.Hhat[1] * dT02dq1.T) + np.trace(dT03dq1 * self.Hhat[2] * dT03dq1.T) #k=1 i=1,j=1
M[1,1] = np.trace(dT02dq2 * self.Hhat[1] * dT02dq2.T) + np.trace(dT03dq2 * self.Hhat[2] * dT03dq2.T)
M[2,2] = np.trace(dT03dq3 * self.Hhat[2] * dT03dq3.T)
M[0,1] = M[1,0] = np.trace(dT02dq2 * self.Hhat[1] * dT02dq1.T) + np.trace(dT03dq2 * self.Hhat[2] * dT03dq1.T)
M[0,2] = M[2,0] = np.trace(dT03dq3 * self.Hhat[2] * dT03dq1.T)
M[1,2] = M[2,1] = np.trace(dT03dq3 * self.Hhat[2] * dT03dq2.T)
return M
def h(self,theta,dtheta):
h = np.array([0.,0.,0.])
q1 = theta[0]
q2 = theta[1]
q3 = theta[2]
dT01dq1 = np.matrix(np.zeros((4,4)))
dT01dq1[1,1] = -np.sin(q1)
dT01dq1[1,2] = -np.cos(q1)
dT01dq1[2,1] = np.cos(q1)
dT01dq1[2,2] = -np.sin(q1)
dT02dq1 = np.matrix(np.zeros((4,4)))
dT02dq1[1,0] = np.cos(q1)*np.sin(q2)
dT02dq1[1,1] = -np.sin(q1)
dT02dq1[1,2] = -np.cos(q1)*np.cos(q2)
dT02dq1[1,3] = -self.b2[2]*np.cos(q1)-self.b2[1]*np.sin(q1)
dT02dq1[2,0] = np.sin(q1)*np.sin(q2)
dT02dq1[2,1] = np.cos(q1)
dT02dq1[2,2] = -np.cos(q2)*np.sin(q1)
dT02dq1[2,3] = self.b2[1]*np.cos(q1)-self.b2[2]*np.sin(q1)
dT02dq2 = np.matrix(np.zeros((4,4)))
dT02dq2[0,0] = -np.sin(q2)
dT02dq2[0,2] = np.cos(q2)
dT02dq2[1,0] = np.cos(q2)*np.sin(q1)
dT02dq2[1,2] = np.sin(q1)*np.sin(q2)
dT02dq2[2,0] = -np.cos(q1)*np.cos(q2)
dT02dq2[2,2] = -np.cos(q1)*np.sin(q2)
dT03dq1 = np.matrix(np.zeros((4,4)))
dT03dq1[1,0] = np.sin(q2+q3)*np.cos(q1)
dT03dq1[1,1] = -np.sin(q1)
dT03dq1[1,2] = -np.cos(q2+q3)*np.cos(q1)
dT03dq1[1,3] = self.b3[0]*np.cos(q1)*np.sin(q2) - self.b2[1]*np.sin(q1) - self.b3[1]*np.sin(q1) - self.b3[2]*np.cos(q1)*np.cos(q2) - self.b2[2]*np.cos(q1)
dT03dq1[2,0] = np.sin(q2+q3)*np.sin(q1)
dT03dq1[2,1] = np.cos(q1)
dT03dq1[2,2] = -np.cos(q2+q3)*np.sin(q1)
dT03dq1[2,3] = self.b2[1]*np.cos(q1) - self.b3[1]*np.cos(q1) - self.b2[2]*np.sin(q1) - self.b3[2]*np.cos(q2)*np.sin(q1) + self.b3[0]*np.sin(q1)*np.sin(q2)
dT03dq2 = np.matrix(np.zeros((4,4)))
dT03dq2[0,0] = -np.sin(q2+q3)
dT03dq2[0,2] = np.cos(q2+q3)
dT03dq2[0,3] = self.b3[2]*np.cos(q2)-self.b3[0]*np.sin(q2)
dT03dq2[1,0] = np.cos(q2+q3)*np.sin(q1)
dT03dq2[1,2] = np.sin(q2+q3)*np.sin(q1)
dT03dq2[1,3] = np.sin(q1)*(self.b3[0]*np.cos(q2)+self.b3[2]*np.sin(q2))
dT03dq2[2,0] = -np.cos(q2+q3)*np.cos(q1)
dT03dq2[2,2] = -np.sin(q2+q3)*np.cos(q1)
dT03dq2[2,3] = -np.cos(q1)*(self.b3[0]*np.cos(q2)+self.b3[2]*np.sin(q2))
dT03dq3 = np.matrix(np.zeros((4,4)))
dT03dq3[0,0] = -np.sin(q2+q3)
dT03dq3[0,2] = np.cos(q2+q3)
dT03dq3[1,0] = np.cos(q2+q3)*np.sin(q1)
dT03dq3[1,2] = np.sin(q2+q3)*np.sin(q1)
dT03dq3[2,0] = -np.cos(q2+q3)*np.cos(q1)
dT03dq3[2,2] = -np.sin(q2+q3)*np.cos(q1)
dT01dq1q1 = np.matrix(np.zeros((4,4)))
dT01dq1[1,1] = -np.cos(q1)
dT01dq1[1,2] = np.sin(q1)
dT01dq1[2,1] = -np.sin(q1)
dT01dq1[2,2] = -np.cos(q1)
dT02dq1q1 = np.matrix(np.zeros((4,4)))
dT02dq1[1,0] = -np.sin(q1)*np.sin(q2)
dT02dq1[1,1] = -np.cos(q1)
dT02dq1[1,2] = np.cos(q2)*np.sin(q1)
dT02dq1[1,3] = self.b2[2]*np.sin(q1)-self.b2[1]*np.cos(q1)
dT02dq1[2,0] = np.cos(q1)*np.sin(q2)
dT02dq1[2,1] = -np.sin(q1)
dT02dq1[2,2] = -np.cos(q1)*np.cos(q2)
dT02dq1[2,3] = -self.b2[2]*np.cos(q1)-self.b2[1]*np.sin(q1)
dT02dq1q2 = dt02dq2q1 =np.matrix(np.zeros((4,4)))
dT02dq1q2[1,0] = np.cos(q1)*np.cos(q2)
dT02dq1q2[1,2] = np.cos(q1)*np.sin(q2)
dT02dq1q2[2,0] = np.cos(q2)*np.sin(q1)
dT02dq1q2[2,2] = np.sin(q1)*np.sin(q2)
dT02dq2q2 = np.matrix(np.zeros((4,4)))
dT02dq2q2[0,0] = -np.cos(q2)
dT02dq2q2[0,2] = -np.sin(q2)
dT02dq2q2[1,0] = -np.sin(q1)*np.sin(q2)
dT02dq2q2[1,2] = np.cos(q2)*np.sin(q1)
dT02dq2q2[2,0] = np.cos(q1)*np.sin(q2)
dT02dq2q2[2,2] = -np.cos(q1)*np.cos(q2)
dT03dq1q1 = np.matrix(np.zeros((4,4)))
dT03dq1[1,0] = -np.sin(q2+q3)*np.sin(q1)
dT03dq1[1,1] = -np.cos(q1)
dT03dq1[1,2] = np.cos(q2+q3)*np.sin(q1)
dT03dq1[1,3] = self.b2[2]*np.sin(q1) - self.b3[1]*np.cos(q1) - self.b2[1]*np.cos(q1) + self.b3[2]*np.cos(q2)*np.sin(q1) - self.b3[0]*np.sin(q1)*np.sin(q2)
dT03dq1[2,0] = np.sin(q2+q3)*np.cos(q1)
dT03dq1[2,1] = -np.sin(q1)
dT03dq1[2,2] = -np.cos(q2+q3)*np.cos(q1)
dT03dq1[2,3] = self.b3[0]*np.cos(q1)*np.sin(q2) - self.b2[1]*np.sin(q1) - self.b3[1]*np.sin(q1) - self.b3[2]*np.cos(q1)*np.cos(q2) - self.b2[2]*np.cos(q1)
dT03dq1q2 = dt03dq2q1 = np.matrix(np.zeros((4,4)))
dT03dq1q2[1,0] = np.cos(q2+q3)*np.cos(q1)
dT03dq1q2[1,2] = np.sin(q2+q3)*np.cos(q1)
dT03dq1q2[1,3] = np.cos(q1)*(self.b3[0]*np.cos(q2)+self.b3[2]*np.sin(q2))
dT03dq1q2[2,0] = np.cos(q2+q3)*np.sin(q1)
dT03dq1q2[2,2] = np.sin(q2+q3)*np.sin(q1)
dT03dq1q2[2,3] = np.sin(q1)*(self.b3[0]*np.cos(q2)+self.b3[2]*np.sin(q2))
dT03dq2q2 = np.matrix(np.zeros((4,4)))
dT03dq2q2[0,0] = -np.cos(q2+q3)
dT03dq2q2[0,2] = -np.sin(q2+q3)
dT03dq2q2[0,3] = -self.b3[0]*np.cos(q2) - self.b3[2]*np.sin(q2)
dT03dq2q2[1,0] = -np.sin(q2+q3)*np.sin(q1)
dT03dq2q2[1,2] = np.cos(q2+q3)*np.sin(q1)
dT03dq2q2[1,3] = np.sin(q1)*(self.b3[2]*np.cos(q2)-self.b3[0]*np.sin(q2))
dT03dq2q2[2,0] = np.sin(q2+q3)*np.cos(q1)
dT03dq2q2[2,2] = -np.cos(q2+q3)*np.cos(q1)
dT03dq2q2[2,3] = -np.cos(q1)*(self.b3[2]*np.cos(q2)-self.b3[0]*np.sin(q2))
dT03dq3q2 = dt03dq2q3 = np.matrix(np.zeros((4,4)))
dT03dq3q2[0,0] = -np.cos(q2+q3) #un
dT03dq3q2[0,2] = -np.sin(q2+q3)
dT03dq3q2[1,0] = -np.sin(q2+q3)*np.sin(q1)
dT03dq3q2[1,2] = np.cos(q2+q3)*np.sin(q1)
dT03dq3q2[2,0] = np.sin(q2+q3)*np.cos(q1)
dT03dq3q2[2,2] = -np.cos(q2+q3)*np.cos(q1)
dT03dq3q3 = np.matrix(np.zeros((4,4)))
dT03dq3q3[0,0] = -np.cos(q2+q3)
dT03dq3q3[0,2] = -np.sin(q2+q3)
dT03dq3q3[1,0] = -np.sin(q2+q3)*np.sin(q1)
dT03dq3q3[1,2] = np.cos(q2+q3)*np.sin(q1)
dT03dq3q3[2,0] = np.sin(q2+q3)*np.cos(q1)
dT03dq3q3[2,2] = -np.cos(q2+q3)*np.cos(q1)
dT03dq3q1 = dt03dq1q3 = np.matrix(np.zeros((4,4)))
dT03dq3q3[1,0] = np.cos(q2+q3)*np.cos(q1)
dT03dq3q3[1,2] = np.sin(q2+q3)*np.cos(q1)
dT03dq3q3[2,0] = np.cos(q2+q3)*np.sin(q1)
dT03dq3q3[2,2] = np.sin(q2+q3)*np.sin(q1)
h[0] = np.trace(dT01dq1q1 * self.Hhat[0] * dT01dq1.T)*dtheta[0]*dtheta[0] \
+np.trace(dT02dq1q1 * self.Hhat[1] * dT02dq1.T)*dtheta[0]*dtheta[0] \
+np.trace(dT03dq1q1 * self.Hhat[2] * dT03dq1.T)*dtheta[0]*dtheta[0] \
+np.trace(dT02dq1q2 * self.Hhat[1] * dT02dq1.T) * dtheta[0]*dtheta[1] \
+np.trace(dT03dq1q2 * self.Hhat[2] * dT03dq1.T) * dtheta[0]*dtheta[1] \
+np.trace(dt03dq1q3 * self.Hhat[2] * dT03dq1.T) * dtheta[0]*dtheta[2] \
+np.trace(dt02dq2q1 * self.Hhat[1] * dT02dq1.T)*dtheta[1]*dtheta[0] \
+np.trace(dt03dq2q1 * self.Hhat[2] * dT03dq1.T)*dtheta[1]*dtheta[0] \
+np.trace(dT02dq2q2 * self.Hhat[1] * dT02dq1.T)*dtheta[1]*dtheta[1] \
+np.trace(dT03dq2q2 * self.Hhat[2] * dT03dq1.T)*dtheta[1]*dtheta[1] \
+np.trace(dt03dq2q3 * self.Hhat[2] * dT03dq1.T) * dtheta[1]*dtheta[2] \
+np.trace(dT03dq3q1 * self.Hhat[2] * dT03dq1.T)*dtheta[2]*dtheta[0] \
+np.trace(dT03dq3q2 * self.Hhat[2] * dT03dq1.T)*dtheta[2]*dtheta[1] \
+np.trace(dT03dq3q3 * self.Hhat[2] * dT03dq1.T)*dtheta[2]*dtheta[2]
h[1] = +np.trace(dT02dq1q1 * self.Hhat[1] * dT02dq2.T)*dtheta[0]*dtheta[0] \
+np.trace(dT03dq1q1 * self.Hhat[2] * dT03dq2.T)*dtheta[0]*dtheta[0] \
+np.trace(dT02dq1q2 * self.Hhat[1] * dT02dq2.T) * dtheta[0]*dtheta[1] \
+np.trace(dT03dq1q2 * self.Hhat[2] * dT03dq2.T) * dtheta[0]*dtheta[1] \
+np.trace(dt03dq1q3 * self.Hhat[2] * dT03dq2.T) * dtheta[0]*dtheta[2] \
+np.trace(dt02dq2q1 * self.Hhat[1] * dT02dq2.T)*dtheta[1]*dtheta[0] \
+np.trace(dt03dq2q1 * self.Hhat[2] * dT03dq2.T)*dtheta[1]*dtheta[0] \
+np.trace(dT02dq2q2 * self.Hhat[1] * dT02dq2.T)*dtheta[1]*dtheta[1] \
+np.trace(dT03dq2q2 * self.Hhat[2] * dT03dq2.T)*dtheta[1]*dtheta[1] \
+np.trace(dt03dq2q3 * self.Hhat[2] * dT03dq2.T) * dtheta[1]*dtheta[2] \
+np.trace(dT03dq3q1 * self.Hhat[2] * dT03dq2.T)*dtheta[2]*dtheta[0] \
+np.trace(dT03dq3q2 * self.Hhat[2] * dT03dq2.T)*dtheta[2]*dtheta[1] \
+np.trace(dT03dq3q3 * self.Hhat[2] * dT03dq2.T)*dtheta[2]*dtheta[2]
h[2] = np.trace(dT03dq1q1 * self.Hhat[2] * dT03dq3.T)*dtheta[0]*dtheta[0] \
+np.trace(dT03dq1q2 * self.Hhat[2] * dT03dq3.T) * dtheta[0]*dtheta[1] \
+np.trace(dt03dq1q3 * self.Hhat[2] * dT03dq3.T) * dtheta[0]*dtheta[2] \
+np.trace(dt03dq2q1 * self.Hhat[2] * dT03dq3.T)*dtheta[1]*dtheta[0] \
+np.trace(dT03dq2q2 * self.Hhat[2] * dT03dq3.T)*dtheta[1]*dtheta[1] \
+np.trace(dt03dq2q3 * self.Hhat[2] * dT03dq3.T) * dtheta[1]*dtheta[2] \
+np.trace(dT03dq3q1 * self.Hhat[2] * dT03dq3.T)*dtheta[2]*dtheta[0] \
+np.trace(dT03dq3q2 * self.Hhat[2] * dT03dq3.T)*dtheta[2]*dtheta[1] \
+np.trace(dT03dq3q3 * self.Hhat[2] * dT03dq3.T)*dtheta[2]*dtheta[2]
return np.matrix(h).T
def g(self,theta):
q1 = theta[0]
q2 = theta[1]
q3 = theta[2]
dT01dq1 = np.matrix(np.zeros((4,4)))
dT01dq1[1,1] = -np.sin(q1)
dT01dq1[1,2] = -np.cos(q1)
dT01dq1[2,1] = np.cos(q1)
dT01dq1[2,2] = -np.sin(q1)
dT02dq1 = np.matrix(np.zeros((4,4)))
dT02dq1[1,0] = np.cos(q1)*np.sin(q2)
dT02dq1[1,1] = -np.sin(q1)
dT02dq1[1,2] = -np.cos(q1)*np.cos(q2)
dT02dq1[1,3] = -self.b2[2]*np.cos(q1)-self.b2[1]*np.sin(q1)
dT02dq1[2,0] = np.sin(q1)*np.sin(q2)
dT02dq1[2,1] = np.cos(q1)
dT02dq1[2,2] = -np.cos(q2)*np.sin(q1)
dT02dq1[2,3] = self.b2[1]*np.cos(q1)-self.b2[2]*np.sin(q1)
dT02dq2 = np.matrix(np.zeros((4,4)))
dT02dq2[0,0] = -np.sin(q2)
dT02dq2[0,2] = np.cos(q2)
dT02dq2[1,0] = np.cos(q2)*np.sin(q1)
dT02dq2[1,2] = np.sin(q1)*np.sin(q2)
dT02dq2[2,0] = -np.cos(q1)*np.cos(q2)
dT02dq2[2,2] = -np.cos(q1)*np.sin(q2)
dT03dq1 = np.matrix(np.zeros((4,4)))
dT03dq1[1,0] = np.sin(q2+q3)*np.cos(q1)
dT03dq1[1,1] = -np.sin(q1)
dT03dq1[1,2] = -np.cos(q2+q3)*np.cos(q1)
dT03dq1[1,3] = self.b3[0]*np.cos(q1)*np.sin(q2) - self.b2[1]*np.sin(q1) - self.b3[1]*np.sin(q1) - self.b3[2]*np.cos(q1)*np.cos(q2) - self.b2[2]*np.cos(q1)
dT03dq1[2,0] = np.sin(q2+q3)*np.sin(q1)
dT03dq1[2,1] = np.cos(q1)
dT03dq1[2,2] = -np.cos(q2+q3)*np.sin(q1)
dT03dq1[2,3] = self.b2[1]*np.cos(q1) - self.b3[1]*np.cos(q1) - self.b2[2]*np.sin(q1) - self.b3[2]*np.cos(q2)*np.sin(q1) + self.b3[0]*np.sin(q1)*np.sin(q2)
dT03dq2 = np.matrix(np.zeros((4,4)))
dT03dq2[0,0] = -np.sin(q2+q3)
dT03dq2[0,2] = np.cos(q2+q3)
dT03dq2[0,3] = self.b3[2]*np.cos(q2)-self.b3[0]*np.sin(q2)
dT03dq2[1,0] = np.cos(q2+q3)*np.sin(q1)
dT03dq2[1,2] = np.sin(q2+q3)*np.sin(q1)
dT03dq2[1,3] = np.sin(q1)*(self.b3[0]*np.cos(q2)+self.b3[2]*np.sin(q2))
dT03dq2[2,0] = -np.cos(q2+q3)*np.cos(q1)
dT03dq2[2,2] = -np.sin(q2+q3)*np.cos(q1)
dT03dq2[2,3] = -np.cos(q1)*(self.b3[0]*np.cos(q2)+self.b3[2]*np.sin(q2))
dT03dq3 = np.matrix(np.zeros((4,4)))
dT03dq3[0,0] = -np.sin(q2+q3)
dT03dq3[0,2] = np.cos(q2+q3)
dT03dq3[1,0] = np.cos(q2+q3)*np.sin(q1)
dT03dq3[1,2] = np.sin(q2+q3)*np.sin(q1)
dT03dq3[2,0] = -np.cos(q2+q3)*np.cos(q1)
dT03dq3[2,2] = -np.sin(q2+q3)*np.cos(q1)
g1 = -self.m[0] * self._g.T * dT01dq1 * np.matrix(np.hstack((self.S[0],1.))).T -self.m[1] * self._g.T * dT02dq1 * np.matrix(np.hstack((self.S[1],1.))).T -self.m[2] * self._g.T * dT03dq1 * np.matrix(np.hstack((self.S[2],1.))).T
g2 = -self.m[1] * self._g.T * dT02dq2 * np.matrix(np.hstack((self.S[1],1.))).T -self.m[2] * self._g.T * dT03dq2 * np.matrix(np.hstack((self.S[2],1.))).T
g3 = -self.m[2] * self._g.T * dT03dq3 * np.matrix(np.hstack((self.S[2],1.))).T
return np.matrix(np.array([g1,g2,g3])).T
def HhatMatrix(self,Ihat, S, m):
Hhat = np.matrix(np.zeros((4,4)))
Hhat[0:3,0:3] = -Ihat
Hhat[0,0] = (-Ihat[0,0]+Ihat[1,1]+Ihat[2,2])/2
Hhat[1,1] = (Ihat[0,0]-Ihat[1,1]+Ihat[2,2])/2
Hhat[2,2] = (Ihat[0,0]+Ihat[1,1]-Ihat[2,2])/2
Hhat[0,3] = Hhat[3,0] = m * S[0]
Hhat[1,3] = Hhat[3,1] = m * S[1]
Hhat[2,3] = Hhat[3,2] = m * S[2]
Hhat[3,3] = m
return Hhat
|
#!/usr/bin/env bash
# set -x
function set_power() {
iface=$1 # interface
co=$2 # country
pwr=$3 # power
iw reg set $co # BO/GY
sleep 3
iwconfig $iface txpower $pwr
sleep 3
iw reg get
}
function run_mdk() {
iface=$1
cnt=$2
bssid=$3
essid=$4
pckts=$5
for ((i=0; i<$cnt; i++))
do
iw $iface interface add mon$i type monitor
macchanger -r mon$i
xterm -hold -e "mdk3 mon$i x 0 -t $bssid -n $essid -s $pckts" &
done
}
main() {
[ -z $1 ] && { echo "No param: wlan interface..."; exit 1; }
iface=$1
count=3
country="GY"
power=30
bssid="DE:AD:BE:EF:D0:11"
essid="the-neighbour-you-hate"
pckts=50000
set_power $iface $country $power
run_mdk $iface $count $bssid $essid $pckts
}
main
|
#!/bin/sh
TRAIN_DATA_DIR=../../data/train/
TRAIN_FILE=../../data/traindata-5.txt
DETECTOR_BIN=../algo/dist/detector
OUTPUT_FOLDER=../../data/trained-data/
TRAINER_JAR=./target/tester-1.0-SNAPSHOT.jar
java -cp $TRAINER_JAR gov.nasa.asteroid.tester.AsteroidDetectorTester -folder $TRAIN_DATA_DIR"/" -train $TRAIN_FILE -exec "$DETECTOR_BIN --mode train" -output $OUTPUT_FOLDER"/"
#copy the known Neos files
cp $TRAIN_DATA_DIR/*.txt $OUTPUT_FOLDER"/"
|
#! /bin/sh
export KSROOT=/jffs/koolshare
if [ ! -L "$KSROOT/init.d/S99Shellinabox.sh" ]; then
ln -sf $KSROOT/shellinabox/shellinabox_start.sh $KSROOT/init.d/S99Shellinabox.sh
fi
case $ACTION in
start)
killall shellinaboxd
$KSROOT/shellinabox/shellinaboxd --css=/jffs/koolshare/shellinabox/white-on-black.css -b
;;
stop)
killall shellinaboxd
;;
*)
killall shellinaboxd
$KSROOT/shellinabox/shellinaboxd --css=/jffs/koolshare/shellinabox/white-on-black.css -b
;;
esac
|
"""
OSC input/output utility.
Server implementation to capture OSC.
Function implementation to replay OSC.
"""
import time
import logging
import numpy as np
import liblo
from .base import TimeSeriesBundle
from .collector import Collector
log = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
# *********************************SERVER CLASS**********************************
class CaptureOSCServer(liblo.ServerThread):
"""OSC server to capture OSC data and store.
Args:
port (int): Port to listen on.
Keyword Args:
debug (bool): Set to True to log captures.
"""
def __init__(self, port, debug=False):
super().__init__(port)
if debug:
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.INFO)
log.info("Server started on port %d", port)
self.collector = Collector()
self.first_message_captured = False
@liblo.make_method(None, None)
def catch_all_callback(self, path, args):
"""Executes when capturing any OSC message.
Uses Collector to format and store.
Args:
path (string): OSC path captured on.
args (list): list of values to capture.
"""
log.debug("%s, %s", path, str(args))
if not self.first_message_captured:
self.first_message_captured = True
self.collector.collect_samples(path, liblo.time(), args)
def get_time_series(self):
"""Converts captured OSC messages to time series bundle format.
Returns:
(TimeSeriesBundle): All outputs are TimeSeriesBundles.
"""
log.info("Converting Data...")
return self.collector.to_time_series_bundle()
def replay_osc(data, address='127.0.0.1', port=9000, loop=False, debug=False):
"""Replay TimeSeriesBundle data via OSC by waiting.
Calculates wait time between OSC messages and waits to send message.
Args:
data (TimeSeriesBundle): TimeSeriesBundle data.
Keyword Args:
address (str): IP address to send to.
port (int): port to send to.
loop (bool): set to True to repeat infinitely.
debug (bool): set to True to print messages.
"""
try:
output_thread = liblo.Address(address, port)
print(f'Replaying OSC to {address}:{port}')
except liblo.AddressError as err:
print(str(err))
if not isinstance(data, TimeSeriesBundle):
raise "Data type not supported."
ts_dict = data.to_timestamped_dict()
timestamps = sorted(ts_dict.keys())
wait_times = np.diff(timestamps, append=0)
run_once = True # guarantee the selection runs once
while run_once or loop: # if loop selected, repeat the full replay
run_once = False
overhead_time_start = time.time()
for i, timestamp in enumerate(timestamps):
msg_list = list()
for info in ts_dict[timestamp]:
name = info[0]
samples = info[1]
if debug:
print("%s:%d -- %f %s %s" %
(address, port, timestamp, name, str(samples)))
msg_list.append(liblo.Message(name, *samples))
liblo.send(output_thread, *msg_list)
overhead_time = time.time() - overhead_time_start
# Avoids discontinous jumps in timestamps
if wait_times[i] > overhead_time and wait_times[i] < 50:
# NOTE:
# The wait time is still too slow
# Manual decrease in wait time
# check corresponding jupyter notebook
time.sleep((wait_times[i] - overhead_time)* 0.8)
# Starting overhead time immidiately after last call.
overhead_time_start = time.time()
def replay_osc_time_bundle(data, address='127.0.0.1', port=9000, loop=False, debug=False):
"""Replay TimeSeriesBundle data via OSC. Doesn't wait.
Sends all the packets at once with the appropriate wait times added to each time tag.
Supported by many osc libraries that implemented the latest osc protocol update.
The client can send packets with future timestamps. The server will wait till the
correct time to actually call the callback functions.
Args:
data (TimeSeriesBundle): TimeSeriesBundle data.
Keyword Args:
address (str): IP address to send to.
port (int): port to send to.
loop (bool): set to True to repeat infinitely.
debug (bool): set to True to print messages.
"""
try:
output_thread = liblo.Address(address, port)
print(f'Replaying OSC to {address}:{port}')
except liblo.AddressError as err:
print(str(err))
if not isinstance(data, TimeSeriesBundle):
raise "Data type not supported."
ts_dict = data.to_timestamped_dict()
timestamps = sorted(ts_dict.keys())
start_timestamp = timestamps[0]
run_once = True # guarantee the selection runs once
while run_once or loop: # if loop selected, repeat the full replay
run_once = False
bundle_list = list()
start_time = liblo.time()
for timestamp in timestamps:
msg_list = list()
for info in ts_dict[timestamp]:
name = info[0]
samples = info[1]
if debug:
print("%s:%d -- %f %s %s" %
(address, port, timestamp, name, str(samples)))
msg_list.append(liblo.Message(name, *samples))
wait_time = timestamp - start_timestamp
bundle = liblo.Bundle(start_time + wait_time, *msg_list)
bundle_list.append(bundle)
liblo.send(output_thread, *bundle_list)
|
package dev.webfx.kit.mapper.peers.javafxgraphics.gwt.html;
import elemental2.dom.CSSProperties;
import elemental2.dom.HTMLElement;
import dev.webfx.kit.mapper.peers.javafxgraphics.gwt.util.HtmlUtil;
import javafx.scene.shape.Rectangle;
import dev.webfx.kit.mapper.peers.javafxgraphics.base.RectanglePeerBase;
import dev.webfx.kit.mapper.peers.javafxgraphics.base.RectanglePeerMixin;
/**
* @author <NAME>
*/
public final class HtmlRectanglePeer
<N extends Rectangle, NB extends RectanglePeerBase<N, NB, NM>, NM extends RectanglePeerMixin<N, NB, NM>>
extends HtmlShapePeer<N, NB, NM>
implements RectanglePeerMixin<N, NB, NM> {
public HtmlRectanglePeer() {
this((NB) new RectanglePeerBase(), HtmlUtil.createElement("fx-rectangle"));
}
public HtmlRectanglePeer(NB base, HTMLElement element) {
super(base, element);
}
@Override
protected String computeClipPath() {
Rectangle r = getNode();
double width = r.getWidth();
double height = r.getHeight();
// Temporary hack: skipping empty rectangles because they are likely coming from wrong computation and cause
// the node to be completely hidden (observed with Label). TODO: Fix problem cause (wrong computation)
if (width == 0 && height == 0)
return null;
// inset(top right bottom left round top-radius right-radius bottom-radius left-radius)
double top = r.getY();
double left = r.getX();
double right = left + width;
double bottom = top + height;
/*
double leftRadius = r.getArcWidth() / 2, rightRadius = leftRadius;
double topRadius = r.getArcHeight() / 2, bottomRadius = topRadius;
return "inset(" + toPx(top) + " " + toPx(right) + " " + toPx(bottom) + " " + toPx(left) + " round " + topRadius + "px " + rightRadius + "px " + bottomRadius + "px " + leftRadius + "px)";
*/
// Note: replaced toPx(top) by top + "px" etc... to preserve precision (required for Mandelbrot thumbnails zoom effect as scale is not 1)
return "polygon(" + left + "px " + top + "px, " + right + "px " + top + "px, " + right + "px " + bottom + "px, " + left + "px " + bottom + "px)";
}
@Override
public void updateX(Double x) {
if (isClip())
applyClipPathToClipNodes();
else
getElement().style.left = toPx(x);
}
@Override
public void updateY(Double y) {
if (isClip())
applyClipPathToClipNodes();
else
getElement().style.top = toPx(y);
}
@Override
public void updateWidth(Double width) {
if (isClip())
applyClipPathToClipNodes();
else
getElement().style.width = CSSProperties.WidthUnionType.of(toPx(width));
}
@Override
public void updateHeight(Double height) {
if (isClip())
applyClipPathToClipNodes();
else
getElement().style.height = CSSProperties.HeightUnionType.of(toPx(height));
}
@Override
public void updateArcWidth(Double arcWidth) {
updateBorderRadius();
}
@Override
public void updateArcHeight(Double arcHeight) {
updateBorderRadius();
}
private void updateBorderRadius() {
if (isClip())
applyClipPathToClipNodes();
else {
Rectangle r = getNode();
getElement().style.borderRadius = CSSProperties.BorderRadiusUnionType.of(toPx(r.getArcWidth()/2) + " " + toPx(r.getArcHeight()/2));
}
}
}
|
import java.util.Stack;
public class MinMaxStack<T extends Comparable<T>> {
private Stack<T> stack;
private Stack<T> minStack;
private Stack<T> maxStack;
public MinMaxStack() {
stack = new Stack<>();
minStack = new Stack<>();
maxStack = new Stack<>();
}
public void push(T element) {
stack.push(element);
if (minStack.isEmpty() || element.compareTo(minStack.peek()) <= 0) {
minStack.push(element);
}
if (maxStack.isEmpty() || element.compareTo(maxStack.peek()) >= 0) {
maxStack.push(element);
}
}
public T pop() {
if (stack.isEmpty()) {
return null;
}
T popped = stack.pop();
if (popped.equals(minStack.peek())) {
minStack.pop();
}
if (popped.equals(maxStack.peek())) {
maxStack.pop();
}
return popped;
}
public T getMin() {
return minStack.isEmpty() ? null : minStack.peek();
}
public T getMax() {
return maxStack.isEmpty() ? null : maxStack.peek();
}
} |
class FotaUpdater:
# ... (other methods remain unchanged)
def parse_tool_options(self):
if self._ota_element is None:
raise FotaError("missing ota_element")
# Extract tool options from self._ota_element and return them
tool_options = self._ota_element.get_tool_options() # Example: Assuming get_tool_options() is a method to extract tool options
return tool_options |
def reverse(string):
new_string=""
for i in string:
new_string = i + new_string
return new_string |
#!/bin/bash
export PYTHONPATH=$(dirname "$0")/mpd
python3 ~/git/mpd-script/dislike.py >> ~/.mpd/dislike
|
class PackageInfo:
def __init__(self):
self.revision = 1
self.sources = None
self.patches = tuple()
self.dependencies = tuple()
self.homepage = None
self.envvars = None
self.build_envvars = None
def set_sources(self, sources):
self.sources = sources
def set_patches(self, patches):
self.patches = patches
def set_dependencies(self, dependencies):
self.dependencies = dependencies
def set_homepage(self, homepage):
self.homepage = homepage
def set_envvars(self, envvars):
self.envvars = envvars
def set_build_envvars(self, build_envvars):
self.build_envvars = build_envvars
def get_revision(self):
return self.revision
def get_sources(self):
return self.sources
def get_patches(self):
return self.patches
def get_dependencies(self):
return self.dependencies
def get_homepage(self):
return self.homepage
def get_envvars(self):
return self.envvars
def get_build_envvars(self):
return self.build_envvars
def display_summary(self):
print("Package Information:")
print(f"Revision: {self.revision}")
print(f"Sources: {self.sources if self.sources is not None else 'None'}")
print(f"Patches: {self.patches if self.patches else 'None'}")
print(f"Dependencies: {self.dependencies if self.dependencies else 'None'}")
print(f"Homepage: {self.homepage if self.homepage is not None else 'None'}")
print(f"Environment Variables: {self.envvars if self.envvars is not None else 'None'}")
print(f"Build Environment Variables: {self.build_envvars if self.build_envvars is not None else 'None'}")
# Example usage
pkg = PackageInfo()
pkg.set_sources("https://example.com/source")
pkg.set_patches(("patch1", "patch2"))
pkg.set_dependencies(("dep1", "dep2"))
pkg.set_homepage("https://example.com/package")
pkg.set_envvars({"VAR1": "value1", "VAR2": "value2"})
pkg.set_build_envvars({"BUILD_VAR": "build_value"})
pkg.display_summary() |
def lowest_unique_numbers(array)
auxiliar= [0]*9
lowest_unique_int = 0
lowest_unique_int_pos = 0
for x in 0..array.length()-1
actual_int = array[x].to_i
auxiliar[actual_int-1] += 1
end
for x in 0..8
if(auxiliar[x] == 1)
lowest_unique_int = x+1
break
end
end
for x in 0..array.length()-1
if(array[x].to_i == lowest_unique_int)
lowest_unique_int_pos = x+1
break
end
end
puts(lowest_unique_int_pos)
end
File.open('testcases.txt').each_line do |line|
line_splitted = line.strip().split(" ")
lowest_unique_numbers(line_splitted)
end |
const session = require("express-session");
const FileStore = require("session-file-store")(session);
const mongoose = require("mongoose");
const User = require("../models/user");
const cookieCleaner = (req, res, next) => {
if (req.cookies?.user_sid && !req.session.email) {
res.clearCookie("user_sid");
}
next();
};
module.exports.sessionConfig = (application) => {
application.use(
session({
store: new FileStore(),
key: "user_sid",
secret: "anything here",
resave: true,
saveUninitialized: false,
cookie: {
expires: 600000000,
},
})
);
application.use(cookieCleaner);
};
module.exports.sessionChecker = (req, res, next) => {
if (req.session.email) {
res.redirect("/");
} else {
next();
}
};
module.exports.sessionVariables = async (req, res, next) => {
if (req.session?.email) {
res.locals.login = true;
res.locals.userName = req.session.email;
res.locals.name = req.session.name;
const user = await User.findOne({ email: req.session.email });
if (user.statusCourier) {
res.locals.courier = true;
} else {
res.locals.customer = true;
}
next();
} else {
next();
}
};
|
#!/bin/bash
set -e
function start() {
# Start
docker-compose -f docker-compose.1.yml -f docker-compose.2.yml -f docker-compose.cnf.yml -f docker-compose.shard.yml up -d
}
function logs() {
# Display logs
docker-compose -f docker-compose.1.yml -f docker-compose.2.yml -f docker-compose.cnf.yml -f docker-compose.shard.yml logs -f
}
function stop() {
docker-compose -f docker-compose.1.yml -f docker-compose.2.yml -f docker-compose.cnf.yml -f docker-compose.shard.yml stop
}
function rm() {
docker-compose -f docker-compose.1.yml -f docker-compose.2.yml -f docker-compose.cnf.yml -f docker-compose.shard.yml rm -f
}
if [ "$1" = "start" ]; then
start
elif [ "$1" = "logs" ]; then
logs
elif [ "$1" = "stop" ]; then
stop
elif [ "$1" = "rm" ]; then
rm
else
echo "Error: start, logs, stop or rm!"
fi
|
module.exports = application => {
application.get('/', (req, res) => {
application.src.controllers.index.home(application, req, res)
})
} |
# frozen_string_literal: true
require_relative "../support/command_testing"
using CommandTesting
describe "edge/proposed features via require" do
it "proposed features" do
cmd = <<~CMD
ruby -rbundler/setup -I#{File.join(__dir__, "../../../lib")} -r #{File.join(__dir__, "fixtures", "proposed.rb")} \
-e "p main({}.to_json); p main({status: :ok}.to_json)"
CMD
# Set env var to 0 to make sure we do not shadow it
run(cmd, env: {"RUBY_NEXT_PROPOSED" => "0"}) do |_status, output, _err|
output.should include("\"status: \"\n")
output.should include("\"status: ok\"\n")
end
end
it "edge features" do
cmd = <<~CMD
ruby -rbundler/setup -I#{File.join(__dir__, "../../../lib")} -r #{File.join(__dir__, "fixtures", "edge.rb")} \
-e "p greet(hello: 'Human'); p greet(hello: 'martian')"
CMD
# Set env var to 0 to make sure we do not shadow it
run(cmd, env: {"RUBY_NEXT_EDGE" => "0"}) do |_status, output, _err|
output.should include("human\n")
output.should include("alien\n")
end
end
end
|
package com.inner.lovetao.index.activity;
import android.os.Bundle;
import android.os.Handler;
import android.text.TextUtils;
import com.alibaba.android.arouter.launcher.ARouter;
import com.inner.lovetao.R;
import com.inner.lovetao.config.ArouterConfig;
import com.inner.lovetao.config.ConfigInfo;
import com.inner.lovetao.index.mvp.SplashContract;
import com.inner.lovetao.index.mvp.SplashPresenter;
import com.jess.arms.base.BaseActivity;
import com.jess.arms.di.component.AppComponent;
import com.jess.arms.utils.ArmsUtils;
import com.jess.arms.utils.DataHelper;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
/**
* desc:启动页
* Created by xcz
* on 2019/1/10.
*/
public class SplashActivity extends BaseActivity<SplashPresenter> implements SplashContract.View, SplashContract.Model {
@Override
public void setupActivityComponent(@NonNull AppComponent appComponent) {
}
@Override
public int initView(@Nullable Bundle savedInstanceState) {
return R.layout.ac_splash;
}
@Override
public void initData(@Nullable Bundle savedInstanceState) {
new Handler() {
}.postDelayed(() -> {
if (TextUtils.isEmpty(DataHelper.getStringSF(this, ConfigInfo.FIRST_INSTALL))) {
toGuild();
} else {
toMain();
}
finish();
}, 2000);
}
/**
* 跳转引导页
*/
@Override
public void toGuild() {
ARouter.getInstance().build(ArouterConfig.AC_GUILD).navigation(this);
}
/**
* 跳转至首页
*/
@Override
public void toMain() {
ARouter.getInstance().build(ArouterConfig.AC_MAIN).navigation(this);
}
@Override
public void onDestroy() {
super.onDestroy();
}
@Override
public void showMessage(@NonNull String message) {
ArmsUtils.makeText(this, message);
}
@Override
public boolean useEventBus() {
return false;
}
@Override
public boolean useFragment() {
return false;
}
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# shellcheck source=scripts/in_container/_in_container_script_init.sh
. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh"
function import_all_provider_classes() {
group_start "Importing all classes"
python3 "${AIRFLOW_SOURCES}/dev/import_all_classes.py" --path "airflow/providers"
group_end
}
function verify_provider_packages_named_properly() {
python3 "${PROVIDER_PACKAGES_DIR}/prepare_provider_packages.py" \
"${OPTIONAL_BACKPORT_FLAG[@]}" \
verify-provider-classes
}
function run_prepare_documentation() {
local prepared_documentation=()
local skipped_documentation=()
local error_documentation=()
local provider_package
for provider_package in "${PROVIDER_PACKAGES[@]}"
do
set +e
local res
# There is a separate group created in logs for each provider package
python3 "${PROVIDER_PACKAGES_DIR}/prepare_provider_packages.py" \
--version-suffix "${TARGET_VERSION_SUFFIX}" \
"${OPTIONAL_BACKPORT_FLAG[@]}" \
"${OPTIONAL_RELEASE_VERSION_ARGUMENT[@]}" \
update-package-documentation \
"${provider_package}"
res=$?
if [[ ${res} == "64" ]]; then
skipped_documentation+=("${provider_package}")
continue
echo "${COLOR_YELLOW}Skipping provider package '${provider_package}'${COLOR_RESET}"
fi
if [[ ${res} != "0" ]]; then
echo "${COLOR_RED}Error when generating provider package '${provider_package}'${COLOR_RESET}"
error_documentation+=("${provider_package}")
continue
fi
prepared_documentation+=("${provider_package}")
set -e
done
echo "${COLOR_BLUE}===================================================================================${COLOR_RESET}"
echo
echo "Summary of prepared documentations:"
echo
if [[ "${#prepared_documentation[@]}" != "0" ]]; then
echo "${COLOR_GREEN} Success:${COLOR_RESET}"
echo "${prepared_documentation[@]}" | fold -w 100
fi
if [[ "${#skipped_documentation[@]}" != "0" ]]; then
echo "${COLOR_YELLOW} Skipped:${COLOR_RESET}"
echo "${skipped_documentation[@]}" | fold -w 100
fi
if [[ "${#error_documentation[@]}" != "0" ]]; then
echo "${COLOR_RED} Errors:${COLOR_RESET}"
echo "${error_documentation[@]}" | fold -w 100
fi
echo
echo "${COLOR_BLUE}===================================================================================${COLOR_RESET}"
if [[ ${#error_documentation[@]} != "0" ]]; then
echo
echo "${COLOR_RED}There were errors when preparing documentation. Exiting! ${COLOR_RESET}"
exit 1
fi
}
setup_provider_packages
cd "${AIRFLOW_SOURCES}" || exit 1
export PYTHONPATH="${AIRFLOW_SOURCES}"
verify_suffix_versions_for_package_preparation
install_supported_pip_version
# install extra packages missing in devel_ci
# TODO: remove it when devel_all == devel_ci
install_remaining_dependencies
reinstall_azure_storage_blob
if [[ ${BACKPORT_PACKAGES} != "true" ]]; then
import_all_provider_classes
verify_provider_packages_named_properly
fi
# We will be able to remove it when we get rid of BACKPORT_PACKAGES
OPTIONAL_RELEASE_VERSION_ARGUMENT=()
if [[ $# != "0" && ${1} =~ ^[0-9][0-9][0-9][0-9]\.[0-9][0-9]\.[0-9][0-9]$ ]]; then
OPTIONAL_RELEASE_VERSION_ARGUMENT+=("--release-version" "${1}")
shift
fi
PROVIDER_PACKAGES=("${@}")
get_providers_to_act_on "${@}"
run_prepare_documentation
echo
echo "${COLOR_GREEN}All good! Airflow Provider's documentation generated!${COLOR_RESET}"
echo
|
import json
import os
import time
import pytest
from src import client
@pytest.mark.run(order=4)
class TestStockClient:
PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "testdata")
def __assert(self, rawdata, testdata):
if rawdata is None:
return
for row in rawdata:
sid = row["sid"]
assert testdata.pop(sid) == row
assert len(testdata) == 0
def test_version(self):
assert client.TaiwanStockClient(version="test0.0.1", sleep_second=5)._version == "test0.0.1"
def test_fetch_rawdata(self):
print("")
# 每日盤後行情、三大法人買賣超、融資融券餘額
date = [
(2018, 1, 15),
(2017, 12, 18),
(2014, 12, 1),
(2012, 5, 2),
]
object = client.TaiwanStockClient(
enable_fetch_institutional_investors=True, enable_fetch_credit_transactions_securities=True
)
for year, month, day in date:
with open(os.path.join(self.PATH, f"{year}{month:0>2}{day:0>2}.json")) as f:
time.sleep(5)
self.__assert(object.fetch(year, month, day), json.load(f))
# 每日盤後行情、融資融券餘額
date = [
(2007, 7, 1),
(2007, 7, 2),
(2007, 1, 1),
(2007, 1, 2),
]
object = client.TaiwanStockClient(
enable_fetch_institutional_investors=False, enable_fetch_credit_transactions_securities=True
)
for year, month, day in date:
time.sleep(5)
rawdata = object.fetch(year, month, day)
if rawdata:
with open(os.path.join(self.PATH, f"{year}{month:0>2}{day:0>2}.json")) as f:
testdata = json.load(f)
self.__assert(rawdata, testdata)
# def test_fetch_sql(self):
# NotImplementedError
|
#!/usr/bin/env bash
# Set DISTNAME, BRANCH and MAKEOPTS to the desired settings
DISTNAME=astral-2.0.3
MAKEOPTS="-j4"
BRANCH=master
clear
if [[ $EUID -ne 0 ]]; then
echo "This script must be run with sudo"
exit 1
fi
if [[ $PWD != $HOME ]]; then
echo "This script must be run from ~/"
exit 1
fi
if [ ! -f ~/MacOSX10.11.sdk.tar.gz ]
then
echo "Before executing script.sh transfer MacOSX10.11.sdk.tar.gz to ~/"
exit 1
fi
export PATH_orig=$PATH
echo @@@
echo @@@"Installing Dependecies"
echo @@@
apt install -y curl g++-aarch64-linux-gnu g++-7-aarch64-linux-gnu gcc-7-aarch64-linux-gnu binutils-aarch64-linux-gnu g++-arm-linux-gnueabihf g++-7-arm-linux-gnueabihf gcc-7-arm-linux-gnueabihf binutils-arm-linux-gnueabihf g++-7-multilib gcc-7-multilib binutils-gold git pkg-config autoconf libtool automake bsdmainutils ca-certificates python g++ mingw-w64 g++-mingw-w64 nsis zip rename librsvg2-bin libtiff-tools cmake imagemagick libcap-dev libz-dev libbz2-dev python-dev python-setuptools fonts-tuffy
cd ~/
# Removes any existing builds and starts clean WARNING
rm -rf ~/astralcoin ~/sign ~/release
git clone https://github.com/astralproject/astralcoin
cd ~/astralcoin
git checkout $BRANCH
echo @@@
echo @@@"Building linux 64 binaries"
echo @@@
mkdir -p ~/release
cd ~/astralcoin/depends
make HOST=x86_64-linux-gnu $MAKEOPTS
cd ~/astralcoin
export PATH=$PWD/depends/x86_64-linux-gnu/native/bin:$PATH
./autogen.sh
CONFIG_SITE=$PWD/depends/x86_64-linux-gnu/share/config.site ./configure --prefix=/ --disable-ccache --disable-maintainer-mode --disable-dependency-tracking --enable-glibc-back-compat --enable-reduce-exports --disable-bench --disable-gui-tests CFLAGS="-O2 -g" CXXFLAGS="-O2 -g" LDFLAGS="-static-libstdc++"
make $MAKEOPTS
make -C src check-security
make -C src check-symbols
mkdir ~/linux64
make install DESTDIR=~/linux64/$DISTNAME
cd ~/linux64
find . -name "lib*.la" -delete
find . -name "lib*.a" -delete
rm -rf $DISTNAME/lib/pkgconfig
find ${DISTNAME}/bin -type f -executable -exec ../astralcoin/contrib/devtools/split-debug.sh {} {} {}.dbg \;
find ${DISTNAME}/lib -type f -exec ../astralcoin/contrib/devtools/split-debug.sh {} {} {}.dbg \;
find $DISTNAME/ -not -name "*.dbg" | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ~/release/$DISTNAME-x86_64-linux-gnu.tar.gz
cd ~/astralcoin
rm -rf ~/linux64
make clean
export PATH=$PATH_orig
echo @@@
echo @@@"Building general sourcecode"
echo @@@
cd ~/astralcoin
export PATH=$PWD/depends/x86_64-linux-gnu/native/bin:$PATH
./autogen.sh
CONFIG_SITE=$PWD/depends/x86_64-linux-gnu/share/config.site ./configure --prefix=/
make dist
SOURCEDIST=`echo astral-*.tar.gz`
mkdir -p ~/astralcoin/temp
cd ~/astralcoin/temp
tar xf ../$SOURCEDIST
find astral-* | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ../$SOURCEDIST
cd ~/astralcoin
mv $SOURCEDIST ~/release
rm -rf temp
make clean
export PATH=$PATH_orig
echo @@@
echo @@@"Building linux 32 binaries"
echo @@@
cd ~/
mkdir -p ~/wrapped/extra_includes/i686-pc-linux-gnu
ln -s /usr/include/x86_64-linux-gnu/asm ~/wrapped/extra_includes/i686-pc-linux-gnu/asm
for prog in gcc g++; do
rm -f ~/wrapped/${prog}
cat << EOF > ~/wrapped/${prog}
#!/usr/bin/env bash
REAL="`which -a ${prog} | grep -v $PWD/wrapped/${prog} | head -1`"
for var in "\$@"
do
if [ "\$var" = "-m32" ]; then
export C_INCLUDE_PATH="$PWD/wrapped/extra_includes/i686-pc-linux-gnu"
export CPLUS_INCLUDE_PATH="$PWD/wrapped/extra_includes/i686-pc-linux-gnu"
break
fi
done
\$REAL \$@
EOF
chmod +x ~/wrapped/${prog}
done
export PATH=$PWD/wrapped:$PATH
export HOST_ID_SALT="$PWD/wrapped/extra_includes/i386-linux-gnu"
cd ~/astralcoin/depends
make HOST=i686-pc-linux-gnu $MAKEOPTS
unset HOST_ID_SALT
cd ~/astralcoin
export PATH=$PWD/depends/i686-pc-linux-gnu/native/bin:$PATH
./autogen.sh
CONFIG_SITE=$PWD/depends/i686-pc-linux-gnu/share/config.site ./configure --prefix=/ --disable-ccache --disable-maintainer-mode --disable-dependency-tracking --enable-glibc-back-compat --enable-reduce-exports --disable-bench --disable-gui-tests CFLAGS="-O2 -g" CXXFLAGS="-O2 -g" LDFLAGS="-static-libstdc++"
make $MAKEOPTS
make -C src check-security
make -C src check-symbols
mkdir -p ~/linux32
make install DESTDIR=~/linux32/$DISTNAME
cd ~/linux32
find . -name "lib*.la" -delete
find . -name "lib*.a" -delete
rm -rf $DISTNAME/lib/pkgconfig
find ${DISTNAME}/bin -type f -executable -exec ../astralcoin/contrib/devtools/split-debug.sh {} {} {}.dbg \;
find ${DISTNAME}/lib -type f -exec ../astralcoin/contrib/devtools/split-debug.sh {} {} {}.dbg \;
find $DISTNAME/ -not -name "*.dbg" | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ~/release/$DISTNAME-i686-pc-linux-gnu.tar.gz
cd ~/astralcoin
rm -rf ~/linux32
rm -rf ~/wrapped
make clean
export PATH=$PATH_orig
echo @@@
echo @@@ "Building linux ARM binaries"
echo @@@
cd ~/astralcoin/depends
make HOST=arm-linux-gnueabihf $MAKEOPTS
cd ~/astralcoin
export PATH=$PWD/depends/arm-linux-gnueabihf/native/bin:$PATH
./autogen.sh
CONFIG_SITE=$PWD/depends/arm-linux-gnueabihf/share/config.site ./configure --prefix=/ --disable-ccache --disable-maintainer-mode --disable-dependency-tracking --enable-glibc-back-compat --enable-reduce-exports --disable-bench --disable-gui-tests CFLAGS="-O2 -g" CXXFLAGS="-O2 -g" LDFLAGS="-static-libstdc++"
make $MAKEOPTS
make -C src check-security
mkdir -p ~/linuxARM
make install DESTDIR=~/linuxARM/$DISTNAME
cd ~/linuxARM
find . -name "lib*.la" -delete
find . -name "lib*.a" -delete
rm -rf $DISTNAME/lib/pkgconfig
find ${DISTNAME}/bin -type f -executable -exec ../astralcoin/contrib/devtools/split-debug.sh {} {} {}.dbg \;
find ${DISTNAME}/lib -type f -exec ../astralcoin/contrib/devtools/split-debug.sh {} {} {}.dbg \;
find $DISTNAME/ -not -name "*.dbg" | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ~/release/$DISTNAME-arm-linux-gnueabihf.tar.gz
cd ~/astralcoin
rm -rf ~/linuxARM
make clean
export PATH=$PATH_orig
echo @@@
echo @@@ "Building linux aarch64 binaries"
echo @@@
cd ~/astralcoin/depends
make HOST=aarch64-linux-gnu $MAKEOPTS
cd ~/astralcoin
export PATH=$PWD/depends/aarch64-linux-gnu/native/bin:$PATH
./autogen.sh
CONFIG_SITE=$PWD/depends/aarch64-linux-gnu/share/config.site ./configure --prefix=/ --disable-ccache --disable-maintainer-mode --disable-dependency-tracking --enable-glibc-back-compat --enable-reduce-exports --disable-bench --disable-gui-tests CFLAGS="-O2 -g" CXXFLAGS="-O2 -g" LDFLAGS="-static-libstdc++"
make $MAKEOPTS
make -C src check-security
mkdir -p ~/linuxaarch64
make install DESTDIR=~/linuxaarch64/$DISTNAME
cd ~/linuxaarch64
find . -name "lib*.la" -delete
find . -name "lib*.a" -delete
rm -rf $DISTNAME/lib/pkgconfig
find ${DISTNAME}/bin -type f -executable -exec ../astralcoin/contrib/devtools/split-debug.sh {} {} {}.dbg \;
find ${DISTNAME}/lib -type f -exec ../astralcoin/contrib/devtools/split-debug.sh {} {} {}.dbg \;
find $DISTNAME/ -not -name "*.dbg" | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ~/release/$DISTNAME-aarch64-linux-gnu.tar.gz
cd ~/astralcoin
rm -rf ~/linuxaarch64
make clean
export PATH=$PATH_orig
echo @@@
echo @@@ "Building windows 64 binaries"
echo @@@
update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix
mkdir -p ~/release/unsigned/
mkdir -p ~/sign/win64
PATH=$(echo "$PATH" | sed -e 's/:\/mnt.*//g') # strip out problematic Windows %PATH% imported var
cd ~/astralcoin/depends
make HOST=x86_64-w64-mingw32 $MAKEOPTS
cd ~/astralcoin
export PATH=$PWD/depends/x86_64-w64-mingw32/native/bin:$PATH
./autogen.sh
CONFIG_SITE=$PWD/depends/x86_64-w64-mingw32/share/config.site ./configure --prefix=/ --disable-ccache --disable-maintainer-mode --disable-dependency-tracking --enable-reduce-exports --disable-bench --disable-gui-tests CFLAGS="-O2 -g" CXXFLAGS="-O2 -g"
make $MAKEOPTS
make -C src check-security
make deploy
rename 's/-setup\.exe$/-setup-unsigned.exe/' *-setup.exe
cp -f astral-*setup*.exe ~/release/unsigned/
mkdir -p ~/win64
make install DESTDIR=~/win64/$DISTNAME
cd ~/win64
mv ~/win64/$DISTNAME/bin/*.dll ~/win64/$DISTNAME/lib/
find . -name "lib*.la" -delete
find . -name "lib*.a" -delete
rm -rf $DISTNAME/lib/pkgconfig
find $DISTNAME/bin -type f -executable -exec x86_64-w64-mingw32-objcopy --only-keep-debug {} {}.dbg \; -exec x86_64-w64-mingw32-strip -s {} \; -exec x86_64-w64-mingw32-objcopy --add-gnu-debuglink={}.dbg {} \;
find ./$DISTNAME -not -name "*.dbg" -type f | sort | zip -X@ ./$DISTNAME-x86_64-w64-mingw32.zip
mv ./$DISTNAME-x86_64-*.zip ~/release/$DISTNAME-win64.zip
cd ~/
rm -rf win64
cp -rf astralcoin/contrib/windeploy ~/sign/win64
cd ~/sign/win64/windeploy
mkdir -p unsigned
mv ~/astralcoin/astral-*setup-unsigned.exe unsigned/
find . | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ~/sign/$DISTNAME-win64-unsigned.tar.gz
cd ~/sign
rm -rf win64
cd ~/astralcoin
rm -rf release
make clean
export PATH=$PATH_orig
echo @@@
echo @@@ "Building windows 32 binaries"
echo @@@
update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix
mkdir -p ~/sign/win32
PATH=$(echo "$PATH" | sed -e 's/:\/mnt.*//g')
cd ~/astralcoin/depends
make HOST=i686-w64-mingw32 $MAKEOPTS
cd ~/astralcoin
export PATH=$PWD/depends/i686-w64-mingw32/native/bin:$PATH
./autogen.sh
CONFIG_SITE=$PWD/depends/i686-w64-mingw32/share/config.site ./configure --prefix=/ --disable-ccache --disable-maintainer-mode --disable-dependency-tracking --enable-reduce-exports --disable-bench --disable-gui-tests CFLAGS="-O2 -g" CXXFLAGS="-O2 -g"
make $MAKEOPTS
make -C src check-security
make deploy
rename 's/-setup\.exe$/-setup-unsigned.exe/' *-setup.exe
cp -f astral-*setup*.exe ~/release/unsigned/
mkdir -p ~/win32
make install DESTDIR=~/win32/$DISTNAME
cd ~/win32
mv ~/win32/$DISTNAME/bin/*.dll ~/win32/$DISTNAME/lib/
find . -name "lib*.la" -delete
find . -name "lib*.a" -delete
rm -rf $DISTNAME/lib/pkgconfig
find $DISTNAME/bin -type f -executable -exec i686-w64-mingw32-objcopy --only-keep-debug {} {}.dbg \; -exec i686-w64-mingw32-strip -s {} \; -exec i686-w64-mingw32-objcopy --add-gnu-debuglink={}.dbg {} \;
find ./$DISTNAME -not -name "*.dbg" -type f | sort | zip -X@ ./$DISTNAME-i686-w64-mingw32.zip
mv ./$DISTNAME-i686-w64-*.zip ~/release/$DISTNAME-win32.zip
cd ~/
rm -rf win32
cp -rf astralcoin/contrib/windeploy ~/sign/win32
cd ~/sign/win32/windeploy
mkdir -p unsigned
mv ~/astralcoin/astral-*setup-unsigned.exe unsigned/
find . | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ~/sign/$DISTNAME-win32-unsigned.tar.gz
cd ~/sign
rm -rf win32
cd ~/astralcoin
rm -rf release
make clean
export PATH=$PATH_orig
echo @@@
echo @@@ "Building OSX binaries"
echo @@@
mkdir -p ~/astralcoin/depends/SDKs
cp ~/MacOSX10.11.sdk.tar.gz ~/astralcoin/depends/SDKs/MacOSX10.11.sdk.tar.gz
cd ~/astralcoin/depends/SDKs && tar -xf MacOSX10.11.sdk.tar.gz
rm -rf MacOSX10.11.sdk.tar.gz
cd ~/astralcoin/depends
make $MAKEOPTS HOST="x86_64-apple-darwin14"
cd ~/astralcoin
./autogen.sh
CONFIG_SITE=$PWD/depends/x86_64-apple-darwin14/share/config.site ./configure --prefix=/ --disable-ccache --disable-maintainer-mode --disable-dependency-tracking --enable-reduce-exports --disable-bench --disable-gui-tests GENISOIMAGE=$PWD/depends/x86_64-apple-darwin14/native/bin/genisoimage
make $MAKEOPTS
mkdir -p ~/OSX
export PATH=$PWD/depends/x86_64-apple-darwin14/native/bin:$PATH
make install-strip DESTDIR=~/OSX/$DISTNAME
make osx_volname
make deploydir
mkdir -p unsigned-app-$DISTNAME
cp osx_volname unsigned-app-$DISTNAME/
cp contrib/macdeploy/detached-sig-apply.sh unsigned-app-$DISTNAME
cp contrib/macdeploy/detached-sig-create.sh unsigned-app-$DISTNAME
cp $PWD/depends/x86_64-apple-darwin14/native/bin/dmg $PWD/depends/x86_64-apple-darwin14/native/bin/genisoimage unsigned-app-$DISTNAME
cp $PWD/depends/x86_64-apple-darwin14/native/bin/x86_64-apple-darwin14-codesign_allocate unsigned-app-$DISTNAME/codesign_allocate
cp $PWD/depends/x86_64-apple-darwin14/native/bin/x86_64-apple-darwin14-pagestuff unsigned-app-$DISTNAME/pagestuff
mv dist unsigned-app-$DISTNAME
cd unsigned-app-$DISTNAME
find . | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ~/sign/$DISTNAME-osx-unsigned.tar.gz
cd ~/astralcoin
make deploy
$PWD/depends/x86_64-apple-darwin14/native/bin/dmg dmg "Astral-Core.dmg" ~/release/unsigned/$DISTNAME-osx-unsigned.dmg
rm -rf unsigned-app-$DISTNAME dist osx_volname dpi36.background.tiff dpi72.background.tiff
cd ~/OSX
find . -name "lib*.la" -delete
find . -name "lib*.a" -delete
rm -rf $DISTNAME/lib/pkgconfig
find $DISTNAME | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ~/release/$DISTNAME-osx64.tar.gz
cd ~/astralcoin
rm -rf ~/OSX
make clean
export PATH=$PATH_orig
|
#!/usr/bin/env bash
set -e
git clean -xfd
dotnet clean --configuration Release
dotnet restore
|
1293;
1207;
1623;
1675;
1842;
1410;
85;
1108;
557;
1217;
1506;
1956;
1579;
1614;
1360;
1544;
1946;
1666;
1972;
1814;
1699;
1778;
1529;
2002;
1768;
1173;
1407;
1201;
1264;
1739;
1774;
1951;
1980;
1428;
1381;
1714;
884;
1939;
1295;
1694;
1168;
1971;
1352;
1462;
1828;
1402;
1433;
1542;
1144;
1331;
1427;
1261;
1663;
1820;
1570;
1874;
1486;
1613;
1769;
1721;
1753;
1142;
1677;
2010;
1640;
1465;
1171;
534;
1790;
2005;
1604;
1891;
1247;
1281;
1867;
1403;
2004;
1668;
1416;
2001;
1359;
686;
1965;
1728;
1551;
1565;
1128;
1832;
1757;
1350;
1808;
1711;
1799;
1590;
1989;
1547;
1140;
1905;
1368;
1179;
1902;
1473;
1908;
1859;
1257;
1394;
1244;
1800;
1695;
1731;
1474;
1781;
1885;
1154;
1990;
1929;
1193;
1302;
1831;
1226;
1418;
1400;
1435;
1645;
1655;
1843;
1227;
1481;
1754;
1290;
1685;
1498;
71;
1286;
1137;
1288;
1758;
1987;
1471;
1839;
1545;
1682;
1615;
1475;
1849;
1985;
1568;
1795;
1184;
1863;
1362;
1271;
1802;
1944;
1821;
1880;
1788;
1733;
1150;
1314;
1727;
1434;
1833;
1312;
1457;
160;
1629;
1967;
1505;
1239;
1266;
1838;
1687;
1630;
1591;
1893;
1450;
1234;
1755;
1523;
1533;
1499;
1865;
1725;
1444;
1517;
1167;
1738;
1519;
1263;
1901;
1627;
1644;
1771;
1812;
1270;
1497;
1707;
1708;
1396;
|
<reponame>shin-kinoshita/dbflute-core
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.logic.generate.language.pkgstyle;
import org.dbflute.DfBuildProperties;
import org.dbflute.properties.DfBasicProperties;
import org.dbflute.properties.DfOutsideSqlProperties;
import org.dbflute.util.DfStringUtil;
import org.dbflute.util.Srl;
import org.dbflute.util.Srl.ScopeInfo;
/**
* @author jflute
* @since 0.8.6 (2008/11/21 Friday)
*/
public abstract class DfLanguagePropertyPackageResolver {
// ===================================================================================
// Definition
// ==========
public static final String VAR_CDEF = "$$CDef$$";
public static final String VAR_DOMAIN = "$$Domain$$";
public static final String VAR_CUSTOMIZE = "$$Customize$$";
public static final String VAR_PMB = "$$Pmb$$";
// ===================================================================================
// Constructor
// ===========
public DfLanguagePropertyPackageResolver() {
}
// ===================================================================================
// Resolver
// ========
public String resolvePackageName(String typeName) {
return doResolvePackageName(typeName, false);
}
public String resolvePackageNameExceptUtil(String typeName) {
return doResolvePackageName(typeName, true);
}
protected String doResolvePackageName(String typeName, boolean exceptUtil) {
if (typeName == null) {
return typeName;
}
final String processed = processLanguageType(typeName, exceptUtil);
if (processed != null) {
return processed;
}
if (typeName.contains(VAR_CDEF)) {
final DfBasicProperties prop = getBasicProperties();
final String pkg = prop.getBaseCommonPackage();
typeName = DfStringUtil.replace(typeName, VAR_CDEF, pkg + "." + prop.getCDefPureName());
}
if (typeName.contains(VAR_DOMAIN + ".")) { // as domain entity
final String pkg = getBasicProperties().getExtendedEntityPackage();
typeName = Srl.replace(typeName, VAR_DOMAIN + ".", pkg + ".");
}
if (typeName.contains(VAR_CUSTOMIZE + ".")) { // as customize entity
final String pkg = getOutsideSqlProperties().getExtendedEntityPackage();
typeName = Srl.replace(typeName, VAR_CUSTOMIZE + ".", pkg + ".");
}
if (typeName.contains(VAR_PMB + ".")) { // as parameter-bean
final String pkg = getOutsideSqlProperties().getExtendedParameterBeanPackage();
typeName = Srl.replace(typeName, VAR_PMB + ".", pkg + ".");
}
return typeName;
}
protected abstract String processLanguageType(String typeName, boolean exceptUtil);
protected String processListType(String typeName, boolean exceptUtil, String listPkg, String listName) {
final String listBegin = listName + "<";
final String listEnd = ">";
if (typeName.startsWith(listBegin) && typeName.endsWith(listEnd)) {
final ScopeInfo scope = Srl.extractScopeWide(typeName, listBegin, listEnd);
final String content = scope.getContent();
final String resolvedContent = doResolvePackageName(content, exceptUtil);
return listPkg + "." + listBegin + resolvedContent + listEnd;
} else {
return null;
}
}
protected String processMapType(String typeName, boolean exceptUtil, String mapPkg, String mapName) {
final String mapBegin = mapName + "<";
final String mapEnd = ">";
if (typeName.startsWith(mapBegin) && typeName.endsWith(mapEnd)) {
final ScopeInfo scope = Srl.extractScopeWide(typeName, mapBegin, mapEnd);
final String content = scope.getContent();
final String keyType = Srl.substringFirstFront(content, ",").trim();
final String valueType = Srl.substringFirstRear(content, ",").trim();
final String resolvedValueType = doResolvePackageName(valueType, exceptUtil);
return mapPkg + "." + mapBegin + keyType + ", " + resolvedValueType + mapEnd;
} else {
return null;
}
}
protected DfBasicProperties getBasicProperties() {
return DfBuildProperties.getInstance().getBasicProperties();
}
protected DfOutsideSqlProperties getOutsideSqlProperties() {
return DfBuildProperties.getInstance().getOutsideSqlProperties();
}
}
|
'''
Created on Jul 30, 2015
@author: Mikhail
'''
import unittest
import re
from json_file_generator import MyOwnJSONProcessing as json_processing
from json_file_generator import __version__ as json_file_generator_version
from unittest.case import skip, skipIf
class GenerateAndLoadJSONTestUpdateFour(unittest.TestCase):
expected_data = {}
@classmethod
def setUpClass(cls):
print "{} for {} has been called".format(cls.setUpClass.__name__, cls.__name__)
cls.expected_data = json_processing.generate_data_for_json_obj()
def setUp(self):
print "{} for {} has been called".format(self.setUp.__name__, self._testMethodName)
self.file_name = "generate_and_load_unittest.json"
self.original_name = json_processing.generate_json_file_with_data(self.file_name, self.expected_data)
def tearDown(self):
print "{} for {} has been called".format(self.tearDown.__name__, self._testMethodName)
@classmethod
def tearDownClass(cls):
print "{} for {} has been called".format(cls.tearDownClass.__name__, cls.__name__)
json_processing.clean_up()
def testGenerateAndLoadJSONValidKeys(self):
print "Processing file {}".format(self.original_name)
actual_data = json_processing.load_data_from_json_file(self.original_name)
for exp_key in self.expected_data.keys():
self.assertTrue(actual_data.has_key(exp_key), "Expected key '{}' has not been found in loaded json".format(exp_key))
for act_key in actual_data.keys():
self.assertTrue(self.expected_data.has_key(act_key), "Loaded key '{}' has not been found in dumped json".format(act_key))
# General version of skip
@skip("old functionality")
def testGenerateAndLoadJSONValidKeysHasOnlyLetters1(self):
print "Processing file {}".format(self.original_name)
actual_data = json_processing.load_data_from_json_file(self.original_name)
for act_key in actual_data.keys():
self.assertTrue(re.match("[^a-zA-Z]", act_key) is None, "Key should contains only alpha symbols: {}".format(act_key))
# Version of skip that check version of our json_file_generator
@skipIf(json_file_generator_version > 1, "This functionality is not supported in this version on the json file generator")
def testGenerateAndLoadJSONValidKeysHasOnlyLetters2(self):
print "Processing file {}".format(self.original_name)
actual_data = json_processing.load_data_from_json_file(self.original_name)
for act_key in actual_data.keys():
self.assertIsNone(re.match("[^a-zA-Z]", act_key), "Key should contains only alpha symbols: {}".format(act_key))
def testGenerateAndLoadJSONValidValues(self):
print "Processing file {}".format(self.original_name)
actual_data = json_processing.load_data_from_json_file(self.original_name)
for exp_key, exp_value in self.expected_data.items():
self.assertEquals(exp_value, actual_data.get(exp_key), "Dictionaries have different values '{}' for first and '{}' for second for the same key".format(exp_value, actual_data.get(exp_key)))
for act_key, act_value in actual_data.items():
self.assertEquals(act_value, self.expected_data.get(act_key), "Dictionaries have different values '{}' for first and '{}' for second for the same key".format(act_value, self.expected_data.get(act_key)))
def testGenerateAndLoadJSONForInvalidFile(self):
"""
This test checks that valid exception will be raised if required file will not be found
"""
invalid_name = "invalid_" + self.original_name
print "Processing file {}".format(invalid_name)
with self.assertRaises(IOError) as io_exception:
# attempt to read file that doesn't exist
json_processing.load_data_from_json_file(invalid_name)
self.assertEqual(io_exception.exception.errno, 2)
self.assertEqual(io_exception.exception.strerror, 'No such file or directory')
if __name__ == "__main__":
unittest.main(verbosity=2) |
cp Makefile.slow Makefile
make |
#!/usr/bin/env bash
git pull
sh ./mvnw clean install -U
export JAVA_OPTS="-server -Xms1024M -Xmx1024M -Xss512k -XX:PermSize=256M -XX:MaxPermSize=512M -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote.port=5006 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false"
java -classpath ./${project}/target/dependency/*:./${project}/target/classes ${application} |
<filename>MfgToolLib/CString.h<gh_stars>0
/*
* Copyright 2016 Freescale Semiconductor, Inc.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of the Freescale Semiconductor nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "stdafx.h"
#include <string>
#include <stdlib.h>
#include <cstdarg>
#pragma once
#ifndef __linux__
#define tstring std::basic_string<T, std::char_traits<T>, std::allocator<T> >
template<class T>
class CTString: public std::basic_string<T, std::char_traits<T>, std::allocator<T> >
#else
#define tstring std::string
#define TCHAR char
template<class T>
//#define T char
class CTString :public std::string
#endif
{
#ifdef __linux__
#define _vsntprintf vsnprintf
#define _tcsnicmp strncasecmp
#define _tcslen strlen
#define _T(x) x
#endif
public: CTString() : tstring() { }
CTString(const tstring& s) : tstring(s) { }
CTString(const tstring& s, std::size_t n) : tstring(s, n) { }
CTString(const T * s, std::size_t n) : tstring(s, n) { }
CTString(const T * s) : tstring(s?s:(const T *)_T(" ")) { }
CTString(std::size_t n, T c) : tstring(n, c) { }
operator const T * () { return this->c_str(); }\
operator const T * () const{ return this->c_str(); }
operator T * () { return (T*) this->c_str(); }
operator T * () const { return (const T*) this->c_str(); }
void operator=(T * buff){ this->assign(buff);return;}
void Empty(){ this->clear(); return; }
const bool IsEmpty() { return this->empty(); }
int GetLength() { return this->length(); }
const T * GetBuffer() { return this->c_str(); }
int Compare(const T* str) const{ return this->compare(str); }
int CompareNoCase(const T* str) { return _tcsnicmp(this->c_str(), str, this->length() > strlen(str) ? this->length() : strlen(str)); }
int Find(T ch) const {return this->find(ch);};
int Find(const T * lpszSub) const{ return this->find(lpszSub); }
int Find(T ch, int nStart) const{ return this->find(ch, nStart); }
int Find(const T * pstr, int nStart) const { return this->find(pstr, nStart); }
T GetAt(int nIndex) const{ return this->at(nIndex);}
void Format(const T *fmt, ...){
size_t buffLen = 512;
T *ret = new T[buffLen];
va_list ap;
int i = 0;
bool cond = true;
va_start(ap, fmt);
int actual = _vsntprintf(ret, buffLen, fmt, ap);
do{
if (actual == -1){
delete[] ret;
ret = new T[buffLen*(i + 2)];
actual = _vsntprintf(ret, actual, fmt, ap);
}
else{
cond = false;
}
} while (i < 10 && cond);
va_end(ap);
CTString<T> str(ret);
this->assign(str);
delete[] ret;
return;
}
void AppendFormat(const T *fmt, ...){
size_t buffLen = 512;
T *ret = new T[buffLen];
va_list ap;
int i = 0;
bool cond = true;
va_start(ap, fmt);
int actual = _vsntprintf(ret, buffLen, fmt, ap);
do{
if (actual == -1){
delete[] ret;
ret = new T[buffLen*(i+2)];
actual=_vsntprintf(ret, actual, fmt, ap);
}
else{
cond = false;
}
} while (i < 10 && cond);
va_end(ap);
CTString<T> str(ret);
this->append(str);
delete[] ret;
return;
}
T * GetBufferSetLength(int length){
this->resize(length);
return (T*) this->data();
}
void TrimLeft(){
if (this->empty())
return;
while (*this->begin() == _T('\t') || *this->begin() == _T('\n') || *this->begin() == _T('\r') || *this->begin() == _T(' ')){
this->erase(this->begin());
if (this->empty())
return;
}
return;
}
void TrimRight(){
if (this->empty())
return;
while (this->back() == _T('\t') || this->back() == _T('\n') || this->back() == _T('\r') || this->back() == _T(' ')){
this->erase(this->length()-1);
if (this->empty())
return;
}
return;
}
void TrimLeft(T chr){
while (this->at(0) == chr){
this->erase(this->begin());
if (this->empty())
return;
}
return;
}
void TrimRight(T chr){
while (this->back() == chr){
this->erase(this->length()-1);
if (this->empty())
return;
}
return;
}
void TrimLeft(const T * chr){
bool present = true;
int i;
while (present){
present = false;
for (i = 0; i < _tcslen(chr); i++){
if (this->at(0) == chr[i]){
present = true;
this->erase(this->begin());
break;
}
}
}
return;
}
void TrimRight(const T *chr){
bool present = true;
int i;
while (present){
present = false;
for (i = 0; i < _tcslen(chr); i++){
if (this->at(this->length()-1) == chr[i]){
present = true;
this->erase(this->end());
break;
}
}
}
return;
}
int Replace(T chOld, T chNew){
int count = 0;
for (int i = 0; i < this->length(); i++){
if (this->at(i) == chOld){
this->at(i) = chNew;
count++;
}
}
return count;
}
int Replace(const T * lpszOld, const T * lpszNew){
bool found = true;
int count = 0;
int pos;
while (found){
pos = this->find(lpszOld);
if (pos != -1){
this->erase(pos, _tcslen(lpszOld));
this->insert(pos, lpszNew);
count++;
}
else{
found = false;
}
}
return count;
}
CTString<T> MakeUpper(){
for (unsigned int i = 0; i < this->length(); i++){
this->at(i) = toupper(this->at(i));
}
return *this;
}
void ReleaseBuffer(int nlength = -1){
if (nlength == -1){
this->assign(this->c_str()); // essentially resizing the string as needed
return;
}
else{
this->resize(nlength);
return;
}
}
CTString<T> Mid(int nFirst) const{
return this->substr(nFirst, this->length() - nFirst);
}
CTString<T> Mid(int nFirst, int nCount) const{
return this->substr(nFirst, nCount);
}
CTString<T> Left(int nCount) const{
return this->substr(0, nCount);
}
CTString<T> Right(int nCount) const{
if (nCount > this->length())
return *this;
return this->substr(this->length()-nCount, nCount);
}
CTString<T> Tokenize(T *token, int &iStart)
{
//TO
return *this;
}
int ReverseFind(T ch) const{
return (int)this->rfind(ch);
}
};
#ifndef __linux__
typedef CTString<TCHAR> CString;
typedef CTString<char> CAnsiString;
#else
typedef CTString<char> CString;
#endif
|
import numpy as np
class LaneDetector:
def __init__(self):
# was the line detected in the last iteration?
self.detected = False
# recent polynomial coefficients
self.recent_fit = []
# polynomial coefficients averaged over the last n iterations
self.best_fit = None
# polynomial coefficients for the most recent fit
self.current_fit = [np.array([False])]
# difference in fit coefficients between last and new fits
self.diffs = np.array([0, 0, 0], dtype='float')
# x values for detected line pixels
self.allx = None
# y values for detected line pixels
def update_lane_info(self, new_fit_coeffs, n):
# Update the detected attribute
self.detected = True if new_fit_coeffs is not None else False
# Update the recent_fit attribute
self.recent_fit = new_fit_coeffs
# Update the best_fit attribute by averaging over the last n iterations
if self.best_fit is not None:
self.best_fit = (self.best_fit * n + new_fit_coeffs) / (n + 1)
else:
self.best_fit = new_fit_coeffs
# Update the current_fit attribute
self.current_fit = new_fit_coeffs
# Calculate the differences in fit coefficients and update the diffs attribute
if self.recent_fit:
self.diffs = np.subtract(new_fit_coeffs, self.recent_fit)
# Update the allx and ally attributes with the x and y values for the detected line pixels
# Assuming new_fit_coeffs contains the coefficients for the detected line pixels
# Update self.allx and self.ally accordingly |
use std::path::Path;
struct DataSource {
load_url_with_config_dir: fn(&Path, bool) -> String,
}
struct Config {
subject: SubjectConfig,
}
struct SubjectConfig {
datasources: Vec<DataSource>,
}
fn construct_url(config: &Config, config_dir_path: &Path, from_env: bool) -> String {
if let Some(first_datasource) = config.subject.datasources.first() {
(first_datasource.load_url_with_config_dir)(config_dir_path, from_env)
} else {
// Handle the case when no data sources are present
String::from("No data sources found")
}
}
fn main() {
let config = Config {
subject: SubjectConfig {
datasources: vec![
DataSource {
load_url_with_config_dir: |path, from_env| {
format!("https://example.com/data?path={}&from_env={}", path.display(), from_env)
},
},
// Additional DataSource objects may be present
],
},
};
let url = construct_url(&config, Path::new("/path/to/prisma"), true);
println!("{}", url);
} |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
#ifndef _FA_ACTIONSARRAY_STATIC_H_
#define _FA_ACTIONSARRAY_STATIC_H_
#include "FAConfig.h"
#include "FAActionsArrayA.h"
namespace BlingFire
{
///
/// Implementation based on static array.
///
class FAActionsArray_static : public FAActionsArrayA {
public:
FAActionsArray_static (const FAActionsA ** pActsArr, const int Count);
virtual ~FAActionsArray_static ();
public:
const int GetStageCount () const;
const FAActionsA * GetStage (const int Num) const;
private:
const FAActionsA ** m_pActsArr;
const int m_Count;
};
}
#endif
|
const express = require('express');
const router = express.Router();
// Get list of all tasks
router.get('/', (req, res) => {
// Code for fetching tasks
});
// Get a single task by id
router.get('/:id', (req, res) => {
// Code for fetching a single task
});
// Create a new task
router.post('/', (req, res) => {
// Code for creating a new task
});
// Update a task by id
router.put('/:id', (req, res) => {
// Code for updating a task
});
// Delete a task by id
router.delete('/:id', (req, res) => {
// Code for deleting a task
});
module.exports = router; |
<filename>src/components/danekalendarz.js<gh_stars>0
import React from 'react';
import { useStaticQuery, graphql, StaticQuery } from "gatsby";
import styled from 'styled-components'
const Kalendarzdane = styled.div`
@media (min-width: 1200px){
width: 100vw;
min-height: 35vw;
}
`
const Miesiac = styled.div`
@media (min-width: 1200px){
width: 44vw;
float: left;
box-sizing: border-box;
border: 0.5vw solid grey;
margin-right:2.5vw;
margin-left: 2.5vw;
margin-top: 2.5vw;
min-height: 50vh;
}
@media (max-width: 1199px){
clear: both;
box-sizing: border-box;
border: 0.5vw solid grey;
margin-right:2.5vw;
margin-left: 2.5vw;
width: 94vw;
min-height: 50vh;
margin-bottom: 2.5vw;
}
`
const Tytul = styled.h2`
@media (min-width: 1200px){
color: black;
text-align: center;
font-size: 2vw;
}
@media (max-width: 1199px){
text-align: center;
}
`
const H3 = styled.h3`
@media (min-width: 1200px){
font-size: 1vw;
}
`
const Kalendarz = () => (
<StaticQuery
query={graphql`
query{
post{
kalendarzs {
tytul
wydarzenie
wydarzeniedwa
wydarzenietrzy
wydarzeniecztery
wydarzeniepiec
wydarzenieszesc
wydarzeniesiedem
wydarzenieosiem
tytuldwa
wydarzeniedziewiec
wydarzeniedziesiec
wydarzeniejedenascie
wydarzeniedwanascie
wydarzenietrzynascie
wydarzenieczternascie
wydarzeniepietnascie
wydarzenieszesnascie
}
}
}
`}
render={({post: {kalendarzs}}) => (
<div className="post">
{ kalendarzs.map(kalendarz => (
<div key={kalendarz.id}>
<Kalendarzdane>
<Miesiac>
<Tytul>{kalendarz.tytul}</Tytul>
<H3>{kalendarz.wydarzenie}</H3>
<H3>{kalendarz.wydarzeniedwa}</H3>
<H3>{kalendarz.wydarzenietrzy}</H3>
<H3>{kalendarz.wydarzeniecztery}</H3>
<H3>{kalendarz.wydarzeniepiec}</H3>
<H3>{kalendarz.wydarzenieszesc}</H3>
<H3>{kalendarz.wydarzeniesiedem}</H3>
<H3>{kalendarz.wydarzenieosiem}</H3>
</Miesiac>
<Miesiac>
<Tytul>{kalendarz.tytuldwa}</Tytul>
<H3>{kalendarz.wydarzeniedziewiec}</H3>
<H3>{kalendarz.wydarzeniedziesiec}</H3>
<H3>{kalendarz.wydarzeniejedenascie}</H3>
<H3>{kalendarz.wydarzeniedwanascie}</H3>
<H3>{kalendarz.wydarzenietrzynascie}</H3>
<H3>{kalendarz.wydarzenieczternascie}</H3>
<H3>{kalendarz.wydarzeniepietnascie}</H3>
<H3>{kalendarz.wydarzenieszesnascie}</H3>
</Miesiac>
</Kalendarzdane>
</div>
) ) }
</div>
)}
/>
)
export default Kalendarz;
|
package resolvers
import (
"errors"
"github.com/bradpurchase/grocerytime-backend/internal/pkg/auth"
"github.com/bradpurchase/grocerytime-backend/internal/pkg/notifications"
"github.com/graphql-go/graphql"
)
// NotifyTripUpdatedItemsAddedResolver resolves the notifyTripUpdatedItemsAdded mutation
func NotifyTripUpdatedItemsAddedResolver(p graphql.ResolveParams) (interface{}, error) {
header := p.Info.RootValue.(map[string]interface{})["Authorization"]
user, err := auth.FetchAuthenticatedUser(header.(string))
if err != nil {
return false, err
}
// Get the app scheme (i.e. Debug, Beta, Release) as we need to pass it to
// the notifications package so it can use the proper apns certificate
appScheme := p.Info.RootValue.(map[string]interface{})["App-Scheme"]
if appScheme != nil {
userID := user.ID
storeID := p.Args["storeId"]
numItemsAdded := p.Args["numItemsAdded"].(int)
go notifications.ItemsAdded(userID, storeID, numItemsAdded, appScheme.(string))
return true, nil
}
return false, errors.New("no app scheme provided")
}
|
#!/bin/bash
#
# MIT License
#
# (C) Copyright 2021-2022 Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
function install_dashboard () {
echo "Enabling the Ceph Dashboard"
until [[ "$(ceph mgr services|jq .dashboard)" =~ "ncn-s00" ]]
do
ceph mgr module enable dashboard
done
echo "Copying or Creating certificates"
ceph dashboard create-self-signed-cert
echo "Checking port info"
if $(ceph config get mgr mgr/dashboard/server_port) != 8443
then
ceph config set mgr/dashboard/server_port 8443
fi
read -s -p "Enter a password for the initial deployment" passwd
read -s -p "Confirm passwd" passwd2
if [[ $passwd == $passwd2 ]] then
echo "Creating cray_cephadm dashboard user"
ceph dashboard ac-user-create cray_cephadm $passwd administrator
echo "Setting up dashboard access to radosgw"
radosgw-admin user create --uid=cray_cephadm --display-name=cray_cephadm --system
access_key=$(radosgw-admin user info --uid cray_cephadm|jq '.keys[0].access_key')
secret_key=$(radosgw-admin user info --uid cray_cephadm|jq '.keys[0].secret_key')
ceph dashboard set-rgw-api-access-key -i "$access_key"
ceph dashboard set-rgw-api-secret-key -i "$secret_key"
# Leaving a place where we can set the rgw-vip address
# ceph dashboard set-rgw-api-host <host>
ceph dashboard set-rgw-api-port 8080
# Putting option in case we need to enable/disable https
# ceph dashboard set-rgw-api-scheme <scheme> # http or https
# Need to investigate the below
# ceph dashboard set-rgw-api-admin-resource <admin_resource>
echo "Disable ssl_verify until we are on signed certs"
ceph dashboard set-rgw-api-ssl-verify False
# Add checks for verifying the dashboard is up and functional
else
echo "passwords did not match, please re-run the install"
fi
}
|
<filename>min-triangle-sum/triangle-sum-day06/src/main/java/ua/kata/MinTriangleSum.java
package ua.kata;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class MinTriangleSum {
private final List<List<Integer>> triangle;
MinTriangleSum(List<List<Integer>> triangle) {
this.triangle = triangle;
}
int compute() {
final int last = triangle.size() - 1;
return IntStream.range(0, last)
.map(i -> last - i - 1)
.mapToObj(triangle::get)
.reduce(
new ArrayList<>(triangle.get(last)),
(acc, current) -> IntStream.range(0, current.size())
.map(j -> Math.min(acc.get(j), acc.get(j + 1)) + current.get(j))
.boxed()
.collect(Collectors.toList())
).get(0);
}
}
|
#!/bin/bash
AUTO_CONF="${AUTO_CONF:-true}"
DB_ADAPTER="${DB_ADAPTER:-mysql2}"
DB_POOL="${DB_POOL:-5}"
DB_HOST="${DB_HOST:-database}"
DB_USER="${DB_USER:-staytus}"
DB_PASSWORD="${DB_PASSWORD:-staytus}"
DB_DATABASE="${DB_DATABASE:-staytus}"
cd /opt/staytus/staytus || { echo "staytus directory not found."; exit 1; }
if [ "$AUTO_CONF" = "true" ] ; then
if [ ! -f "/opt/staytus/staytus/config/database.yml" ]; then
cp -f /opt/staytus/staytus/config/database.example.yml /opt/staytus/staytus/config/database.yml
fi
echo "CREATE DATABASE staytus CHARSET utf8 COLLATE utf8_unicode_ci;" | mysql -h "$DB_HOST" -u "$DB_USER" -p"$DB_PASSWORD" || { echo "=> Issues creating database staytus, can be ignored when the database already exists."; true; }
sed -i "s|adapter:.*|adapter: \"$DB_ADAPTER\"|" /opt/staytus/staytus/config/database.yml
sed -i "s|pool:.*|pool: $DB_POOL|" /opt/staytus/staytus/config/database.yml
sed -i "s|host:.*|host: \"$DB_HOST\"|" /opt/staytus/staytus/config/database.yml
sed -i "s|username:.*|username: \"$DB_USER\"|" /opt/staytus/staytus/config/database.yml
sed -i "s|password:.*|password: \"$DB_PASSWORD\"|" /opt/staytus/staytus/config/database.yml
sed -i "s|database:.*|database: \"$DB_DATABASE\"|" /opt/staytus/staytus/config/database.yml
fi
set -ex
bundle exec rake staytus:build staytus:upgrade
exec procodile start -f |
#!/bin/sh
sed -i "s/{FRONTEND_TITLE}/${FRONTEND_TITLE?UNKNOWN}/g" /usr/share/nginx/html/frontend/main.js
sed -i "s/{FRONTEND_DESCRIPTION}/${FRONTEND_DESCRIPTION?UNKNOWN}/g" /usr/share/nginx/html/frontend/main.js
exec "$@"
|
import React, { ReactNode, createContext, useContext } from 'react';
const LIST_TYPES = ['number', 'alpha', 'roman'] as const;
const DEFAULT_LIST_TYPE = LIST_TYPES[0];
type ListType = typeof LIST_TYPES[number];
const nextListType = (type: ListType): ListType =>
LIST_TYPES[LIST_TYPES.indexOf(type) + 1] ?? DEFAULT_LIST_TYPE;
const ListContext = createContext<ListType>(DEFAULT_LIST_TYPE);
interface OrderedListProviderProps {
children: ReactNode;
}
export const useOrderedListContext = () => {
const type = useContext(ListContext);
return {
NextOrderedListProvider: ({ children }: OrderedListProviderProps) => (
<ListContext.Provider value={nextListType(type)}>
{children}
</ListContext.Provider>
),
type,
};
};
|
<reponame>zangiboy/learning_testing<filename>spec/triangle-spec.js<gh_stars>0
import { Triangle } from './../src/project.js';
describe('Triangle', function() {
it('should return equilateral for a triangle which has three equal sides', function() {
let equal = new Triangle(3, 3, 3);
expect(equal.checkType()).toEqual("equilateral");
})
it('should correctly determine whether three lengths can be made into a triangle', function() {
let testTriangle = new Triangle(3, 4, 29);
expect(testTriangle.checkType()).toEqual("not a triangle");
});
it('should return isosceles for a triangle which has two sides of equal length', function () {
let testTriangle = new Triangle(3, 3, 5);
expect(testTriangle.checkType()).toEqual('isosceles');
});
it('should return scalene for a triangle the sides of which are all different lengths', function (){
let testTriangle = new Triangle(3, 4, 5);
expect(testTriangle.checkType()).toEqual('scalene');
});
}); |
<reponame>INSO-TUWien/EffortBurst
export const ItemNodeType = {
EPIC: 'epic',
ISSUE: 'issue',
PROJECT: 'project',
SUB_TASK: 'subTask'
}; |
<gh_stars>0
from werkzeug.wrappers import Response
import frappe
from frappe import _
from frappe.contacts.doctype.contact.contact import get_contact_with_phone_number
from .twilio_handler import Twilio, IncomingCall, TwilioCallDetails
@frappe.whitelist()
def get_twilio_phone_numbers():
twilio = Twilio.connect()
return (twilio and twilio.get_phone_numbers()) or []
@frappe.whitelist()
def generate_access_token():
"""Returns access token that is required to authenticate Twilio Client SDK.
"""
twilio = Twilio.connect()
if not twilio:
return {}
from_number = frappe.db.get_value('Voice Call Settings', frappe.session.user, 'twilio_number')
if not from_number:
return {
"ok": False,
"error": "caller_phone_identity_missing",
"detail": "Phone number is not mapped to the caller"
}
token=twilio.generate_voice_access_token(from_number=from_number, identity=frappe.session.user)
return {
'token': token.decode('utf-8')
}
@frappe.whitelist(allow_guest=True)
def voice(**kwargs):
"""This is a webhook called by twilio to get instructions when the voice call request comes to twilio server.
"""
def _get_caller_number(caller):
identity = caller.replace('client:', '').strip()
user = Twilio.emailid_from_identity(identity)
return frappe.db.get_value('Voice Call Settings', user, 'twilio_number')
args = frappe._dict(kwargs)
twilio = Twilio.connect()
if not twilio:
return
assert args.AccountSid == twilio.account_sid
assert args.ApplicationSid == twilio.application_sid
# Generate TwiML instructions to make a call
from_number = _get_caller_number(args.Caller)
resp = twilio.generate_twilio_dial_response(from_number, args.To)
call_details = TwilioCallDetails(args, call_from=from_number)
create_call_log(call_details)
return Response(resp.to_xml(), mimetype='text/xml')
@frappe.whitelist(allow_guest=True)
def twilio_incoming_call_handler(**kwargs):
args = frappe._dict(kwargs)
call_details = TwilioCallDetails(args)
create_call_log(call_details)
resp = IncomingCall(args.From, args.To).process()
return Response(resp.to_xml(), mimetype='text/xml')
@frappe.whitelist()
def create_call_log(call_details: TwilioCallDetails):
call_log = frappe.get_doc({**call_details.to_dict(),
'doctype': 'Call Log',
'medium': 'Twilio'
})
call_log.flags.ignore_permissions = True
call_log.save()
frappe.db.commit()
@frappe.whitelist()
def update_call_log(call_sid, status=None):
"""Update call log status.
"""
twilio = Twilio.connect()
if not (twilio and frappe.db.exists("Call Log", call_sid)): return
call_details = twilio.get_call_info(call_sid)
call_log = frappe.get_doc("Call Log", call_sid)
call_log.status = status or TwilioCallDetails.get_call_status(call_details.status)
call_log.duration = call_details.duration
call_log.flags.ignore_permissions = True
call_log.save()
frappe.db.commit()
@frappe.whitelist(allow_guest=True)
def update_recording_info(**kwargs):
try:
args = frappe._dict(kwargs)
recording_url = args.RecordingUrl
call_sid = args.CallSid
update_call_log(call_sid)
frappe.db.set_value("Call Log", call_sid, "recording_url", recording_url)
except:
frappe.log_error(title=_("Failed to capture Twilio recording"))
@frappe.whitelist()
def get_contact_details(phone):
"""Get information about existing contact in the system.
"""
contact = get_contact_with_phone_number(phone.strip())
if not contact: return
contact_doc = frappe.get_doc('Contact', contact)
return contact_doc and {
'first_name': contact_doc.first_name.title(),
'email_id': contact_doc.email_id,
'phone_number': contact_doc.phone
}
|
public static boolean isVowelPresent(String str) {
// Counts the number of vowels present
int count = 0;
for (int i = 0; i < str.length(); i++) {
// Checks whether a char is a vowel
if (str.charAt(i) == 'a' || str.charAt(i) == 'e'
|| str.charAt(i) == 'i' || str.charAt(i) == 'o'
|| str.charAt(i) == 'u') {
count++;
}
}
// Returns true if all the vowels are present
return count == 5;
}
String s = "Hello World!";
if (isVowelPresent(s))
System.out.println("True");
else
System.out.println("False"); |
#include <iostream>
using namespace std;
bool isPrime(int n) {
for (int i = 2; i <= n / 2; ++i) {
if (n%i == 0)
return false;
}
return true;
}
int main()
{
cout << "Prime numbers from 0 to 100 are: ";
for (int i = 0; i <= 100; ++i) {
if (isPrime(i))
cout << i << ", ";
}
return 0;
} |
def fibonacci_series(n):
a = 0
b = 1
if n < 0:
return
elif n == 0:
return 0
elif n == 1:
return 1
else:
for i in range(2,n+1):
c = a + b
a = b
b = c
fibonacci_series = b
return fibonacci_series |
<reponame>tcmRyan/OpenOLAT<filename>src/main/java/org/olat/repository/manager/RepositoryEntryStatisticsDAO.java
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.repository.manager;
import java.util.Calendar;
import java.util.Date;
import javax.annotation.PostConstruct;
import javax.persistence.LockModeType;
import org.olat.core.commons.persistence.DB;
import org.olat.core.commons.services.commentAndRating.UserCommentsDelegate;
import org.olat.core.commons.services.commentAndRating.UserRatingsDelegate;
import org.olat.core.commons.services.commentAndRating.manager.UserCommentsDAO;
import org.olat.core.commons.services.commentAndRating.manager.UserRatingsDAO;
import org.olat.core.id.OLATResourceable;
import org.olat.repository.RepositoryEntry;
import org.olat.repository.model.RepositoryEntryStatistics;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
*
* Initial date: 20.02.2014<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
@Service
public class RepositoryEntryStatisticsDAO implements UserRatingsDelegate, UserCommentsDelegate {
@Autowired
private DB dbInstance;
@Autowired
private UserRatingsDAO userRatingsDao;
@Autowired
private UserCommentsDAO userCommentsDao;
@PostConstruct
public void init() {
userRatingsDao.addDegelate(this);
userCommentsDao.addDelegate(this);
}
/**
* Increment the launch counter.
* @param re
*/
public void incrementLaunchCounter(RepositoryEntry re) {
String updateQuery = "update repoentrystats set launchCounter=launchCounter+1, lastUsage=:now where key=:statsKey";
int updated = dbInstance.getCurrentEntityManager().createQuery(updateQuery)
.setParameter("statsKey", re.getStatistics().getKey())
.setParameter("now", new Date())
.executeUpdate();
if(updated > 0) {
dbInstance.commit();//big performance improvement
}
}
/**
* Increment the download counter.
* @param re
*/
public void incrementDownloadCounter(RepositoryEntry re) {
String updateQuery = "update repoentrystats set downloadCounter=downloadCounter+1, lastUsage=:now where key=:statsKey";
int updated = dbInstance.getCurrentEntityManager().createQuery(updateQuery)
.setParameter("statsKey", re.getStatistics().getKey())
.setParameter("now", new Date())
.executeUpdate();
if(updated > 0) {
dbInstance.commit();//big performance improvement
}
}
/**
* Set last-usage date to now for the specified repository entry
* with a granularity of 1 minute.
* @param
*/
public void setLastUsageNowFor(RepositoryEntry re) {
if (re == null) return;
Date newUsage = new Date();
if(re.getStatistics().getLastUsage().getTime() + 60000 < newUsage.getTime()) {
Calendar cal = Calendar.getInstance();
cal.add(Calendar.MINUTE, -1);
Date limit = cal.getTime();
String updateQuery = "update repoentrystats set lastUsage=:now where key=:statsKey and lastUsage<:limit";
int updated = dbInstance.getCurrentEntityManager().createQuery(updateQuery)
.setParameter("statsKey", re.getStatistics().getKey())
.setParameter("now", newUsage)
.setParameter("limit", limit)
.executeUpdate();
if(updated > 0) {
dbInstance.commit();//big performance improvement
}
}
}
protected RepositoryEntryStatistics loadStatistics(OLATResourceable repositoryEntryRes) {
StringBuilder sb = new StringBuilder();
sb.append("select v.statistics from ").append(RepositoryEntry.class.getName()).append(" as v")
.append(" where v.key=:key");
return dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), RepositoryEntryStatistics.class)
.setParameter("key", repositoryEntryRes.getResourceableId())
.getSingleResult();
}
private RepositoryEntryStatistics loadStatisticsForUpdate(OLATResourceable repositoryEntryRes) {
if(repositoryEntryRes instanceof RepositoryEntry) {
RepositoryEntry re = (RepositoryEntry)repositoryEntryRes;
dbInstance.getCurrentEntityManager().detach(re);
dbInstance.getCurrentEntityManager().detach(re.getStatistics());
}
StringBuilder sb = new StringBuilder();
sb.append("select stats from ").append(RepositoryEntryStatistics.class.getName()).append(" as stats")
.append(" where stats.key in (select v.statistics.key from ").append(RepositoryEntry.class.getName()).append(" as v where v.key=:key)");
return dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), RepositoryEntryStatistics.class)
.setParameter("key", repositoryEntryRes.getResourceableId())
.setLockMode(LockModeType.PESSIMISTIC_WRITE)
.getSingleResult();
}
@Override
public boolean accept(OLATResourceable ores, String resSubPath) {
if("RepositoryEntry".equals(ores.getResourceableTypeName()) && resSubPath == null) {
return true;
}
return false;
}
@Override
public boolean update(OLATResourceable ores, String resSubPath, double newAverageRating, long numOfRatings) {
RepositoryEntryStatistics statistics = loadStatisticsForUpdate(ores);
if(statistics != null) {
statistics.setRating(newAverageRating);
statistics.setNumOfRatings(numOfRatings);
statistics.setLastModified(new Date());
dbInstance.getCurrentEntityManager().merge(statistics);
return true;
}
return false;
}
@Override
public boolean update(OLATResourceable ores, String resSubPath, int numOfComments) {
RepositoryEntryStatistics statistics = loadStatisticsForUpdate(ores);
if(statistics != null) {
statistics.setNumOfComments(numOfComments);
statistics.setLastModified(new Date());
dbInstance.getCurrentEntityManager().merge(statistics);
return true;
}
return false;
}
} |
<reponame>m-wrona/hevicado<filename>fe/test/unit/modules/commons/spinner/http-progress-watcher-spec.js
'use strict';
describe('http-progress-watcher-spec:', function () {
//prepare module for testing
beforeEach(angular.mock.module('commons.spinner'));
describe('HttpProgressWatcher-spec:', function () {
var watcher;
var mockCounter, mockQ;
beforeEach(angular.mock.module(function ($provide) {
//mock dependencies
mockCounter = jasmine.createSpyObj('SpinnerCounter', ['increment', 'decrement']);
$provide.value('SpinnerCounter', mockCounter);
mockQ = {
reject: function () {
mockQ.rejected = true;
}
};
$provide.value('$q', mockQ);
}));
beforeEach(inject(function ($injector) {
watcher = $injector.get('HttpProgressWatcher');
}));
it('should increment counter when HTTP request is sent', function () {
//given watcher is active
expect(watcher).toBeDefined();
//when request is sent
watcher.request();
//then operation is marked as started
expect(mockCounter.increment).toHaveBeenCalled();
});
it('should decrement counter when HTTP response is received', function () {
//given watcher is active
expect(watcher).toBeDefined();
//when response is received
watcher.response();
//then operation is marked as completed
expect(mockCounter.decrement).toHaveBeenCalled();
});
it('should decrement counter when HTTP response failure is received', function () {
//given watcher is active
expect(watcher).toBeDefined();
//when response is received
watcher.responseError();
//then operation is marked as completed
expect(mockCounter.decrement).toHaveBeenCalled();
//and operation is rejected
expect(mockQ.rejected).toBe(true);
});
it('should decrement counter when HTTP request failure is received', function () {
//given watcher is active
expect(watcher).toBeDefined();
//when response is received
watcher.requestError();
//then operation is marked as completed
expect(mockCounter.decrement).toHaveBeenCalled();
//and operation is rejected
expect(mockQ.rejected).toBe(true);
});
});
}); |
PROGRESS_FILE=/tmp/dependancy_camera_in_progress
if [ ! -z $1 ]; then
PROGRESS_FILE=$1
fi
touch ${PROGRESS_FILE}
echo 0 > ${PROGRESS_FILE}
echo "Launch install of MusicCast dependancy"
echo 100 > ${PROGRESS_FILE}
echo "Everything is successfully installed!"
rm ${PROGRESS_FILE}
|
mvn install -Dmaven.test.skip=true |
# Set an automatic timeout so we don't have idle boxes sitting around
echo "*********************************************************"
echo "* Welcome to the Online Learning Environment *"
echo "* *"
echo "* This shell has a 5 minute timeout. *"
echo "* You will be logged out after 5 minutes of inactivity. *"
echo "*********************************************************"
TMOUT=300
readonly TMOUT
export TMOUT
|
#!/usr/bin/env bash
# Copyright 2018 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script runs the end-to-end tests against Knative Serving built from source.
# It is started by prow for each PR. For convenience, it can also be executed manually.
# If you already have the *_OVERRIDE environment variables set, call
# this script with the --run-tests arguments and it will start knative in
# the cluster and run the tests.
# Calling this script without arguments will create a new cluster in
# project $PROJECT_ID, start knative in it, run the tests and delete the
# cluster.
source $(dirname $0)/e2e-common.sh
# Latest serving release. This is intentionally hardcoded so that we can test
# upgrade/downgrade on release branches (or even arbitrary commits).
#
# Unfortunately, that means we'll need to manually bump this version when we
# make new releases.
#
# Fortunately, that's not *too* terrible, because forgetting to bump this
# version will make tests either:
# 1. Still pass, meaning we can upgrade from earlier than latest release (good).
# 2. Fail, which might be remedied by bumping this version.
readonly LATEST_SERVING_RELEASE_VERSION=0.5.0
function install_latest_release() {
header "Installing Knative latest public release"
local url="https://github.com/knative/serving/releases/download/v${LATEST_SERVING_RELEASE_VERSION}"
# TODO: should this test install istio and build at all, or only serving?
install_knative_serving \
"${url}/serving.yaml" \
|| fail_test "Knative latest release installation failed"
}
function install_head() {
header "Installing Knative head release"
install_knative_serving || fail_test "Knative head release installation failed"
}
function knative_setup() {
# Build Knative to generate Istio manifests from HEAD for install_latest_release
# We do it here because it's a one-time setup
build_knative_from_source
install_latest_release
}
# Script entry point.
initialize $@
# TODO(#2656): Reduce the timeout after we get this test to consistently passing.
TIMEOUT=10m
header "Running preupgrade tests"
go_test_e2e -tags=preupgrade -timeout=${TIMEOUT} ./test/upgrade \
--resolvabledomain=$(use_resolvable_domain) || fail_test
install_head
header "Running postupgrade tests"
go_test_e2e -tags=postupgrade -timeout=${TIMEOUT} ./test/upgrade \
--resolvabledomain=$(use_resolvable_domain) || fail_test
install_latest_release
success
|
def maximumOfThree(num1, num2, num3):
max = num1
if(num2 > max):
max = num2
if(num3 > max):
max = num3
return max |
<gh_stars>1-10
import argonaut._
import Argonaut._
object FindNonaccepted {
val commaSep = "(.*), (.*)".r
val disambig = "(.*) \\((.*)\\)".r
def main(args : Array[String]) {
val wn = io.Source.fromFile("wordnet.json").mkString("").
decodeOption[Map[String, WordNetEntry]].get
val accepted = io.Source.fromFile("accepted.tsv").getLines.map({ line =>
val elems = line.split("\t")
(elems(0), elems(1))
}).toSet
val acceptedWn = accepted.map(_._2)
val out = new java.io.PrintWriter("manual-check.tsv")
out.println("WordNet ID\tLemmas\tDefinition\tWikiID\tType")
accepted.filter({
case (wikiId, wnId) => {
val lemmas = wn(wnId).lemmas.map(_.toLowerCase)
val defn = wn(wnId).defn.toLowerCase
wikiId.toLowerCase match {
case x if lemmas.contains(x) => false
case commaSep(x,y) if lemmas.contains(x) && defn.contains(y) => false
case disambig(x,y) if lemmas.contains(x) && defn.contains(y) => false
case _ => true
}
}
}).foreach({
case (wikiId, wnId) => {
val wne = wn(wnId)
out.println(s"$wnId\t${wne.lemmas.mkString(",")}\t${wne.defn.replaceAll("\t", " ")}\t$wikiId")
}
})
wn.filter({
case (id, wne) => wne.isInstance && !acceptedWn.contains(id)
}).foreach({
case (id, wne) =>
out.println(s"""$id\t${wne.lemmas.mkString(",")}\t${wne.defn}""")
})
out.close
}
}
|
package aserg.gtf.dao.authorship;
import java.util.List;
import javax.persistence.Query;
import aserg.gtf.dao.GenericDAO;
import aserg.gtf.dao.PersistThread;
import aserg.gtf.model.authorship.Developer;
public class DeveloperDAO extends GenericDAO<Developer> {
@Override
public void persist(Developer o) {
if (o.getId()!=null){
Developer persistedDeveloper = this.em.find(Developer.class, o.getId());
if (persistedDeveloper != null)
return;
}
super.persist(o);
}
@Override
public Developer find(Object id) {
return this.em.find(Developer.class, id);
}
public List<Developer> getDevelopers(String repositoryName, String newusername){
if (newusername.contains("\'"))
newusername = newusername.replace("'", "''");
String hql = "SELECT d FROM Repository r "
+ "JOIN r.developers as d "
+ "WHERE r.fullName = "+ "\'" + repositoryName +"\' AND d.removed = \'FALSE\' AND d.newUserName = "+ "\'" + newusername +"\'";
Query q = em.createQuery(hql);
return q.getResultList();
}
@Override
public List<Developer> findAll(Class clazz) {
// TODO Auto-generated method stub
return super.findAll(Developer.class);
}
@Override
public void merge(Developer o) {
super.merge(o);
}
public List<Developer> getAllDevelopers(String repositoryName){
String hql = "SELECT d FROM Repository r "
+ "JOIN r.developers as d "
+ "WHERE r.fullName = "+ "\'" + repositoryName +"\' AND d.removed = \'FALSE\' ";
Query q = em.createQuery(hql);
return q.getResultList();
}
@Override
public boolean exist(Developer entity) {
return this.find(entity.getId())!=null;
}
PersistThread<Developer> thread = null;
public void persistAll(List<Developer> developers){
if (thread == null)
thread = new PersistThread<Developer>(developers, this);
else {
try {
if (thread.isAlive())
thread.join();
thread = new PersistThread<Developer>(developers, this);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
thread.start();
}
public void update(Developer o){
Developer persistedDeveloper = this.em.find(Developer.class, o.getId());
if (persistedDeveloper != null){
persistedDeveloper.setAuthorshipInfos(o.getAuthorshipInfos());
persistedDeveloper.setEmail(o.getEmail());
persistedDeveloper.setName(o.getName());
// persistedDeveloper.setNewUserName(o.getNewUserName());
if (o.isRemoved())
persistedDeveloper.setAsRemoved();
persistedDeveloper.setStatus(o.getStatus());
persistedDeveloper.setOrigemDevelopers(o.getOrigemDevelopers());
super.merge(persistedDeveloper);
}
}
public List<String> getDuplicatedUsernames(String repositoryName) {
String hql = "SELECT newusername FROM repository r "
+ "JOIN repository_developer rd ON rd.repository_id = r.id "
+ "JOIN developer d ON rd.developers_id = d.id "
+ " WHERE r.fullname = "+ "\'" + repositoryName +"\' AND removed = \'FALSE\' "
+ "GROUP BY newusername "
+ "HAVING COUNT(DISTINCT username)>1 "
+ "ORDER BY newusername;";
Query q = em.createNativeQuery(hql);
return q.getResultList();
}
}
|
<reponame>jschoolcraft/urlagg<gh_stars>1-10
require 'email_spec'
require 'email_spec/cucumber'
require 'factory_girl'
Dir[File.expand_path(File.dirname(__FILE__) + "/../../spec/factories/*.rb")].each {|f| require f}
After("@show-page") do |scenario|
if scenario.failed?
save_and_open_page
end
end |
package com.abubusoft.kripton.samples.paging2.com.abubusoft.kripton.widgetx;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import com.abubusoft.kripton.android.KriptonLibrary;
import com.abubusoft.kripton.android.Logger;
import com.abubusoft.kripton.android.PageChunk;
import com.abubusoft.kripton.androidx.livedata.PagedLiveData;
import com.abubusoft.kripton.androidx.widgets.CustomDiffCallback;
import com.abubusoft.kripton.androidx.widgets.MaxSizeHashMap;
import android.os.Handler;
import android.os.Looper;
import android.view.ViewGroup;
import androidx.lifecycle.LifecycleOwner;
import androidx.recyclerview.widget.ListUpdateCallback;
import androidx.recyclerview.widget.RecyclerView.Adapter;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.schedulers.Schedulers;
import io.reactivex.subjects.PublishSubject;
public class KriptonRecyclerViewAdapter<T, VH extends KriptonViewHolder<T>> extends Adapter<VH> {
public interface OnLoadingListener {
void onChangeStatus(boolean loading, int position, int lenght, int total);
}
private OnLoadingListener loadingListener;
// protected PagedLiveData<List<T>> pagedResult;
public KriptonRecyclerViewAdapter(LifecycleOwner context, PagedLiveData<List<T>> pagedResult, CustomDiffCallback<T> diff, OnLoadingListener loadingListener) {
// this.pagedResult = pagedResult;
this.viewBuffer = new ViewBuffer<T>(context, pagedResult, diff);
this.loadingListener = loadingListener;
// this.registerAdapterDataObserver(ca);
}
public class ViewBuffer<E> {
private final Handler mainHandler;
private Map<Integer, PageChunk<E>> chunks;
private int pageSize;
private final int lowerLimit;
private final int upperLimit;
private PublishSubject<Integer> subject;
private int positionMinToNotify;
private int positionMaxToNotify;
private AtomicBoolean loading = new AtomicBoolean(false);
// private ReentrantLock lock = new ReentrantLock();
private PagedLiveData<List<E>> pagedResult;
private int lastPositionInvoked;
public ViewBuffer(LifecycleOwner context, PagedLiveData<List<E>> pagedResult, CustomDiffCallback<E> diff) {
// Get a handler that can be used to post to the main thread
mainHandler = new Handler(Looper.getMainLooper());
this.pagedResult = pagedResult;
this.chunks = new MaxSizeHashMap<>(4);
//this.chunks=new HashMap<>();
this.pageSize = pagedResult.getPageSize();
this.loading.set(true);
lowerLimit = Math.round(pageSize * 0.3333f);
upperLimit = Math.round(pageSize * 0.6666f);
//@formatter:off
this.subject = PublishSubject.create();
subject
.observeOn(Schedulers.from(KriptonLibrary.getExecutorService()))
//.doOnNext(System.out::println)
.filter(page -> {
boolean f = !chunks.containsKey(page);
Logger.info(" ++++++++++++++++observable " + Thread.currentThread().getName());
//System.out.println("================= "+f);
return f;
}).doOnNext(page -> {
if (loadingListener != null) {
// This is your code
Runnable myRunnable = () -> loadingListener.onChangeStatus(true, page, -1, pagedResult.getTotalElements());
mainHandler.post(myRunnable);
}
})
.map(page -> new PageChunk<>(page, pagedResult.getExecutor().execute(page, pagedResult.getPageSize())))
.subscribeOn(Schedulers.from(KriptonLibrary.getExecutorService()))
.subscribe(chunk -> {
Logger.info("++++++++++++++++subscriber " + Thread.currentThread().getName());
chunks.put(chunk.getPageNumber(), chunk);
loading.set(false);
//notifyItemRangeChanged(chunk.getPageNumber() * pageSize, chunk.getData().size());
if (positionMinToNotify <= positionMaxToNotify) {
// This is your code
Runnable myRunnable = () -> {
Logger.info("++++++++++++++++change %s %s", positionMinToNotify, positionMaxToNotify - positionMinToNotify + 1);
notifyItemRangeChanged(positionMinToNotify, positionMaxToNotify - positionMinToNotify + 1);
positionMinToNotify = pagedResult.getTotalElements();
positionMaxToNotify = 0;
};
mainHandler.post(myRunnable);
}
if (loadingListener != null) {
// This is your code
Runnable myRunnable = () -> loadingListener.onChangeStatus(false, chunk.getPageNumber(), chunk.getData().size(), pagedResult.getTotalElements());
mainHandler.post(myRunnable);
}
});
//@formatter:on
if (context == null) {
pagedResult.observeForever(data -> {
chunks.clear();
chunks.put(0, new PageChunk<E>(0, data));
//notifyItemRangeChanged(0, data.size());
notifyDataSetChanged();
if (loadingListener != null) {
loadingListener.onChangeStatus(false, 0, data.size(), pagedResult.getTotalElements());
}
System.out.println(String.format(" ::::::::: observable %s total %s", data.size(), pagedResult.getTotalElements()));
this.loading.set(false);
if (lastPositionInvoked != 0) {
loadAround(lastPositionInvoked);
}
});
} else {
pagedResult.observe(context, data -> {
chunks.clear();
chunks.put(0, new PageChunk<E>(0, data));
//notifyItemRangeChanged(0, data.size());
notifyDataSetChanged();
if (loadingListener != null) {
loadingListener.onChangeStatus(false, 0, data.size(), pagedResult.getTotalElements());
}
System.out.println(String.format(" ::::::::: observable %s total %s", data.size(), pagedResult.getTotalElements()));
this.loading.set(false);
if (lastPositionInvoked != 0) {
loadAround(lastPositionInvoked);
}
});
}
}
int getPage(int index) {
return index / pageSize;
}
int nextPage(int index) {
return normalizePage(getPage(index) + 1);
}
int previousPage(int index) {
return normalizePage(getPage(index) - 1);
}
int normalizePage(int pageIndex) {
if (pageIndex < 0)
return 0;
if (pageIndex >= pagedResult.getTotalPages())
return pagedResult.getTotalPages() - 1;
return pageIndex;
}
public E get(int position) {
return loadAround(position);
}
public int getTotalSize() {
return pagedResult.getTotalElements();
}
protected E loadAround(int position) {
lastPositionInvoked = position;
if (!loading.get()) {
int startPosition = position - (position % pageSize);
int lower = startPosition + lowerLimit;
int upper = startPosition + upperLimit;
System.out.println(String.format("Position %s, lower: %s, upper: %s", position, lower, upper));
if (position >= upper) {
int page = nextPage(position);
loadPage(position, page);
} else if (position <= lower) {
int page = previousPage(position);
loadPage(position, page);
} else {
System.out.println(String.format("Nothing to load for position: %s", position));
}
}
int page = getPage(position);
PageChunk<E> currentChunk = chunks.get(page);
System.out.println(String.format("Position: %s, Index: %s, Chunk: %s", position, position - page * pageSize, page));
if (currentChunk != null) {
return currentChunk.getData().get(position - page * pageSize);
} else {
positionMinToNotify = Math.min(positionMinToNotify, position);
positionMaxToNotify = Math.max(positionMaxToNotify, position);
return null;
}
}
protected void loadPage(int position, int page) {
// load only different page
if (getPage(position) != page && !this.chunks.containsKey(page)) {
// we need to load it
loading.set(true);
if (loadingListener != null) {
loadingListener.onChangeStatus(true, page, -1, pagedResult.getTotalElements());
}
System.out.println("==> preload " + page);
subject.onNext(page);
}
}
}
final ListUpdateCallback ca = new ListUpdateCallback() {
@Override
public void onChanged(int position, int count, Object payload) {
Logger.info("@@@@@ onChanged " + position + " count " + count);
}
@Override
public void onInserted(int position, int count) {
Logger.info("@@@@@ onInserted " + position + " count " + count);
}
@Override
public void onMoved(int fromPosition, int toPosition) {
Logger.info("@@@@@ onMoved " + fromPosition + " to " + toPosition);
}
@Override
public void onRemoved(int position, int count) {
Logger.info("@@@@@ onRemoved " + position + " count " + count);
}
};
protected ViewBuffer<T> viewBuffer;
@Override
public int getItemCount() {
return viewBuffer.getTotalSize();
}
public T getItem(int position) {
return viewBuffer.get(position);
}
@Override
public void onBindViewHolder(VH holder, int position) {
}
@Override
public VH onCreateViewHolder(ViewGroup parent, int viewType) {
// TODO Auto-generated method stub
return null;
}
}
|
#!/bin/bash
dieharder -d 208 -g 208 -S 2140661553
|
/* eslint arrow-body-style: ["error", "as-needed"] */
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import { getHtmlLedetekst, getLedetekst } from '@navikt/digisyfo-npm';
import { Hovedknapp } from 'nav-frontend-knapper';
import Alertstripe from 'nav-frontend-alertstriper';
import { soknadPt } from '../../../propTypes/index';
import { ettersendSoknadTilNav } from '../../data/ettersending/ettersendingNav';
import { ettersendSoknadTilArbeidsgiver } from '../../data/ettersending/ettersendingArbeidsgiver';
import Feilstripe from '../../../components/Feilstripe';
import {
ETTERSEND_SOKNAD_FEILET,
ETTERSEND_SOKNAD_SENDER,
ALLEREDE_ETTERSENDT,
selectEttersendSoknadStatus,
} from '../../data/ettersending/ettersendingSelectors';
import logger from '../../../logging';
const sendtTilNAVDato = 'sendtTilNAVDato';
const sendtTilArbeidsgiverDato = 'sendtTilArbeidsgiverDato';
const ledetekstKeySuffixPt = PropTypes.oneOf(['send-til-nav', 'send-til-arbeidsgiver']);
const manglendeDatoPt = PropTypes.oneOf([sendtTilNAVDato, sendtTilArbeidsgiverDato]);
export class EttersendingDialog extends Component {
componentDidUpdate() {
const {
onClose,
sykepengesoknad,
status,
} = this.props;
if (status === ALLEREDE_ETTERSENDT) {
logger.info(`Forsøkte å sende søknad ${sykepengesoknad.id}`);
onClose();
}
}
render() {
const {
onClose,
sykepengesoknad,
status,
ledetekstKeySuffix,
manglendeDato,
doEttersendSoknadTilNav,
doEttersendSoknadTilArbeidsgiver,
} = this.props;
const sender = status === ETTERSEND_SOKNAD_SENDER;
const senderPaNyttSuffix = sykepengesoknad[manglendeDato]
? '-igjen'
: '';
return (<div className="ettersending">
<h3 className="modal__tittel">{getLedetekst(`sykepengesoknad.ettersending.info.tittel.${ledetekstKeySuffix}${senderPaNyttSuffix}`)}</h3>
{
(() => {
if (sykepengesoknad[manglendeDato]) {
return (<Alertstripe
type="info">{getLedetekst('sykepengesoknad.ettersending.info.tekst.allerede-sendt')}</Alertstripe>);
}
return (<div
dangerouslySetInnerHTML={getHtmlLedetekst(`sykepengesoknad.ettersending.info.tekst.${ledetekstKeySuffix}`)} />);
})()
}
<Feilstripe vis={status === ETTERSEND_SOKNAD_FEILET} />
<div className="knapperad">
<Hovedknapp
disabled={sender}
spinner={sender}
className="blokk--s"
onClick={(e) => {
e.preventDefault();
if (sykepengesoknad[manglendeDato]) {
logger.info(`Forsøker å sende søknad ${sykepengesoknad.id} på nytt (${manglendeDato})`);
}
if (manglendeDato === sendtTilNAVDato) {
doEttersendSoknadTilNav(sykepengesoknad.id);
} else {
doEttersendSoknadTilArbeidsgiver(sykepengesoknad.id);
}
}}>
{getLedetekst(`sykepengesoknad.ettersending.knapp.bekreft.${ledetekstKeySuffix}${senderPaNyttSuffix}`)}
</Hovedknapp>
<p>
<a
onClick={(e) => {
e.preventDefault();
onClose();
}}
href="#"
className="lenke">Avbryt</a>
</p>
</div>
</div>);
}
}
EttersendingDialog.propTypes = {
onClose: PropTypes.func,
sykepengesoknad: soknadPt,
status: PropTypes.string,
ledetekstKeySuffix: ledetekstKeySuffixPt,
manglendeDato: manglendeDatoPt,
doEttersendSoknadTilNav: PropTypes.func,
doEttersendSoknadTilArbeidsgiver: PropTypes.func,
};
const mapStateToProps = state => ({
status: selectEttersendSoknadStatus(state),
});
const actionCreators = {
doEttersendSoknadTilNav: ettersendSoknadTilNav,
doEttersendSoknadTilArbeidsgiver: ettersendSoknadTilArbeidsgiver,
};
export const EttersendDialogConnected = connect(mapStateToProps, actionCreators)(EttersendingDialog);
|
#!/bin/bash -e
#
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Portions of this file are derived from https://github.com/pytorch/builder/blob/d5e62b676b5d3b6c5dba35a4b5ac227bd6d3563b/manywheel/build.sh
#
# Copyright (c) 2016, Hugh Perkins
# Copyright (c) 2016, Soumith Chintala
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
INWHL=$(readlink -e $1)
DEPS_PATH=${2:-/usr/local}
OUTDIR=/wheelhouse
OUTWHLNAME=$(basename $INWHL)
# For some reason the pip wheel builder inserts "-none-" into the tag even if you gave it an ABI name
OUTWHLNAME=${OUTWHLNAME//-none-/-}
PKGNAME=$(echo "$OUTWHLNAME" | sed 's/-.*$//')
PKGNAME_PATH=$(echo "$PKGNAME" | sed 's/_/\//' | sed 's/_.*$//')
if [[ -z "$INWHL" || ! -f "$INWHL" || -z "$PKGNAME" ]]; then
echo "Usage: $0 <inputfile.whl>"
exit 1
fi
#######################################################################
# ADD DEPENDENCIES INTO THE WHEEL
#
# auditwheel repair doesn't work correctly and is buggy
# so manually do the work of copying dependency libs and patchelfing
# and fixing RECORDS entries correctly
######################################################################
fname_with_sha256() {
HASH=$(sha256sum $1 | cut -c1-8)
BASENAME=$(basename $1)
INITNAME=$(echo $BASENAME | cut -f1 -d".")
ENDNAME=$(echo $BASENAME | cut -f 2- -d".")
echo "$INITNAME-$HASH.$ENDNAME"
}
make_wheel_record() {
FPATH=$1
RECORD_FILE=$2
TMPDIR=$3
if echo $FPATH | grep RECORD >/dev/null 2>&1; then
# if the RECORD file, then
result="$FPATH,,"
else
HASH=$(openssl dgst -sha256 -binary $FPATH | openssl base64 | sed -e 's/+/-/g' | sed -e 's/\//_/g' | sed -e 's/=//g')
FSIZE=$(ls -nl $FPATH | awk '{print $5}')
result="$FPATH,sha256=$HASH,$FSIZE"
fi
flock $TMPDIR/dali_rec.lock echo $result>>$RECORD_FILE
}
DEPS_LIST=(
"${DEPS_PATH}/lib64/libjpeg.so.62"
"${DEPS_PATH}/lib/libjpeg.so.62"
"${DEPS_PATH}/lib/libavformat.so.58"
"${DEPS_PATH}/lib/libavcodec.so.58"
"${DEPS_PATH}/lib/libavfilter.so.7"
"${DEPS_PATH}/lib/libavutil.so.56"
"${DEPS_PATH}/lib/libtiff.so.5"
"${DEPS_PATH}/lib/libsndfile.so.1"
"${DEPS_PATH}/lib/libFLAC.so.8"
"${DEPS_PATH}/lib/libogg.so.0"
"${DEPS_PATH}/lib/libvorbis.so.0"
"${DEPS_PATH}/lib/libvorbisenc.so.2"
"${DEPS_PATH}/lib/libopenjp2.so.7"
"${DEPS_PATH}/lib/libzstd.so.1"
"${DEPS_PATH}/lib/libz.so.1"
)
TMPDIR=$(mktemp -d)
pushd $TMPDIR
unzip -q $INWHL
mkdir -p $PKGNAME_PATH/.libs
popd
# copy over needed dependent .so files over and tag them with their hash
original=()
patched=()
copy_and_patch() {
local filepath=$1
filename=$(basename $filepath)
if [[ ! -f "$filepath" ]]; then
echo "Didn't find $filename, skipping..."
return
fi
patchedname=$(fname_with_sha256 $filepath)
patchedpath=$PKGNAME_PATH/.libs/$patchedname
original+=("$filename")
patched+=("$patchedname")
echo "Copying $filepath to $patchedpath"
cp $filepath $TMPDIR/$patchedpath
echo "Patching DT_SONAME field in $patchedpath"
patchelf --set-soname $patchedname $TMPDIR/$patchedpath &
}
echo "Patching DT_SONAMEs..."
for filepath in "${DEPS_LIST[@]}"; do
copy_and_patch $filepath
done
wait
echo "Patched DT_SONAMEs"
pushd $TMPDIR
patch_hashed_names() {
local sofile=$1
for ((i=0;i<${#original[@]};++i)); do
origname=${original[i]}
patchedname=${patched[i]}
if [[ "$origname" != "$patchedname" ]]; then
set +e
patchelf --print-needed $sofile | grep $origname 2>&1 >/dev/null
ERRCODE=$?
set -e
if [ "$ERRCODE" -eq "0" ]; then
echo "patching $sofile entry $origname to $patchedname"
patchelf --replace-needed $origname $patchedname $sofile
fi
fi
done
}
echo "Patching to fix the so names to the hashed names..."
# get list of files to iterate over
sofile_list=()
while IFS= read -r -d $'\0'; do
sofile_list+=("$REPLY")
done < <(find $PKGNAME_PATH -name '*.so*' -print0)
while IFS= read -r -d $'\0'; do
sofile_list+=("$REPLY")
done < <(find $PKGNAME_PATH -name '*.bin' -print0)
for ((i=0;i<${#sofile_list[@]};++i)); do
sofile=${sofile_list[i]}
patch_hashed_names $sofile &
done
wait
echo "Fixed hashed names"
patch_rpath() {
local FILE=$1
UPDIRS=$(dirname $(echo "$FILE" | sed "s|$PKGNAME_PATH||") | sed 's/[^\/][^\/]*/../g')
echo "Setting rpath of $FILE to '\$ORIGIN:\$ORIGIN$UPDIRS:\$ORIGIN$UPDIRS/.libs'"
patchelf --set-rpath "\$ORIGIN:\$ORIGIN$UPDIRS:\$ORIGIN$UPDIRS/.libs" $FILE
patchelf --print-rpath $FILE
}
echo "Fixing rpath of main files..."
# set RPATH of backend_impl.so and similar to $ORIGIN, $ORIGIN$UPDIRS, $ORIGIN$UPDIRS/.libs
for ((i=0;i<${#sofile_list[@]};++i)); do
sofile=${sofile_list[i]}
patch_rpath $sofile &
done
wait
echo "Fixed rpath of main files"
patch_other_rpath() {
local sofile=$1
echo "Setting rpath of $sofile to " '$ORIGIN'
patchelf --set-rpath '$ORIGIN' $sofile
patchelf --print-rpath $sofile
}
echo "Fixing rpath of .lib files..."
# get list of files to iterate over
sofile_list=()
while IFS= read -r -d $'\0'; do
sofile_list+=("$REPLY")
done < <(find $PKGNAME_PATH/.libs -maxdepth 1 -type f -name "*.so*" -print0)
# set RPATH of .libs/ files to $ORIGIN
for ((i=0;i<${#sofile_list[@]};++i)); do
sofile=${sofile_list[i]}
patch_other_rpath $sofile &
done
wait
echo "Fixed rpath of .lib files"
# correct the metadata in the dist-info/WHEEL, e.g.:
#Root-Is-Purelib: true
#Tag: cp27-cp27mu-none-manylinux1_x86_64
sed -i 's/\(Tag:.*\)-none-/\1-/;s/\(Root-Is-Purelib:\) true/\1 false/' ${PKGNAME}-*.dist-info/WHEEL
# regenerate the RECORD file with new hashes
RECORD_FILE=$(ls $PKGNAME-*.dist-info/RECORD)
echo "Generating new record file $RECORD_FILE"
rm -f $RECORD_FILE
# generate records for $PKGNAME_S folder
rec_list=()
while IFS= read -r -d $'\0'; do
rec_list+=("$REPLY")
done < <(find * -type f -print0)
for ((i=0;i<${#rec_list[@]};++i)); do
FNAME=${rec_list[i]}
make_wheel_record $FNAME $RECORD_FILE $TMPDIR &
done
wait
echo "$RECORD_FILE,," >> $RECORD_FILE
echo "Finished generating new record file $RECORD_FILE"
# zip up the new wheel into the wheelhouse
mkdir -p $OUTDIR
rm -f $OUTDIR/$OUTWHLNAME
zip -rq $OUTDIR/$OUTWHLNAME *
# clean up
popd
rm -rf $TMPDIR
|
#!/bin/bash
# Verbose Logging - Add parameters to /etc/chrome_dev.conf that increase logging verbosity.
# Run this script on a Chromebook:
# 1. Put Chromebook in developer mode - https://www.chromium.org/chromium-os/poking-around-your-chrome-os-device
# 2. Log into device. Press CTRL+ALT+T to open crosh shell.
# 3. Type "shell" to enter Bash shell.
# 4. Type:
# bash <(curl -s -S -L https://raw.githubusercontent.com/jay0lee/cros-scripts/master/verbose_logging.sh)
# Make SSD read/write if it's not
source <(curl -s -S -L https://raw.githubusercontent.com/jay0lee/cros-scripts/master/enable_rw_ssd.sh)
sudo bash -c 'echo "--log-net-log=/tmp/netlog" >> /etc/chrome_dev.conf'
sudo bash -c 'echo "--net-log-level=0" >> /etc/chrome_dev.conf'
sudo bash -c 'echo "--v=2" >> /etc/chrome_dev.conf'
sudo bash -c 'echo "vmodule=*/chromeos/login/*=2" >> /etc/chrome_dev.conf'
echo
echo "Enabled verbose logging in /etc/chrome_dev.conf. Please reboot for logging to take effect."
|
#!/bin/bash
# Build Borsa React App Project
cd ./borsa-app/
npm run build
cd ..
# Build Borsa Service Project
APPS="borsa-service"
# First ensure dependencies loaded since .m2 may be empty
mvn dependency:tree -Ddetail=true
mvn help:evaluate -Dexpression=project.version
# Clean repo from builds
./clean.sh
#
# Config
#
THEUSER=$(/usr/bin/whoami)
NOW=$(date "+%Y%m%d%H%M%S")
APP_MAVEN_VERSION=$(mvn help:evaluate -Dexpression=project.version | grep -e '^[^\[]')
APP_GIT_VERSION=$(git rev-parse --abbrev-ref HEAD)
RESOURCES_DIR=./borsa-service/src/main/resources
APP_PROP_FILE=${RESOURCES_DIR}/application.properties
APP_TEMPLATE_PROP_FILE=${RESOURCES_DIR}/application.properties-template
APP_VERSION_FILE=./app.version
APP_VERSION=${APP_MAVEN_VERSION}-${APP_GIT_VERSION}_${NOW}_${THEUSER}
echo ${APP_VERSION} > ${APP_VERSION_FILE}
[ -e ${APP_PROP_FILE} ] && rm ${APP_PROP_FILE}
cat ${APP_TEMPLATE_PROP_FILE} | sed -e "s/FULLAPPBUILDNUM/${APP_VERSION}/g" \
| sed -e "s/MVNVERSION/${APP_MAVEN_VERSION}/g" > ${APP_PROP_FILE}
echo "LOCALLY building runtime to local folder: ./build ..."
echo "Version = ${APP_VERSION}"
if [ ! -f "${APP_VERSION_FILE}" ]; then
echo "APP Version file DOES NOT exist. CANNOT proceed with build."
exit 1
fi
#
# Config
#
APP_VERSION=$(cat ${APP_VERSION_FILE})
#mvn test
mvn install -Dmaven.test.skip=true
mvn package -N -P classpath-deps -Dmaven.test.skip=true
# Package runtimes with compiled and built libraries
#for APP in $APPS
#do
# echo "Building $APP ..."
# cd ./$APP
# ./build_app.sh
# echo "Built $APP"
# cd ..
#done
#echo "Copying job scripts for at scheduling to build/app/ ..."
#cp ./cicd/deploy.sh ./build/app/
#cp ./app/opt/start_app.sh ./build/app/
#echo "Done."
echo "Done building APP."
|
<gh_stars>1-10
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
@Autonomous
public class GoldHitMiddle extends AutoOpMode {
@Override
public void runOpMode() throws InterruptedException {
initialize();
waitForStart();
moveTime(1500, .4);
sleep(1000);
moveToRange(15.0);
setTeamMarker();
sleep(1000);
dropTeamMarker();
sleep(1000);
setTeamMarker();
pRightTurn(55);
pMoveBackward(5000);
}
}
|
#ifndef _WORLD_H_
#define _WORLD_H_
#if defined(_MSC_VER)
#pragma once
#endif
/*
* LEGAL NOTICE
* This computer software was prepared by Battelle Memorial Institute,
* hereinafter the Contractor, under Contract No. DE-AC05-76RL0 1830
* with the Department of Energy (DOE). NEITHER THE GOVERNMENT NOR THE
* CONTRACTOR MAKES ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY
* LIABILITY FOR THE USE OF THIS SOFTWARE. This notice including this
* sentence must appear on any copies of this computer software.
*
* EXPORT CONTROL
* User agrees that the Software will not be shipped, transferred or
* exported into any country or used in any manner prohibited by the
* United States Export Administration Act or any other applicable
* export laws, restrictions or regulations (collectively the "Export Laws").
* Export of the Software may require some form of license or other
* authority from the U.S. Government, and failure to obtain such
* export control license may result in criminal liability under
* U.S. laws. In addition, if the Software is identified as export controlled
* items under the Export Laws, User represents and warrants that User
* is not a citizen, or otherwise located within, an embargoed nation
* (including without limitation Iran, Syria, Sudan, Cuba, and North Korea)
* and that User is not otherwise prohibited
* under the Export Laws from receiving the Software.
*
* Copyright 2011 Battelle Memorial Institute. All Rights Reserved.
* Distributed as open-source under the terms of the Educational Community
* License version 2.0 (ECL 2.0). http://www.opensource.org/licenses/ecl2.php
*
* For further details, see: http://www.globalchange.umd.edu/models/gcam/
*
*/
/*!
* \file world.h
* \ingroup Objects
* \brief The World class header file.
* \author <NAME>
*/
#include <map>
#include <vector>
#include <list>
#include <memory>
#include <xercesc/dom/DOMNode.hpp>
#include "util/base/include/ivisitable.h"
#include "util/base/include/iround_trippable.h"
#include "land_allocator/include/set_carbon_density.h"
// Forward declarations
class Region;
class ILogger;
class Curve;
class CalcCounter;
class IClimateModel;
class GHGPolicy;
namespace objects {
class Atom;
}
template <class T, class U> class HashMap;
/*!
* \ingroup Objects
* \brief A class which contains all the model's regions. These regions may be MiniCAM (partial
* equilibrium) regions or SGM (general equilibrium) regions as they are derived
* from the Region base class.
*
* The World class object is contained by the Scenario class object. The world object controls
* the calling of the regions which it has been told to solve (passed in an
* argument of the method world.calc()) by calling region.calc() to run the model
* for one iteration for these regions.
*
* \author <NAME>
*/
class World: public IVisitable, public IRoundTrippable
{
public:
World();
~World();
void XMLParse( const xercesc::DOMNode* node );
void completeInit();
void toInputXML( std::ostream& out, Tabs* tabs ) const;
void toDebugXML( const int period, std::ostream& out, Tabs* tabs ) const;
static const std::string& getXMLNameStatic();
const std::string& getName() const;
void initCalc( const int period );
void postCalc( const int aPeriod );
//! The type of the vector containing region atoms.
typedef std::vector<const objects::Atom*> AtomVector;
void calc( const int period, const AtomVector& aRegionsToCalc = AtomVector() );
void updateSummary( const std::list<std::string> aPrimaryFuelList, const int period );
void runClimateModel();
void csvOutputFile() const;
void dbOutput( const std::list<std::string>& aPrimaryFuelList ) const;
const std::map<std::string,int> getOutputRegionMap() const;
const AtomVector getRegionIDs() const;
bool isAllCalibrated( const int period, double calAccuracy, const bool printWarnings ) const;
void setTax( const GHGPolicy* aTax );
const IClimateModel* getClimateModel() const;
const std::map<const std::string, const Curve*> getEmissionsQuantityCurves( const std::string& ghgName ) const;
const std::map<const std::string, const Curve*> getEmissionsPriceCurves( const std::string& ghgName ) const;
CalcCounter* getCalcCounter() const;
void accept( IVisitor* aVisitor, const int aPeriod ) const;
void csvSGMOutputFile( std::ostream& aFile, const int period ) const;
void csvSGMGenFile( std::ostream& aFile ) const;
SetCarbonDensity& getAdjustCarbonDensityVisitor();
std::vector<Region*> getRegions();
private:
//! The type of an iterator over the Region vector.
typedef std::vector<Region*>::iterator RegionIterator;
std::vector<Region*> regions; //!< array of pointers to Region objects
//! The type of a constant iterator over the Region vector.
typedef std::vector<Region*>::const_iterator CRegionIterator;
//! The type of the fast Region lookup hashmap.
typedef HashMap<const objects::Atom*, unsigned int> FastRegionLookupMap;
//! A fast hashmap which stores a mapping of region ID atom to region
//! location. This allows world calc calls for derivatives to be faster.
std::auto_ptr<FastRegionLookupMap> mRegionLookupMap;
std::map<std::string, int> regionNamesToNumbers; //!< Map of region name to indice used for XML parsing.
std::auto_ptr<IClimateModel> mClimateModel; //!< The climate model.
//! An object which maintains a count of the number of times
//! calc() has been called.
std::auto_ptr<CalcCounter> mCalcCounter;
//! A visitor which can be used to adjust carbon densities from the top
//! level. This is used during experiment 1 of iESM.
SetCarbonDensity mAdjDensityVisitor;
void clear();
const std::vector<unsigned int> getRegionIndexesToCalculate( const AtomVector& aRegionsToCalc );
void createFastLookupMap();
void csvGlobalDataFile() const;
bool checkCalConsistancy( const int period );
};
#endif // _WORLD_H_
|
import assertType from '../utils/assert-type';
import assertNotNull from '../utils/assert-not-null';
import forOf from '../utils/for-of';
import {identityFunction} from './helper-functions';
export default function aggregateIterator(source, seed, func, resultSelector) {
assertNotNull(source);
assertType(func, Function);
resultSelector = resultSelector || identityFunction;
assertType(resultSelector, Function);
var result = seed;
forOf(source, function (element) {
result = func(result, element);
});
return resultSelector(result);
}
|
<reponame>FLSoz/terratech-steam-mod-loader
import { app, Menu, shell, BrowserWindow, MenuItemConstructorOptions } from 'electron';
import checkForUpdates from './updater';
import { ValidChannel } from '../model';
interface DarwinMenuItemConstructorOptions extends MenuItemConstructorOptions {
selector?: string;
submenu?: DarwinMenuItemConstructorOptions[] | Menu;
}
export default class MenuBuilder {
mainWindow: BrowserWindow;
constructor(mainWindow: BrowserWindow) {
this.mainWindow = mainWindow;
}
buildMenu(): Menu {
if (process.env.NODE_ENV === 'development' || process.env.DEBUG_PROD === 'true') {
this.setupDevelopmentEnvironment();
}
const template = this.buildDefaultTemplate();
const menu = Menu.buildFromTemplate(template);
Menu.setApplicationMenu(menu);
return menu;
}
setupDevelopmentEnvironment(): void {}
buildDefaultTemplate(): MenuItemConstructorOptions[] {
const subMenuFile: MenuItemConstructorOptions = {
label: '&File',
submenu: [
{
label: 'Refresh mod information',
click: () => {
this.mainWindow.webContents.send(ValidChannel.MOD_REFRESH_REQUESTED);
}
}
]
};
const subMenuAboutDarwin: DarwinMenuItemConstructorOptions = {
label: 'TTSMM',
submenu: [
{
label: 'About TTSMM',
selector: 'orderFrontStandardAboutPanel:'
},
{ type: 'separator' },
{ label: 'Services', submenu: [] },
{ type: 'separator' },
{
label: 'Hide TTSMM',
accelerator: 'Command+H',
selector: 'hide:'
},
{
label: 'Hide Others',
accelerator: 'Command+Shift+H',
selector: 'hideOtherApplications:'
},
{ label: 'Show All', selector: 'unhideAllApplications:' },
{ type: 'separator' },
{
label: 'Quit',
accelerator: 'Command+Q',
click: () => {
app.quit();
}
}
]
};
const subMenuEditDarwin: DarwinMenuItemConstructorOptions = {
label: 'Edit',
submenu: [
{ label: 'Undo', accelerator: 'Command+Z', selector: 'undo:' },
{ label: 'Redo', accelerator: 'Shift+Command+Z', selector: 'redo:' },
{ type: 'separator' }
]
};
const subMenuEdit: MenuItemConstructorOptions = {
label: '&Edit',
submenu: [
{
label: '&Undo',
accelerator: 'Ctrl+Z',
click: () => {}
},
{
label: '&Redo',
accelerator: 'Ctrl+Y',
click: () => {}
}
]
};
const subMenuView: MenuItemConstructorOptions = {
label: 'View',
submenu: [
{
label: 'Reload',
accelerator: 'Command+R',
click: () => {
this.mainWindow.webContents.reload();
}
},
{
label: 'Toggle Full Screen',
accelerator: 'Ctrl+Command+F',
click: () => {
this.mainWindow.setFullScreen(!this.mainWindow.isFullScreen());
}
},
{
label: 'Toggle Developer Tools',
accelerator: 'F12',
click: () => {
this.mainWindow.webContents.toggleDevTools();
}
}
]
};
const subMenuWindowDarwin: DarwinMenuItemConstructorOptions = {
label: 'Window',
submenu: [
{
label: 'Minimize',
accelerator: 'Command+M',
selector: 'performMiniaturize:'
},
{ label: 'Close', accelerator: 'Command+W', selector: 'performClose:' },
{ type: 'separator' },
{ label: 'Bring All to Front', selector: 'arrangeInFront:' }
]
};
const subMenuHelp: MenuItemConstructorOptions = {
label: 'Help',
submenu: [
{
label: 'TerraTech Forums',
click() {
shell.openExternal('https://forum.terratechgame.com/index.php');
}
},
{
label: `TerraTech Discord`,
click() {
shell.openExternal('https://discord.com/invite/terratechgame');
}
},
{
label: 'Documentation',
click() {
shell.openExternal('https://github.com/FLSoz/terratech-steam-mod-loader/#readme');
}
},
{
label: 'Search Issues',
click() {
shell.openExternal('https://github.com/FLSoz/terratech-steam-mod-loader/issues');
}
}
]
};
const subMenuUpdates: MenuItemConstructorOptions = {
label: '&Updates',
submenu: [
{
label: 'Check for updates',
click: checkForUpdates
}
]
};
return process.platform === 'darwin'
? [subMenuFile, subMenuAboutDarwin, subMenuEditDarwin, subMenuView, subMenuWindowDarwin, subMenuUpdates, subMenuHelp]
: [subMenuFile, subMenuEdit, subMenuView, subMenuUpdates, subMenuHelp];
}
}
|
class Visitor:
def visit(self, element):
pass
class ConcreteVisitor(Visitor):
def visit(self, element):
if isinstance(element, ConcreteElementA):
self.visit_concrete_element_a(element)
elif isinstance(element, ConcreteElementB):
self.visit_concrete_element_b(element)
def visit_concrete_element_a(self, element):
print("Visiting ConcreteElementA")
def visit_concrete_element_b(self, element):
print("Visiting ConcreteElementB")
class Element:
def accept(self, visitor):
pass
class ConcreteElementA(Element):
def accept(self, visitor):
visitor.visit(self)
class ConcreteElementB(Element):
def accept(self, visitor):
visitor.visit(self)
# Usage
visitor = ConcreteVisitor()
element_a = ConcreteElementA()
element_b = ConcreteElementB()
element_a.accept(visitor) # Output: Visiting ConcreteElementA
element_b.accept(visitor) # Output: Visiting ConcreteElementB |
<filename>nexus/lib/memory.py
##################################################################
## (c) Copyright 2015- by <NAME> ##
##################################################################
######################################################################
# The following is adapted from
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/286222
# Python Cookbook, by <NAME>
######################################################################
#====================================================================#
# memory.py #
# Calculate memory used by the Nexus host process. #
# #
# Content summary: #
# memory #
# Return memory usage of the current process. #
# #
# resident #
# Return resident memory usage. #
# #
# stacksize #
# Return stack size. #
# #
#====================================================================#
import os
_scale = {'kB': 1024.0, 'mB': 1024.0*1024.0,
'KB': 1024.0, 'MB': 1024.0*1024.0}
def _VmB(VmKey, pid=None):
'''Private.
'''
global _scale
# get pseudo file /proc/<pid>/status
if not pid:
pid = os.getpid()
proc_status = '/proc/%d/status' % pid
try:
t = open(proc_status)
v = t.read()
t.close()
except:
return 0.0 # non-Linux?
# get VmKey line e.g. 'VmRSS: 9999 kB\n ...'
try:
i = v.index(VmKey)
v = v[i:].split(None, 3) # whitespace
except ValueError:
return 0.0
if len(v) < 3:
return 0.0 # invalid format?
# convert Vm value to bytes
return float(v[1]) * _scale[v[2]]
def get_children(pid):
proc_children = '/proc/%d/task/%d/children'%(pid,pid)
try:
t = open(proc_children,'r')
v = t.read()
t.close()
except:
return []
children = [int(c) for c in v.split()]
return children
def all_children(pid=None):
if not pid:
pid = os.getpid()
child_list = get_children(pid)
all_list = child_list[:]
for child_pid in child_list:
child2 = all_children(child_pid)
all_list.extend(child2)
return all_list
def memory(since=0.0, children=False):
'''Return memory usage in bytes.
'''
mem = 0.0
if children:
for child_pid in all_children():
mem += _VmB('VmSize:',pid=child_pid)
mem += _VmB('VmSize:') - since
return mem
def resident(since=0.0, children=False):
'''Return resident memory usage in bytes.
'''
mem = 0.0
if children:
for child_pid in all_children():
mem += _VmB('VmRSS:',pid=child_pid)
mem += _VmB('VmRSS:') - since
return mem
#return _VmB('VmRSS:') - since
def stacksize(since=0.0, children=False):
'''Return stack size in bytes.
'''
mem = 0.0
if children:
for child_pid in all_children():
mem += _VmB('VmStk:',pid=child_pid)
mem += _VmB('VmStk:') - since
return mem
|
export { EditorNavbar } from "./EditorNavbar";
export { EditorPanel } from "./EditorPanel";
export { PreviewSpace } from "./PreviewSpace";
export { HomeNavbar } from "./HomeNavbar";
export { SlideNavigator } from "./SlideNavigator";
export { Slide } from "./Slide";
export { Logo } from "./Logo";
export { PrimaryButton } from "./PrimayButton";
export { PublishSettingsModal } from "./PublishSettingsModal";
export { AccountActions } from "./AccountActions";
export { ColorPicker } from "./ColorPicker";
export { LoadFonts } from "./LoadFonts";
export { FullScreenPresentation } from "./FullScreenPresentation";
export { NextSlideButton } from "./NextSlideButton";
export { PrevSlideButton } from "./PrevSlideButton";
export { Meta } from "./Meta";
export { StartPresentationModeButton } from "./StartPresentationModeButton";
export type { EditorNavbarProps } from "./EditorNavbar";
export type { EditorPanelProps } from "./EditorPanel";
export type { HomeNavbarProps } from "./HomeNavbar";
export type { SlideNavigatorProps } from "./SlideNavigator";
export type { PreviewSpaceProps } from "./PreviewSpace";
export type { LogoProps } from "./Logo";
export type { NextSlideButtonProps } from "./NextSlideButton";
export type { PrevSlideButtonProps } from "./PrevSlideButton";
export type { SlideProps } from "./Slide";
export type { ColorPickerProps } from "./ColorPicker";
export type { AccountActionsProps } from "./AccountActions";
export type { PublishSettingsModalProps } from "./PublishSettingsModal";
export type { MetaProps } from "./Meta";
export type { FullScreenPresentationProps } from "./FullScreenPresentation";
export type { StartPresentationModeButtonProps } from "./StartPresentationModeButton";
|
<filename>src/map.js
const equals = require('shallow-equals');
class BaseMap {
merge(opts) {
let isUnequal = false;
for (let key of Object.keys(opts)) {
if (this.__props[key] !== opts[key]) {
isUnequal = true;
break;
}
}
if (!isUnequal) return this;
return IMap({ ...this.__props, ...opts });
}
set(key, value) {
if (this.__props[key] === value) {
return this;
}
return IMap({
...this.__props,
[key]: value
});
}
map(mapFn) {
let result = {};
for (let entry of Object.entries(this.__props)) {
let [ key, value ] = entry;
let [ newKey, newValue ] = mapFn(key, value);
result[newKey] = newValue;
}
if (equals(result, this.__props)) {
return this;
}
return IMap(result);
}
setIn(path, value) {
const p = path.shift();
if (path.length === 0) {
return this.set(p, value);
} else {
return this.set(p, this[p].setIn(path, value));
}
}
getIn(path, defaultValue) {
const p = path.shift();
if (path.length === 0) {
const value = this[p];
if (value === undefined) {
return defaultValue;
}
return value;
} else if (this[p] && this[p].getIn) {
return this[p].getIn(path, defaultValue);
}
}
updateIn(path, valueFn, defaultValue) {
const p = path.shift();
if (path.length === 0) {
return this.set(p, valueFn(this[p] || defaultValue));
} else {
return this.set(p, this[p].updateIn(path, valueFn, defaultValue));
}
}
keys() {
return Object.keys(this);
}
values() {
return Object.values(this);
}
mapValues(mapFn) {
return this.map((key, value) => [ key, mapFn(value) ]);
}
mapKeys(mapFn) {
return this.map((key, value) => [ mapFn(key), value ]);
}
inverted(mapFn) {
return this.map((key, value) => [ value, key ]);
}
toObject() {
return ({ ...this });
}
remove(key) {
if (this.__props.hasOwnProperty(key)) {
const result = { ...this.__props };
delete result[key];
return IMap(result);
}
return this;
}
filter(filterFn) {
let result;
let tainted = false;
for (let [ key, value] of Object.entries(this.__props)) {
if (!filterFn(key, value)) {
if (!tainted) {
result = { ...this.__props };
}
tainted = true;
delete result[key];
}
}
if (!tainted) return this;
return IMap(result);
}
pick(keys) {
const result = {};
for (let key of keys) {
result[key] = this.__props[key];
}
if (equals(result, this.__props)) {
return this;
}
return IMap(result);
}
}
const IMap = function(opts) {
let result = new BaseMap();
Object.defineProperty(result, "__props", {
value: opts,
enumerable: false,
configurable: false
});
Object.freeze(result.__props);
for (let entry of Object.entries(opts)) {
let [ key, value ] = entry;
Object.defineProperty(result, key, {
get: () => value,
set: (_) => { throw new Error("Cannot set '" + key + "': value is readonly, use .set()") },
configurable: false,
enumerable: true
});
}
Object.freeze(result);
return result;
}
module.exports = IMap;
|
package native
import (
"os"
"os/exec"
"strconv"
"sync"
"syscall"
"time"
docker "github.com/fsouza/go-dockerclient"
units "github.com/docker/go-units"
log "github.com/xuperchain/log15"
"github.com/xuperchain/xupercore/kernel/contract/bridge"
)
var (
dockerOnce sync.Once
dockerClient *docker.Client
)
const (
pingTimeoutSecond = 2
)
// Process is the container of running contract
type Process interface {
// Start 启动Native code进程
Start() error
// Stop 停止进程,如果在超时时间内进程没有退出则强制杀死进程
Stop(timeout time.Duration) error
}
// DockerProcess is the process running as a docker container
type DockerProcess struct {
basedir string
startcmd string
envs []string
mounts []string
// ports []string
cfg *bridge.NativeDockerConfig
id string
log.Logger
}
func (d *DockerProcess) resourceConfig() (int64, int64, error) {
const cpuPeriod = 100000
var cpuLimit, memLimit int64
cpuLimit = int64(cpuPeriod * d.cfg.Cpus)
if d.cfg.Memory != "" {
var err error
memLimit, err = units.RAMInBytes(d.cfg.Memory)
if err != nil {
return 0, 0, err
}
}
return cpuLimit, memLimit, nil
}
// Start implements process interface
func (d *DockerProcess) Start() error {
client, err := getDockerClient()
if err != nil {
return err
}
volumes := map[string]struct{}{}
for _, mount := range d.mounts {
volumes[mount] = struct{}{}
}
cmd := []string{
"sh", "-c",
d.startcmd,
}
env := []string{
"XCHAIN_PING_TIMEOUT=" + strconv.Itoa(pingTimeoutSecond),
}
env = append(env, d.envs...)
env = append(env, os.Environ()...)
user := strconv.Itoa(os.Getuid()) + ":" + strconv.Itoa(os.Getgid())
cpulimit, memlimit, err := d.resourceConfig()
if err != nil {
return err
}
binds := make([]string, len(d.mounts))
for i := range d.mounts {
binds[i] = d.mounts[i] + ":" + d.mounts[i]
}
// portBinds := make(map[docker.Port][]docker.PortBinding)
// for _, port := range d.ports {
// key := docker.Port(port + "/tcp")
// value := []docker.PortBinding{
// {
// HostIP: "127.0.0.1",
// HostPort: port,
// },
// }
// portBinds[key] = value
// }
opts := docker.CreateContainerOptions{
Config: &docker.Config{
Volumes: volumes,
Env: env,
WorkingDir: d.basedir,
// NetworkDisabled: true,
Image: d.cfg.ImageName,
Cmd: cmd,
User: user,
},
HostConfig: &docker.HostConfig{
NetworkMode: "host",
AutoRemove: true,
Binds: binds,
CPUPeriod: cpulimit,
Memory: memlimit,
// PortBindings: portBinds,
},
}
container, err := client.CreateContainer(opts)
if err != nil {
return err
}
d.Info("create container success", "id", container.ID)
d.id = container.ID
err = client.StartContainer(d.id, nil)
if err != nil {
return err
}
d.Info("start container success", "id", d.id)
return nil
}
// Stop implements process interface
func (d *DockerProcess) Stop(timeout time.Duration) error {
client, err := getDockerClient()
if err != nil {
return err
}
err = client.StopContainer(d.id, uint(timeout.Seconds()))
if err != nil {
return err
}
d.Info("stop container success", "id", d.id)
client.WaitContainer(d.id)
d.Info("wait container success", "id", d.id)
return nil
}
// HostProcess is the process running as a native process
type HostProcess struct {
basedir string
startcmd string
envs []string
cmd *exec.Cmd
log.Logger
}
// Start implements process interface
func (h *HostProcess) Start() error {
cmd := exec.Command("sh", "-c", h.startcmd)
cmd.Dir = h.basedir
cmd.SysProcAttr = &syscall.SysProcAttr{
Setsid: true,
Pgid: 0,
}
cmd.Env = []string{"XCHAIN_PING_TIMEOUT=" + strconv.Itoa(pingTimeoutSecond)}
cmd.Env = append(cmd.Env, h.envs...)
cmd.Env = append(cmd.Env, os.Environ()...)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
if err := cmd.Start(); err != nil {
return err
}
h.Info("start command success", "pid", cmd.Process.Pid)
h.cmd = cmd
return nil
}
func processExists(pid int) bool {
return syscall.Kill(pid, syscall.Signal(0)) == nil
}
// Stop implements process interface
func (h *HostProcess) Stop(timeout time.Duration) error {
h.cmd.Process.Signal(syscall.SIGTERM)
deadline := time.Now().Add(timeout)
for time.Now().Before(deadline) {
if !processExists(h.cmd.Process.Pid) {
break
}
time.Sleep(time.Second)
}
// force kill if timeout
if !time.Now().Before(deadline) {
h.cmd.Process.Kill()
}
h.Info("stop command success", "pid", h.cmd.Process.Pid)
return h.cmd.Wait()
}
func getDockerClient() (*docker.Client, error) {
var err error
dockerOnce.Do(func() {
dockerClient, err = docker.NewClientFromEnv()
})
if err != nil {
return nil, err
}
return dockerClient, nil
}
|
export default {
name: 'AuthenticatorApp',
components: {
},
methods: {
onNext () {
this.$router.replace('/account/authentication/qrcode')
},
onCancel () {
this.$router.replace('/account/authentication/preferences')
}
}
}
|
#!/bin/bash
TASK=19
MODEL=ctrl_vilbert
MODEL_CONFIG=ctrl_vilbert_base
TASKS_CONFIG=iglue_test_tasks_boxes36.dtu
TRTASK=XVNLI
TETASK=XVNLIar
TEXT_PATH=/home/projects/ku_00062/data/XVNLI/annotations/ar/test.jsonl
PRETRAINED=/home/projects/ku_00062/checkpoints/iglue/zero_shot/xvnli/${MODEL}/${TRTASK}_${MODEL_CONFIG}/pytorch_model_best.bin
OUTPUT_DIR=/home/projects/ku_00062/results/iglue/zero_shot/xvnli/${MODEL}/${TRTASK}_${MODEL_CONFIG}/$TETASK/test
source /home/projects/ku_00062/envs/iglue/bin/activate
cd ../../../../volta
python eval_task.py \
--bert_model /home/projects/ku_00062/huggingface/bert-base-uncased --do_lower_case \
--config_file config/${MODEL_CONFIG}.json \
--from_pretrained ${PRETRAINED} \
--tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK --split test \
--output_dir ${OUTPUT_DIR} --val_annotations_jsonpath ${TEXT_PATH}
deactivate
|
#!/bin/bash
source ./constant.sh
# create network
docker network create --subnet=${REDIS_NETWORK_IP}/24 ${REDIS_NETWORK}
# create rerdis-cluster
# master:slave 1:1
redis-cli --cluster create 169.69.2.2:7001 169.69.2.3:7002 169.69.2.4:7003 169.69.2.5:7004 169.69.2.6:7005 169.69.2.7:7006 --cluster-replicas 1
|
<reponame>smagill/opensphere-desktop
package io.opensphere.mantle.data;
import java.awt.Component;
import java.util.List;
import javax.swing.Icon;
/**
* A way for a data type to provide additional functionality, such as additional
* UIs.
*/
public interface DataTypeInfoAssistant
{
/**
* Gets the layer control user interface component.
*
* @param dataType the {@link DataTypeInfo}
* @return the layer control component
*/
Component getLayerControlUIComponent(DataTypeInfo dataType);
/**
* Gets any layer icons to show in the Layers UI.
*
* @return the layer icons
*/
List<Icon> getLayerIcons();
/**
* Gets any additional labels to show to the right of the layer in the Layers UI.
*
* @return the layer labels
*/
List<String> getLayerLabels();
}
|
from django_chuck.template.base import BaseEngine
from django_chuck.utils import write_to_file
from django_chuck.exceptions import TemplateError
import os
import re
class TemplateEngine(BaseEngine):
input_file = ""
base_file = ""
extension_file = ""
line_count = ""
input = ""
output = ""
keyword_patterns = {
"extends": re.compile(r"#!chuck_extends [\'\"]?(.+)[\'\"]?", re.IGNORECASE),
"extends_if_exists": re.compile(r"#!chuck_extends_if_exists [\'\"]?(.+)[\'\"]?", re.IGNORECASE),
"renders": re.compile(r"#!chuck_renders ([\w\d\_\-]+)", re.IGNORECASE),
"appends": re.compile(r"#!chuck_appends ([\w\d\_\-]+)", re.IGNORECASE),
"prepends": re.compile(r"#!chuck_prepends ([\w\d\_\-]+)", re.IGNORECASE),
}
def get_block_content(self, content, block_name="", keyword="renders"):
"""
Get the content of a chuck block (chuck_renders, chuck_append, chuck_prepend)
"""
block_content = ""
match = re.search(r"#!chuck_" + keyword +" " + re.escape(block_name) + "([\r\n\s]+.*?)#!end",
content,
re.MULTILINE|re.DOTALL)
if match:
block_content = match.group(1)
return block_content
def update_block_content(self, block_name, keyword="renders"):
"""
Update output depending on keyword (append, prepend, renders) from template with block named block_name
"""
old_block_content = self.get_block_content(self.output, block_name)
if old_block_content:
new_block_content = self.get_block_content(self.input, block_name, keyword)
if new_block_content:
if keyword == "appends":
update_content = old_block_content + new_block_content
elif keyword == "prepends":
update_content = new_block_content + old_block_content
else:
update_content = new_block_content
self.output = re.sub(r"#!chuck_renders " + block_name + re.escape(old_block_content) + "#!end",
r"#!chuck_renders " + block_name + update_content + "#!end",
self.output,
re.IGNORECASE|re.DOTALL|re.MULTILINE)
else:
raise TemplateError("Content of block " + block_name + " cannot be found in file " + self.input_file + " line " + str(self.line_count))
else:
raise TemplateError("Block " + block_name + " cannot be found in file " + self.base_file)
@staticmethod
def found_keyword_in_line(line):
"""
Parse keyword in line
"""
keyword = None
match = re.search(r"#!chuck_(\w+)", line)
if match:
keyword = match.group(1)
return keyword
def write_file(self, filename):
"""
Replace placeholder and write file
"""
for (var, value) in self.placeholder.items():
self.output = self.output.replace("$" + var, value)
write_to_file(filename, self.output)
def get_real_basefile_path(self):
"""
Return the real basefile path. Resolve project to dir name.
"""
base_file = os.path.join(self.site_dir, self.extension_file).rstrip("\"").rstrip("\'").lstrip()
if self.extension_file.startswith("project"):
# remove first dir (project) from path
(tmp_path, tmp_file) = os.path.split(self.extension_file)
tmp_path = tmp_path.lstrip()
tmp_dirs = tmp_path.split(os.sep)
# project dir is not the only dir
if len(tmp_dirs) > 1:
tmp_path = os.sep.join(tmp_dirs[1:])
else:
tmp_path = ""
# and add the real project dir
base_file = os.path.join(self.project_dir, tmp_path, tmp_file)
return base_file
def extend_file(self):
"""
command extends
"""
# Write old base file
if self.base_file:
self.write_file(self.base_file)
# Remove old extension block
tmp = self.input.splitlines()
self.input = "\n".join(tmp[self.line_count:])
# Load base template
self.base_file = self.get_real_basefile_path()
try:
fh = open(self.base_file, "r")
self.output = fh.read()
fh.close()
except IOError:
raise TemplateError("Cannot find extension file " + self.base_file + " in file " + self.input_file + " line " + str(self.line_count))
def handle(self, input_file, output_file, placeholder):
"""
Render template
"""
self.base_file = None
self.input = None
self.line_count = 0
self.input_file = input_file
self.base_file = ""
self.extension_file = None
self.output = ""
self.placeholder = placeholder
with open(self.input_file, "r") as f:
self.input = f.read()
lines = self.input.splitlines()
self.output = self.input
for line in lines:
self.line_count += 1
# Something to do for chuck?
keyword = self.found_keyword_in_line(line)
if keyword:
if self.keyword_patterns.get(keyword):
match = self.keyword_patterns[keyword].match(line)
# EXTENDS
if match and keyword == "extends":
self.extension_file = match.group(1).rstrip("\"").rstrip("\'")
self.extend_file()
# EXTENDS_IF_EXISTS
elif match and keyword == "extends_if_exists":
self.extension_file = match.group(1).rstrip("\"").rstrip("\'")
if os.path.exists(self.get_real_basefile_path()):
self.extend_file()
else:
self.base_file = None
# RENDERS
elif match and match.group(1) and keyword == "renders" and self.base_file:
self.update_block_content(match.group(1), keyword)
# PREPENDS, APPENDS
elif match and match.group(1) and (keyword == "prepends" or keyword == "appends") and self.base_file:
self.update_block_content(match.group(1), keyword)
else:
raise TemplateError("Unknown keyword " + keyword + " found in file " + self.input_file + " line " + str(self.line_count))
if self.base_file:
self.write_file(self.base_file)
else:
self.write_file(output_file)
def remove_keywords(self, input_file):
"""
Remove chuck keywords from file
"""
output = []
with open(input_file, "r") as f:
for line in f.xreadlines():
keyword = self.found_keyword_in_line(line)
if keyword:
match = self.keyword_patterns[keyword].search(line)
if match:
line = re.sub(r"#!chuck_" + keyword + " " + re.escape(match.group(1)) + "\s?\r?\n?", "", line, re.IGNORECASE)
line = re.sub(r"#!end\s*\r?\n?", "", line, re.IGNORECASE)
output.append(line)
with open(input_file, "w") as f:
f.write("".join(output))
|
import { getSession } from "../neo4j";
import { StreamObject } from "../types";
export const createTweet = async (streamObject: StreamObject) => {
const session = getSession();
const { id, created_at, text, author_id } = streamObject.data;
const tweet = {
id,
created_at,
text,
author_id,
};
try {
await session.run(
"MATCH (u:User) WHERE u.id = $author_id CREATE (u)-[:TWEETED]->(t:Tweet) SET t = $tweet",
{ tweet, author_id }
);
} catch (err) {
console.log(err);
} finally {
session.close();
}
};
|
<reponame>Ojonathan/Startlight
#include "gameview.h"
#include "ui_gameview.h"
#include "metier_abs/mirror.h"
#include <QPixmap>
#include <QMessageBox>
#include <iostream>
#include <QWidget>
#include <QObject>
#include <vector>
#include <cmath>
#define PI (3.141592653589793)
gameView::gameView(Level * level, QWidget * parent):QGraphicsView{parent},scene{level}
{
this->setScene(scene);
dlimit = new QGraphicsRectItem();
setHorizontalScrollBarPolicy( Qt::ScrollBarAlwaysOff );// pour désactiver le scroll lors du changement de map
setVerticalScrollBarPolicy( Qt::ScrollBarAlwaysOff );
//taille de gameView
setFixedWidth(scene->getWidth() + 20);
setFixedHeight(scene->getHeight() + 20);
setMouseTracking(true);
//attacher l'observateur.
scene->attacher(this);
setBackgroundBrush(QBrush(QPixmap(":resource/fond5.png")));
//source
connect(scene->getSrc(),SIGNAL(mesOn()),this,SLOT(compute()));
scene->addItem(scene->getSrc());
//destination
scene->addItem(scene->getDest());
//cristals
for(unsigned i = 0; i<scene->getCrystals().size();i++){
scene->addItem(scene->getCrystals()[i]);
}
//murs
for(unsigned i = 0; i<scene->getWalls().size();i++){
scene->addItem(scene->getWalls()[i]);
}
//bombes
for(unsigned i = 0; i<scene->getNukes().size();i++){
scene->addItem(scene->getNukes()[i]);
}
//lens
for(unsigned i = 0; i<scene->getLenses().size();i++){
scene->addItem(scene->getLenses()[i]);
}
//Mirrors
//faire le connect pour chaque mirroir, c'est à dire, si un mirroir n'importe le quelle fait un mouvement
//alors le mirroir emet une signal pour executer la methode computerays() de Level(c'est à fois le scene).
for(unsigned i = 0; i<scene->getMirrors().size();i++){
//connect(scene->getMirrors()[i],&Mirror::moved(),this,&gameView::compute());
//connect(scene->getMirrors()[i],SIGNAL(moved()),this,SLOT(compute()));
connect(scene->getMirrors()[i],SIGNAL(actionMouse()),this,SLOT(compute()));
connect(scene->getMirrors()[i],SIGNAL(drawlimit(bool,Point,Point)),
this,SLOT(limit(bool,Point,Point)));
scene->addItem(scene->getMirrors()[i]);
}
//pour la premiere fois.
scene->computeRays();
}
void gameView::limit(bool draw,Point p1,Point p2){
if(draw){
std::cout<<p1<<" "<<p2<<std::endl;
//fonctionne uniquement si yMax est superieur a yMin
QPen penlimit(Qt::white,2);
dlimit->setPen(penlimit);
dlimit->setRect(p1.getX(),p1.getY(),p2.getX() - p1.getX(),p2.getY() - p1.getY());
scene->addItem(dlimit);
}else{
scene->removeItem(dlimit);
}
}
void gameView::compute()
{
scene->computeRays();
}
gameView::~gameView()
{
scene->detacher(this);
delete dlimit;
}
//utilisée pour visualizer les changements.
void gameView::rafraichir(SujetDObservation* sdo)
{
//mettre en scene les rayon creés.
for(unsigned i = 0; i< scene->getRays().size();i++){
scene->addItem(scene->getRays()[i]);
}
//verifier si la destination a été touchée.
if(scene->getDest()->isLightedUp()){
QMessageBox::information(this,"Felicitations","Vous avez gagné");
setEnabled(false);
}
//verifier si une bombe a été touchée.
for(unsigned i = 0; i < scene->getNukes().size(); i++){
if(scene->getNukes()[i]->isLightedUp()){
QMessageBox::critical(this,"Desolé ...","Vous avez perdu");
setEnabled(false);
}
}
}
|
The most significant implications of deep learning are:
• Improved accuracy and speed of analytics
• Ability to extract and analyze data from large and diverse data sets
• Ability to identify patterns and structures from data that are not visible to traditional analytical methods
• Improved automation and decision support through the use of autonomous systems
• Facilitated capabilities for predictive analytics
• Ability to create supervised and unsupervised models for learning
• Improved interface capabilities for applications like natural language processing (NLP).
• Increased efficiency and decreased errors from replacing manual processes with automated ones.
• Increased ability to manage and optimize complex processes.
• Improved understanding of user needs and behavior.
• Development of more efficient systems, such as generative adversarial networks.
• Improved ability to personalize and automate customer service.
• Improved ability to identify and prevent fraud.
• Development of models to better analyze stock performance.
• Improved medical diagnoses and treatments.
• Increased ability to detect and predict cyber threats.
• Ability to better target marketing messages and resources.
• Facilitated development of more intelligent and interactive chatbots.
• Increased capabilities of virtual assistants.
• Improved ability to detect cyberbullying and other malicious behavior.
• Increased development of self-driving cars and other automated systems.
• Ability to generate realistic images and other types of digital media.
• Facilitated development of robots with better cognitive capabilities.
• Ability to recognize facial expressions and gestures.
• Improved ability to detect and respond to cyber threats.
• Enhanced tracking capabilities.
• Improved automation of various business processes.
• Increased ability to provide personalized recommendations.
• Enhanced security of the Internet of Things (IoT) and other connected systems.
• Improved support for those with disabilities.
• Enabled the development of new tools for data science.
• Increased efficiency in the healthcare industry.
• Improved education through better platforms for teaching and learning.
• Enhanced effectiveness of online marketplaces.
• Facilitated development of more sophisticated financial products.
• Improved governance and social responsibility of companies. |
module KubeDSL::DSL::Extensions
autoload :V1beta1, 'kube-dsl/dsl/extensions/v1beta1'
end
|
export SBT_OPTS="-XX:+CMSClassUnloadingEnabled"
|
#!/usr/bin/env node
var KrakenClient = require('kraken-api')
var kraken = new KrakenClient()
var mapping
var products = []
function addProduct(base, quote, altname, min_size, increment) {
products.push({
asset: base,
currency: quote,
min_size: parseFloat(min_size).toFixed(10),
increment: (10 ** (-1 * increment)).toFixed(10),
label: getPair(base) + '/' + getPair(quote)
})
}
function getPair(name) {
return mapping[name].altname
}
kraken.api('Assets', null, function(error, data) {
if (error) {
console.log(error)
process.exit(1)
} else {
mapping = data.result
kraken.api('AssetPairs', null, function(error, data) {
if (error) {
console.log(error)
process.exit(1)
} else {
Object.keys(data.result).forEach(function(result) {
if (!result.match('\.d')) {
addProduct(data.result[result].base, data.result[result].quote, data.result[result].altname,
data.result[result].ordermin, data.result[result].pair_decimals)
}
})
var target = require('path').resolve(__dirname, 'products.json')
require('fs').writeFileSync(target, JSON.stringify(products, null, 2))
console.log('wrote', target)
process.exit()
}
})
}
})
|
<reponame>sercaneraslan/svelte
/* generated by Svelte vX.Y.Z */
import {
SvelteComponentDev,
add_location,
append_dev,
destroy_each,
detach_dev,
dispatch_dev,
element,
init,
insert_dev,
noop,
safe_not_equal,
set_data_dev,
space,
text
} from "svelte/internal";
const file = undefined;
function get_each_context(ctx, list, i) {
const child_ctx = Object.create(ctx);
child_ctx.thing = list[i];
return child_ctx;
}
// (8:0) {#each things as thing}
function create_each_block(ctx) {
var span, t0_value = ctx.thing.name + "", t0, t1;
const block = {
c: function create() {
span = element("span");
t0 = text(t0_value);
t1 = space();
{
const { foo, bar, baz, thing } = ctx;
console.log({ foo, bar, baz, thing });
debugger;
}
add_location(span, file, 8, 1, 116);
},
m: function mount(target, anchor) {
insert_dev(target, span, anchor);
append_dev(span, t0);
insert_dev(target, t1, anchor);
},
p: function update(changed, ctx) {
if ((changed.things) && t0_value !== (t0_value = ctx.thing.name + "")) {
set_data_dev(t0, t0_value);
}
if (changed.foo || changed.bar || changed.baz || changed.things) {
const { foo, bar, baz, thing } = ctx;
console.log({ foo, bar, baz, thing });
debugger;
}
},
d: function destroy(detaching) {
if (detaching) {
detach_dev(span);
detach_dev(t1);
}
}
};
dispatch_dev("SvelteRegisterBlock", { block, id: create_each_block.name, type: "each", source: "(8:0) {#each things as thing}", ctx });
return block;
}
function create_fragment(ctx) {
var t0, p, t1, t2;
let each_value = ctx.things;
let each_blocks = [];
for (let i = 0; i < each_value.length; i += 1) {
each_blocks[i] = create_each_block(get_each_context(ctx, each_value, i));
}
const block = {
c: function create() {
for (let i = 0; i < each_blocks.length; i += 1) {
each_blocks[i].c();
}
t0 = space();
p = element("p");
t1 = text("foo: ");
t2 = text(ctx.foo);
add_location(p, file, 12, 0, 182);
},
l: function claim(nodes) {
throw new Error("options.hydrate only works if the component was compiled with the `hydratable: true` option");
},
m: function mount(target, anchor) {
for (let i = 0; i < each_blocks.length; i += 1) {
each_blocks[i].m(target, anchor);
}
insert_dev(target, t0, anchor);
insert_dev(target, p, anchor);
append_dev(p, t1);
append_dev(p, t2);
},
p: function update(changed, ctx) {
if (changed.things) {
each_value = ctx.things;
let i;
for (i = 0; i < each_value.length; i += 1) {
const child_ctx = get_each_context(ctx, each_value, i);
if (each_blocks[i]) {
each_blocks[i].p(changed, child_ctx);
} else {
each_blocks[i] = create_each_block(child_ctx);
each_blocks[i].c();
each_blocks[i].m(t0.parentNode, t0);
}
}
for (; i < each_blocks.length; i += 1) {
each_blocks[i].d(1);
}
each_blocks.length = each_value.length;
}
if (changed.foo) {
set_data_dev(t2, ctx.foo);
}
},
i: noop,
o: noop,
d: function destroy(detaching) {
destroy_each(each_blocks, detaching);
if (detaching) {
detach_dev(t0);
detach_dev(p);
}
}
};
dispatch_dev("SvelteRegisterBlock", { block, id: create_fragment.name, type: "component", source: "", ctx });
return block;
}
function instance($$self, $$props, $$invalidate) {
let { things, foo, bar, baz } = $$props;
const writable_props = ['things', 'foo', 'bar', 'baz'];
Object.keys($$props).forEach(key => {
if (!writable_props.includes(key) && !key.startsWith('$$')) console.warn(`<Component> was created with unknown prop '${key}'`);
});
$$self.$set = $$props => {
if ('things' in $$props) $$invalidate('things', things = $$props.things);
if ('foo' in $$props) $$invalidate('foo', foo = $$props.foo);
if ('bar' in $$props) $$invalidate('bar', bar = $$props.bar);
if ('baz' in $$props) $$invalidate('baz', baz = $$props.baz);
};
$$self.$capture_state = () => {
return { things, foo, bar, baz };
};
$$self.$inject_state = $$props => {
if ('things' in $$props) $$invalidate('things', things = $$props.things);
if ('foo' in $$props) $$invalidate('foo', foo = $$props.foo);
if ('bar' in $$props) $$invalidate('bar', bar = $$props.bar);
if ('baz' in $$props) $$invalidate('baz', baz = $$props.baz);
};
return { things, foo, bar, baz };
}
class Component extends SvelteComponentDev {
constructor(options) {
super(options);
init(this, options, instance, create_fragment, safe_not_equal, ["things", "foo", "bar", "baz"]);
dispatch_dev("SvelteRegisterComponent", { component: this, tagName: "Component", options, id: create_fragment.name });
const { ctx } = this.$$;
const props = options.props || {};
if (ctx.things === undefined && !('things' in props)) {
console.warn("<Component> was created without expected prop 'things'");
}
if (ctx.foo === undefined && !('foo' in props)) {
console.warn("<Component> was created without expected prop 'foo'");
}
if (ctx.bar === undefined && !('bar' in props)) {
console.warn("<Component> was created without expected prop 'bar'");
}
if (ctx.baz === undefined && !('baz' in props)) {
console.warn("<Component> was created without expected prop 'baz'");
}
}
get things() {
throw new Error("<Component>: Props cannot be read directly from the component instance unless compiling with 'accessors: true' or '<svelte:options accessors/>'");
}
set things(value) {
throw new Error("<Component>: Props cannot be set directly on the component instance unless compiling with 'accessors: true' or '<svelte:options accessors/>'");
}
get foo() {
throw new Error("<Component>: Props cannot be read directly from the component instance unless compiling with 'accessors: true' or '<svelte:options accessors/>'");
}
set foo(value) {
throw new Error("<Component>: Props cannot be set directly on the component instance unless compiling with 'accessors: true' or '<svelte:options accessors/>'");
}
get bar() {
throw new Error("<Component>: Props cannot be read directly from the component instance unless compiling with 'accessors: true' or '<svelte:options accessors/>'");
}
set bar(value) {
throw new Error("<Component>: Props cannot be set directly on the component instance unless compiling with 'accessors: true' or '<svelte:options accessors/>'");
}
get baz() {
throw new Error("<Component>: Props cannot be read directly from the component instance unless compiling with 'accessors: true' or '<svelte:options accessors/>'");
}
set baz(value) {
throw new Error("<Component>: Props cannot be set directly on the component instance unless compiling with 'accessors: true' or '<svelte:options accessors/>'");
}
}
export default Component; |
/*
* Tencent is pleased to support the open source community by making Blueking Container Service available.
* Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package sqlstore
import (
"time"
"github.com/jinzhu/gorm"
"github.com/Tencent/bk-bcs/bcs-services/bcs-user-manager/app/user-manager/models"
)
type TokenStore interface {
GetTokenByCondition(cond *models.BcsUser) *models.BcsUser
GetUserTokensByName(name string) []models.BcsUser
CreateToken(token *models.BcsUser) error
UpdateToken(token, updatedToken *models.BcsUser) error
DeleteToken(token string) error
CreateTemporaryToken(token *models.BcsTempToken) error
}
func NewTokenStore(db *gorm.DB) TokenStore {
return &realTokenStore{db: db}
}
type realTokenStore struct {
db *gorm.DB
}
// GetTokenByCondition Query token by condition
func (u *realTokenStore) GetTokenByCondition(cond *models.BcsUser) *models.BcsUser {
token := models.BcsUser{}
u.db.Where(cond).First(&token)
if token.ID != 0 {
return &token
}
return nil
}
// GetUserTokensByName get user tokens by username, return user tokens that is expired and not expired,
func (u *realTokenStore) GetUserTokensByName(name string) []models.BcsUser {
var tokens []models.BcsUser
u.db.Where(&models.BcsUser{Name: name}).Find(&tokens)
return tokens
}
// CreateToken create new token
func (u *realTokenStore) CreateToken(token *models.BcsUser) error {
err := u.db.Create(token).Error
return err
}
// UpdateToken update token information
func (u *realTokenStore) UpdateToken(token, updatedToken *models.BcsUser) error {
err := u.db.Model(token).Updates(*updatedToken).Error
return err
}
// DeleteToken delete user token
func (u *realTokenStore) DeleteToken(token string) error {
cond := &models.BcsUser{UserToken: token}
err := u.db.Where(cond).Delete(&models.BcsUser{}).Error
return err
}
// CreateToken create new temporary token
func (u *realTokenStore) CreateTemporaryToken(token *models.BcsTempToken) error {
err := u.db.Create(token).Error
return err
}
// GetUserByCondition Query user by condition
func GetTempTokenByCondition(cond *models.BcsTempToken) *models.BcsTempToken {
tempUser := models.BcsTempToken{}
GCoreDB.Where(cond).First(&tempUser)
if tempUser.ID != 0 {
return &tempUser
}
return nil
}
// GetAllNotExpiredTokens get available user
func GetAllNotExpiredTokens() []models.BcsUser {
var tokens []models.BcsUser
GCoreDB.Where("expires_at > ?", time.Now()).Find(&tokens)
return tokens
}
// GetAllTokens get all tokens
func GetAllTokens() []models.BcsUser {
var tokens []models.BcsUser
GCoreDB.Find(&tokens)
return tokens
}
|
<filename>acmicpc.net/source/14614.cpp
// 14614. Calculate!
// 2021.12.03
// 수학
#include<iostream>
using namespace std;
int main()
{
int a, b;
string c;
cin >> a >> b >> c;
// 같은 수를 짝수번 XOR 연산한 값은 0이다.
int k = c[c.size() - 1] - '0';
if (k % 2 == 0)
{
cout << a << endl;
}
else
{
cout << (a ^ b) << endl;
}
return 0;
}
|
/**
* 200 (OK) Response
*
* Usage:
* return res.ok();
* return res.ok(data);
*
* @param {Object} data
**/
module.exports = function sendOk(data) {
return this.res.status(200).json(data);
};
|
import React from "react";
const StarsConfig = ({data, updateData, simple}) => (
<div>
<label>Viewport Angle</label>
<input
onChange={evt =>
updateData(
JSON.stringify({...JSON.parse(data), angle: evt.target.value}),
)
}
defaultValue={JSON.parse(data).angle}
/>
<p>
<small>
This is the yaw angle which the viewscreen is looking out in degrees.
</small>
</p>
</div>
);
export default StarsConfig;
|
#!/usr/bin/env bats
DOCKER_COMPOSE_FILE="${BATS_TEST_DIRNAME}/php-7.1_ini_redis_on.yml"
container() {
echo "$(docker-compose -f ${DOCKER_COMPOSE_FILE} ps php | grep php | awk '{ print $1 }')"
}
setup() {
docker-compose -f "${DOCKER_COMPOSE_FILE}" up -d
sleep 20
}
teardown() {
docker-compose -f "${DOCKER_COMPOSE_FILE}" kill
docker-compose -f "${DOCKER_COMPOSE_FILE}" rm --force
}
@test "php-7.1: ini: redis: on" {
run docker exec "$(container)" /bin/su - root -mc "php -m | grep 'redis'"
[ "${status}" -eq 0 ]
}
|
<filename>opencga-analysis/src/main/java/org/opencb/opencga/analysis/files/FileScanner.java
package org.opencb.opencga.analysis.files;
import org.opencb.datastore.core.ObjectMap;
import org.opencb.datastore.core.QueryOptions;
import org.opencb.datastore.core.QueryResult;
import org.opencb.opencga.catalog.db.api.CatalogFileDBAdaptor;
import org.opencb.opencga.catalog.exceptions.CatalogException;
import org.opencb.opencga.catalog.io.CatalogIOManager;
import org.opencb.opencga.catalog.utils.CatalogFileUtils;
import org.opencb.opencga.catalog.CatalogManager;
import org.opencb.opencga.catalog.models.File;
import org.opencb.opencga.catalog.models.Study;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Paths;
import java.util.*;
/**
* @author <NAME> <<EMAIL>>
*/
public class FileScanner {
private static Logger logger = LoggerFactory.getLogger(FileScanner.class);
protected final CatalogManager catalogManager;
private CatalogFileUtils catalogFileUtils;
public enum FileScannerPolicy {
DELETE, //Delete file and file entry. Then create a new one
REPLACE, //Delete the file, but not the file entry. Updates the file information.
// DO_ERROR,
// RENAME,
}
public FileScanner(CatalogManager catalogManager) {
this.catalogManager = catalogManager;
catalogFileUtils = new CatalogFileUtils(catalogManager);
}
/**
* Check tracking from all files from a study.
* Set file status File.Status.MISSING if the file (fileUri) is unreachable
* Set file status to File.Status.READY if was File.Status.MISSING and file (fileUri) is reachable
* @param study The study to ckeck
* @param sessionId User sessionId
* @throws CatalogException
* @return found and lost files
*/
public List<File> checkStudyFiles(Study study, boolean calculateChecksum, String sessionId) throws CatalogException {
QueryOptions queryOptions = new QueryOptions();
queryOptions.put(CatalogFileDBAdaptor.FileFilterOption.status.toString(), Arrays.asList(
File.Status.READY, File.Status.MISSING, File.Status.TRASHED));
QueryResult<File> files = catalogManager.getAllFiles(study.getId(),
queryOptions,
sessionId);
List<File> modifiedFiles = new LinkedList<>();
for (File file : files.getResult()) {
File checkedFile = catalogFileUtils.checkFile(file, calculateChecksum, sessionId);
if (checkedFile != file) {
modifiedFiles.add(checkedFile);
}
}
return modifiedFiles;
}
/**
* Scan the study folder, add all untracked files and check tracking
*
* @param study Study to resync
* @param calculateChecksum Calculate Checksum of files
* @return New, lost and found files
* @throws CatalogException
* @throws IOException
*/
public List<File> reSync(Study study, boolean calculateChecksum, String sessionId)
throws CatalogException, IOException {
int studyId = study.getId();
// File root = catalogManager.searchFile(studyId, new QueryOptions("path", ""), sessionId).first();
QueryOptions query = new QueryOptions();
query.put(CatalogFileDBAdaptor.FileFilterOption.uri.toString(), "~.*"); //Where URI exists
query.put(CatalogFileDBAdaptor.FileFilterOption.type.toString(), File.Type.FOLDER);
List<File> files = catalogManager.searchFile(studyId, query, sessionId).getResult();
List<File> scan = new LinkedList<>();
for (File file : files) {
scan.addAll(scan(file, catalogManager.getFileUri(file), FileScannerPolicy.REPLACE, calculateChecksum,
false, sessionId));
scan.addAll(checkStudyFiles(study, calculateChecksum, sessionId));
}
return scan;
}
/**
* Return all untracked files in a study folder
* @param study Study to scan
* @return Untracked files
* @throws CatalogException
*/
public Map<String, URI> untrackedFiles(Study study, String sessionId)
throws CatalogException {
int studyId = study.getId();
URI studyUri = catalogManager.getStudyUri(studyId);
CatalogIOManager ioManager = catalogManager.getCatalogIOManagerFactory().get(studyUri);
Map<String, URI> linkedFolders = new HashMap<>();
linkedFolders.put("", studyUri);
QueryOptions query = new QueryOptions("include", "projects.studies.files.path,projects.studies.files.uri");
query.put(CatalogFileDBAdaptor.FileFilterOption.uri.toString(), "~.*"); //Where URI exists
catalogManager.getAllFiles(studyId, query, sessionId).getResult().forEach(f -> linkedFolders.put(f.getPath(), f.getUri()));
Map<String, URI> untrackedFiles = new HashMap<>();
for (Map.Entry<String, URI> entry : linkedFolders.entrySet()) {
if (!ioManager.exists(entry.getValue())) {
untrackedFiles.put(entry.getKey(), entry.getValue());
continue;
}
List<URI> files = ioManager.listFiles(entry.getValue());
for (URI uri : files) {
String filePath = entry.getKey() + entry.getValue().relativize(uri).toString();
QueryResult<File> searchFile = catalogManager.searchFile(studyId,
new QueryOptions("path", filePath),
new QueryOptions("include", "projects.studies.files.id"), sessionId);
if (searchFile.getResult().isEmpty()) {
untrackedFiles.put(filePath, uri);
} /*else {
iterator.remove(); //Remove the ones that have an entry in Catalog
}*/
}
}
return untrackedFiles ;
}
/**
* Scans the files inside the specified URI and adds to the provided directory.
*
* @param directory Directory where add found files
* @param directoryToScan Directory to scan
* @throws CatalogException
* @return found and new files.
*/
public List<File> scan(File directory, URI directoryToScan, FileScannerPolicy policy,
boolean calculateChecksum, boolean deleteSource, String sessionId)
throws IOException, CatalogException {
return scan(directory, directoryToScan, policy, calculateChecksum, deleteSource, -1, sessionId);
}
/**
* Scans the files inside the specified URI and adds to the provided directory.
*
* @param directory Directory where add found files
* @param directoryToScan Directory to scan
* @param jobId If any, the job that has generated this files
* @throws CatalogException
* @return found and new files.
*/
public List<File> scan(File directory, URI directoryToScan, FileScannerPolicy policy,
boolean calculateChecksum, boolean deleteSource, int jobId, String sessionId)
throws IOException, CatalogException {
if (directoryToScan == null) {
directoryToScan = catalogManager.getFileUri(directory);
}
if (!directoryToScan.getPath().endsWith("/")) {
directoryToScan = URI.create(directoryToScan.toString() + "/");
}
if (!directory.getType().equals(File.Type.FOLDER)) {
throw new CatalogException("Expected folder where place the found files.");
}
int studyId = catalogManager.getStudyIdByFileId(directory.getId());
List<URI> uris = catalogManager.getCatalogIOManagerFactory().get(directoryToScan).listFiles(directoryToScan);
List<File> files = new LinkedList<>();
for (URI uri : uris) {
URI generatedFile = directoryToScan.relativize(uri);
String filePath = Paths.get(directory.getPath(), generatedFile.toString()).toString();
QueryResult<File> searchFile = catalogManager.searchFile(studyId, new QueryOptions("path", filePath), sessionId);
File file = null;
boolean returnFile = false;
if (searchFile.getNumResults() != 0) {
File existingFile = searchFile.first();
logger.info("File already existing in target \"" + filePath + "\". FileScannerPolicy = " + policy);
switch (policy) {
case DELETE:
logger.info("Deleting file { id:" + existingFile.getId() + ", path:\"" + existingFile.getPath() + "\" }");
catalogManager.deleteFile(existingFile.getId(), sessionId);
break;
case REPLACE:
file = existingFile;
break;
// case RENAME:
// throw new UnsupportedOperationException("Unimplemented policy 'rename'");
// case DO_ERROR:
// throw new UnsupportedOperationException("Unimplemented policy 'error'");
}
}
if (file == null) {
file = catalogManager.createFile(studyId, FormatDetector.detect(uri), BioformatDetector.detect(uri), filePath, "", true, jobId, sessionId).first();
logger.info("Added new file " + uri + " { id:" + file.getId() + ", path:\"" + file.getPath() + "\" }");
/** Moves the file to the read output **/
catalogFileUtils.upload(uri, file, null, sessionId, false, false, deleteSource, calculateChecksum);
returnFile = true; //Return file because is new
} else {
if (file.getStatus().equals(File.Status.MISSING)) {
logger.info("File { id:" + file.getId() + ", path:\"" + file.getPath() + "\" } recover tracking from file " + uri);
logger.info("Set status to " + File.Status.READY);
returnFile = true; //Return file because was missing
}
catalogFileUtils.upload(uri, file, null, sessionId, true, true, deleteSource, calculateChecksum);
}
try {
FileMetadataReader.get(catalogManager).setMetadataInformation(file, null, null, sessionId, false);
} catch (Exception e) {
logger.error("Unable to read metadata information from file { id:" + file.getId() + ", name: \"" + file.getName() + "\" }", e);
}
if (returnFile) { //Return only new and found files.
files.add(catalogManager.getFile(file.getId(), sessionId).first());
}
}
return files;
}
}
|
<filename>lecture6/videoVTT-refactored/core-components/video-viewer.js<gh_stars>0
import { LitElement, html, css } from 'lit-element';
/**
* Wrapper around the video tag.
* Takes the video file, type and vtt file as parameters.
* When a new vtt file has been loaded it fires a "cuesUpdated" event containing
* an array with the text, id and startTime of all cues loaded.
* When a video is plaing it fires "cuechange" events when cues becomes active
* or cues becomes inactive. The event object contains a list of active
* cues when the event is fired.
*
* The video is scaled to fill the width of the container tag.
*
* @extends LitElement
*/
class VideoViewer extends LitElement {
static get properties() {
return {
videofile: { type: String },
videotype: { type: String },
vttfile: { type: String },
cues: { type: Array }
};
}
static get styles() {
return [
css`
:host {
display: block;
}
video, p {
width: 100%;
}
`];
}
constructor() {
super();
this.videofile = '';
this.videotype = '';
this.vttfile = '';
this.cues = [];
}
render() {
return html`
<video controls>
<source src="${this.videofile}" type="${this.videotype}">
<track kind="subtitles" label="English subtitles" src="${this.vttfile}" srclang="en" default></track>
</video>
`;
}
/**
* Set the current time of the video to the given time.
*
* @param {[Number]} time the time to set as the current time.
*/
setTime(time) {
this.shadowRoot.querySelector('video').currentTime = time;
}
/**
* When the video has been added to the DOM an event listener listening for
* load events is added to the track element (containing the vtt source).
* This is used to get the cues as soon as a vtt file is loaded, this is then
* made available to container tags through dispatching a "cuesUpdated" event.
*
* The subtitle track is hidden from the video and then an eventListener is
* added to the subtitle track so that we can dispatch a cuechange event
* when cues are activated/deactivated.
*/
firstUpdated() {
const track = this.shadowRoot.querySelector('video track');
track.addEventListener('load',e=>{ // vtt file is loaded
this.cues = [];
const trackCues = e.path[0].track.cues;
for (let i=0; i<trackCues.length; i++) { // Go through the cue list
this.cues.push({text: trackCues[i].text, id: trackCues[i].id, startTime: trackCues[i].startTime});
};
// console.log (this.cues);
this.dispatchEvent(new CustomEvent("cuesUpdated", {
bubbles: true,
composed: true,
detail:{
cues:this.cues
}
}));
});
this.shadowRoot.querySelector('video').textTracks[0].mode='hidden';
this.shadowRoot.querySelector('video').textTracks[0].addEventListener('cuechange', e=>{ // When a cue change event occurs
// console.log(e);
const startTimes = [];
for (let i=0; i<e.target.activeCues.length; i++) {
startTimes.push(e.target.activeCues[i].startTime);
}
this.dispatchEvent(new CustomEvent('cuechange', {
bubbles: true,
composed: true,
detail: {
activeCues: startTimes
}
}));
});
}
}
customElements.define('video-viewer', VideoViewer);
|
<filename>node_modules/botbuilder-core/lib/extendedUserTokenProvider.d.ts<gh_stars>1-10
/**
* @module botbuilder
*/
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
import { CoreAppCredentials } from './coreAppCredentials';
import { IUserTokenProvider } from './userTokenProvider';
import { TurnContext } from './turnContext';
import { SignInUrlResponse, TokenResponse, TokenExchangeRequest } from 'botframework-schema';
/**
* Interface for User Token OAuth Single Sign On and Token Exchange APIs for BotAdapters
*/
export interface ExtendedUserTokenProvider extends IUserTokenProvider {
/**
* Retrieves the OAuth token for a user that is in a sign-in flow.
* @param context Context for the current turn of conversation with the user.
* @param connectionName Name of the auth connection to use.
* @param magicCode (Optional) Optional user entered code to validate.
*/
getUserToken(context: TurnContext, connectionName: string, magicCode?: string, appCredentials?: CoreAppCredentials): Promise<TokenResponse>;
/**
* Signs the user out with the token server.
* @param context Context for the current turn of conversation with the user.
* @param connectionName Name of the auth connection to use.
* @param userId User id of user to sign out.
* @param oAuthAppCredentials AppCredentials for OAuth.
*/
signOutUser(context: TurnContext, connectionName: string, userId?: string, appCredentials?: CoreAppCredentials): Promise<void>;
/**
* Gets a signin link from the token server that can be sent as part of a SigninCard.
* @param context Context for the current turn of conversation with the user.
* @param connectionName Name of the auth connection to use.
* @param oAuthAppCredentials AppCredentials for OAuth.
*/
getSignInLink(context: TurnContext, connectionName: string, appCredentials?: CoreAppCredentials): Promise<string>;
/**
* Signs the user out with the token server.
* @param context Context for the current turn of conversation with the user.
* @param connectionName Name of the auth connection to use.
* @param oAuthAppCredentials AppCredentials for OAuth.
*/
getAadTokens(context: TurnContext, connectionName: string, resourceUrls: string[], appCredentials?: CoreAppCredentials): Promise<{
[propertyName: string]: TokenResponse;
}>;
/**
* Get the raw signin resource to be sent to the user for signin for a connection name.
* @param context Context for the current turn of conversation with the user.
* @param connectionName Name of the auth connection to use.
*/
getSignInResource(context: TurnContext, connectionName: string): Promise<SignInUrlResponse>;
/**
* Get the raw signin resource to be sent to the user for signin for a connection name.
* @param context Context for the current turn of conversation with the user.
* @param connectionName Name of the auth connection to use.
* @param userId The user id that will be associated with the token.
* @param finalRedirect The final URL that the OAuth flow will redirect to.
*/
getSignInResource(context: TurnContext, connectionName: string, userId: string, finalRedirect?: string): Promise<SignInUrlResponse>;
/**
* Get the raw signin resource to be sent to the user for signin for a connection name.
* @param context Context for the current turn of conversation with the user.
* @param connectionName Name of the auth connection to use.
* @param userId The user id that will be associated with the token.
* @param finalRedirect The final URL that the OAuth flow will redirect to.
*/
getSignInResource(context: TurnContext, connectionName: string, userId: string, finalRedirect?: string, appCredentials?: CoreAppCredentials): Promise<SignInUrlResponse>;
/**
* Performs a token exchange operation such as for single sign-on.
* @param context Context for the current turn of conversation with the user.
* @param connectionName Name of the auth connection to use.
* @param userId The user id that will be associated with the token.
* @param tokenExchangeRequest The exchange request details, either a token to exchange or a uri to exchange.
*/
exchangeToken(context: TurnContext, connectionName: string, userId: string, tokenExchangeRequest: TokenExchangeRequest): Promise<TokenResponse>;
/**
* Performs a token exchange operation such as for single sign-on.
* @param context Context for the current turn of conversation with the user.
* @param connectionName Name of the auth connection to use.
* @param userId The user id that will be associated with the token.
* @param tokenExchangeRequest The exchange request details, either a token to exchange or a uri to exchange.
*/
exchangeToken(context: TurnContext, connectionName: string, userId: string, tokenExchangeRequest: TokenExchangeRequest, appCredentials: CoreAppCredentials): Promise<TokenResponse>;
}
//# sourceMappingURL=extendedUserTokenProvider.d.ts.map |
import UIKit
protocol AppListRouterProtocol: AnyObject {
func navigateToAppDetail(with appId: String)
func navigateToSettings()
}
final class AppListRouter: AppListRouterProtocol {
weak var viewController: UIViewController!
func navigateToAppDetail(with appId: String) {
// Navigate to the app detail view using the provided appId
let appDetailViewController = AppDetailViewController(appId: appId)
viewController.navigationController?.pushViewController(appDetailViewController, animated: true)
}
func navigateToSettings() {
// Navigate to the settings view
let settingsViewController = SettingsViewController()
viewController.present(settingsViewController, animated: true, completion: nil)
}
} |
#! /bin/sh
#SBATCH -t 3:00:00
#SBATCH --nodes=1
#SBATCH --tasks-per-node=1
#SBATCH --cpus-per-task=1
#SBATCH -A p_readex
#SBATCH --mem=62000
#SBATCH --mail-user=diethelm@gns-mbh.com # email address
#SBATCH --mail-type=BEGIN,FAIL,END
#SBATCH --partition=haswell
#
# Installation on Taurus
# 1) Modules and Variables
module purge
. readex_env/set_env_rdd.source
# 2) Compile
export INDDIR=`pwd`
export EXEMOD="rdd"
export FC="scorep --online-access --mpp=mpi --thread=omp --compiler --instrument-filter=$INDDIR/scorep.filt ifort"
export USE_SCOREP=1
export FCDEFS="-D_READEX_INSTR_"
echo INDDIR $INDDIR
echo EXEMOD $EXEMOD
echo
if [ -d bin ]; then
cd bin
rm -f Indeed_"$EXEMOD"64.exe*
cd ..
else
mkdir bin
fi
echo contents of bin directory:
ls -l bin
echo
cd lib
rm -f libDmsys_"$EXEMOD".a libIndeed_"$EXEMOD".a libparsol_"$EXEMOD".a
cd ..
echo contents of lib directory:
ls -l lib
echo
cd src/Filter
# make -f MakeList.ifort clean
# make -f Makefile.ifort clean
# make -f MakeList.ifort
# make -f Makefile.ifort
cd ..
# echo finished building the filter executable
make clobber
echo finished make clobber
echo
make
# 3) Test Run
|
<reponame>pengge/ztSDP
// +build !linux android
/* SPDX-License-Identifier: MIT
*
* Copyright (C) 2017-2019 ZtSDP LLC. All Rights Reserved.
*/
package device
import (
"net"
"os"
"syscall"
)
/* This code is meant to be a temporary solution
* on platforms for which the sticky socket / source caching behavior
* has not yet been implemented.
*
* See conn_linux.go for an implementation on the linux platform.
*/
type nativeBind struct {
ipv4 *net.UDPConn
ipv6 *net.UDPConn
blackhole4 bool
blackhole6 bool
}
type NativeEndpoint net.UDPAddr
var _ Bind = (*nativeBind)(nil)
var _ Endpoint = (*NativeEndpoint)(nil)
func CreateEndpoint(s string) (Endpoint, error) {
addr, err := parseEndpoint(s)
return (*NativeEndpoint)(addr), err
}
func (_ *NativeEndpoint) ClearSrc() {}
func (e *NativeEndpoint) DstIP() net.IP {
return (*net.UDPAddr)(e).IP
}
func (e *NativeEndpoint) SrcIP() net.IP {
return nil // not supported
}
func (e *NativeEndpoint) DstToBytes() []byte {
addr := (*net.UDPAddr)(e)
out := addr.IP.To4()
if out == nil {
out = addr.IP
}
out = append(out, byte(addr.Port&0xff))
out = append(out, byte((addr.Port>>8)&0xff))
return out
}
func (e *NativeEndpoint) DstToString() string {
return (*net.UDPAddr)(e).String()
}
func (e *NativeEndpoint) SrcToString() string {
return ""
}
func listenNet(network string, port int) (*net.UDPConn, int, error) {
// listen
conn, err := net.ListenUDP(network, &net.UDPAddr{Port: port})
if err != nil {
return nil, 0, err
}
// retrieve port
laddr := conn.LocalAddr()
uaddr, err := net.ResolveUDPAddr(
laddr.Network(),
laddr.String(),
)
if err != nil {
return nil, 0, err
}
return conn, uaddr.Port, nil
}
func extractErrno(err error) error {
opErr, ok := err.(*net.OpError)
if !ok {
return nil
}
syscallErr, ok := opErr.Err.(*os.SyscallError)
if !ok {
return nil
}
return syscallErr.Err
}
func CreateBind(uport uint16, device *Device) (Bind, uint16, error) {
var err error
var bind nativeBind
port := int(uport)
bind.ipv4, port, err = listenNet("udp4", port)
if err != nil && extractErrno(err) != syscall.EAFNOSUPPORT {
return nil, 0, err
}
bind.ipv6, port, err = listenNet("udp6", port)
if err != nil && extractErrno(err) != syscall.EAFNOSUPPORT {
bind.ipv4.Close()
bind.ipv4 = nil
return nil, 0, err
}
return &bind, uint16(port), nil
}
func (bind *nativeBind) Close() error {
var err1, err2 error
if bind.ipv4 != nil {
err1 = bind.ipv4.Close()
}
if bind.ipv6 != nil {
err2 = bind.ipv6.Close()
}
if err1 != nil {
return err1
}
return err2
}
func (bind *nativeBind) ReceiveIPv4(buff []byte) (int, Endpoint, error) {
if bind.ipv4 == nil {
return 0, nil, syscall.EAFNOSUPPORT
}
n, endpoint, err := bind.ipv4.ReadFromUDP(buff)
if endpoint != nil {
endpoint.IP = endpoint.IP.To4()
}
return n, (*NativeEndpoint)(endpoint), err
}
func (bind *nativeBind) ReceiveIPv6(buff []byte) (int, Endpoint, error) {
if bind.ipv6 == nil {
return 0, nil, syscall.EAFNOSUPPORT
}
n, endpoint, err := bind.ipv6.ReadFromUDP(buff)
return n, (*NativeEndpoint)(endpoint), err
}
func (bind *nativeBind) Send(buff []byte, endpoint Endpoint) error {
var err error
nend := endpoint.(*NativeEndpoint)
if nend.IP.To4() != nil {
if bind.ipv4 == nil {
return syscall.EAFNOSUPPORT
}
if bind.blackhole4 {
return nil
}
_, err = bind.ipv4.WriteToUDP(buff, (*net.UDPAddr)(nend))
} else {
if bind.ipv6 == nil {
return syscall.EAFNOSUPPORT
}
if bind.blackhole6 {
return nil
}
_, err = bind.ipv6.WriteToUDP(buff, (*net.UDPAddr)(nend))
}
return err
}
|
#!/bin/bash
echo "Extracting tests.zip..."
unzip -o tests.zip
echo "Installing requirements"
chmod 0755 resources/requirements.txt
pip3 install -r resources/requirements.txt
## start Appium server
echo "Starting Appium ..."
appium --log-no-colors --log-timestamp --command-timeout 120
## Start test execution
echo "Running test"
python3 run_android.py -x TEST-all
echo "Gathering results"
mkdir -p output-files
cp -r screenshots output-files
mv report.html log.html output-files
|
<filename>src/skidesign.py
"""
ID: isaiahl1
LANG: PYTHON2
TASK: skidesign
"""
TASK = 'skidesign'
def readints(fin):
return tuple(int(x) for x in fin.readline().split())
def readint(fin):
return int(fin.readline())
def main(fin, fout):
N = readint(fin)
hills = []
for _ in xrange(N):
hills.append(readint(fin))
print N, hills
hills.sort()
minHeight = min(hills)
maxHeight = max(hills)
print 'min', minHeight, 'max', maxHeight
minimalCost = float('inf')
for adjMinHeight in xrange(minHeight, maxHeight-17+1):
print adjMinHeight, adjMinHeight+17
raiseCost = 0
for height in hills:
if height >= adjMinHeight: break
raiseCost += (adjMinHeight - height)**2
lowerCost = 0
for height in reversed(hills):
if height <= adjMinHeight+17: break
lowerCost += (height - adjMinHeight-17) ** 2
totalCost = raiseCost + lowerCost
minimalCost = min(minimalCost, totalCost)
print >>fout, minimalCost
fin = open (TASK + '.in', 'r')
fout = open (TASK + '.out', 'w')
with fin:
with fout:
main(fin, fout)
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-N-VB/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-N-VB/13-512+512+512-shuffled-N-VB-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_remove_all_but_nouns_and_verbs_first_third_full --eval_function last_element_eval |
#!/bin/bash
# -*-mode: Shell-script; indent-tabs-mode: nil; sh-basic-offset: 2 -*-
# Find the base directory while avoiding subtle variations in $0:
dollar0=`which $0`; PACKAGE_DIR=$(cd $(dirname $dollar0); pwd) # NEVER export PACKAGE_DIR
# Set defaults for BUILD_DIR and INSTALL_DIR environment variables and
# define utility functions such as BuildDependentPackage:
. $PACKAGE_DIR/../../../support-files/build_platform_util.bash
CLEAN=0
while [ $# -gt 0 ]
do
if [ "$1" = "-builddir" ]
then
BUILD_DIR="$2"
shift
elif [ "$1" = "-installdir" ]
then
INSTALL_DIR="$2"
shift
elif [ "$1" = "-clean" ]
then
CLEAN=1
elif [ "$1" = "-h" ]
then
EmitStandardUsage
exit 0
else
echo "Undefined parameter $1"
exit 1
fi
shift
done
# --------------------------------------------------------------------------------
# Dependent packages will be installed into $INSTALL_DIR/bin so add
# that directory to the PATH:
# --------------------------------------------------------------------------------
SetupBasicEnvironment
# --------------------------------------------------------------------------------
# Build required dependent packages:
# --------------------------------------------------------------------------------
# No known dependent packages.
# --------------------------------------------------------------------------------
# Create build directory structure:
# --------------------------------------------------------------------------------
CreateAndChdirIntoBuildDir numdiff
# --------------------------------------------------------------------------------
# Download, build, and install:
# --------------------------------------------------------------------------------
DownloadExtractBuildAutoconfBasedPackage numdiff http://savannah.spinellicreations.com/numdiff/
exit 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.