text stringlengths 1 1.05M |
|---|
# routes.py
from flask import jsonify, request
from . import order_api_blueprint
# Sample data for demonstration purposes
orders = [
{"id": 1, "product": "Product A", "quantity": 2},
{"id": 2, "product": "Product B", "quantity": 1}
]
@order_api_blueprint.route('/orders', methods=['GET'])
def get_all_orders():
return jsonify(orders)
@order_api_blueprint.route('/orders/<int:order_id>', methods=['GET'])
def get_order(order_id):
order = next((o for o in orders if o["id"] == order_id), None)
if order:
return jsonify(order)
else:
return jsonify({"error": "Order not found"}), 404
@order_api_blueprint.route('/orders', methods=['POST'])
def create_order():
data = request.json
new_order = {"id": len(orders) + 1, "product": data["product"], "quantity": data["quantity"]}
orders.append(new_order)
return jsonify(new_order), 201
@order_api_blueprint.route('/orders/<int:order_id>', methods=['PUT'])
def update_order(order_id):
data = request.json
order = next((o for o in orders if o["id"] == order_id), None)
if order:
order["product"] = data["product"]
order["quantity"] = data["quantity"]
return jsonify(order)
else:
return jsonify({"error": "Order not found"}), 404
@order_api_blueprint.route('/orders/<int:order_id>', methods=['DELETE'])
def delete_order(order_id):
global orders
orders = [o for o in orders if o["id"] != order_id]
return '', 204 |
#!/bin/bash
set -eu
version=12
for cmd in clang clang-${version} clang++ clang++-${version} clang-format clang-format-${version} clang-tidy clang-tidy-${version} llvm-ar llvm-ar-${version}
do
result=0
output1=$(${cmd} --version 2>&1) || result=$?
if [ ! "${result}" = "0" ]; then
echo >&2 "ERROR: command '${cmd}' not found."
exit ${result}
fi
output2=$(echo ${output1} | grep "version ${version}") || result=$?
if [ ! "${result}" = "0" ]; then
echo >&2 "ERROR: wrong version of command '${cmd}'."
exit ${result}
fi
done
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.phone = void 0;
var phone = {
"viewBox": "0 0 20 20",
"children": [{
"name": "path",
"attribs": {
"d": "M17.256,12.253c-0.096-0.667-0.611-1.187-1.274-1.342c-2.577-0.604-3.223-2.088-3.332-3.734C12.193,7.092,11.38,7,10,7\r\n\tS7.807,7.092,7.35,7.177c-0.109,1.646-0.755,3.13-3.332,3.734c-0.663,0.156-1.178,0.675-1.274,1.342l-0.497,3.442\r\n\tC2.072,16.907,2.962,18,4.2,18h11.6c1.237,0,2.128-1.093,1.953-2.305L17.256,12.253z M10,15.492c-1.395,0-2.526-1.12-2.526-2.5\r\n\ts1.131-2.5,2.526-2.5s2.526,1.12,2.526,2.5S11.394,15.492,10,15.492z M19.95,6C19.926,4.5,16.108,2.001,10,2\r\n\tC3.891,2.001,0.073,4.5,0.05,6s0.021,3.452,2.535,3.127c2.941-0.381,2.76-1.408,2.76-2.876C5.345,5.227,7.737,4.98,10,4.98\r\n\ts4.654,0.247,4.655,1.271c0,1.468-0.181,2.495,2.76,2.876C19.928,9.452,19.973,7.5,19.95,6z"
}
}]
};
exports.phone = phone; |
package com.abubusoft.kripton.examples.rssreader.service.model;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import com.abubusoft.kripton.TypeAdapter;
public class DateAdapter implements TypeAdapter<Date, String> {
DateFormat formatter = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.ENGLISH);
@Override
public Date toJava(String dataValue) {
if (dataValue==null) return null;
Date date=null;
try {
date = formatter.parse(dataValue);
} catch (ParseException e) {
e.printStackTrace();
}
return date;
}
@Override
public String toData(Date javaValue) {
// we don't need to implement for the moment
return null;
}
}
|
package br.com.alinesolutions.anotaai.model.usuario;
import java.time.ZonedDateTime;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.xml.bind.annotation.XmlRootElement;
import org.hibernate.annotations.SQLDelete;
import org.hibernate.annotations.Where;
import com.fasterxml.jackson.annotation.JsonIdentityInfo;
import com.fasterxml.jackson.annotation.ObjectIdGenerators;
import br.com.alinesolutions.anotaai.infra.ReferencedNamedQuery;
import br.com.alinesolutions.anotaai.model.BaseEntity;
import br.com.alinesolutions.anotaai.model.domain.Operadora;
import br.com.alinesolutions.anotaai.model.domain.SituacaoPessoa;
import br.com.alinesolutions.anotaai.model.domain.SituacaoUsuario;
import br.com.alinesolutions.anotaai.model.usuario.ClienteConsumidor.ClienteConsumidorConstant;
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id", scope = ClienteConsumidor.class)
@NamedQueries({
@NamedQuery(name = ClienteConsumidorConstant.FIND_BY_TELEFONE_KEY, query = ClienteConsumidorConstant.FIND_BY_TELEFONE_QUERY),
@NamedQuery(name = ClienteConsumidorConstant.FIND_BY_ID_KEY, query = ClienteConsumidorConstant.FIND_BY_ID_QUERY),
@NamedQuery(name = ClienteConsumidorConstant.FIND_BY_NOME_KEY, query = ClienteConsumidorConstant.FIND_BY_NOME_QUERY),
@NamedQuery(name = ClienteConsumidorConstant.FIND_BY_NOME_CONSUMIDOR_KEY, query = ClienteConsumidorConstant.FIND_BY_NOME_CONSUMIDOR_QUERY),
@NamedQuery(name = ClienteConsumidorConstant.LIST_CLIENTE_CONSUMIDOR_KEY, query = ClienteConsumidorConstant.LIST_CLIENTE_CONSUMIDOR_QUERY),
@NamedQuery(name = ClienteConsumidorConstant.LOAD_BY_CONSUMIDOR_KEY, query = ClienteConsumidorConstant.LOAD_BY_CONSUMIDOR_QUERY),
@NamedQuery(name = ClienteConsumidorConstant.COUNT_USUARIO_KEY, query = ClienteConsumidorConstant.COUNT_USUARIO_QUERY),
@NamedQuery(name = ClienteConsumidorConstant.FIND_BY_CLIENTE_KEY, query = ClienteConsumidorConstant.FIND_BY_CLIENTE_QUERY) })
@Entity
@Where(clause = "ativo = true")
@SQLDelete(sql = "update ClienteConsumidor set ativo = false where id = ?")
@XmlRootElement
public class ClienteConsumidor extends BaseEntity<Long, ClienteConsumidor> {
private static final long serialVersionUID = 1L;
@ManyToOne(optional = false)
private Cliente cliente;
@ManyToOne(optional = false, cascade = CascadeType.PERSIST)
private Consumidor consumidor;
private String nomeConsumidor;
private ZonedDateTime dataAssociacao;
@Enumerated(EnumType.ORDINAL)
private SituacaoPessoa situacao;
public ClienteConsumidor() {
super();
}
@ReferencedNamedQuery(namedQuerys= {
ClienteConsumidorConstant.FIND_BY_CLIENTE_KEY
})
public ClienteConsumidor(Long id) {
this();
this.setId(id);
}
public ClienteConsumidor(Long id,Long idUsuarioConsumidor, String nomeUsuarioConsumidor,String emailUsuarioConsumidor,
Long idTelefoneConsumidor,Integer ddd, Integer ddi, Integer numero, SituacaoUsuario situacaoUsuario) {
this();
this.setId(id);
this.setConsumidor(new Consumidor());
this.consumidor.setUsuario(new Usuario(idUsuarioConsumidor, nomeUsuarioConsumidor, emailUsuarioConsumidor,idTelefoneConsumidor,ddd,ddi,numero, situacaoUsuario));
}
public ClienteConsumidor(Long id, Long idUsuario, String nome, String email, Long idTelefone, Integer ddi, Integer ddd, Integer numero, Operadora operadora) {
this(id);
this.setConsumidor(new Consumidor());
this.getConsumidor().setUsuario(new Usuario());
Usuario usuario = this.getConsumidor().getUsuario();
usuario.setId(idUsuario);
this.setNomeConsumidor(nome);
usuario.setTelefone(new Telefone());
usuario.getTelefone().setId(idTelefone);
usuario.getTelefone().setDdi(ddi);
usuario.getTelefone().setDdd(ddd);
usuario.getTelefone().setNumero(numero);
}
public ClienteConsumidor(Long id, Long idCliente, Long idUsuarioCliente, String nomeUsuarioCliente,
String emailUsuarioCliente, Long idConsumidor, Long idUsuarioConsumidor, String nomeUsuarioConsumidor,
String emailUsuarioConsumidor) {
this();
this.setId(id);
this.cliente = new Cliente();
this.cliente.setId(idCliente);
this.cliente.setUsuario(new Usuario(idUsuarioCliente, nomeUsuarioCliente, emailUsuarioCliente));
this.setConsumidor(new Consumidor());
this.consumidor.setId(idConsumidor);
this.consumidor.setUsuario(new Usuario(idUsuarioConsumidor, nomeUsuarioConsumidor, emailUsuarioConsumidor));
}
@ReferencedNamedQuery(namedQuerys = {
ClienteConsumidorConstant.FIND_BY_NOME_CONSUMIDOR_KEY
})
public ClienteConsumidor(Long id, String nomeConsumidor) {
setId(id);
this.nomeConsumidor = nomeConsumidor;
}
public Cliente getCliente() {
return cliente;
}
public void setCliente(Cliente cliente) {
this.cliente = cliente;
}
public Consumidor getConsumidor() {
return consumidor;
}
public void setConsumidor(Consumidor consumidor) {
this.consumidor = consumidor;
}
public ZonedDateTime getDataAssociacao() {
return dataAssociacao;
}
public void setDataAssociacao(ZonedDateTime dataAssociacao) {
this.dataAssociacao = dataAssociacao;
}
public SituacaoPessoa getSituacao() {
return situacao;
}
public void setSituacao(SituacaoPessoa situacao) {
this.situacao = situacao;
}
public String getNomeConsumidor() {
return nomeConsumidor;
}
public void setNomeConsumidor(String nomeConsumidor) {
this.nomeConsumidor = nomeConsumidor;
}
public interface ClienteConsumidorConstant {
String FIND_BY_ID_KEY = "ClienteConsumidor.findById";
String FIELD_CONSUMIDOR = "consumidor";
String FIELD_SITUACAO = "situacao";
String FIELD_NOME_CONSUMIDOR = "nomeConsumidor";
String FIND_BY_TELEFONE_KEY = "ClienteConsumidor.findByClienteAndConsumidorFotTel";
String FIND_BY_TELEFONE_QUERY = "select new br.com.alinesolutions.anotaai.model.usuario.ClienteConsumidor(cc.id, cl.id, ucl.id, ucl.nome, ucl.email, co.id, uco.id, uco.nome, uco.email) from ClienteConsumidor cc join cc.cliente cl join cl.usuario ucl join cc.consumidor co join co.usuario uco join uco.telefone t where t.ddi = :ddi and t.ddd = :ddd and t.numero = :numero and cl = :cliente";
String COUNT_USUARIO_KEY = "ClienteConsumidor.countUsuario";
String COUNT_USUARIO_QUERY = "select count(cc) from ClienteConsumidor cc join cc.consumidor c join c.usuario u where cc.consumidor.usuario = :usuario";
String FIND_BY_ID_QUERY = "select new br.com.alinesolutions.anotaai.model.usuario.ClienteConsumidor(cc.id,cc.consumidor.usuario.id,cc.consumidor.usuario.nome,cc.consumidor.usuario.email,cc.consumidor.usuario.telefone.id,cc.consumidor.usuario.telefone.ddd,cc.consumidor.usuario.telefone.ddi,cc.consumidor.usuario.telefone.numero,cc.consumidor.usuario.situacao ) from ClienteConsumidor cc where cc.cliente = :cliente and cc.id = :id";
String LIST_CLIENTE_CONSUMIDOR_KEY = "ClienteConsumidor.findConsumidorByCliente";
String LIST_CLIENTE_CONSUMIDOR_QUERY = "select new br.com.alinesolutions.anotaai.model.usuario.ClienteConsumidor(cc.id, u.id, cc.nomeConsumidor, u.email, t.id, t.ddi, t.ddd, t.numero, t.operadora) from ClienteConsumidor cc left join cc.consumidor cs join cs.usuario u join u.telefone t where cc.cliente = :cliente and cc.situacao = :situacao order by u.nome";
String FIND_BY_NOME_KEY = "ClienteConsumidor.findByName";
String FIND_BY_NOME_QUERY = "select new br.com.alinesolutions.anotaai.model.usuario.ClienteConsumidor(cc.id, u.id, cc.nomeConsumidor, u.email, t.id, t.ddi, t.ddd, t.numero, t.operadora) from ClienteConsumidor cc left join cc.consumidor cs join cs.usuario u join u.telefone t where cc.cliente = :cliente and cc.situacao = :situacao and upper(u.nome) like upper(concat('%', :nome, '%')) order by u.nome";
String FIND_BY_NOME_CONSUMIDOR_KEY = "ClienteConsumidor.findByNameConsumidor";
String FIND_BY_NOME_CONSUMIDOR_QUERY = "select new br.com.alinesolutions.anotaai.model.usuario.ClienteConsumidor(cc.id, cc.nomeConsumidor) from ClienteConsumidor cc where cc.cliente = :cliente and cc.situacao = :situacao and upper(cc.nomeConsumidor) like upper(concat('%', :nomeConsumidor, '%')) order by cc.nomeConsumidor";
String LOAD_BY_CONSUMIDOR_KEY = "ClienteConsumidor.loadByConsumidor";
String LOAD_BY_CONSUMIDOR_QUERY = "select cc from ClienteConsumidor cc left join cc.cliente c where c = :cliente and cc.consumidor = :consumidor";
String FIND_BY_CLIENTE_KEY = "Consumidor.findClienteByConsumidor";
String FIND_BY_CLIENTE_QUERY = "select new br.com.alinesolutions.anotaai.model.usuario.ClienteConsumidor(cc.id) from ClienteConsumidor cc left join cc.cliente cliente where cliente = :cliente and cc = :clienteConsumidor";
}
}
|
#!/bin/sh
# Copyright 2019 Google LLC
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
#################################### Scalar ###################################
### Generic C micro-kernels
tools/xngen src/f32-vmulcaddc/scalar.c.in -D CHANNEL_TILE=1 -D ROW_TILE=2 -D WASM=0 -o src/f32-vmulcaddc/gen/c1-minmax-scalar-2x.c &
tools/xngen src/f32-vmulcaddc/scalar.c.in -D CHANNEL_TILE=2 -D ROW_TILE=2 -D WASM=0 -o src/f32-vmulcaddc/gen/c2-minmax-scalar-2x.c &
tools/xngen src/f32-vmulcaddc/scalar.c.in -D CHANNEL_TILE=4 -D ROW_TILE=2 -D WASM=0 -o src/f32-vmulcaddc/gen/c4-minmax-scalar-2x.c &
### WAsm-specific micro-kernels
tools/xngen src/f32-vmulcaddc/scalar.c.in -D CHANNEL_TILE=1 -D ROW_TILE=2 -D WASM=1 -o src/f32-vmulcaddc/gen/c1-minmax-wasm-2x.c &
tools/xngen src/f32-vmulcaddc/scalar.c.in -D CHANNEL_TILE=2 -D ROW_TILE=2 -D WASM=1 -o src/f32-vmulcaddc/gen/c2-minmax-wasm-2x.c &
tools/xngen src/f32-vmulcaddc/scalar.c.in -D CHANNEL_TILE=4 -D ROW_TILE=2 -D WASM=1 -o src/f32-vmulcaddc/gen/c4-minmax-wasm-2x.c &
################################## WAsm SIMD ##################################
tools/xngen src/f32-vmulcaddc/wasmsimd.c.in -D CHANNEL_TILE=4 -D ROW_TILE=2 -D X86=0 -o src/f32-vmulcaddc/gen/c4-minmax-wasmsimd-arm-2x.c &
tools/xngen src/f32-vmulcaddc/wasmsimd.c.in -D CHANNEL_TILE=8 -D ROW_TILE=2 -D X86=0 -o src/f32-vmulcaddc/gen/c8-minmax-wasmsimd-arm-2x.c &
tools/xngen src/f32-vmulcaddc/wasmsimd.c.in -D CHANNEL_TILE=4 -D ROW_TILE=2 -D X86=1 -o src/f32-vmulcaddc/gen/c4-minmax-wasmsimd-x86-2x.c &
tools/xngen src/f32-vmulcaddc/wasmsimd.c.in -D CHANNEL_TILE=8 -D ROW_TILE=2 -D X86=1 -o src/f32-vmulcaddc/gen/c8-minmax-wasmsimd-x86-2x.c &
################################### ARM NEON ##################################
tools/xngen src/f32-vmulcaddc/neon.c.in -D CHANNEL_TILE=4 -D ROW_TILE=2 -D FMA=0 -o src/f32-vmulcaddc/gen/c4-minmax-neon-2x.c &
tools/xngen src/f32-vmulcaddc/neon.c.in -D CHANNEL_TILE=8 -D ROW_TILE=2 -D FMA=0 -o src/f32-vmulcaddc/gen/c8-minmax-neon-2x.c &
tools/xngen src/f32-vmulcaddc/neon.c.in -D CHANNEL_TILE=4 -D ROW_TILE=2 -D FMA=1 -o src/f32-vmulcaddc/gen/c4-minmax-neonfma-2x.c &
tools/xngen src/f32-vmulcaddc/neon.c.in -D CHANNEL_TILE=8 -D ROW_TILE=2 -D FMA=1 -o src/f32-vmulcaddc/gen/c8-minmax-neonfma-2x.c &
################################### x86 SSE ###################################
tools/xngen src/f32-vmulcaddc/sse.c.in -D CHANNEL_TILE=4 -D ROW_TILE=2 -o src/f32-vmulcaddc/gen/c4-minmax-sse-2x.c &
tools/xngen src/f32-vmulcaddc/sse.c.in -D CHANNEL_TILE=8 -D ROW_TILE=2 -o src/f32-vmulcaddc/gen/c8-minmax-sse-2x.c &
################################## Unit tests #################################
tools/generate-vmulcaddc-test.py --spec test/f32-vmulcaddc-minmax.yaml --output test/f32-vmulcaddc-minmax.cc &
wait
|
<reponame>fossabot/v-rating
const Vue = require('vue');
const VRating = require('v-rating');
function getComponent(Component, propsData) {
const Ctor = Vue.extend(Component);
return new Ctor({ propsData }).$mount();
}
var exampleData = {
rateMe: 0
};
describe('VRatingSemanticUIComponent', function() {
it('has correct DOM structure', function() {
const vm = getComponent(VRating, {
data: exampleData
});
expect(vm.$el.nodeName).toBe('DIV');
});
}); |
<reponame>Priyalc/Gcloud<filename>sdk/src/test/java/com/google/cloud/dataflow/sdk/util/StreamingGroupAlsoByWindowsDoFnTest.java
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import com.google.cloud.dataflow.sdk.coders.BigEndianLongCoder;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.coders.StringUtf8Coder;
import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory;
import com.google.cloud.dataflow.sdk.transforms.Combine.CombineFn;
import com.google.cloud.dataflow.sdk.transforms.Combine.KeyedCombineFn;
import com.google.cloud.dataflow.sdk.transforms.windowing.BoundedWindow;
import com.google.cloud.dataflow.sdk.transforms.windowing.FixedWindows;
import com.google.cloud.dataflow.sdk.transforms.windowing.IntervalWindow;
import com.google.cloud.dataflow.sdk.transforms.windowing.Sessions;
import com.google.cloud.dataflow.sdk.transforms.windowing.SlidingWindows;
import com.google.cloud.dataflow.sdk.transforms.windowing.Trigger;
import com.google.cloud.dataflow.sdk.transforms.windowing.Trigger.TriggerId;
import com.google.cloud.dataflow.sdk.util.TriggerExecutor.TriggerIdCoder;
import com.google.cloud.dataflow.sdk.util.common.CounterSet;
import com.google.cloud.dataflow.sdk.values.KV;
import com.google.cloud.dataflow.sdk.values.TupleTag;
import org.hamcrest.Matchers;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/** Unit tests for {@link StreamingGroupAlsoByWindowsDoFn}. */
@RunWith(JUnit4.class)
@SuppressWarnings("rawtypes")
public class StreamingGroupAlsoByWindowsDoFnTest {
ExecutionContext execContext;
CounterSet counters;
TupleTag<KV<String, Iterable<String>>> outputTag;
@Before public void setUp() {
execContext = new DirectModeExecutionContext() {
@Override
public void setTimer(String tag, Instant timestamp, Trigger.TimeDomain domain) {}
@Override
public void deleteTimer(String tag, Trigger.TimeDomain domain) {}
};
counters = new CounterSet();
outputTag = new TupleTag<>();
}
@Test public void testEmpty() throws Exception {
DoFnRunner<TimerOrElement<KV<String, String>>, KV<String, Iterable<String>>, List> runner =
makeRunner(WindowingStrategy.of(FixedWindows.of(Duration.millis(10))));
runner.startBundle();
runner.finishBundle();
List<?> result = runner.getReceiver(outputTag);
assertEquals(0, result.size());
}
@Test public void testFixedWindows() throws Exception {
DoFnRunner<TimerOrElement<KV<String, String>>,
KV<String, Iterable<String>>, List> runner =
makeRunner(WindowingStrategy.of(FixedWindows.of(Duration.millis(10))));
Coder<IntervalWindow> windowCoder = FixedWindows.of(Duration.millis(10)).windowCoder();
Coder<TriggerId<IntervalWindow>> triggerIdCoder =
new TriggerIdCoder<IntervalWindow>(windowCoder);
runner.startBundle();
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", "v1")),
new Instant(1),
Arrays.asList(window(0, 10))));
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", "v2")),
new Instant(2),
Arrays.asList(window(0, 10))));
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", "v0")),
new Instant(0),
Arrays.asList(window(0, 10))));
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", "v3")),
new Instant(13),
Arrays.asList(window(10, 20))));
runner.processElement(WindowedValue.valueInEmptyWindows(
TimerOrElement.<KV<String, String>>timer(
CoderUtils.encodeToBase64(triggerIdCoder, new TriggerId<IntervalWindow>(
(IntervalWindow) window(0, 10), Collections.<Integer>emptyList())),
new Instant(9), "k")));
runner.processElement(WindowedValue.valueInEmptyWindows(
TimerOrElement.<KV<String, String>>timer(
CoderUtils.encodeToBase64(triggerIdCoder, new TriggerId<IntervalWindow>(
(IntervalWindow) window(10, 20), Collections.<Integer>emptyList())),
new Instant(19), "k")));
runner.finishBundle();
@SuppressWarnings("unchecked")
List<WindowedValue<KV<String, Iterable<String>>>> result = runner.getReceiver(outputTag);
assertEquals(2, result.size());
WindowedValue<KV<String, Iterable<String>>> item0 = result.get(0);
assertEquals("k", item0.getValue().getKey());
assertThat(item0.getValue().getValue(), Matchers.containsInAnyOrder("v0", "v1", "v2"));
assertEquals(new Instant(0), item0.getTimestamp());
assertThat(item0.getWindows(), Matchers.contains(window(0, 10)));
WindowedValue<KV<String, Iterable<String>>> item1 = result.get(1);
assertEquals("k", item1.getValue().getKey());
assertThat(item1.getValue().getValue(), Matchers.containsInAnyOrder("v3"));
assertEquals(new Instant(13), item1.getTimestamp());
assertThat(item1.getWindows(), Matchers.contains(window(10, 20)));
}
@Test public void testSlidingWindows() throws Exception {
DoFnRunner<TimerOrElement<KV<String, String>>,
KV<String, Iterable<String>>, List> runner =
makeRunner(WindowingStrategy.of(
SlidingWindows.of(Duration.millis(20)).every(Duration.millis(10))));
Coder<IntervalWindow> windowCoder =
SlidingWindows.of(Duration.millis(10)).every(Duration.millis(10)).windowCoder();
Coder<TriggerId<IntervalWindow>> triggerIdCoder =
new TriggerIdCoder<IntervalWindow>(windowCoder);
runner.startBundle();
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", "v1")),
new Instant(5),
Arrays.asList(window(-10, 10), window(0, 20))));
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", "v0")),
new Instant(2),
Arrays.asList(window(-10, 10), window(0, 20))));
runner.processElement(WindowedValue.valueInEmptyWindows(
TimerOrElement.<KV<String, String>>timer(
CoderUtils.encodeToBase64(triggerIdCoder, new TriggerId<IntervalWindow>(
(IntervalWindow) window(-10, 10), Collections.<Integer>emptyList())),
new Instant(9), "k")));
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", "v2")),
new Instant(5),
Arrays.asList(window(0, 20), window(10, 30))));
runner.processElement(WindowedValue.valueInEmptyWindows(
TimerOrElement.<KV<String, String>>timer(
CoderUtils.encodeToBase64(triggerIdCoder, new TriggerId<IntervalWindow>(
(IntervalWindow) window(0, 20), Collections.<Integer>emptyList())),
new Instant(19), "k")));
runner.processElement(WindowedValue.valueInEmptyWindows(
TimerOrElement.<KV<String, String>>timer(
CoderUtils.encodeToBase64(triggerIdCoder, new TriggerId<IntervalWindow>(
(IntervalWindow) window(10, 30), Collections.<Integer>emptyList())),
new Instant(29), "k")));
runner.finishBundle();
@SuppressWarnings("unchecked")
List<WindowedValue<KV<String, Iterable<String>>>> result = runner.getReceiver(outputTag);
assertEquals(3, result.size());
WindowedValue<KV<String, Iterable<String>>> item0 = result.get(0);
assertEquals("k", item0.getValue().getKey());
assertThat(item0.getValue().getValue(), Matchers.containsInAnyOrder("v0", "v1"));
assertEquals(new Instant(2), item0.getTimestamp());
assertThat(item0.getWindows(), Matchers.contains(window(-10, 10)));
WindowedValue<KV<String, Iterable<String>>> item1 = result.get(1);
assertEquals("k", item1.getValue().getKey());
assertThat(item1.getValue().getValue(), Matchers.containsInAnyOrder("v0", "v1", "v2"));
assertEquals(new Instant(2), item1.getTimestamp());
assertThat(item1.getWindows(), Matchers.contains(window(0, 20)));
WindowedValue<KV<String, Iterable<String>>> item2 = result.get(2);
assertEquals("k", item2.getValue().getKey());
assertThat(item2.getValue().getValue(), Matchers.containsInAnyOrder("v2"));
assertEquals(new Instant(5), item2.getTimestamp());
assertThat(item2.getWindows(), Matchers.contains(window(10, 30)));
}
@Test public void testSessions() throws Exception {
DoFnRunner<TimerOrElement<KV<String, String>>,
KV<String, Iterable<String>>, List> runner =
makeRunner(WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10))));
Coder<IntervalWindow> windowCoder =
Sessions.withGapDuration(Duration.millis(10)).windowCoder();
Coder<TriggerId<IntervalWindow>> triggerIdCoder =
new TriggerIdCoder<IntervalWindow>(windowCoder);
runner.startBundle();
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", "v1")),
new Instant(0),
Arrays.asList(window(0, 10))));
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", "v2")),
new Instant(5),
Arrays.asList(window(5, 15))));
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", "v3")),
new Instant(15),
Arrays.asList(window(15, 25))));
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", "v0")),
new Instant(3),
Arrays.asList(window(3, 13))));
runner.processElement(WindowedValue.valueInEmptyWindows(
TimerOrElement.<KV<String, String>>timer(
CoderUtils.encodeToBase64(triggerIdCoder, new TriggerId<IntervalWindow>(
(IntervalWindow) window(0, 10), Collections.<Integer>emptyList())),
new Instant(9), "k")));
runner.processElement(WindowedValue.valueInEmptyWindows(
TimerOrElement.<KV<String, String>>timer(
CoderUtils.encodeToBase64(triggerIdCoder, new TriggerId<IntervalWindow>(
(IntervalWindow) window(0, 15), Collections.<Integer>emptyList())),
new Instant(14), "k")));
runner.processElement(WindowedValue.valueInEmptyWindows(
TimerOrElement.<KV<String, String>>timer(
CoderUtils.encodeToBase64(triggerIdCoder, new TriggerId<IntervalWindow>(
(IntervalWindow) window(15, 25), Collections.<Integer>emptyList())),
new Instant(24), "k")));
runner.finishBundle();
@SuppressWarnings("unchecked")
List<WindowedValue<KV<String, Iterable<String>>>> result = runner.getReceiver(outputTag);
assertEquals(2, result.size());
WindowedValue<KV<String, Iterable<String>>> item0 = result.get(0);
assertEquals("k", item0.getValue().getKey());
assertThat(item0.getValue().getValue(), Matchers.containsInAnyOrder("v0", "v1", "v2"));
assertEquals(new Instant(0), item0.getTimestamp());
assertThat(item0.getWindows(), Matchers.contains(window(0, 15)));
WindowedValue<KV<String, Iterable<String>>> item1 = result.get(1);
assertEquals("k", item1.getValue().getKey());
assertThat(item1.getValue().getValue(), Matchers.containsInAnyOrder("v3"));
assertEquals(new Instant(15), item1.getTimestamp());
assertThat(item1.getWindows(), Matchers.contains(window(15, 25)));
}
/**
* A custom combine fn that doesn't take any performace shortcuts
* to ensure that we are using the CombineFn API properly.
*/
private static class SumLongs extends CombineFn<Long, Long, Long> {
private static final long serialVersionUID = 0L;
@Override
public Long createAccumulator() {
return 0L;
}
@Override
public Long addInput(Long accumulator, Long input) {
return accumulator + input;
}
@Override
public Long mergeAccumulators(Iterable<Long> accumulators) {
Long sum = 0L;
for (Long value : accumulators) {
sum += value;
}
return sum;
}
@Override
public Long extractOutput(Long accumulator) {
return new Long(accumulator);
}
}
@Test public void testSessionsCombine() throws Exception {
CombineFn<Long, ?, Long> combineFn = new SumLongs();
DoFnRunner<TimerOrElement<KV<String, Long>>,
KV<String, Long>, List> runner =
makeRunner(WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10))),
combineFn.<String>asKeyedFn());
Coder<IntervalWindow> windowCoder =
Sessions.withGapDuration(Duration.millis(10)).windowCoder();
Coder<TriggerId<IntervalWindow>> triggerIdCoder =
new TriggerIdCoder<IntervalWindow>(windowCoder);
runner.startBundle();
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", 1L)),
new Instant(0),
Arrays.asList(window(0, 10))));
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", 2L)),
new Instant(5),
Arrays.asList(window(5, 15))));
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", 3L)),
new Instant(15),
Arrays.asList(window(15, 25))));
runner.processElement(WindowedValue.of(
TimerOrElement.element(KV.of("k", 4L)),
new Instant(3),
Arrays.asList(window(3, 13))));
// TODO: To simplify tests, create a timer manager that can sweep a watermark past some timers
// and fire them as appropriate. This would essentially be the batch timer context.
runner.processElement(WindowedValue.valueInEmptyWindows(
TimerOrElement.<KV<String, Long>>timer(
CoderUtils.encodeToBase64(triggerIdCoder, new TriggerId<IntervalWindow>(
(IntervalWindow) window(0, 10), Collections.<Integer>emptyList())),
new Instant(9), "k")));
runner.processElement(WindowedValue.valueInEmptyWindows(
TimerOrElement.<KV<String, Long>>timer(
CoderUtils.encodeToBase64(triggerIdCoder, new TriggerId<IntervalWindow>(
(IntervalWindow) window(0, 15), Collections.<Integer>emptyList())),
new Instant(14), "k")));
runner.processElement(WindowedValue.valueInEmptyWindows(
TimerOrElement.<KV<String, Long>>timer(
CoderUtils.encodeToBase64(triggerIdCoder, new TriggerId<IntervalWindow>(
(IntervalWindow) window(15, 25), Collections.<Integer>emptyList())),
new Instant(24), "k")));
runner.finishBundle();
@SuppressWarnings("unchecked")
List<WindowedValue<KV<String, Long>>> result = runner.getReceiver(outputTag);
assertEquals(2, result.size());
WindowedValue<KV<String, Long>> item0 = result.get(0);
assertEquals("k", item0.getValue().getKey());
assertEquals((Long) 7L, item0.getValue().getValue());
assertEquals(new Instant(0), item0.getTimestamp());
assertThat(item0.getWindows(), Matchers.contains(window(0, 15)));
WindowedValue<KV<String, Long>> item1 = result.get(1);
assertEquals("k", item1.getValue().getKey());
assertEquals((Long) 3L, item1.getValue().getValue());
assertEquals(new Instant(15), item1.getTimestamp());
assertThat(item1.getWindows(), Matchers.contains(window(15, 25)));
}
private DoFnRunner<TimerOrElement<KV<String, String>>, KV<String, Iterable<String>>, List>
makeRunner(WindowingStrategy<? super String, IntervalWindow> windowingStrategy) {
StreamingGroupAlsoByWindowsDoFn<String, String, Iterable<String>, IntervalWindow> fn =
StreamingGroupAlsoByWindowsDoFn.createForIterable(windowingStrategy, StringUtf8Coder.of());
return makeRunner(windowingStrategy, fn);
}
private DoFnRunner<TimerOrElement<KV<String, Long>>, KV<String, Long>, List> makeRunner(
WindowingStrategy<? super String, IntervalWindow> windowingStrategy,
KeyedCombineFn<String, Long, ?, Long> combineFn) {
StreamingGroupAlsoByWindowsDoFn<String, Long, Long, IntervalWindow> fn =
StreamingGroupAlsoByWindowsDoFn.create(
windowingStrategy, combineFn, StringUtf8Coder.of(), BigEndianLongCoder.of());
return makeRunner(windowingStrategy, fn);
}
private <VI, VO> DoFnRunner<TimerOrElement<KV<String, VI>>, KV<String, VO>, List> makeRunner(
WindowingStrategy<? super String, IntervalWindow> windowingStrategy,
StreamingGroupAlsoByWindowsDoFn<String, VI, VO, IntervalWindow> fn) {
return
DoFnRunner.createWithListOutputs(
PipelineOptionsFactory.create(),
fn,
PTuple.empty(),
(TupleTag<KV<String, VO>>) (TupleTag) outputTag,
new ArrayList<TupleTag<?>>(),
execContext.createStepContext("merge"),
counters.getAddCounterMutator(),
windowingStrategy);
}
private BoundedWindow window(long start, long end) {
return new IntervalWindow(new Instant(start), new Instant(end));
}
}
|
apt-get update
sudo apt-get install -y apt-utils dirmngr gnupg gnupg-agent software-properties-common python3 autoconf gperf bison flex gcc g++ make swig python-dev cmake subversion iverilog python3-pip python-pip texlive-base texlive-fonts-extra texlive-fonts-recommended texlive-generic-recommended texlive-pictures texlive-xetex texlive-extra-utils xzdec aspell texlive-latex-recommended texlive-full
if ! [ -x "$(command -v pandoc)" ]; then
# Fonts
wget https://raw.githubusercontent.com/hotice/webupd8/master/install-google-fonts
chmod +x install-google-fonts
./install-google-fonts
# PDF generation
wget https://github.com/jgm/pandoc/releases/download/2.4/pandoc-2.4-1-amd64.deb
sudo dpkg -i pandoc-*-amd64.deb
rm -rf pandoc-*.deb
fi
# Spelling stuff
pip3 install junit-xml |
const path = require('path');
const axios = require('axios');
const fse = require('fs-extra');
const { findAvailablePort, killPort, portIsOccupied } = require('./port');
const { runBySpawn, runByExec } = require('./run-cmd');
function getAbsolutePath(basePath, curPath) {
// 注意:有可能 rootPath 后面携带了一个反斜杠,需要去掉
if (curPath) {
return path.isAbsolute(curPath) ? curPath : path.resolve(path.join(basePath, curPath));
} else {
return path.isAbsolute(basePath) ? basePath : path.resolve(basePath);
}
}
function getBase64(data, length) {
const buff = Buffer.from(data + '');
const base64data = buff.toString('base64');
return length ? base64data.slice(-1 * length) : base64data;
}
function getFromStrOrFunc(target, ...args) {
return (typeof target === 'function') ? target(...args) : target;
}
/**
* 检查是否能访问,一直到能够访问或者访问超时为止
*
* @param {String} url 请求地址
* @param {Object} [opts] 选项
* @param {Number} [opts.retryLimit] 最多重试次数
* @param {Number} [opts.count] 当前重试次数
* @param {Number} [opts.timeout] 每次重试之后需要等待的时间,单位为ms
* @return {Promise<Boolean>}
*/
async function checkAndWaitURLAvailable(url, opts = {}) {
const result = await axios.get(url).catch(() => {
});
if (!opts.count) {
opts.count = 0;
}
if (!opts.retryLimit) {
opts.retryLimit = 10;
}
if (!opts.timeout) {
opts.timeout = 1000;
}
if (result) {
console.log(`checkAndWaitURLAvailable return true!`, url, opts);
return true;
} else if (opts.count >= opts.retryLimit) {
console.log(`retry max! ${opts.count}/${opts.retryLimit}`);
return Promise.reject(new Error('retry max'));
} else {
opts.count++;
console.log(`check again: ${opts.count}/${opts.retryLimit}, waiting ${opts.timeout}ms`);
return new Promise((resolve, reject) => {
setTimeout(async () => {
checkAndWaitURLAvailable(url, opts)
.then((data) => {
resolve(data);
})
.catch((err) => {
reject(err);
});
}, opts.timeout);
});
}
}
/**
* 检查某个文件是否存在,一直到能够查到或者超时为止
*
* @param {String} checkFile 本地文件
* @param {Object} [opts] 选项
* @param {Number} [opts.retryLimit] 最多重试次数
* @param {Number} [opts.count] 当前重试次数
* @param {Number} [opts.timeout] 每次重试之后需要等待的时间,单位为ms
* @return {Promise<Boolean>}
*/
async function checkAndWaitFileAvailable(checkFile, opts = {}) {
if (!opts.count) {
opts.count = 0;
}
if (!opts.retryLimit) {
opts.retryLimit = 10;
}
if (!opts.timeout) {
opts.timeout = 1000;
}
const result = await fse.pathExists(checkFile);
if (result) {
console.log(`checkAndWaitFileAvailable return true!`, checkFile, opts);
return true;
} else if (opts.count >= opts.retryLimit) {
console.log(`retry max! ${opts.count}/${opts.retryLimit}`);
return Promise.reject(new Error('retry max'));
} else {
opts.count++;
console.log(`check again: ${opts.count}/${opts.retryLimit}, waiting ${opts.timeout}ms`);
return new Promise((resolve, reject) => {
setTimeout(async () => {
checkAndWaitFileAvailable(checkFile, opts)
.then((data) => {
resolve(data);
})
.catch((err) => {
reject(err);
});
}, opts.timeout);
});
}
}
module.exports = {
getAbsolutePath,
getBase64,
getFromStrOrFunc,
checkAndWaitFileAvailable,
checkAndWaitURLAvailable,
findAvailablePort,
killPort,
portIsOccupied,
runBySpawn,
runByExec
};
|
#!/usr/bin/env bash
{{!
Template adapted from here:
https://github.com/chriskempson/base16-builder/blob/master/templates/gnome-terminal/dark.sh.erb
}}
# Base16 Dracula - Gnome Terminal color scheme install script
# Mike Barkmin (http://github.com/mikebarkmin) based on Dracula Theme (http://github.com/dracula)
[[ -z "$PROFILE_NAME" ]] && PROFILE_NAME="Base 16 Dracula"
[[ -z "$PROFILE_SLUG" ]] && PROFILE_SLUG="base-16-dracula"
[[ -z "$DCONF" ]] && DCONF=dconf
[[ -z "$UUIDGEN" ]] && UUIDGEN=uuidgen
dset() {
local key="$1"; shift
local val="$1"; shift
if [[ "$type" == "string" ]]; then
val="'$val'"
fi
"$DCONF" write "$PROFILE_KEY/$key" "$val"
}
# Because dconf still doesn't have "append"
dlist_append() {
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$DCONF" read "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "'$val'"
} | head -c-1 | tr "\n" ,
)"
"$DCONF" write "$key" "[$entries]"
}
# Newest versions of gnome-terminal use dconf
if which "$DCONF" > /dev/null 2>&1; then
# Check that uuidgen is available
type $UUIDGEN >/dev/null 2>&1 || { echo >&2 "Requires uuidgen but it's not installed. Aborting!"; exit 1; }
[[ -z "$BASE_KEY_NEW" ]] && BASE_KEY_NEW=/org/gnome/terminal/legacy/profiles:
if [[ -n "`$DCONF list $BASE_KEY_NEW/`" ]]; then
if which "$UUIDGEN" > /dev/null 2>&1; then
PROFILE_SLUG=`uuidgen`
fi
if [[ -n "`$DCONF read $BASE_KEY_NEW/default`" ]]; then
DEFAULT_SLUG=`$DCONF read $BASE_KEY_NEW/default | tr -d \'`
else
DEFAULT_SLUG=`$DCONF list $BASE_KEY_NEW/ | grep '^:' | head -n1 | tr -d :/`
fi
DEFAULT_KEY="$BASE_KEY_NEW/:$DEFAULT_SLUG"
PROFILE_KEY="$BASE_KEY_NEW/:$PROFILE_SLUG"
# Copy existing settings from default profile
$DCONF dump "$DEFAULT_KEY/" | $DCONF load "$PROFILE_KEY/"
# Add new copy to list of profiles
dlist_append $BASE_KEY_NEW/list "$PROFILE_SLUG"
# Update profile values with theme options
dset visible-name "'$PROFILE_NAME'"
dset palette "['#282936', '#ea51b2', '#ebff87', '#00f769', '#62d6e8', '#b45bcf', '#a1efe4', '#e9e9f4', '#626483', '#b45bcf', '#3a3c4e', '#4d4f68', '#62d6e8', '#f1f2f8', '#00f769', '#f7f7fb']"
dset background-color "'#282936'"
dset foreground-color "'#e9e9f4'"
dset bold-color "'#e9e9f4'"
dset bold-color-same-as-fg "true"
dset cursor-colors-set "true"
dset cursor-background-color "'#e9e9f4'"
dset cursor-foreground-color "'#282936'"
dset use-theme-colors "false"
dset use-theme-background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
exit 0
fi
fi
# Fallback for Gnome 2 and early Gnome 3
[[ -z "$GCONFTOOL" ]] && GCONFTOOL=gconftool
[[ -z "$BASE_KEY" ]] && BASE_KEY=/apps/gnome-terminal/profiles
PROFILE_KEY="$BASE_KEY/$PROFILE_SLUG"
gset() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
"$GCONFTOOL" --set --type "$type" "$PROFILE_KEY/$key" -- "$val"
}
# Because gconftool doesn't have "append"
glist_append() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$GCONFTOOL" --get "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "$val"
} | head -c-1 | tr "\n" ,
)"
"$GCONFTOOL" --set --type list --list-type $type "$key" "[$entries]"
}
# Append the Base16 profile to the profile list
glist_append string /apps/gnome-terminal/global/profile_list "$PROFILE_SLUG"
gset string visible_name "$PROFILE_NAME"
gset string palette "#282936:#ea51b2:#ebff87:#00f769:#62d6e8:#b45bcf:#a1efe4:#e9e9f4:#626483:#ea51b2:#ebff87:#00f769:#62d6e8:#b45bcf:#a1efe4:#f7f7fb"
gset string palette "['#282936', '#ea51b2', '#ebff87', '#00f769', '#62d6e8', '#b45bcf', '#a1efe4', '#e9e9f4', '#626483', '#b45bcf', '#3a3c4e', '#4d4f68', '#62d6e8', '#f1f2f8', '#00f769', '#f7f7fb']"
gset string background_color "#282936"
gset string foreground_color "#e9e9f4"
gset string bold_color "#e9e9f4"
gset bool bold_color_same_as_fg "true"
gset bool cursor-colors-set "true"
gset string cursor-background-color "'#e9e9f4'"
gset string cursor-foreground-color "'#282936'"
gset bool use_theme_colors "false"
gset bool use_theme_background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
|
<filename>src/main/java/jp/col/controller/dailyreport/DailyReportListController.java
package jp.col.controller.dailyreport;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import io.micrometer.core.instrument.util.StringUtils;
import jp.col.Model.DailyReportModel;
import jp.col.Model.UserModel;
import jp.col.dao.DailyReportDaoImpl;
import jp.col.dao.IDailyReportDao;
class Task {
private String name;
private String projectName;
private String taskContent;
private String spentTime;
public Task(String name, String projectName, String taskContent, String spentTime) {
this.name = name;
this.projectName = projectName;
this.taskContent = taskContent;
this.spentTime = spentTime;
}
public Task() {
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getProjectName() {
return projectName;
}
public void setProjectName(String projectName) {
this.projectName = projectName;
}
public String getTaskContent() {
return taskContent;
}
public void setTaskContent(String taskContent) {
this.taskContent = taskContent;
}
public String getSpentTime() {
return spentTime;
}
public void setSpentTime(String spentTime) {
this.spentTime = spentTime;
}
}
class Report {
private final static SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd");
private String label;
private String dateStr;
private List<Task> taskList;
private String workKind;
private String yobi;
private String beginTime;
private String endTime;
private String breakTime;
private String workTime;
private String offTime;
private String reportStatus;
public Report() {
this.label = "";
this.dateStr = "";
this.taskList = new ArrayList<Task>();
}
public Report(Calendar c) {
this.label = String.valueOf(c.get(Calendar.DAY_OF_MONTH));
this.dateStr = sdf.format(c.getTime());
this.taskList = new ArrayList<Task>();
}
public String getLabel() {
return label == null ? "" : label;
}
public void setLabel(String label) {
this.label = label;
}
public String getDateStr() {
return dateStr == null ? "" : dateStr;
}
public void setDateStr(String dateStr) {
this.dateStr = dateStr;
}
public List<Task> getTaskList() {
return taskList;
}
public void setTaskList(List<Task> taskList) {
this.taskList = taskList;
}
public String getWorkKind() {
return workKind;
}
public void setWorkKind(String workKind) {
this.workKind = workKind;
}
public String getYobi() {
return yobi;
}
public void setYobi(String yobi) {
this.yobi = yobi;
}
public String getBeginTime() {
return beginTime;
}
public void setBeginTime(String beginTime) {
this.beginTime = beginTime;
}
public String getEndTime() {
return endTime;
}
public void setEndTime(String endTime) {
this.endTime = endTime;
}
public String getBreakTime() {
return breakTime;
}
public void setBreakTime(String breakTime) {
this.breakTime = breakTime;
}
public String getReportStatus() {
return reportStatus;
}
public void setReportStatus(String reportStatus) {
this.reportStatus = reportStatus;
}
public String getWorkTime() {
return workTime;
}
public void setWorkTime(String workTime) {
this.workTime = workTime;
}
public String getOffTime() {
return offTime;
}
public void setOffTime(String offTime) {
this.offTime = offTime;
}
}
@Controller
public class DailyReportListController {
private static final String DATEFORMAT_JA = "yyyy年M月";
private static Map<String, String> reportStatusMap = new HashMap<String, String>();
private static Map<String, String> workKindMap = new HashMap<String, String>();
static {
reportStatusMap.put("saved","保存済");
reportStatusMap.put("Submitted","提出済");
reportStatusMap.put("Confirmed","確認済");
workKindMap.put("Predetermined", "所定");
workKindMap.put("HolidayWorkSat", "休出(土・祝)");
workKindMap.put("HolidayWorkSun", "休出(日)");
workKindMap.put("Holiday", "有休");
workKindMap.put("HalfADayOff", "半休");
workKindMap.put("SpecialHoliday", "特休");
workKindMap.put("Absence", "欠勤");
}
IDailyReportDao dailyReportDao;
@CacheEvict
@RequestMapping("/DailyList")
String init(Map<String, Object> model,HttpSession ses, HttpServletRequest req , String currentDate , String mode) {
Object userObj = ses.getAttribute("user");
if(userObj == null){
model.put("message", "セッションタイムアウトが発生しました。\r\n再度ログインから実行してください。");
return "login";
}
UserModel user = (UserModel)userObj;
Calendar now = Calendar.getInstance();
if (!StringUtils.isBlank(currentDate)) {
SimpleDateFormat sdf = new SimpleDateFormat(DATEFORMAT_JA);
try {
now.setTime(sdf.parse(currentDate));
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
now.add(Calendar.MONTH, "p".equals(mode) ? -1 : 1);
}
return initReportList(model, user, now);
}
@RequestMapping("/dailyReportListPre")
String dailyReportListPre(Map<String, Object> model, HttpSession ses, HttpServletRequest req) {
Object userObj = ses.getAttribute("user");
if(userObj == null){
model.put("message", "セッションタイムアウトが発生しました。\r\n再度ログインから実行してください。");
return "login";
}
UserModel user = (UserModel)userObj;
String date = req.getParameter("currentDate");
Calendar calendar = Calendar.getInstance();
SimpleDateFormat sdf = new SimpleDateFormat(DATEFORMAT_JA);
try {
calendar.setTime(sdf.parse(date));
calendar.add(Calendar.MONTH, -1);
return initReportList(model, user, calendar);
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return "dailyReportList";
}
@RequestMapping("/dailyReportListLater")
String dailyReportListLater(Map<String, Object> model,HttpSession ses, HttpServletRequest req) {
Object userObj = ses.getAttribute("user");
if(userObj == null){
model.put("message", "セッションタイムアウトが発生しました。\r\n再度ログインから実行してください。");
return "login";
}
UserModel user = (UserModel)userObj;
String date = req.getParameter("currentDate");
Calendar calendar = Calendar.getInstance();
SimpleDateFormat sdf = new SimpleDateFormat(DATEFORMAT_JA);
try {
calendar.setTime(sdf.parse(date));
calendar.add(Calendar.MONTH, 1);
return initReportList(model, user, calendar);
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return "dailyReportList";
}
/*
private String initReportList(Map<String, Object> model, UserModel user, Calendar now) {
Calendar firstDay = (Calendar) now.clone();
firstDay.set(Calendar.DAY_OF_MONTH, 1);
while (firstDay.get(Calendar.DAY_OF_WEEK) != Calendar.SUNDAY) {
firstDay.add(Calendar.DATE, -1);
}
Calendar lastDay = (Calendar) now.clone();
lastDay.set(Calendar.DAY_OF_MONTH, now.getActualMaximum(Calendar.DAY_OF_MONTH));
while (lastDay.get(Calendar.DAY_OF_WEEK) != Calendar.SATURDAY) {
lastDay.add(Calendar.DATE, 1);
}
dailyReportDao = new DailyReportDaoImpl();
DailyReportModel dailyReport = new DailyReportModel();
dailyReport.setEmployee(user.getSfid());
SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM");
String reportDate = sdf.format(now.getTime());
dailyReport.setReportDate(reportDate);
List<DailyReportModel> dailyReportList = dailyReportDao.findDailyReportByMonth(dailyReport);
List<List<Report>> reportList = new ArrayList<List<Report>>();
List<Report> rowReports = new ArrayList<Report>();
List<Report> detailReportList = new ArrayList<Report>();
Report tempReport;
do {
if (firstDay.get(Calendar.DAY_OF_WEEK) == Calendar.SUNDAY) {
rowReports = new ArrayList<Report>();
}
if (firstDay.get(Calendar.MONTH) == now.get(Calendar.MONTH)) {
tempReport = new Report(firstDay);
DailyReportModel dr = getDailyReport(dailyReportList , tempReport.getDateStr());
if (dr != null) {
if(StringUtils.isNotEmpty(dr.getTask1())) {
tempReport.getTaskList().add(new Task(dr.getTask1(),dr.getProjectName1(),dr.getWorkContent1(),dr.getWorkTime1() + "h"));
}
if(StringUtils.isNotEmpty(dr.getTask2())) {
tempReport.getTaskList().add(new Task(dr.getTask2(),dr.getProjectName2(),dr.getWorkContent2(),dr.getWorkTime2() + "h"));
}
if(StringUtils.isNotEmpty(dr.getTask3())) {
tempReport.getTaskList().add(new Task(dr.getTask3(),dr.getProjectName3(),dr.getWorkContent3(),dr.getWorkTime3() + "h"));
}
if(StringUtils.isNotEmpty(dr.getTask4())) {
tempReport.getTaskList().add(new Task(dr.getTask4(),dr.getProjectName4(),dr.getWorkContent4(),dr.getWorkTime4() + "h"));
}
if(StringUtils.isNotEmpty(dr.getTask5())) {
tempReport.getTaskList().add(new Task(dr.getTask5(),dr.getProjectName5(),dr.getWorkContent5(),dr.getWorkTime5() + "h"));
}
if(StringUtils.isNotEmpty(dr.getTask6())) {
tempReport.getTaskList().add(new Task(dr.getTask6(),dr.getProjectName6(),dr.getWorkContent6(),dr.getWorkTime6() + "h"));
}
if(StringUtils.isNotEmpty(dr.getTask7())) {
tempReport.getTaskList().add(new Task(dr.getTask7(),dr.getProjectName7(),dr.getWorkContent7(),dr.getWorkTime7() + "h"));
}
if(StringUtils.isNotEmpty(dr.getTask8())) {
tempReport.getTaskList().add(new Task(dr.getTask8(),dr.getProjectName8(),dr.getWorkContent8(),dr.getWorkTime8() + "h"));
}
if(StringUtils.isNotEmpty(dr.getTask9())) {
tempReport.getTaskList().add(new Task(dr.getTask9(),dr.getProjectName9(),dr.getWorkContent9(),dr.getWorkTime9() + "h"));
}
if(StringUtils.isNotEmpty(dr.getTask10())) {
tempReport.getTaskList().add(new Task(dr.getTask10(),dr.getProjectName10(),dr.getWorkContent10(),dr.getWorkTime10() + "h"));
}
}
rowReports.add(tempReport);
detailReportList.add(tempReport);
} else {
tempReport = new Report();
rowReports.add(tempReport);
}
if (firstDay.get(Calendar.DAY_OF_WEEK) == Calendar.SATURDAY) {
reportList.add(rowReports);
}
firstDay.add(Calendar.DATE, 1);
} while (!firstDay.after(lastDay));
model.put("reportList", reportList);
model.put("detailReportList", detailReportList);
SimpleDateFormat sdfJa = new SimpleDateFormat(DATEFORMAT_JA);
String currentDate = sdfJa.format(now.getTime());
model.put("currentDate", currentDate);
return "dailyReportList";
}*/
private DailyReportModel getDailyReport(List<DailyReportModel> dailyReportList , String dateStr) {
for (DailyReportModel model : dailyReportList) {
if(model.getReportDate().equals(dateStr)) {
return model;
}
}
return null;
}
private String initReportList(Map<String, Object> model, UserModel user, Calendar now) {
SimpleDateFormat sdfE = new SimpleDateFormat("E", new Locale("ja"));
Calendar firstDay = (Calendar) now.clone();
firstDay.set(Calendar.DAY_OF_MONTH, 1);
while (firstDay.get(Calendar.DAY_OF_WEEK) != Calendar.SUNDAY) {
firstDay.add(Calendar.DATE, -1);
}
Calendar lastDay = (Calendar) now.clone();
lastDay.set(Calendar.DAY_OF_MONTH, now.getActualMaximum(Calendar.DAY_OF_MONTH));
while (lastDay.get(Calendar.DAY_OF_WEEK) != Calendar.SATURDAY) {
lastDay.add(Calendar.DATE, 1);
}
dailyReportDao = new DailyReportDaoImpl();
DailyReportModel dailyReport = new DailyReportModel();
dailyReport.setEmployee(user.getSfid());
SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM");
String reportDate = sdf.format(now.getTime());
dailyReport.setReportDate(reportDate);
List<DailyReportModel> dailyReportList = dailyReportDao.findDailyReportByMonth(dailyReport);
List<List<Report>> reportList = new ArrayList<List<Report>>();
List<Report> rowReports = new ArrayList<Report>();
List<Report> detailReportList = new ArrayList<Report>();
Report tempReport;
String totalTime = "0:00";
do {
if (firstDay.get(Calendar.DAY_OF_WEEK) == Calendar.SUNDAY) {
rowReports = new ArrayList<Report>();
}
if (firstDay.get(Calendar.MONTH) == now.get(Calendar.MONTH)) {
tempReport = new Report(firstDay);
DailyReportModel drm = getDailyReport(dailyReportList , tempReport.getDateStr());
tempReport.setYobi(sdfE.format(firstDay.getTime()));
if (drm != null) {
if(StringUtils.isNotEmpty(drm.getWorkKind())) {
tempReport.setWorkKind(workKindMap.get(drm.getWorkKind()));
}
if(StringUtils.isNotEmpty(drm.getBeginTime())) {
tempReport.setBeginTime(drm.getBeginTime().substring(0,5));
}
if(StringUtils.isNotEmpty(drm.getEndTime())) {
tempReport.setEndTime(drm.getEndTime().substring(0,5));
}
if(StringUtils.isNotEmpty(drm.getBreakTime())) {
tempReport.setBreakTime(getBreakTimeHHmm(drm.getBreakTime()));
}
tempReport.setWorkTime(getWorkTimeHHmm(drm.getBeginTime(),drm.getEndTime(),tempReport.getBreakTime()));
totalTime = addTime(totalTime, tempReport.getWorkTime());
tempReport.setOffTime(getOffTimeHHmm(tempReport.getWorkTime(), tempReport.getWorkKind()));
if(StringUtils.isNotEmpty(drm.getReportStatus())) {
tempReport.setReportStatus(reportStatusMap.get(drm.getReportStatus()));
}
tempReport.getTaskList().add(new Task());
}
rowReports.add(tempReport);
detailReportList.add(tempReport);
} else {
tempReport = new Report();
rowReports.add(tempReport);
}
if (firstDay.get(Calendar.DAY_OF_WEEK) == Calendar.SATURDAY) {
reportList.add(rowReports);
}
firstDay.add(Calendar.DATE, 1);
} while (!firstDay.after(lastDay));
model.put("reportList", reportList);
model.put("detailReportList", detailReportList);
model.put("totalTime", "出勤時間: " + totalTime);
SimpleDateFormat sdfJa = new SimpleDateFormat(DATEFORMAT_JA);
String currentDate = sdfJa.format(now.getTime());
model.put("currentDate", currentDate);
return "dailyReportList";
}
private String getBreakTimeHHmm(String breakTime){
String breakTimeHHmm = "";
if (StringUtils.isNotEmpty(breakTime)) {
double dblBreakTime = Double.parseDouble(breakTime);
int intBreakTime = (int)dblBreakTime;
breakTimeHHmm = "" + intBreakTime;
//dailyReport.setBreakHour(String.valueOf(intBreakTime));
double dblBreakMinute = dblBreakTime - intBreakTime;
if (dblBreakMinute == 0) {
breakTimeHHmm += ":00";
}
if (dblBreakMinute == 0.25) {
breakTimeHHmm += ":15";
}
if (dblBreakMinute == 0.5) {
breakTimeHHmm += ":30";
}
if (dblBreakMinute == 0.75) {
breakTimeHHmm += ":45";
}
}
return breakTimeHHmm;
}
private String getWorkTimeHHmm(String beginTime, String endTime, String breakTime){
String workTime = "";
if (StringUtils.isNotEmpty(beginTime) && StringUtils.isNotEmpty(endTime) && StringUtils.isNotEmpty(breakTime)) {
int intBeginTimeHour = Integer.parseInt(beginTime.split(":")[0]);
int intEndTimeHour = Integer.parseInt(endTime.split(":")[0]);
int intBreakTimeHour = Integer.parseInt(breakTime.split(":")[0]);
int intBeginTimeMin = Integer.parseInt(beginTime.split(":")[1]);
int intEndTimeMin = Integer.parseInt(endTime.split(":")[1]);
int intBreakTimeMin = Integer.parseInt(breakTime.split(":")[1]);
int workTimeHour = intEndTimeHour - intBeginTimeHour - intBreakTimeHour;
int workTimeMin = intEndTimeMin - intBeginTimeMin - intBreakTimeMin;
if(workTimeMin < 0) {
workTimeHour = workTimeHour - 1;
workTimeMin = 60 + workTimeMin;
}
workTime = "" + workTimeHour + ":" + workTimeMin;
if(workTimeMin == 0) {
workTime += "0";
}
}
return workTime;
}
private String getOffTimeHHmm(String workTime, String workKind){
String offTime = "";
if (StringUtils.isNotEmpty(workTime) && StringUtils.isNotEmpty(workKind) ) {
switch (workKind) {
case "有休" :
case "特休" :
offTime = "0:00";
break;
default :
int workTimeHour = Integer.parseInt(workTime.split(":")[0] );
if (workTimeHour >= 8) {
offTime = "" + (workTimeHour - 8) + ":";
offTime += workTime.split(":")[1];
} else {
offTime = "0:00" ;
}
}
}
return offTime;
}
private static String addTime(String totalTime, String workTime) {
String returnTotalTime = totalTime;
if (StringUtils.isNotEmpty(totalTime) && StringUtils.isNotEmpty(workTime) ) {
int totalTimeHour = Integer.parseInt(totalTime.split(":")[0]);
int totalTimeMin = Integer.parseInt(totalTime.split(":")[1]);
int workTimeHour = Integer.parseInt(workTime.split(":")[0]);
int workTimeMin = Integer.parseInt(workTime.split(":")[1]);
int returnTotalTimeHour = totalTimeHour + workTimeHour;
int returnTotalTimeMin = totalTimeMin + workTimeMin;
if (returnTotalTimeMin >= 60) {
returnTotalTimeHour += 1;
returnTotalTimeMin -= 60;
}
returnTotalTime = returnTotalTimeHour + ":" + returnTotalTimeMin;
if (returnTotalTimeMin == 0 ) {
returnTotalTime += "0";
}
}
return returnTotalTime;
}
}
|
import React from 'react'
import PropTypes from 'prop-types'
export const ReducerContext = React.createContext({})
export default function ReducerProvider({ children, value }) {
return (
<ReducerContext.Provider value={value}>{children}</ReducerContext.Provider>
)
}
ReducerProvider.propTypes = {
children: PropTypes.any,
value: PropTypes.object.isRequired,
}
|
#!/bin/bash
#SBATCH -J Act_maxout-3_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py maxout-3 1 Adagrad 1 0.2366365402765473 36 0.013037964913135998 lecun_uniform PE-infersent
|
alter session set "_ORACLE_SCRIPT"= true;
create user testing identified by testing account unlock;
grant create session, connect, resource to testing;
grant all privileges to testing;
ALTER USER testing PROFILE DEFAULT;
grant IMP_FULL_DATABASE to testing; |
import AbstractStorage from './AbstractStorage';
import NullMarshaller from './Marshaller/NullMarshaller';
export default
class ProvidedTokenStorage extends AbstractStorage {
private _accessToken?: string;
private _refreshToken?: string;
/**
* Constructor.
*/
constructor(accessToken?: string | undefined, refreshToken?: string | undefined) {
super();
this._accessToken = accessToken;
this._refreshToken = refreshToken;
this.marshaller = new NullMarshaller();
}
/**
* @inheritdoc
*/
async hasItem(key: string): Promise<boolean> {
return (('access_token' === key && undefined !== this._accessToken) ||
('refresh_token' === key && undefined !== this._refreshToken));
}
/**
* @inheritdoc
*/
async clear(): Promise<boolean> {
this._accessToken = undefined;
this._refreshToken = undefined;
return true;
}
/**
* @inheritdoc
*/
async deleteItem(key: string): Promise<boolean> {
switch (key) {
case 'access_token':
this._accessToken = undefined;
break;
case 'refresh_token':
this._refreshToken = undefined;
break;
}
return true;
}
/**
* @inheritdoc
*/
async _getItem(key: string): Promise<any> {
switch (key) {
case 'access_token':
return this._accessToken;
case 'refresh_token':
return this._refreshToken;
}
return undefined;
}
/**
* @inheritdoc
*/
async _save(key: string, value: any, expiry: Date): Promise<boolean> { // eslint-disable-line no-unused-vars
switch (key) {
case 'access_token':
this._accessToken = value;
return true;
case 'refresh_token':
this._refreshToken = value;
return true;
}
return false;
}
}
|
// #!/usr/bin/env node
var nps = require('path')
var child = require('child_process')
var chalk = require('chalk')
var over = require('../lib/lib/utils/overwrite-require')
var _console = require('../lib/lib/utils/console').default
var info = over.getInfo()
function install(pkg) {
var cmd = 'npm install ' + pkg + ' --save-optional'
console.log(' running: ', chalk.yellow.bold(cmd))
var output = child.execSync(cmd, { cwd: info.path.rootPath })
console.log(output.toString())
}
module.exports = function use(pkgs) {
if (info.type === 'local') {
console.log(
chalk.read.bold(' ERROR: Picidae is in local position, you should run `npm install ' + pkgs.join(' ') + ' --save`!\n')
)
}
else {
pkgs.forEach(install)
}
process.exit()
}
|
package info.novatec.micronaut.camunda.feature;
import io.micronaut.context.annotation.ConfigurationProperties;
import io.micronaut.core.bind.annotation.Bindable;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.NotNull;
@ConfigurationProperties("datasources.default")
public interface DatasourceConfiguration {
@Bindable(defaultValue = "jdbc:h2:mem:micronaut-db;DB_CLOSE_DELAY=1000")
@NotBlank
String getUrl();
@Bindable(defaultValue = "sa")
@NotBlank
String getUsername();
@Bindable(defaultValue = "")
@NotNull
String getPassword();
@Bindable(defaultValue = "org.h2.Driver")
@NotBlank
String getDriverClassName();
} |
<filename>server.js
'use strict';
require('dotenv').config();
const express = require('express');
const cors = require('cors');
const mongoose = require('mongoose');
mongoose.connect(process.env.DB_URL, {useNewUrlParser: true, useUnifiedTopology: true});
const db = mongoose.connection;
const Book = require('./models/bookModel.js');
const verifyUser = require('./auth.js');
db.on('error', console.error.bind(console, 'connection error:'));
db.once('open', () => console.log('Mongoose is connected'));
const app = express();
app.use(cors());
app.use(express.json());
const PORT = process.env.PORT || 3001;
const handleBookRequest = (req, res) => {
verifyUser(req, async (err, user) => {
if (err) {
res.status(498).send('Token not valid.');
console.log('error here');
} else {
try {
const booksFromDB = await Book.find({email: user.email});
if (booksFromDB.length > 0) {
res.status(200).send(booksFromDB);
} else {
res.status(404).send('no books');
}
} catch (e) {
console.log(e);
res.status(500).send('Server error');
}
}
});
};
const handleBookPost = (req, res) => {
verifyUser(req, async (err, user) => {
if (err) {
res.status(498).send('Token not valid');
} else {
try {
let newBook = await Book.create(req.body);
res.status(201).send(newBook);
} catch (e) {
res.status(500).send('Sorry, your book was not added.');
}
}
});
};
const handleBookDelete = (req, res) => {
verifyUser(req, async (err, user) => {
if (err) {
res.status(498).send('Token not valid');
} else {
try {
const id = req.params.id;
const bookCheck = await Book.findById(id);
if (bookCheck.email === user.email) {
const deletedBook = await Book.findByIdAndDelete(id);
if (deletedBook) {
res.status(204).send('Book successfully deleted.');
} else {
res.status(404).send('Can\'t find book to delete');
}
}
} catch (error) {
res.status(500).send('Server Error');
}
}
});
};
const handleBookPut = (req, res) => {
verifyUser(req, async (err, user) => {
if (err) {
res.status(498).send('Token not valid');
} else {
try {
const id = req.params.id;
const requestedUpdate = req.body;
const updatedBook = await Book.findByIdAndUpdate(id, requestedUpdate, {new: true, overwrite: true});
if (updatedBook) {
res.status(202).send(updatedBook);
} else {
res.status(404);
}
} catch (error) {
console.log(error);
res.status(500);
}
}
});
};
const getUser = (req, res) => {
verifyUser(req, (err, user) => {
if (err) {
res.status(498).send('Token not valid');
} else {
res.status(200).send(user);
}
});
};
app.get('/books', handleBookRequest);
app.post('/books', handleBookPost);
app.delete('/books/:id', handleBookDelete);
app.put('/books/:id', handleBookPut);
app.get('/user', getUser);
app.listen(PORT, () => console.log(`listening on ${PORT}`));
|
<reponame>AnwarHaque/chefjenkins
#
# Cookbook:: jenkins
# HWRP:: credentials_password
#
# Author:: <NAME> <<EMAIL>>
#
# Copyright:: 2013-2016, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative 'credentials'
require_relative 'credentials_user'
#
# Determine whether a key is an ECDSA key. As original functionality
# assumed that exclusively RSA keys were used, not breaking this assumption
# despite ECDSA keys being a possibility alleviates some issues with
# backwards-compatibility.
#
# @param [String] key
# @return [TrueClass, FalseClass]
def ecdsa_key?(key)
key.include?('BEGIN EC PRIVATE KEY')
end
class Chef
class Resource::JenkinsPrivateKeyCredentials < Resource::JenkinsUserCredentials
include Jenkins::Helper
resource_name :jenkins_private_key_credentials
# Attributes
attribute :username,
kind_of: String,
name_attribute: true
attribute :private_key,
kind_of: [String, OpenSSL::PKey::RSA, OpenSSL::PKey::EC],
required: true
attribute :passphrase,
kind_of: String
#
# Private key of the credentials . This should be the actual key
# contents (as opposed to the path to a private key file) in OpenSSH
# format.
#
# @param [String] arg
# @return [String]
#
def pem_private_key
if private_key.is_a?(OpenSSL::PKey::RSA) || private_key.is_a?(OpenSSL::PKey::EC)
private_key.to_pem
elsif ecdsa_key?(private_key)
OpenSSL::PKey::EC.new(private_key).to_pem
else
OpenSSL::PKey::RSA.new(private_key).to_pem
end
end
end
end
class Chef
class Provider::JenkinsPrivateKeyCredentials < Provider::JenkinsUserCredentials
use_inline_resources
provides :jenkins_private_key_credentials
def load_current_resource
@current_resource ||= Resource::JenkinsPrivateKeyCredentials.new(new_resource.name)
super
if current_credentials
@current_resource.private_key(current_credentials[:private_key])
end
@current_resource
end
private
#
# @see Chef::Resource::JenkinsCredentials#credentials_groovy
# @see https://github.com/jenkinsci/ssh-credentials-plugin/blob/master/src/main/java/com/cloudbees/jenkins/plugins/sshcredentials/impl/BasicSSHUserPrivateKey.java
#
def credentials_groovy
<<-EOH.gsub(/ ^{8}/, '')
import com.cloudbees.plugins.credentials.*
import com.cloudbees.jenkins.plugins.sshcredentials.impl.*
private_key = """#{new_resource.pem_private_key}
"""
credentials = new BasicSSHUserPrivateKey(
CredentialsScope.GLOBAL,
#{convert_to_groovy(new_resource.id)},
#{convert_to_groovy(new_resource.username)},
new BasicSSHUserPrivateKey.DirectEntryPrivateKeySource(private_key),
#{convert_to_groovy(new_resource.passphrase)},
#{convert_to_groovy(new_resource.description)}
)
EOH
end
#
# @see Chef::Resource::JenkinsCredentials#attribute_to_property_map
#
def attribute_to_property_map
{
private_key: 'credentials.privateKey',
passphrase: '<PASSWORD>',
}
end
#
# @see Chef::Resource::JenkinsCredentials#current_credentials
#
def current_credentials
super
# Normalize the private key
if @current_credentials && @current_credentials[:private_key]
cc = @current_credentials[:private_key]
cc = @current_credentials[:private_key].to_pem unless cc.is_a?(String)
@current_credentials[:private_key] = ecdsa_key?(cc) ? OpenSSL::PKey::EC.new(cc) : OpenSSL::PKey::RSA.new(cc)
end
@current_credentials
end
end
end
|
#!/bin/bash
set -e
export PATH=/usr/local/bin:$PATH
exec "$@"
|
from typing import List
def process_sequence(sequence: str) -> List[str]:
stack = []
for c in sequence:
if c not in stack:
stack.append(c)
else:
stack.remove(c)
return stack |
const http = require('http')
const fs = require('fs')
const SERVER_PORT = 3000;
const server = http.createServer((request, response) => {
// Informações básicas de uma request
console.log('request.url', request.url)
console.log('request.method', request.method)
// console.log('request.headers', request.headers)
// Fornecer conteúdo no formato documento text/html
if (request.url === '/html') {
response.setHeader('Content-Type', 'text/html; charset=utf-8')
response.write(`
<html>
<head>
<title>Node.js Server!</title>
</head>
<body>
<h2>Hello from my Node.js Server!</h2>
<h3>Olá do servidor em Node.js!</h3>
</body>
</html>
`, 'utf-8')
response.end();
}
if (request.url === '/json') {
// Fornecer conteúdo no formato JSON
if (request.method === 'GET') {
response.setHeader('Content-Type', 'application/json; charset=utf-8')
const data = { message: 'Hello, world!', portugues: 'Olá mundo!' };
response.write(JSON.stringify(data))
response.end();
}
/**
* curl --header "Content-Type: application/json"
* --request POST
* --data '{"message": "Hello, world!"}' http://localhost:3000/json
*/
if (request.method === 'POST') {
const chunks = [];
request.on('data', (chunk) => {
console.log('chunk', chunk)
chunks.push(chunk);
})
request.on('end', () => {
const parsedChunks = Buffer.concat(chunks).toString();
console.log('parsedChunks', parsedChunks)
fs.writeFile('message.json', parsedChunks, err => {
response.statusCode = 302;
response.setHeader('Location', '/')
response.end();
})
})
}
}
// Fornecendo conteúdo através de um stream de dados
if (request.url === '/') {
response.setHeader('Content-Type', 'text/html; charset=utf-8')
response.write('<p>First chunk of data</p>')
setTimeout(() => response.write('<p>Third chunk of data</p>'), 1000)
setTimeout(() => response.write('<p>Fourthy chunk of data</p>'), 2000)
setTimeout(() => {
response.write('<p>Fourthy chunk of data</p>')
response.end() // Fecha a conexão somente aqui
}, 3000)
response.write('<p>Second chunk of data</p>')
}
})
server.listen(SERVER_PORT, () => console.log(`Server is listening on http://localhost:${SERVER_PORT}`)) |
<filename>node_modules/react-icons-kit/feather/packageIcon.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.packageIcon = void 0;
var packageIcon = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M12.89 1.45l8 4A2 2 0 0 1 22 7.24v9.53a2 2 0 0 1-1.11 1.79l-8 4a2 2 0 0 1-1.79 0l-8-4a2 2 0 0 1-1.1-1.8V7.24a2 2 0 0 1 1.11-1.79l8-4a2 2 0 0 1 1.78 0z"
},
"children": []
}, {
"name": "polyline",
"attribs": {
"points": "2.32 6.16 12 11 21.68 6.16"
},
"children": []
}, {
"name": "line",
"attribs": {
"x1": "12",
"y1": "22.76",
"x2": "12",
"y2": "11"
},
"children": []
}, {
"name": "line",
"attribs": {
"x1": "7",
"y1": "3.5",
"x2": "17",
"y2": "8.5"
},
"children": []
}],
"attribs": {
"fill": "none",
"stroke": "currentColor",
"stroke-width": "2",
"stroke-linecap": "round",
"stroke-linejoin": "round"
}
};
exports.packageIcon = packageIcon; |
#!/bin/bash
generate_security_keys=0
read -p "Generated security key-backed keys (Requires key and user interaction)? [yN] " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]
then
generate_security_keys=1
fi
ssh-keygen -trsa -N "passw0rd" -f ./encrypted_rsa
ssh-keygen -trsa -N "" -f ./rsa_ca
ssh-keygen -tdsa -N "" -f ./dsa_ca
ssh-keygen -tecdsa -N "" -f ./ecdsa_ca
ssh-keygen -ted25519 -N "" -f ./ed25519_ca
ssh-keygen -trsa -N "" -f ./rsa_leaf_for_rsa_ca
ssh-keygen -s rsa_ca -z 123 -n p1,p2 -O clear -I my-ident -O critical:foo=bar -O extension:baz=qwer -O permit-X11-forwarding rsa_leaf_for_rsa_ca.pub
ssh-keygen -trsa -N "" -f ./rsa_leaf_for_rsa_ca_sha2_256
ssh-keygen -trsa-sha2-256 -s rsa_ca -z 123 -n p1,p2 -O clear -I my-ident -O critical:foo=bar -O extension:baz=qwer -O permit-X11-forwarding rsa_leaf_for_rsa_ca_sha2_256.pub
ssh-keygen -trsa -N "" -f ./rsa_leaf_for_rsa_ca_sha2_512
ssh-keygen -trsa-sha2-512 -s rsa_ca -z 123 -n p1,p2 -O clear -I my-ident -O critical:foo=bar -O extension:baz=qwer -O permit-X11-forwarding rsa_leaf_for_rsa_ca_sha2_512.pub
ssh-keygen -trsa -N "" -f ./rsa_leaf_for_dsa_ca
ssh-keygen -s dsa_ca -z 123 -n p1,p2 -O clear -I my-ident -O critical:foo=bar -O extension:baz=qwer -O permit-X11-forwarding rsa_leaf_for_dsa_ca.pub
ssh-keygen -trsa -N "" -f ./rsa_leaf_for_ecdsa_ca
ssh-keygen -s ecdsa_ca -z 123 -n p1,p2 -O clear -I my-ident -O critical:foo=bar -O extension:baz=qwer -O permit-X11-forwarding rsa_leaf_for_ecdsa_ca.pub
ssh-keygen -trsa -N "" -f ./rsa_leaf_for_ed25519_ca
ssh-keygen -s ed25519_ca -z 123 -n p1,p2 -O clear -I my-ident -O critical:foo=bar -O extension:baz=qwer -O permit-X11-forwarding rsa_leaf_for_ed25519_ca.pub
ssh-keygen -tdsa -N "" -f ./dsa_leaf_for_rsa_ca
ssh-keygen -s rsa_ca -z 123 -n p1,p2 -O clear -I my-ident -O critical:foo=bar -O extension:baz=qwer -O permit-X11-forwarding dsa_leaf_for_rsa_ca.pub
ssh-keygen -tecdsa -N "" -f ./ecdsa_leaf_for_rsa_ca
ssh-keygen -s rsa_ca -z 123 -n p1,p2 -O clear -I my-ident -O critical:foo=bar -O extension:baz=qwer -O permit-X11-forwarding ecdsa_leaf_for_rsa_ca.pub
ssh-keygen -ted25519 -N "" -f ./ed25519_leaf_for_rsa_ca
ssh-keygen -s rsa_ca -z 123 -n p1,p2 -O clear -I my-ident -O critical:foo=bar -O extension:baz=qwer -O permit-X11-forwarding ed25519_leaf_for_rsa_ca.pub
if [[ $generate_security_keys -eq 1 ]]
then
ssh-keygen -t ed25519-sk -N "" -f ./sked25519_leaf_for_rsa_ca
ssh-keygen -s rsa_ca -z 123 -n p1,p2 -O clear -I my-ident -O critical:foo=bar -O extension:baz=qwer -O permit-X11-forwarding sked25519_leaf_for_rsa_ca.pub
ssh-keygen -t ecdsa-sk -N "" -f ./skecdsa_leaf_for_rsa_ca
ssh-keygen -s rsa_ca -z 123 -n p1,p2 -O clear -I my-ident -O critical:foo=bar -O extension:baz=qwer -O permit-X11-forwarding skecdsa_leaf_for_rsa_ca.pub
fi
# critical opts
ssh-keygen -trsa -N "" -f ./valid_force_command
ssh-keygen -s rsa_ca -z 123 -O clear -I my-ident -O force-command=asdf valid_force_command.pub
ssh-keygen -trsa -N "" -f ./invalid_force_command
ssh-keygen -s rsa_ca -z 123 -O clear -I my-ident -O critical:force-command invalid_force_command.pub
ssh-keygen -trsa -N "" -f ./single_source_address
ssh-keygen -s rsa_ca -z 123 -O clear -I my-ident -O source-address=1.1.1.1 single_source_address.pub
ssh-keygen -trsa -N "" -f ./single_cidr_source_address
ssh-keygen -s rsa_ca -z 123 -O clear -I my-ident -O source-address=1.1.1.0/24 single_cidr_source_address.pub
ssh-keygen -trsa -N "" -f ./multiple_cidr_source_address
ssh-keygen -s rsa_ca -z 123 -O clear -I my-ident -O source-address=1.1.1.0/24,2.2.2.0/24 multiple_cidr_source_address.pub
ssh-keygen -trsa -N "" -f ./spaces_source_address
ssh-keygen -s rsa_ca -z 123 -O clear -I my-ident -O critical:source-address="1.1.1.1, 2.2.2.2" spaces_source_address.pub
ssh-keygen -trsa -N "" -f ./invalid_source_address_flag
ssh-keygen -s rsa_ca -z 123 -O clear -I my-ident -O critical:source-address invalid_source_address_flag.pub
ssh-keygen -trsa -N "" -f ./invalid_source_address_bad_ip
ssh-keygen -s rsa_ca -z 123 -O clear -I my-ident -O critical:source-address=foo invalid_source_address_bad_ip.pub
# pem encoded keys
openssl genrsa -out rsa.plaintext.pem 2048
openssl rsa -aes-128-cbc -passout pass:mypass -in rsa.plaintext.pem -out rsa.encrypted.pem
openssl dsaparam -noout -out dsa.plaintext.pem -genkey 1024
openssl dsa -aes-128-cbc -passout pass:mypass -in dsa.plaintext.pem -out dsa.encrypted.pem
openssl ecparam -noout -out ecdsa.plaintext.pem -name prime256v1 -genkey
openssl ec -aes-128-cbc -passout pass:mypass -in ecdsa.plaintext.pem -out ecdsa.encrypted.pem
chmod 400 *.pem
# Create a certificate with a bad signature. We use ed25519 because the
# signature doesn't have any fancy encoding (Eg. RSA has PKCS1v1.5 and DSA/ECDSA
# have ASN.1).
ruby <<RUBY
require "base64"
encoded = File.read("rsa_leaf_for_ed25519_ca-cert.pub")
algo, b64, comment = encoded.split(" ", 3)
raw = Base64.decode64(b64)
# we flip bits in the last byte, since that's where the signature is.
raw[-1] = (raw[-1].ord ^ 0xff).chr
b64 = Base64.strict_encode64(raw)
encoded = [algo, b64, comment].join(" ")
File.open("bad_signature-cert.pub", "w") { |f| f.write(encoded) }
RUBY
|
#!/bin/bash
# Copyright (c) 2020-2021 Hiroshi Tanaka, hirtanak@gmail.com @hirtanak
set -exuv
SW=hub
echo "starting 21.ubuntu18.04-${SW}.sh"
# adapt multi user environment
SCRIPTUSER=$(jetpack config SCRIPTUSER)
if [[ -z ${SCRIPTUSER} ]] || [[ ${SCRIPTUSER} = "None" ]]; then
CUSER=$(grep "Added user" /opt/cycle/jetpack/logs/jetpackd.log | awk '{print $6}')
CUSER=${CUSER//\'/}
CUSER=${CUSER//\`/}
# After CycleCloud 7.9 and later
if [[ -z $CUSER ]]; then
CUSER=$(grep "Added user" /opt/cycle/jetpack/logs/initialize.log | awk '{print $6}' | head -1)
CUSER=${CUSER//\`/}
echo ${CUSER} > /mnt/exports/CUSER
fi
else
echo ${SCRIPTUSER} > /shared/SCRIPTUSER
CUSER=${SCRIPTUSER}
echo ${SCRIPTUSER} > /shared/CUSER
fi
# jupyterhub ユーザ作成
JUPYTER_ADMIN=$(jetpack config JUPYTER_ADMIN)
HOMEDIR=/shared/home/${JUPYTER_ADMIN}
CYCLECLOUD_SPEC_PATH=/mnt/cluster-init/ai01/scheduler
# get palrform
PLATFORM=$(jetpack config platform)
PLATFORM_VERSION=$(jetpack config platform_version)
# jupyterlab 設定
JUPYTERLAB_VERSION=$(jetpack config JUPYTERLAB_VERSION)
JUPYTERHUB_INSTALL=$(jetpack config JUPYTERHUB_INSTALL)
JUPYTERHUB_USER_PASS=$(jetpack config JUPYTERHUB_USER_PASS)
# anaconda パラメータ
ANACONDAENVNAME=$(jetpack config ANACONDAENVNAME)
ANACONDAPYTHON_VERSION=$(jetpack config ANACONDAPYTHON_VERSION)
ANACONDAPACKAGE=$(jetpack config ANACONDAPACKAGE)
# jupyterhub ユーザ作成
userdel ${JUPYTER_ADMIN} | exit 0
groupadd ${JUPYTER_ADMIN} -g 19000 | exit 0
eval $(useradd -m ${JUPYTER_ADMIN} -g ${JUPYTER_ADMIN} --home-dir ${HOMEDIR} -u 19000 --password ${JUPYTERHUB_USER_PASS} -s /bin/bash)
mkdir -p /shared/home/${JUPYTER_ADMIN}/notebook
chown ${JUPYTER_ADMIN}:${JUPYTER_ADMIN} ${HOMEDIR}/notebook | exit 0
# 権限・グループ設定
usermod -aG ${SCRIPTUSER} ${JUPYTER_ADMIN} | exit 0
usermod -aG sudo ${JUPYTER_ADMIN} | exit 0
#usermod -aG root ${JUPYTER_ADMIN} | exit 0
grep -e ${SCRIPTUSER} -e ${JUPYTER_ADMIN} /etc/passwd
grep -e ${SCRIPTUSER} -e ${JUPYTER_ADMIN} /etc/group
# SSH設定ファイルコピー
cp -rf /shared/home/${CUSER}/.ssh ${HOMEDIR}
chown ${JUPYTER_ADMIN}:${JUPYTER_ADMIN} ${HOMEDIR}/.ssh
# ディレクトリ作成
mkdir -p ${HOMEDIR}/.jupyter
chown ${JUPYTER_ADMIN}:${JUPYTER_ADMIN} ${HOMEDIR}/.jupyter
# Create tempdir
tmpdir=$(mktemp -d)
pushd $tmpdir
if [[ ${JUPYTERHUB_INSTALL} == "True" ]] || [[ ${JUPYTERHUB_INSTALL} == "true" ]]; then
echo "pass to the following steps"
else
echo "end of this script" && exit 0
fi
if [[ ! -d ${HOMEDIR}/anaconda ]]; then
cp /shared/home/${CUSER}/anaconda.sh ${HOMEDIR}/
chown ${JUPYTER_ADMIN}:${JUPYTER_ADMIN} ${HOMEDIR}/anaconda.sh
sudo -u ${JUPYTER_ADMIN} bash ${HOMEDIR}/anaconda.sh -b -p ${HOMEDIR}/anaconda
fi
# 環境作成
# c.JupyterHub.spawner_class = 'sudospawner.SudoSpawner' 利用に必要なパッケージ
${HOMEDIR}/anaconda/bin/conda create -n ${JUPYTER_ADMIN} python=${ANACONDAPYTHON_VERSION}
if [[ ! -f ${HOMEDIR}/anaconda/envs/jupyterhub/bin/node ]]; then
${HOMEDIR}/anaconda/bin/conda install -n ${JUPYTER_ADMIN} -c conda-forge nodejs==14.15.1
fi
# install node js npm and n package
set +eu
CMD=$(node -v)
echo ${CMD%%.*}
if [[ -z ${CMD} ]]; then
apt-get install -y nodejs npm
npm install n -g
n lts
npm install -g configurable-http-proxy
apt purge -y nodejs npm
node -v
else
echo "skip to install"
fi
set -eu
# パッケージインストール
${HOMEDIR}/anaconda/bin/conda install -n ${JUPYTER_ADMIN} -c conda-forge jupyterlab==${JUPYTERLAB_VERSION} jupyterhub==1.3.0 sudospawner configurable-http-proxy jupyterhub-systemdspawner ipython ipykernel git
# generate jupyterhub configfile
if [[ ! -f ${HOMEDIR}/.jupyter/jupyterhub_config.py ]]; then
${HOMEDIR}/anaconda/envs/${JUPYTER_ADMIN}/bin/jupyterhub --generate-config
fi
# /tmpのテンポラリディレクトリに作成される。ファイル移動
mv jupyterhub_config.py ${HOMEDIR}/.jupyter/jupyterhub_config.py | exit 0
chown ${JUPYTER_ADMIN}:${JUPYTER_ADMIN} ${HOMEDIR}/.jupyter/jupyterhub_config.py | exit 0
cp ${HOMEDIR}/.jupyter/jupyterhub_config.py ${HOMEDIR}/.jupyter/jupyterhub_config.py.original | exit 0
# jupyter_notebook_config.py の修正: Jupyterhubポートセッティング
sed -i -e "s!# c.JupyterHub.bind_url = 'http://:8000'!c.JupyterHub.bind_url = 'http://:8443'!" ${HOMEDIR}/.jupyter/jupyterhub_config.py
sed -i -e "s!# c.JupyterHub.hub_port = 8081!c.JupyterHub.hub_port = 8444!" ${HOMEDIR}/.jupyter/jupyterhub_config.py
# デフォルトでjupyter labを利用するため以下を修正
sed -i -e "s:# c.Spawner.default_url = '':c.Spawner.default_url = '/lab':" ${HOMEDIR}/.jupyter/jupyterhub_config.py
# adminユーザを追加 (一般ユーザーの接続を切断する権限有)
sed -i -e "s/^# c.Authenticator.admin_users = set()/c.Authenticator.admin_users = {'${SCRIPTUSER}', '${JUPYTER_ADMIN}'}/" ${HOMEDIR}/.jupyter/jupyterhub_config.py
# 認証方法設定: 正しく設定しないとInternal Server Error 500 API Error
sed -i -e "s/# c.JupyterHub.spawner_class = 'jupyterhub.spawner.LocalProcessSpawner'/c.JupyterHub.spawner_class = 'sudospawner.SudoSpawner'/" ${HOMEDIR}/.jupyter/jupyterhub_config.py
sed -i -e "594c c.SudoSpawner.sudospawner_path = '${HOMEDIR}/anaconda/envs/${JUPYTER_ADMIN}/bin/sudospawner'" ${HOMEDIR}/.jupyter/jupyterhub_config.py
# アクセスしたら各ユーザーディレクトリのnotebookフォルダを参照するように設定
#'c.Spawner.notebook_dir'を設定した場合、'各ユーザのnotebookディレクトリ(~/notebook)は事前に作成が必要。
mkdir -p ${HOMEDIR}/notebook
chown ${JUPYTER_ADMIN}:${JUPYTER_ADMIN} ${HOMEDIR}/notebook
#sed -i -e "s:^# c.Spawner.notebook_dir = '':c.Spawner.notebook_dir = '~/notebook':" ${HOMEDIR}/.jupyter/jupyterhub_config.py
## Configure JupyterHub's Spawner to start with a JupyterLab that is aware of the JupyterHub
sed -i -e "728c c.Spawner.cmd = ['jupyter-labhub']" ${HOMEDIR}/.jupyter/jupyterhub_config.py
# ssl config: SSL設定(mycert.key, mykey.key)は20.で作成済み
sed -i -e "s!# c.JupyterHub.ssl_cert = ''!c.JupyterHub.ssl_cert = u'/shared/home/${CUSER}/.jupyter/mycert.pem'!" ${HOMEDIR}/.jupyter/jupyterhub_config.py
sed -i -e "s!# c.JupyterHub.ssl_key = ''!c.JupyterHub.ssl_key = u'/shared/home/${CUSER}/.jupyter/mycert.key'!" ${HOMEDIR}/.jupyter/jupyterhub_config.py
###追加利用ユーザ設定(jupyterhub)--------------------------------------
set +u
cat /etc/passwd | grep "/bin/bash" | cut -d: -f1 > ${HOMEDIR}/chekceduser-tmp.txt
# ユーザ除外
cat ${HOMEDIR}/chekceduser-tmp.txt | sed '/root/d' | sed '/cyclecloud/d' | sed '/nxautomation/d' | sed '/omsagent/d' | sed "/${SCRIPTUSER}/d" > ${HOMEDIR}/chekceduser.txt
chown ${JUPYTER_ADMIN}:${JUPYTER_ADMIN} ${HOMEDIR}/chekceduser.txt
# 人数割り出し
LINES=$(cat ${HOMEDIR}/chekceduser.txt | wc -l )
# ユーザ設定
cat ${HOMEDIR}/chekceduser.txt | while read line
do
# パスワード変更
echo "$line:${JUPYTERHUB_USER_PASS}" | chpasswd
# conda 設定
set +eu
CMD=$(grep conda.sh /shared/home/$line/.bashrc)
if [[ ! -z $line ]]; then
(echo "source ${HOMEDIR}/anaconda/etc/profile.d/conda.sh") >> /shared/home/$line/.bashrc
fi
set -eu
done
# アポストロフィー処理
sed "s/^/'/g" ${HOMEDIR}/chekceduser.txt > ${HOMEDIR}/chekceduser-tmp1.txt
sed -e "s/$/\'/" ${HOMEDIR}/chekceduser-tmp1.txt > ${HOMEDIR}/chekceduser.txt
cat ${HOMEDIR}/chekceduser.txt
rm ${HOMEDIR}/chekceduser-tmp*.txt
LINES=$(cat ${HOMEDIR}/chekceduser.txt | wc -l )
# jupyterhub_config.py 設定変更
case ${LINES} in
0 | 1 )
echo "skip replace none or one"
;;
* )
# ", " へ変換
CHECKUSER=$(cat ${HOMEDIR}/chekceduser.txt | tr '\n' ', ' | sed -e 's/,$/\n/g')
# 既存設定
CONFIGUSERLINE=$(grep "c.Authenticator.whitelist" ${HOMEDIR}/.jupyter/jupyterhub_config.py)
if [[ ${CONFIGUSERLINE} == "# c.Authenticator.whitelist = set()" ]]; then
# "Error adding user <username> already in db" への対応
rm /shared/home/azureuser/.jupyter/jupyterhub.sqlite | exit 0
# オリジナルのままの場合の変換
sed -i -e "s/# c.Authenticator.whitelist = set()/c.Authenticator.whitelist = {${CHECKUSER}}/" ${HOMEDIR}/.jupyter/jupyterhub_config.py
else
# CONFIGUSER=$(echo ${CONFIGUSERLINE} | sed 's/^.*"\(.*\)".*$/\1/')
echo "later update"
fi
;;
esac
set -u
###----------------------------------------------------
###拡張機能--------------------------------------------
## 2020/12/25 対策: pam_loginuid(login:session):
#chmod 755 /proc/self/loginuid | exit 0
#export PATH=${HOMEDIR}/anaconda/envs/${JUPYTER_ADMIN}/bin/:$PATH
source ${HOMEDIR}/anaconda/etc/profile.d/conda.sh
conda activate ${JUPYTER_ADMIN}
jupyter labextension enable
jupyter labextension install -y @jupyterlab/hub-extension
jupyter labextension install -y @lckr/jupyterlab_variableinspector
jupyter labextension install -y @jupyterlab/toc
#${HOMEDIR}/anaconda/bin/conda install -n ${ANACONDAENVNAME} -y -c conda-forge jupyterlab_code_formatter
#jupyter labextension install -y @ryantam626/jupyterlab_code_formatter
#jupyter serverextension enable jupyterlab_code_formatter
#jupyter labextension install -y @jupyterlab/google-drive
#${HOMEDIR}/anaconda/bin/conda install -n ${JUPYTER_ADMIN} ipywidgets
#jupyter labextension install -y @jupyter-widgets/jupyterlab-manager
#jupyter nbextension enable --sys-prefix widgetsnbextension
#conda install -c conda-forge jupyterlab-git -y
#### Jupyterlab-Slurm 拡張機能インストール(調整中)
#wget https://github.com/hirtanak/jupyterlab-slurm/archive/master.zip -O ${HOMEDIR}/notebook/master.zip
#source ${HOMEDIR}/anaconda/etc/profile.d/conda.sh
#conda activate ${JUPYTER_ADMIN}
#conda install unzip -y
#unzip -qq ${HOMEDIR}/notebook/master.zip -d ${HOMEDIR}/notebook/
#${HOMEDIR}/anaconda/envs/${JUPYTER_ADMIN}/bin/jlpm install # Install npm package dependencies
#${HOMEDIR}/anaconda/envs/${JUPYTER_ADMIN}/bin/jlpm run build # Compile the TypeScript sources to Javascript
#${HOMEDIR}/anaconda/envs/${JUPYTER_ADMIN}/bin/jupyter labextension install # Install the current directory as an extension
# jupyterlab_slurm 拡張パッケージインストール
pip install --quiet jupyterlab_slurm
jupyter labextension install jupyterlab-slurm
${HOMEDIR}/anaconda/envs/${JUPYTER_ADMIN}/bin/jupyter-lab build
## 拡張機能確認
jupyter labextension list
###自動起動--------------------------------------------
# jupyterlab サービス化設定・実行
cp -rf ${CYCLECLOUD_SPEC_PATH}/files/jupyterlab.service ${HOMEDIR}/jupyterlab.service
chown ${JUPYTER_ADMIN}:${JUPYTER_ADMIN} ${HOMEDIR}/jupyterlab.service
# jupyter_lab_config.py ファイルのコピー
if [[ ! -f ${HOMEDIR}/.jupyter/jupyter_lab_config.py ]]; then
cp /shared/home/${CUSER}/.jupyter/jupyter_lab_config.py ${HOMEDIR}/.jupyter/
else
mv ${HOMEDIR}/.jupyter/jupyter_lab_config.py ${HOMEDIR}/.jupyter/jupyter_lab_config.py.original | exit 0
cp /shared/home/${CUSER}/.jupyter/jupyter_lab_config.py ${HOMEDIR}/.jupyter/
fi
chown ${JUPYTER_ADMIN}:${JUPYTER_ADMIN} ${HOMEDIR}/.jupyter/jupyter_lab_config.py
# サービス化ファイルの変更
sed -i -e "s/\${ANACONDAENVNAME}/${JUPYTER_ADMIN}/g" ${HOMEDIR}/jupyterlab.service
sed -i -e "s/\${CUSER}/${JUPYTER_ADMIN}/g" ${HOMEDIR}/jupyterlab.service
sed -i -e "s:\${HOMEDIR}:${HOMEDIR}:g" ${HOMEDIR}/jupyterlab.service
cp -rf ${HOMEDIR}/jupyterlab.service /etc/systemd/system/jupyterlab.service
mv ${HOMEDIR}/jupyterlab.service ${HOMEDIR}/.jupyter/jupyterlab.service
systemctl stop jupyterlab
systemctl daemon-reload
source ${HOMEDIR}/anaconda/etc/profile.d/conda.sh
conda activate ${JUPYTER_ADMIN}
systemctl start jupyterlab
systemctl status jupyterlab
# jupyterhub サービス化設定・実行
cp -rf ${CYCLECLOUD_SPEC_PATH}/files/jupyterhub.service ${HOMEDIR}/jupyterhub.service
chown ${JUPYTER_ADMIN}:${JUPYTER_ADMIN} ${HOMEDIR}/jupyterhub.service
# サービス化ファイルの変更
sed -i -e "s/\${ANACONDAENVNAME}/${JUPYTER_ADMIN}/g" ${HOMEDIR}/jupyterhub.service
sed -i -e "s/\${CUSER}/${JUPYTER_ADMIN}/g" ${HOMEDIR}/jupyterhub.service
sed -i -e "s:\${HOMEDIR}:${HOMEDIR}:g" ${HOMEDIR}/jupyterhub.service
cp -rf ${HOMEDIR}/jupyterhub.service /etc/systemd/system/jupyterhub.service
mv ${HOMEDIR}/jupyterhub.service ${HOMEDIR}/.jupyter/jupyterhub.service
systemctl stop jupyterhub
systemctl daemon-reload
source ${HOMEDIR}/anaconda/etc/profile.d/conda.sh
conda activate ${JUPYTER_ADMIN}
systemctl start jupyterhub
systemctl status jupyterhub
# .bashrc 修正
set +eu
(grep conda.sh ${HOMEDIR}/.bashrc | head -1) > /shared/CONDA1
(grep "conda activate" ${HOMEDIR}/.bashrc | head -1) > /shared/CONDA2
CMD1=$(cat /shared/CONDA1)
CMD2=$(cat /shared/CONDA2)
if [[ -z ${CMD1} ]]; then
(echo "source ${HOMEDIR}/anaconda/etc/profile.d/conda.sh") >> ${HOMEDIR}/.bashrc
fi
if [[ ! -z ${CMD1} ]] && [[ -z ${CMD2} ]]; then
(echo "conda activate ${JUPYTER_ADMIN}") >> ${HOMEDIR}/.bashrc
fi
set -eu
# Jupyterhub/lab 環境追加スクリプト設定
cp -rf ${CYCLECLOUD_SPEC_PATH}/files/addcondaenv.sh ${HOMEDIR}/addcondaenv.sh
chmod +x ${HOMEDIR}/addcondaenv.sh
chown ${JUPYTER_ADMIN}:${JUPYTER_ADMIN} ${HOMEDIR}/addcondaenv.sh
sed -i -e "s:\${HOMEDIR}:${HOMEDIR}:g" ${HOMEDIR}/addcondaenv.sh
CMD=$(curl -s ifconfig.io)
echo "https://${CMD}:8443"
popd
rm -rf $tmpdir
echo "end of 21.ubuntu18.04-${SW}.sh"
|
bool CQPaletteArea::isFirstWindow(const CQPaletteWindow *window) const {
// Get the index of the specified window within the splitter
int pos = splitter_->indexOf(const_cast<CQPaletteWindow *>(window));
// If the index is 0, the window is the first one in the palette area
return (pos == 0);
} |
<filename>public/gulp/src/scripts/chart/main/dicts.js
/*
* 字典文件
* createDate:2016-11-02 10:03:51
* author: XXXXXX
*/
Ehr.module('Charts', function(Charts, Ehr, Backbone, Marionette, $, _) {
var Dicts = {};
Charts.Dicts = Dicts;
});
|
const { SlashCommandBuilder } = require('@discordjs/builders');
const logSchema = require('../schema/messageLogSchema');
module.exports = {
data: new SlashCommandBuilder()
.setName('clearlog')
.setDescription('Clears amount of logs based on input')
.addIntegerOption(option => option.setName('amount').setDescription('Number of logs to clear').setRequired(true)),
async execute(interaction) {
console.log('logClear id is' + interaction.commandId); // [Interaction commandID]
// Store input in "amount" constant
const amount = interaction.options.getInteger('amount');
// Clamping Value to 1-10
if (amount <= 1 || amount > 10) {
return interaction.reply({ content: 'You need to input a number between 1 and 10.', ephemeral: true });
} else {
// Define logs in array, and Count array
const logs = await logSchema.find({});
const logNum = logs.length;
// Display number of logs [Test] [Deprecated]
// console.log(logNum);
// For Loop that deletes based on input
if (logNum < 15) {
interaction.reply(`There are currently ${logNum} logs in the database. Deleting more is ill-advised.`)
}
else if (logNum >= 15) {
// Loop delete one function until i = amount constant
for (let i = 0; i < amount; i++) {
const logsToDelete = await logSchema.find({}).sort({ createdTimestamp: 1 });
const oneLog = logsToDelete[0].createdTimestamp;
const query = { createdTimestamp: `${oneLog}` };
const deleteLog = await logSchema.deleteOne({ query });
// deleteOne method tests [Deprecated]
// if (deleteLog.deletedCount === 1) {console.log("Successfully deleted 1");} else {console.log("Shi, probably failed lmfao");}
}
// Provide output to end-user
interaction.reply(`${amount} logs have been deleted from the database.`)
}
}
}
}
|
import os
import random
import matplotlib.pyplot as plt
import numpy as np
import extra
import lpd
import synthetic
def main():
# """
# Main function of test python module
# """
# random.seed(os.urandom(345634)) # initialize random generator
# t = np.linspace(0.0, 24.0, 96.0) # define the time axis of a day, here we use 96 values every quarter of an hour
# # standard load profile -- input
# q = extra.read_slp(t,
# 'Profielen-Elektriciteit-2015-versie-1.00 Folder/profielen Elektriciteit 2015 versie 1.00.csv') # read the sample standard load profile, can be any length, can be resized given a low/high resolution time axis
# q = q / np.sum(q) # normalization of standard load profile
# # process duration
# duration_axis = np.linspace(0.0, 24.0, 96.0)
# (p_d, E_p) = extra.app_time(duration_axis, 10, 2, 0.0,
# 24.0) # function that define the pdf of duration of a process
# # process consumption
# consumption_axis = np.linspace(0.0, 3.5, 96.0)
# (p_k, E_k) = extra.app_consumption(consumption_axis, 10, 2, 0.0,
# 3.5) # function that define the pdf of duration of a process
# # pdf of starting time
# p_t_0 = lpd.infer_t_0(q, p_d, E_k) # computes the pdf of starting time of processes
# p_t_0 = p_t_0 / np.sum(p_t_0) # normalization of the pdf to sum up to zero
#
# """
# 1st Approach, starting time of processes is a discrete propapibility density function
# """
# # synthetic profile of D processes
# D = 2000
# synthetic_profile = lpd.synthetic_profile(D, t, p_d, consumption_axis, p_k, p_t_0)
# synthetic_profile_1 = lpd.synthetic_profile(D, t, p_d, consumption_axis, p_k, p_t_0)
# # expected value of D processes
# q_e_e = lpd.infer_q_e(t, p_t_0, p_d, E_k, D)
# # plot
# plt.step(t, synthetic_profile, "g-")
# plt.step(t, q_e_e, "b--")
#
# """
# 2nd Approach, starting time of processes is a continuous propapibility density function
# """
# # synthetic profile of D processes
# ts, cs = lpd.continous_synthetic_profile(D, t, p_d, consumption_axis, p_k, p_t_0)
# plt.step(ts / len(t) * t[-1], cs, where='post', c='r')
# plt.xlim(0, 24.0)
# plt.legend(["synthetic", "expected", "continuous"], loc=0)
# plt.show()
#
# """
# Time discretization
# """
# n_intervals = 24 * 1 # discretized in minutes
# discrete_timeaxis = np.linspace(0.0, 24.0, n_intervals + 1)
# discrete_consumption = lpd.signal_discretization(discrete_timeaxis, t, ts, cs)
# plt.step(ts / len(t) * t[-1], cs, where='post', c='r')
# plt.step(discrete_timeaxis, discrete_consumption, where='post', c='k', ls='--', lw=2)
# plt.legend(["continuous", "discretized"], loc=0)
# plt.show()
#
#
# """
# Repeated day synthetic profile creation
# """
# # synthetic profile of D processes
# D = 2000
# n = 10
# slp = lpd.synthetic_profile_repeated(D, t, p_d, consumption_axis, p_k, p_t_0, n)
# plt.step(range(len(slp)), slp, "g-")
# plt.show()
"""
New function for synthetic in repeated time
"""
t = np.linspace(0.0, 24.0, 96.0)
load_profile = extra.read_slp(t, 'Profielen-Elektriciteit-2015-versie-1.00 Folder/profielen Elektriciteit 2015 versie 1.00.csv')
slp = synthetic.create_synthetic_load(load_profile, 5.0, 5)
plt.step(range(len(slp)), slp)
plt.show()
if __name__ == "__main__":
main()
|
<filename>src/main/java/org/olat/modules/qpool/ui/metadata/ExtendedSearchController.java
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.qpool.ui.metadata;
import java.util.ArrayList;
import java.util.List;
import org.olat.core.commons.services.license.LicenseModule;
import org.olat.core.commons.services.license.ui.LicenseSelectionConfig;
import org.olat.core.commons.services.license.ui.LicenseUIFactory;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.form.flexible.FormItem;
import org.olat.core.gui.components.form.flexible.FormItemContainer;
import org.olat.core.gui.components.form.flexible.elements.FormLink;
import org.olat.core.gui.components.form.flexible.elements.SingleSelection;
import org.olat.core.gui.components.form.flexible.elements.TextElement;
import org.olat.core.gui.components.form.flexible.impl.Form;
import org.olat.core.gui.components.form.flexible.impl.FormBasicController;
import org.olat.core.gui.components.form.flexible.impl.FormEvent;
import org.olat.core.gui.components.form.flexible.impl.FormLayoutContainer;
import org.olat.core.gui.components.form.flexible.impl.elements.table.ExtendedFlexiTableSearchController;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.util.CodeHelper;
import org.olat.core.util.StringHelper;
import org.olat.core.util.Util;
import org.olat.modules.qpool.QPoolSecurityCallback;
import org.olat.modules.qpool.QPoolService;
import org.olat.modules.qpool.QuestionStatus;
import org.olat.modules.qpool.manager.QuestionPoolLicenseHandler;
import org.olat.modules.qpool.model.QItemDocument;
import org.olat.modules.qpool.model.QItemType;
import org.olat.modules.qpool.model.SearchQuestionItemParams;
import org.olat.modules.qpool.ui.QuestionsController;
import org.olat.modules.qpool.ui.metadata.MetaUIFactory.KeyValues;
import org.olat.modules.qpool.ui.tree.QPoolTaxonomyTreeBuilder;
import org.olat.search.model.AbstractOlatDocument;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
* Initial date: 03.05.2013<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class ExtendedSearchController extends FormBasicController implements ExtendedFlexiTableSearchController {
private FormLink searchButton;
private final SearchAttributes searchAttributes;
private final List<ConditionalQuery> uiQueries = new ArrayList<>();
private final String prefsKey;
private ExtendedSearchPrefs prefs;
private final boolean allTaxonomyLevels;
private final List<QItemType> excludedItemTypes;
private boolean enabled = true;
private final QPoolSecurityCallback qPoolSecurityCallback;
@Autowired
private QPoolService qpoolService;
@Autowired
private QPoolTaxonomyTreeBuilder qpoolTaxonomyTreeBuilder;
@Autowired
private LicenseModule licenseModule;
@Autowired
private QuestionPoolLicenseHandler licenseHandler;
public ExtendedSearchController(UserRequest ureq, WindowControl wControl,
QPoolSecurityCallback qPoolSecurityCallback, String prefsKey, Form mainForm,
List<QItemType> excludedItemTypes, boolean allTaxonomyLevels) {
super(ureq, wControl, LAYOUT_CUSTOM, "extended_search", mainForm);
setTranslator(Util.createPackageTranslator(QuestionsController.class, getLocale(), getTranslator()));
this.qPoolSecurityCallback = qPoolSecurityCallback;
this.allTaxonomyLevels = allTaxonomyLevels;
this.excludedItemTypes = excludedItemTypes;
searchAttributes = new SearchAttributes();
this.prefsKey = prefsKey;
prefs = (ExtendedSearchPrefs) ureq.getUserSession().getGuiPreferences()
.get(ExtendedFlexiTableSearchController.class, prefsKey);
if(prefs != null && !prefs.getCondQueries().isEmpty()) {
for(ExtendedSearchPref pref:prefs.getCondQueries()) {
uiQueries.add(new ConditionalQuery(pref));
}
} else {
uiQueries.add(new ConditionalQuery());
}
initForm(ureq);
}
@Override
protected void initForm(FormItemContainer formLayout, Controller listener, UserRequest ureq) {
if(formLayout instanceof FormLayoutContainer) {
FormLayoutContainer layoutCont = (FormLayoutContainer)formLayout;
layoutCont.contextPut("uiQueries", uiQueries);
}
FormLayoutContainer buttonsCont = FormLayoutContainer.createButtonLayout("buttons", getTranslator());
buttonsCont.setRootForm(mainForm);
formLayout.add(buttonsCont);
uifactory.addFormCancelButton("cancel", buttonsCont, ureq, getWindowControl());
searchButton = uifactory.addFormLink("search", buttonsCont, Link.BUTTON);
}
@Override
public void setEnabled(boolean enable) {
this.enabled = enable;
}
@Override
protected void formOK(UserRequest ureq) {
if(enabled) {
fireSearchEvent(ureq);
}
}
@Override
protected void formCancelled(UserRequest ureq) {
fireEvent(ureq, Event.CANCELLED_EVENT);
}
@Override
protected void formInnerEvent(UserRequest ureq, FormItem source, FormEvent event) {
if(source == searchButton) {
fireSearchEvent(ureq);
} else if (source instanceof SingleSelection) {
SingleSelection attrEl = (SingleSelection)source;
if(attrEl.isOneSelected()) {
Object uObject = attrEl.getUserObject();
if(uObject instanceof ConditionalQuery) {
ConditionalQuery query = (ConditionalQuery)uObject;
query.selectAttributeType(attrEl.getSelectedKey(), null);
}
}
} else if(source instanceof FormLink) {
FormLink button = (FormLink)source;
if(button.getCmd().startsWith("add")) {
ConditionalQuery query = (ConditionalQuery)button.getUserObject();
addParameter(query);
} else if(button.getCmd().startsWith("remove")) {
ConditionalQuery query = (ConditionalQuery)button.getUserObject();
removeParameter(query);
}
}
super.formInnerEvent(ureq, source, event);
}
private void addParameter(ConditionalQuery query) {
int index = uiQueries.indexOf(query);
ConditionalQuery newQuery = new ConditionalQuery();
if(index < 0 || (index + 1) > uiQueries.size()) {
uiQueries.add(newQuery);
} else {
uiQueries.add(index+1, newQuery);
}
}
private void removeParameter(ConditionalQuery query) {
if(uiQueries.size() > 1 && uiQueries.remove(query)) {
flc.setDirty(true);
}
}
private void fireSearchEvent(UserRequest ureq) {
SearchQuestionItemParams searchParams = new SearchQuestionItemParams(null, null, null);
List<ExtendedSearchPref> params = new ArrayList<>();
for(ConditionalQuery uiQuery:uiQueries) {
boolean empty = uiQuery.fillSearchParams(searchParams);
if(!empty) {
params.add(new ExtendedSearchPref(uiQuery.getAttribute(), uiQuery.getValue()));
}
}
if (prefs == null){
prefs = new ExtendedSearchPrefs();
}
prefs.setCondQueries(params);
ureq.getUserSession().getGuiPreferences().putAndSave(ExtendedFlexiTableSearchController.class, prefsKey, prefs);
fireEvent(ureq, new QPoolSearchEvent(searchParams));
}
public class ConditionalQuery {
private SingleSelection attributeChoice;
private FormItem parameter;
private QueryParameterFactory parameterFactory;
private FormLink addButton;
private FormLink removeButton;
public ConditionalQuery() {
this(null);
}
public ConditionalQuery(ExtendedSearchPref pref) {
long id = CodeHelper.getRAMUniqueID();
String[] attrKeys = searchAttributes.getKeys();
String[] attrValues = new String[attrKeys.length];
for(int i=attrValues.length; i-->0; ) {
attrValues[i] = translate(attrKeys[i]);
}
attributeChoice = uifactory.addDropdownSingleselect("attr-" + id, null, flc, attrKeys, attrValues, null);
if(pref == null) {
selectAttributeType(attrKeys[0], null);
} else {
selectAttributeType(pref.getAttribute(), pref.getValue());
}
boolean found = false;
if(pref != null && StringHelper.containsNonWhitespace(pref.getAttribute())) {
String attr = pref.getAttribute();
for(String attrKey:attrKeys) {
if(attr.equals(attrKey)) {
attributeChoice.select(attrKey, true);
found = true;
}
}
}
if(!found) {
attributeChoice.select(attrKeys[0], true);
}
if(pref == null) {
selectAttributeType(attrKeys[0], null);
} else {
selectAttributeType(pref.getAttribute(), pref.getValue());
}
attributeChoice.addActionListener(FormEvent.ONCHANGE);
attributeChoice.setUserObject(this);
flc.add(attributeChoice.getName(), attributeChoice);
addButton = uifactory.addFormLink("add-" + id, "add", null, flc, Link.BUTTON);
addButton.setUserObject(this);
flc.add(addButton.getComponent().getComponentName(), addButton);
removeButton = uifactory.addFormLink("remove-"+ id, "remove", null, flc, Link.BUTTON);
removeButton.setUserObject(this);
flc.add(removeButton.getComponent().getComponentName(), removeButton);
}
public String getAttribute() {
return attributeChoice.isOneSelected() ? attributeChoice.getSelectedKey() : null;
}
public String getValue() {
return "test";
}
public SingleSelection getAttributChoice() {
return attributeChoice;
}
public FormItem getParameterItem() {
return parameter;
}
public FormLink getAddButton() {
return addButton;
}
public FormLink getRemoveButton() {
return removeButton;
}
public void selectAttributeType(String type, String value) {
parameterFactory = searchAttributes.getQueryParameterFactory(type);
if(parameterFactory != null) {
parameter = parameterFactory.createItem(value);
}
}
public boolean fillSearchParams(SearchQuestionItemParams searchParams) {
boolean empty = true;
if(parameterFactory != null && parameter != null) {
empty = parameterFactory.fillSearchParams(searchParams, parameter);
}
return empty;
}
}
public static interface QueryParameterFactory {
public String getValue(FormItem item);
public FormItem createItem(String startValue);
public boolean fillSearchParams(SearchQuestionItemParams searchParams, FormItem item);
}
private class SearchAttributes {
private List<SearchAttribute> attributes = new ArrayList<>();
public SearchAttributes() {
//general
attributes.add(new SearchAttribute("general.title", new StringQueryParameter(AbstractOlatDocument.TITLE_FIELD_NAME)));
attributes.add(new SearchAttribute("general.topic", new StringQueryParameter(QItemDocument.TOPIC_FIELD)));
attributes.add(new SearchAttribute("general.keywords", new StringQueryParameter(QItemDocument.KEYWORDS_FIELD)));
attributes.add(new SearchAttribute("general.coverage", new StringQueryParameter(QItemDocument.COVERAGE_FIELD)));
attributes.add(new SearchAttribute("general.additional.informations", new StringQueryParameter(QItemDocument.ADD_INFOS_FIELD)));
attributes.add(new SearchAttribute("general.language", new StringQueryParameter(QItemDocument.LANGUAGE_FIELD)));
if (qPoolSecurityCallback.canUseTaxonomy()) {
attributes.add(new SearchAttribute("classification.taxonomy.level", new TaxonomicFieldQueryParameter()));
attributes.add(new SearchAttribute("classification.taxonomic.path.incl", new TaxonomicPathQueryParameter()));
}
attributes.add(new SearchAttribute("owner", new StringQueryParameter(AbstractOlatDocument.AUTHOR_FIELD_NAME)));
//educational
if (qPoolSecurityCallback.canUseEducationalContext()) {
attributes.add(new SearchAttribute("educational.context", new ContextQueryParameter()));
}
//question
attributes.add(new SearchAttribute("question.type", new TypeQueryParameter()));
attributes.add(new SearchAttribute("question.assessmentType", new AssessmentQueryParameter()));
//lifecycle
attributes.add(new SearchAttribute("lifecycle.status", new StatusQueryParameter()));
//technical
attributes.add(new SearchAttribute("technical.editor", new StringQueryParameter(QItemDocument.EDITOR_FIELD)));
attributes.add(new SearchAttribute("technical.format", new FormatQueryParameter()));
//rights
if (licenseModule.isEnabled(licenseHandler)) {
attributes.add(new SearchAttribute("rights.license", new LicenseQueryParameter()));
}
}
public QueryParameterFactory getQueryParameterFactory(String type) {
for(SearchAttribute attribute:attributes) {
if(type.equals(attribute.getI18nKey())) {
return attribute.getFactory();
}
}
return null;
}
public String[] getKeys() {
String[] keys = new String[attributes.size()];
for(int i=keys.length; i-->0; ) {
keys[i] = attributes.get(i).getI18nKey();
}
return keys;
}
}
public class StringQueryParameter implements QueryParameterFactory {
private final String docAttribute;
public StringQueryParameter(String docAttribute) {
this.docAttribute = docAttribute;
}
@Override
public String getValue(FormItem item) {
if(item instanceof TextElement) {
return ((TextElement)item).getValue();
}
return null;
}
@Override
public FormItem createItem(String startValue) {
return uifactory.addTextElement("type-" + CodeHelper.getRAMUniqueID(), null, 50, startValue, flc);
}
@Override
public boolean fillSearchParams(SearchQuestionItemParams searchParams, FormItem item) {
String val = getValue(item);
if(StringHelper.containsNonWhitespace(val)) {
if(AbstractOlatDocument.TITLE_FIELD_NAME.equals(docAttribute)) {
searchParams.setTitle(val);
} else if(QItemDocument.TOPIC_FIELD.equals(docAttribute)) {
searchParams.setTopic(val);
} else if(QItemDocument.KEYWORDS_FIELD.equals(docAttribute)) {
searchParams.setKeywords(val);
} else if(AbstractOlatDocument.AUTHOR_FIELD_NAME.equals(docAttribute)) {
searchParams.setOwner(val);
} else if(QItemDocument.COVERAGE_FIELD.equals(docAttribute)) {
searchParams.setCoverage(val);
} else if(QItemDocument.ADD_INFOS_FIELD.equals(docAttribute)) {
searchParams.setInformations(val);
} else if(QItemDocument.LANGUAGE_FIELD.equals(docAttribute)) {
searchParams.setLanguage(val);
}
return true;
}
return false;
}
}
public class TaxonomicFieldQueryParameter extends SingleChoiceQueryParameter {
public TaxonomicFieldQueryParameter() {
super(QItemDocument.TAXONOMIC_FIELD);
}
@Override
public FormItem createItem(String startValue) {
qpoolTaxonomyTreeBuilder.loadTaxonomyLevelsSelection(getIdentity(), false, allTaxonomyLevels);
return createItem(qpoolTaxonomyTreeBuilder.getSelectableKeys(),
qpoolTaxonomyTreeBuilder.getSelectableValues(), startValue);
}
@Override
public boolean fillSearchParams(SearchQuestionItemParams searchParams, FormItem item) {
searchParams.setTaxonomyLevel(qpoolTaxonomyTreeBuilder.getTaxonomyLevel(getValue(item)));
return searchParams.getTaxonomyLevel() != null;
}
}
public class TaxonomicPathQueryParameter extends SingleChoiceQueryParameter {
public TaxonomicPathQueryParameter() {
super(QItemDocument.TAXONOMIC_PATH_FIELD);
}
@Override
public FormItem createItem(String startValue) {
qpoolTaxonomyTreeBuilder.loadTaxonomyLevelsSelection(getIdentity(), false, allTaxonomyLevels);
return createItem(qpoolTaxonomyTreeBuilder.getTaxonomicKeyPaths(),
qpoolTaxonomyTreeBuilder.getSelectableValues(), startValue);
}
@Override
public boolean fillSearchParams(SearchQuestionItemParams searchParams, FormItem item) {
String val = getValue(item);
if(StringHelper.containsNonWhitespace(val)) {
searchParams.setLikeTaxonomyLevel(qpoolTaxonomyTreeBuilder.getTaxonomyLevel(val));
}
return searchParams.getLikeTaxonomyLevel() != null;
}
}
public class LicenseQueryParameter extends SingleChoiceQueryParameter {
private final LicenseSelectionConfig config;
public LicenseQueryParameter() {
super(AbstractOlatDocument.LICENSE_TYPE_FIELD_NAME);
config = LicenseUIFactory.createLicenseSelectionConfig(licenseHandler);
}
@Override
public FormItem createItem(String startValue) {
return createItem(config.getLicenseTypeKeys(), config.getLicenseTypeValues(getLocale()), startValue);
}
@Override
public boolean fillSearchParams(SearchQuestionItemParams searchParams, FormItem item) {
String val = getValue(item);
if(StringHelper.containsNonWhitespace(val)) {
searchParams.setLicenseType(config.getLicenseType(val));
}
return searchParams.getLicenseType() != null;
}
}
public class TypeQueryParameter extends SingleChoiceQueryParameter {
public TypeQueryParameter() {
super(QItemDocument.ITEM_TYPE_FIELD);
}
@Override
public FormItem createItem(String startValue) {
KeyValues types = MetaUIFactory.getQItemTypeKeyValues(getTranslator(), excludedItemTypes, qpoolService);
return createItem(types.getKeys(), types.getValues(), startValue);
}
@Override
public boolean fillSearchParams(SearchQuestionItemParams searchParams, FormItem item) {
String val = getValue(item);
if(StringHelper.containsNonWhitespace(val)) {
searchParams.setItemType(MetaUIFactory.getQItemTypeByKey(val, qpoolService));
}
return searchParams.getItemType() != null;
}
}
public class FormatQueryParameter extends SingleChoiceQueryParameter {
public FormatQueryParameter() {
super(QItemDocument.FORMAT_FIELD);
}
@Override
public FormItem createItem(String startValue) {
KeyValues formats = MetaUIFactory.getFormats();
return createItem(formats.getKeys(), formats.getValues(), startValue);
}
@Override
public boolean fillSearchParams(SearchQuestionItemParams searchParams, FormItem item) {
searchParams.setFormat(getValue(item));
return StringHelper.containsNonWhitespace(searchParams.getFormat());
}
}
public class ContextQueryParameter extends SingleChoiceQueryParameter {
public ContextQueryParameter() {
super(QItemDocument.EDU_CONTEXT_FIELD);
}
@Override
public FormItem createItem(String startValue) {
KeyValues contexts = MetaUIFactory.getContextKeyValues(getTranslator(), qpoolService);
return createItem(contexts.getKeys(), contexts.getValues(), startValue);
}
@Override
public boolean fillSearchParams(SearchQuestionItemParams searchParams, FormItem item) {
String val = getValue(item);
if(StringHelper.containsNonWhitespace(val)) {
searchParams.setLevel(MetaUIFactory.getContextByKey(val, qpoolService));
}
return searchParams.getLevel() != null;
}
}
public class AssessmentQueryParameter extends SingleChoiceQueryParameter {
public AssessmentQueryParameter() {
super(QItemDocument.ASSESSMENT_TYPE_FIELD);
}
@Override
public FormItem createItem(String startValue) {
KeyValues types = MetaUIFactory.getAssessmentTypes(getTranslator());
return createItem(types.getKeys(), types.getValues(), startValue);
}
@Override
public boolean fillSearchParams(SearchQuestionItemParams searchParams, FormItem item) {
String val = getValue(item);
if(StringHelper.containsNonWhitespace(val)) {
searchParams.setAssessmentType(val);
}
return StringHelper.containsNonWhitespace(searchParams.getAssessmentType());
}
}
public class StatusQueryParameter extends SingleChoiceQueryParameter {
public StatusQueryParameter() {
super(QItemDocument.ITEM_STATUS_FIELD);
}
@Override
public FormItem createItem(String startValue) {
KeyValues types = MetaUIFactory.getStatus(getTranslator());
return createItem(types.getKeys(), types.getValues(), startValue);
}
@Override
public boolean fillSearchParams(SearchQuestionItemParams searchParams, FormItem item) {
String val = getValue(item);
if(StringHelper.containsNonWhitespace(val)) {
searchParams.setQuestionStatus(QuestionStatus.valueOf(val));
}
return searchParams.getQuestionStatus() != null;
}
}
public abstract class SingleChoiceQueryParameter implements QueryParameterFactory {
private final String docAttribute;
public SingleChoiceQueryParameter(String docAttribute) {
this.docAttribute = docAttribute;
}
@Override
public String getValue(FormItem item) {
if(item instanceof SingleSelection && ((SingleSelection)item).isOneSelected()) {
return ((SingleSelection)item).getSelectedKey();
}
return null;
}
protected FormItem createItem(String[] keys, String[] values, String startValue) {
SingleSelection choice = uifactory.addDropdownSingleselect(docAttribute + "-" + CodeHelper.getRAMUniqueID(), flc,
keys, values, null);
if(startValue != null) {
for(String key:keys) {
if(key.equals(startValue)) {
choice.select(key, true);
}
}
}
return choice;
}
public String getDocAttribute() {
return docAttribute;
}
}
private static class SearchAttribute {
private final String i18nKey;
private final QueryParameterFactory factory;
public SearchAttribute(String i18nKey, QueryParameterFactory factory) {
this.i18nKey = i18nKey;
this.factory = factory;
}
public String getI18nKey() {
return i18nKey;
}
public QueryParameterFactory getFactory() {
return factory;
}
}
} |
// Update the code so it only uses the let keyword.
// var should not exist in the code.
// catName should be Oliver.
// quote should be "Oliver says Meow!"
// Initial:
// var catName;
// var quote;
// function catTalk() {
// "use strict";
// catName = "Oliver";
// quote = catName + " says Meow!";
// }
// catTalk();
let catName;
let quote;
function catTalk() {
"use strict";
catName = "Oliver";
quote = catName + " says Meow!";
}
catTalk(); |
#!/usr/bin/env bash
./task_build.sh
hugo serve --disableFastRender
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.arrowUpC = void 0;
var arrowUpC = {
"viewBox": "0 0 512 512",
"children": [{
"name": "path",
"attribs": {
"d": "M128.4,189.3L233.4,89c5.8-6,13.7-9,22.4-9c8.7,0,16.5,3,22.4,9l105.4,100.3c12.5,11.9,12.5,31.3,0,43.2\r\n\tc-12.5,11.9-32.7,11.9-45.2,0L288,184.4v217c0,16.9-14.3,30.6-32,30.6c-17.7,0-32-13.7-32-30.6v-217l-50.4,48.2\r\n\tc-12.5,11.9-32.7,11.9-45.2,0C115.9,220.6,115.9,201.3,128.4,189.3z"
},
"children": []
}]
};
exports.arrowUpC = arrowUpC; |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#=======================================================================
#
# ccm.py
# ------
# Simple, pure Python, CCM implementation using the
# word based model of the AES cipher with support for 128 and
# 256 bit keys. Test vectors used in the tests are from the SP 800-38C.
#
#
#
# Author: <NAME>
# Copyright (c) 2017, Secworks Sweden AB
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#=======================================================================
#-------------------------------------------------------------------
# Python module imports.
#-------------------------------------------------------------------
import sys
# Note this assumes that the aes implementation is either in the
# same dir or symlinked.
from aes import AES
#-------------------------------------------------------------------
# test_ccm()
#
#-------------------------------------------------------------------
def test_ccm():
print("Testing complete ccm:")
print("---------------------")
#-------------------------------------------------------------------
# main()
#
# If executed tests the cmac function and its subfunctions.
#-------------------------------------------------------------------
def main():
print("Testing the CCM-AES mode")
print("========================")
print
test_ccm()
#-------------------------------------------------------------------
# __name__
# Python thingy which allows the file to be run standalone as
# well as parsed from within a Python interpreter.
#-------------------------------------------------------------------
if __name__=="__main__":
# Run the main function.
sys.exit(main())
#=======================================================================
# EOF ccm.py
#=======================================================================
|
package io.opensphere.xyztile.transformer;
import java.io.Closeable;
import java.util.Observable;
import java.util.Observer;
import io.opensphere.core.util.lang.ThreadUtilities;
import io.opensphere.xyztile.model.XYZDataTypeInfo;
import io.opensphere.xyztile.model.XYZTileLayerInfo;
/**
* Class that responds to max zoom level changes and refreshed the tiles on the
* globe to reflect those changes.
*/
public class XYZMaxZoomObserver implements Observer, Closeable
{
/**
* Knows how to remove and add tiles to the globe.
*/
private final LayerActivationListener myActivationHandler;
/**
* The layer we are watching.
*/
private final XYZDataTypeInfo myLayer;
/**
* Constructs a new observer.
*
* @param layer the layer to watch.
* @param activationHandler Knows how to remove and add tiles to the map.
*/
public XYZMaxZoomObserver(XYZDataTypeInfo layer, LayerActivationListener activationHandler)
{
myActivationHandler = activationHandler;
myLayer = layer;
myLayer.getLayerInfo().addObserver(this);
}
@Override
public void close()
{
myLayer.getLayerInfo().deleteObserver(this);
}
@Override
public void update(Observable o, Object arg)
{
if (XYZTileLayerInfo.MAX_LEVELS_PROP.equals(arg))
{
ThreadUtilities.runCpu(() ->
{
myActivationHandler.layerDeactivated(myLayer);
myActivationHandler.layerActivated(myLayer);
});
}
}
}
|
#!/bin/bash
# ========== Experiment Seq. Idx. 386 / 19.1.2 / N. 24/3/0 - _S=19.1.2 D1_N=24 a=-1 b=-1 c=-1 d=1 e=1 f=-1 D3_N=3 g=-1 h=1 i=1 D4_N=0 j=0 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 386 / 19.1.2 / N. 24/3/0 - _S=19.1.2 D1_N=24 a=-1 b=-1 c=-1 d=1 e=1 f=-1 D3_N=3 g=-1 h=1 i=1 D4_N=0 j=0 ==========\n\n'
if [[ "Yes" == "No" ]]; then
echo 'FATAL: This treatment did not include an SVM layer.'>&2
echo ' Something very wrong happened!'>&2
exit 161
fi
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
DATASET_DIR="$JBHI_DIR/data/fulltrain.299.tfr"
MODEL_DIR="$JBHI_DIR/models/deep.24"
SVM_DIR="$JBHI_DIR/svm-models"
SVM_PREFIX="$SVM_DIR/deep.24.layer.3.svm"
SVM_PATH="$SVM_PREFIX.pkl"
FEATURES_DIR="$JBHI_DIR/features"
TEST_FEATURES_PREFIX="$FEATURES_DIR/deep.24.layer.3.test.0.index.1112.test"
TEST_FEATURES_PATH="$TEST_FEATURES_PREFIX.feats.pkl"
RESULTS_DIR="$JBHI_DIR/results"
RESULTS_PREFIX="$RESULTS_DIR/deep.24.layer.3.test.0.index.1112.svm"
RESULTS_PATH="$RESULTS_PREFIX.results.txt"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODEL_DIR/finish.txt:$SVM_PREFIX.finish.txt"
START_PATH="$RESULTS_PREFIX.start.txt"
FINISH_PATH="$RESULTS_PREFIX.finish.txt"
LOCK_PATH="$RESULTS_PREFIX.running.lock"
LAST_OUTPUT="$RESULTS_PATH"
# ...creates mid-way checkpoint after the expensive test features extraction
SEMIFINISH_PATH="$TEST_FEATURES_PREFIX.finish.txt"
# EXPERIMENT_STATUS=1
# STARTED_BEFORE=No
mkdir -p "$FEATURES_DIR"
mkdir -p "$RESULTS_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
#...gets closest checkpoint file
MODEL_CHECKPOINT=$(ls "$MODEL_DIR/"model.ckpt-*.index | \
sed 's/.*ckpt-\([0-9]*\)\..*/\1/' | \
sort -n | \
awk -v c=1 -v t=15000 \
'NR==1{d=$c-t;d=d<0?-d:d;v=$c;next}{m=$c-t;m=m<0?-m:m}m<d{d=m;v=$c}END{print v}')
MODEL_PATH="$MODEL_DIR/model.ckpt-$MODEL_CHECKPOINT"
echo "$MODEL_PATH" >> "$START_PATH"
if [[ ! -f "$SEMIFINISH_PATH" ]]; then
#...performs preliminary feature extraction
echo Extracting SVM test features with "$MODEL_PATH"
python \
"$SOURCES_GIT_DIR/predict_image_classifier.py" \
--model_name="resnet_v2_101" \
--checkpoint_path="$MODEL_PATH" \
--dataset_name=skin_lesions \
--task_name=label \
--dataset_split_name=test \
--preprocessing_name=dermatologic \
--aggressive_augmentation="True" \
--add_rotations="True" \
--minimum_area_to_crop="0.20" \
--normalize_per_image="1" \
--batch_size=1 \
--id_field_name=id \
--pool_features=avg \
--extract_features \
--output_format=pickle \
--add_scores_to_features=none \
--eval_replicas="50" \
--output_file="$TEST_FEATURES_PATH" \
--dataset_dir="$DATASET_DIR"
# Tip: leave last the arguments that make the command fail if they're absent,
# so if there's a typo or forgotten \ the entire thing fails
EXPERIMENT_STATUS="$?"
if [[ "$EXPERIMENT_STATUS" != "0" || ! -e "$TEST_FEATURES_PATH" ]]; then
exit
fi
date -u >> "$SEMIFINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$SEMIFINISH_PATH"
else
echo Reloading features from "$TEST_FEATURES_PATH"
fi
#...performs prediction with SVM model
python \
"$SOURCES_GIT_DIR/predict_svm_layer.py" \
--output_file "$RESULTS_PATH" \
--input_test "$TEST_FEATURES_PATH" \
--input_model "$SVM_PATH"
# Tip: leave last the arguments that make the command fail if they're absent,
# so if there's a typo or forgotten \ the entire thing fails
EXPERIMENT_STATUS="$?"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.actions
import com.google.inject.{ImplementedBy, Inject}
import controllers.routes
import models.SignedInUser
import models.requests.AuthenticatedRequest
import play.api.Configuration
import play.api.mvc._
import uk.gov.hmrc.auth.core._
import uk.gov.hmrc.auth.core.retrieve.v2.Retrievals._
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.play.http.HeaderCarrierConverter
import scala.concurrent.{ExecutionContext, Future}
class AuthActionImpl @Inject()(override val authConnector: AuthConnector, eoriAllowList: EoriAllowList, bodyParsers: PlayBodyParsers)(
implicit override val executionContext: ExecutionContext
) extends AuthAction with AuthorisedFunctions {
override val parser: BodyParser[AnyContent] = bodyParsers.anyContent
override def invokeBlock[A](request: Request[A], block: AuthenticatedRequest[A] => Future[Result]): Future[Result] = {
implicit val hc: HeaderCarrier =
HeaderCarrierConverter.fromRequestAndSession(request, request.session)
authorised(Enrolment("HMRC-CUS-ORG"))
.retrieve(allEnrolments) { allEnrolments: Enrolments =>
val eori = allEnrolments
.getEnrolment("HMRC-CUS-ORG")
.flatMap(_.getIdentifier("EORINumber"))
if (eori.isEmpty) {
throw InsufficientEnrolments()
}
val cdsLoggedInUser = SignedInUser(eori.get.value, allEnrolments)
if (eoriAllowList.contains(cdsLoggedInUser)) {
block(AuthenticatedRequest(request, cdsLoggedInUser))
} else {
Future.successful(Results.Redirect(routes.UnauthorisedController.onPageLoad))
}
}
}
}
@ImplementedBy(classOf[AuthActionImpl])
trait AuthAction extends ActionBuilder[AuthenticatedRequest, AnyContent] with ActionFunction[Request, AuthenticatedRequest]
class EoriAllowList @Inject()(configuration: Configuration) {
private val values = configuration.get[Seq[String]]("allowList.eori")
def contains(user: SignedInUser): Boolean = values.isEmpty || values.contains(user.eori)
}
|
/*
* Copyright (c) 2008, 2009, 2010 <NAME>. All rights reserved. Your use of this computer software
* is permitted only in accordance with the GooTool license agreement distributed with this file.
*/
package com.goofans.gootool.util;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.*;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.dom.DOMSource;
import java.io.*;
import java.util.logging.Logger;
import java.util.logging.Level;
import org.w3c.dom.*;
import org.xml.sax.SAXException;
import org.xml.sax.InputSource;
import com.goofans.gootool.io.UnicodeReader;
import com.goofans.gootool.io.GameFormat;
/**
* XML manipulation utilities.
*
* @author <NAME> (<EMAIL>)
* @version $Id: XMLUtil.java 410 2010-07-12 11:46:50Z david $
*/
public class XMLUtil
{
private static final Logger log = Logger.getLogger(XMLUtil.class.getName());
private XMLUtil()
{
}
/**
* Create a document from the contents of a file.
*
* @param file The file to read.
* @return the Document read.
* @throws IOException if the file cannot be read, or contains malformed XML.
*/
public static Document loadDocumentFromFile(File file) throws IOException
{
try {
return loadDocumentInternal(new FileInputStream(file));
}
catch (SAXException e) {
log.log(Level.SEVERE, "Unable to parse " + file.getName(), e);
throw new IOException("Unable to parse " + file.getName());
}
}
/**
* Create a document from the contents of a stream.
*
* @param is The stream to read.
* @return the Document read.
* @throws IOException if the stream cannot be read, or contains malformed XML.
*/
public static Document loadDocumentFromInputStream(InputStream is) throws IOException
{
try {
return loadDocumentInternal(is);
}
catch (SAXException e) {
log.log(Level.SEVERE, "Unable to parse document", e);
throw new IOException("Unable to parse document");
}
}
private static Document loadDocumentInternal(InputStream is) throws IOException, SAXException
{
DocumentBuilder builder;
try {
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
}
catch (ParserConfigurationException e) {
log.log(Level.SEVERE, "Unable to create an XML document builder", e);
throw new IOException("Unable to create an XML document builder: " + e.getLocalizedMessage());
}
/* Swallow any BOM at the start of file */
UnicodeReader r = new UnicodeReader(is, GameFormat.DEFAULT_CHARSET);
return builder.parse(new InputSource(r));
}
/**
* Writes a document to a file.
*
* @param d The document to write.
* @param file The file to write to.
* @throws TransformerException if an identity transformer instance doesn't exist or cannot be created, or if it fails to transform the XML.
*/
public static void writeDocumentToFile(Document d, File file) throws TransformerException
{
// Prepare the DOM document for writing
Source source = new DOMSource(d);
// Prepare the output file
Result result = new StreamResult(file);
// Write the DOM document to the file
Transformer xformer = TransformerFactory.newInstance().newTransformer();
xformer.transform(source, result);
}
/**
* Writes a document to a string.
*
* @param d The document to write.
* @return A String containing the document as XML.
* @throws TransformerException if an identity transformer instance doesn't exist or cannot be created, or if it fails to transform the XML.
*/
public static String writeDocumentToString(Document d) throws TransformerException
{
// Prepare the DOM document for writing
Source source = new DOMSource(d);
// Prepare the output file
StringWriter writer = new StringWriter();
Result result = new StreamResult(writer);
// Write the DOM document to the string
Transformer xformer = TransformerFactory.newInstance().newTransformer();
xformer.transform(source, result);
return writer.toString();
}
/**
* Creates a new, blank document for use.
*
* @return A blank document.
* @throws ParserConfigurationException if a suitable document builder cannot be found.
*/
public static Document newDocument() throws ParserConfigurationException
{
return DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument();
}
public static String getAttributeString(Node node, String attributeName, String defaultValue)
{
NamedNodeMap attributes = node.getAttributes();
if (attributes == null) return defaultValue;
Node attribute = attributes.getNamedItem(attributeName);
if (attribute == null) return defaultValue;
return attribute.getNodeValue().trim();
}
public static String getAttributeStringRequired(Node node, String attributeName) throws IOException
{
String s = getAttributeString(node, attributeName, null);
if (s == null) throw new IOException("Mandatory attribute " + attributeName + " not specified on " + node.getNodeName());
return s;
}
public static Double getAttributeDouble(Node node, String attributeName, Double defaultValue)
{
NamedNodeMap attributes = node.getAttributes();
if (attributes == null) return defaultValue;
Node attribute = attributes.getNamedItem(attributeName);
if (attribute == null) return defaultValue;
try {
Double d = Double.valueOf(attribute.getNodeValue().trim());
if (d.isNaN() || d.isInfinite()) return defaultValue;
return d;
}
catch (NumberFormatException e) {
return defaultValue;
}
}
public static double getAttributeDoubleRequired(Node node, String attributeName) throws IOException
{
Double d = getAttributeDouble(node, attributeName, null);
if (d == null) throw new IOException("Mandatory attribute " + attributeName + " not specified on " + node.getNodeName());
return d;
}
public static Float getAttributeFloat(Node node, String attributeName, Float defaultValue)
{
NamedNodeMap attributes = node.getAttributes();
if (attributes == null) return defaultValue;
Node attribute = attributes.getNamedItem(attributeName);
if (attribute == null) return defaultValue;
try {
Float f = Float.valueOf(attribute.getNodeValue().trim());
if (f.isNaN() || f.isInfinite()) return defaultValue;
return f;
}
catch (NumberFormatException e) {
return defaultValue;
}
}
public static float getAttributeFloatRequired(Node node, String attributeName) throws IOException
{
Float f = getAttributeFloat(node, attributeName, null);
if (f == null) throw new IOException("Mandatory attribute " + attributeName + " not specified on " + node.getNodeName());
return f;
}
public static Integer getAttributeInteger(Node node, String attributeName, Integer defaultValue)
{
NamedNodeMap attributes = node.getAttributes();
if (attributes == null) return defaultValue;
Node attribute = attributes.getNamedItem(attributeName);
if (attribute == null) return defaultValue;
try {
return Integer.valueOf(attribute.getNodeValue().trim());
}
catch (NumberFormatException e) {
return defaultValue;
}
}
public static int getAttributeIntegerRequired(Node node, String attributeName) throws IOException
{
Integer integer = getAttributeInteger(node, attributeName, null);
if (integer == null) throw new IOException("Mandatory attribute " + attributeName + " not specified on " + node.getNodeName());
return integer;
}
public static Boolean getAttributeBoolean(Node node, String attributeName, Boolean defaultValue)
{
NamedNodeMap attributes = node.getAttributes();
if (attributes == null) return defaultValue;
Node attribute = attributes.getNamedItem(attributeName);
if (attribute == null) return defaultValue;
return Boolean.valueOf(attribute.getNodeValue().trim());
}
// TODO something better than IOException
public static boolean getAttributeBooleanRequired(Node node, String attributeName) throws IOException
{
Boolean b = getAttributeBoolean(node, attributeName, null);
if (b == null) throw new IOException("Mandatory attribute " + attributeName + " not specified on " + node.getNodeName());
return b;
}
public static Element getElement(Element el, String tagName)
{
NodeList nodes = el.getElementsByTagName(tagName);
if (nodes.getLength() > 0) return (Element) nodes.item(0);
return null;
}
public static Element getElementRequired(Element el, String tagName) throws IOException
{
Element foundEl = getElement(el, tagName);
if (foundEl == null) throw new IOException("element " + tagName + " not found");
return foundEl;
}
public static String getElementString(Element el, String tagName)
{
Element foundEl = getElement(el, tagName);
if (foundEl == null) return "";
return foundEl.getTextContent().trim();
}
public static String getElementStringRequired(Element el, String tagName) throws IOException
{
return getElementRequired(el, tagName).getTextContent().trim();
}
public static double getElementDouble(Element el, String tagName, double defaultValue) throws IOException
{
NodeList list = el.getElementsByTagName(tagName);
if (list.getLength() == 0) return defaultValue;
try {
Double d = Double.valueOf(list.item(0).getTextContent().trim());
if (d.isNaN() || d.isInfinite()) return defaultValue;
return d;
}
catch (NumberFormatException e) {
throw new IOException("Invalid " + tagName + " double value: " + list.item(0));
}
}
public static int getElementInteger(Element el, String tagName, int defaultValue) throws IOException
{
NodeList list = el.getElementsByTagName(tagName);
if (list.getLength() == 0) return defaultValue;
try {
return Integer.valueOf(list.item(0).getTextContent().trim());
}
catch (NumberFormatException e) {
throw new IOException("Invalid " + tagName + " integer value: " + list.item(0));
}
}
public static int getElementIntegerRequired(Element el, String tagName) throws IOException
{
NodeList list = el.getElementsByTagName(tagName);
if (list.getLength() == 0) throw new IOException("element " + tagName + " not found");
try {
return Integer.valueOf(list.item(0).getTextContent().trim());
}
catch (NumberFormatException e) {
throw new IOException("Invalid " + tagName + " integer value: " + list.item(0));
}
}
public static Element findElementByAttributeValue(Element root, String elementName, String attributeName, String attributeValue, boolean caseSensitive)
{
NodeList elements = root.getElementsByTagName(elementName);
for (int i = 0; i < elements.getLength(); ++i) {
Element element = (Element) elements.item(i);
Attr attribute = element.getAttributeNode(attributeName);
if (attribute != null &&
((caseSensitive && attribute.getValue().equals(attributeValue)) ||
(!caseSensitive && attribute.getValue().equalsIgnoreCase(attributeValue)))) {
return element;
}
}
return null;
}
public static String escapeEntities(String input)
{
return input.replaceAll("&", "&")
.replaceAll("<", "<")
.replaceAll(">", ">")
.replaceAll("\"", """)
.replaceAll("'", "'");
}
}
|
#!/bin/bash
for file in *.py; do
name=$(basename "$file" .py)
echo mv "$file" "$name.py"
done
|
<reponame>andypyrope/JDA
/*
* Copyright 2015-2018 <NAME> & <NAME> & <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.dv8tion.jda.api.utils.cache;
import net.dv8tion.jda.api.entities.ISnowflake;
import net.dv8tion.jda.api.utils.ClosableIterator;
import net.dv8tion.jda.internal.utils.Checks;
import net.dv8tion.jda.internal.utils.cache.AbstractCacheView;
import net.dv8tion.jda.internal.utils.cache.ShardCacheViewImpl;
import net.dv8tion.jda.internal.utils.cache.SortedSnowflakeCacheViewImpl;
import net.dv8tion.jda.internal.utils.cache.UnifiedCacheViewImpl;
import java.util.*;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collector;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
/**
* Read-only view on internal JDA cache of items.
* <br>This can be useful to check information such as size without creating
* an immutable snapshot first.
*
* <h2>Memory Efficient Usage</h2>
* The {@link #forEach(Consumer)} method can be used to avoid creating a snapshot
* of the backing data store, it is implemented by first acquiring a read-lock and then iterating the code.
* The enhanced-for-loop uses the {@link #iterator()} which has to first create a snapshot to avoid
* concurrent modifications. Alternatively the {@link #lockedIterator()} can be used to acquire an iterator
* which holds a read-lock on the data store and thus prohibits concurrent modifications, for more details
* read the documentation of {@link ClosableIterator}. Streams from {@link #stream()}/{@link #parallelStream()}
* both use {@link #iterator()} with a snapshot of the backing data store to avoid concurrent modifications.
* <br>Using {@link #getElementsByName(String)} is more efficient than {@link #asList()} as it uses {@link #forEach(Consumer)}
* for pattern matching and thus does not need to create a snapshot of the entire data store like {@link #asList()} does.
* <br>Both {@link #size()} and {@link #isEmpty()} are atomic operations.
*
* <p>Note that making a copy is a requirement if a specific order is desired. If using {@link #lockedIterator()}
* the order is not guaranteed as it directly iterates the backing cache.
* Using {@link #forEach(Consumer)} on a {@link SortedSnowflakeCacheView} will copy the cache in order to sort
* it, use {@link SortedSnowflakeCacheView#forEachUnordered(Consumer)} to avoid this overhead.
* The backing cache is stored using an un-ordered hash map.
*
* @param <T>
* The cache type
*/
public interface CacheView<T> extends Iterable<T>
{
/**
* Creates an immutable snapshot of the current cache state.
* <br>This will copy all elements contained in this cache into a list.
* <br>This will be sorted for a {@link SortedSnowflakeCacheViewImpl SortedSnowflakeCacheView}.
*
* @return Immutable list of cached elements
*/
List<T> asList();
/**
* Creates an immutable snapshot of the current cache state.
* <br>This will copy all elements contained in this cache into a set.
*
* @return Immutable set of cached elements
*/
Set<T> asSet();
/**
* Returns an iterator with direct access to the underlying data store.
* This iterator does not support removing elements.
* <br>After usage this iterator should be closed to allow modifications by the library internals.
*
* <p><b>Note: Order is not preserved in this iterator to be more efficient,
* if order is desired use {@link #iterator()} instead!</b>
*
* @return {@link ClosableIterator} holding a read-lock on the data structure.
*/
ClosableIterator<T> lockedIterator();
/**
* Creates an unordered sequenced stream of the elements in this cache.
* <br>This does not copy the backing cache prior to consumption unlike {@link #stream()}.
*
* <p>The stream will be closed once this method returns and cannot be used anymore.
*
* <h2>Example</h2>
* <code>
* {@literal CacheView<User>} view = jda.getUserCache();<br>
* long shortNames = view.applyStream(stream {@literal ->} stream.filter(it {@literal ->} it.getName().length() {@literal <} 4).count());<br>
* System.out.println(shortNames + " users with less than 4 characters in their name");
* </code>
*
* @param action
* The action to perform on the stream
* @param <R>
* The return type after performing the specified action
*
* @throws IllegalArgumentException
* If the action is null
*
* @return The resulting value after the action was performed
*
* @see #acceptStream(Consumer)
*/
default <R> R applyStream(Function<Stream<T>, R> action)
{
Checks.notNull(action, "Action");
try (ClosableIterator<T> it = lockedIterator())
{
Spliterator<T> spliterator = Spliterators.spliterator(it, size(), Spliterator.IMMUTABLE | Spliterator.NONNULL);
Stream<T> stream = StreamSupport.stream(spliterator, false);
return action.apply(stream);
}
}
/**
* Creates an unordered sequenced stream of the elements in this cache.
* <br>This does not copy the backing cache prior to consumption unlike {@link #stream()}.
*
* <p>The stream will be closed once this method returns and cannot be used anymore.
*
* <h2>Example</h2>
* <code>
* {@literal CacheView<TextChannel>} view = guild.getTextChannelCache();<br>
* view.acceptStream(stream {@literal ->} stream.filter(it {@literal ->} it.isNSFW()).forEach(it {@literal ->} it.sendMessage("lewd").queue()));
* </code>
*
* @param action
* The action to perform on the stream
*
* @throws IllegalArgumentException
* If the action is null
*
* @see #applyStream(Function)
*/
default void acceptStream(Consumer<Stream<T>> action)
{
Checks.notNull(action, "Action");
try (ClosableIterator<T> it = lockedIterator())
{
Spliterator<T> spliterator = Spliterators.spliterator(it, size(), Spliterator.IMMUTABLE | Spliterator.NONNULL);
Stream<T> stream = StreamSupport.stream(spliterator, false);
action.accept(stream);
}
}
/**
* The current size of this cache
* <br>This is a {@code long} as it may be a projected view of multiple caches
* (See {@link net.dv8tion.jda.api.utils.cache.CacheView#all(java.util.function.Supplier)})
*
* <p>This is more efficient than creating a list or set snapshot first as it checks the size
* of the internal cache directly.
*
* @return The current size of this cache
*/
long size();
/**
* Whether the cache is empty
*
* <p>This is more efficient than creating a list or set snapshot first as it checks the size
* of the internal cache directly.
* <br>On a projected cache view this will simply look through all projected views and return false
* the moment it finds one that is not empty.
*
* @return True, if this cache is currently empty
*/
boolean isEmpty();
/**
* Creates an immutable list of all elements matching the given name.
* <br>For a {@link net.dv8tion.jda.api.utils.cache.MemberCacheView MemberCacheView} this will
* check the {@link net.dv8tion.jda.api.entities.Member#getEffectiveName() Effective Name} of the cached members.
*
* @param name
* The name to check
* @param ignoreCase
* Whether to ignore case when comparing names
*
* @throws java.lang.IllegalArgumentException
* If the provided name is {@code null}
*
* @return Immutable list of elements with the given name
*/
List<T> getElementsByName(String name, boolean ignoreCase);
/**
* Creates an immutable list of all elements matching the given name.
* <br>For a {@link net.dv8tion.jda.api.utils.cache.MemberCacheView MemberCacheView} this will
* check the {@link net.dv8tion.jda.api.entities.Member#getEffectiveName() Effective Name} of the cached members.
*
* @param name
* The name to check
*
* @throws java.lang.IllegalArgumentException
* If the provided name is {@code null}
*
* @return Immutable list of elements with the given name
*/
default List<T> getElementsByName(String name)
{
return getElementsByName(name, false);
}
/**
* Creates a {@link java.util.stream.Stream Stream} of all cached elements.
* <br>This will be sorted for a {@link SortedSnowflakeCacheViewImpl SortedSnowflakeCacheView}.
*
* @return Stream of elements
*/
Stream<T> stream();
/**
* Creates a parallel {@link java.util.stream.Stream Stream} of all cached elements.
* <br>This will be sorted for a {@link SortedSnowflakeCacheViewImpl SortedSnowflakeCacheView}.
*
* @return Parallel Stream of elements
*/
Stream<T> parallelStream();
/**
* Collects all cached entities into a single Collection using the provided
* {@link java.util.stream.Collector Collector}.
* Shortcut for {@code stream().collect(collector)}.
*
* @param collector
* The collector used to collect the elements
*
* @param <R>
* The output type
* @param <A>
* The accumulator type
*
* @throws java.lang.IllegalArgumentException
* If the provided collector is {@code null}
*
* @return Resulting collections
*/
default <R, A> R collect(Collector<? super T, A, R> collector)
{
return stream().collect(collector);
}
/**
* Creates a combined {@link net.dv8tion.jda.api.utils.cache.CacheView CacheView}
* for all provided CacheView implementations. This allows to combine cache of multiple
* JDA sessions or Guilds.
*
* @param cacheViews
* Collection of {@link net.dv8tion.jda.api.utils.cache.CacheView CacheView} implementations
*
* @param <E>
* The target type of the projection
*
* @return Combined CacheView spanning over all provided implementation instances
*/
static <E> CacheView<E> all(Collection<? extends CacheView<E>> cacheViews)
{
Checks.noneNull(cacheViews, "Collection");
return new UnifiedCacheViewImpl<>(cacheViews::stream);
}
/**
* Creates a combined {@link net.dv8tion.jda.api.utils.cache.CacheView CacheView}
* for all provided CacheView implementations. This allows to combine cache of multiple
* JDA sessions or Guilds.
*
* @param generator
* Stream generator of {@link net.dv8tion.jda.api.utils.cache.CacheView CacheView} implementations
*
* @param <E>
* The target type of the projection
*
* @return Combined CacheView spanning over all provided implementation instances
*/
static <E> CacheView<E> all(Supplier<Stream<CacheView<E>>> generator)
{
Checks.notNull(generator, "Generator");
return new UnifiedCacheViewImpl<>(generator);
}
/**
* Creates a combined {@link ShardCacheView ShardCacheView}
* for all provided ShardCacheView implementations.
*
* @param cacheViews
* Collection of {@link ShardCacheView ShardCacheView} implementations
*
* @return Combined ShardCacheView spanning over all provided implementation instances
*/
static ShardCacheView allShards(Collection<ShardCacheView> cacheViews)
{
Checks.noneNull(cacheViews, "Collection");
return new ShardCacheViewImpl.UnifiedShardCacheViewImpl(cacheViews::stream);
}
/**
* Creates a combined {@link ShardCacheView ShardCacheView}
* for all provided ShardCacheView implementations.
*
* @param generator
* Stream generator of {@link ShardCacheView ShardCacheView} implementations
*
* @return Combined ShardCacheView spanning over all provided implementation instances
*/
static ShardCacheView allShards(Supplier<Stream<ShardCacheView>> generator)
{
Checks.notNull(generator, "Generator");
return new ShardCacheViewImpl.UnifiedShardCacheViewImpl(generator);
}
/**
* Creates a combined {@link net.dv8tion.jda.api.utils.cache.SnowflakeCacheView SnowflakeCacheView}
* for all provided SnowflakeCacheView implementations.
* <br>This allows to combine cache of multiple JDA sessions or Guilds.
*
* @param cacheViews
* Collection of {@link net.dv8tion.jda.api.utils.cache.SnowflakeCacheView SnowflakeCacheView} implementations
*
* @param <E>
* The target type of the chain
*
* @return Combined SnowflakeCacheView spanning over all provided implementation instances
*/
static <E extends ISnowflake> SnowflakeCacheView<E> allSnowflakes(Collection<SnowflakeCacheView<E>> cacheViews)
{
Checks.noneNull(cacheViews, "Collection");
return new UnifiedCacheViewImpl.UnifiedSnowflakeCacheView<>(cacheViews::stream);
}
/**
* Creates a combined {@link net.dv8tion.jda.api.utils.cache.SnowflakeCacheView SnowflakeCacheView}
* for all provided SnowflakeCacheView implementations.
* <br>This allows to combine cache of multiple JDA sessions or Guilds.
*
* @param generator
* Stream generator of {@link net.dv8tion.jda.api.utils.cache.SnowflakeCacheView SnowflakeCacheView} implementations
*
* @param <E>
* The target type of the chain
*
* @return Combined SnowflakeCacheView spanning over all provided implementation instances
*/
static <E extends ISnowflake> SnowflakeCacheView<E> allSnowflakes(Supplier<Stream<SnowflakeCacheView<E>>> generator)
{
Checks.notNull(generator, "Generator");
return new UnifiedCacheViewImpl.UnifiedSnowflakeCacheView<>(generator);
}
/**
* Creates a combined {@link UnifiedMemberCacheView UnifiedMemberCacheView}
* for all provided MemberCacheView implementations.
* <br>This allows to combine cache of multiple JDA sessions or Guilds.
*
* @param cacheViews
* Collection of {@link net.dv8tion.jda.api.utils.cache.MemberCacheView MemberCacheView} instances
*
* @return Combined MemberCacheView spanning over all provided instances
*/
static UnifiedMemberCacheView allMembers(Collection<MemberCacheView> cacheViews)
{
Checks.noneNull(cacheViews, "Collection");
return new UnifiedCacheViewImpl.UnifiedMemberCacheViewImpl(cacheViews::stream);
}
/**
* Creates a combined {@link UnifiedMemberCacheView UnifiedMemberCacheView}
* for all provided MemberCacheView implementations.
* <br>This allows to combine cache of multiple JDA sessions or Guilds.
*
* @param generator
* Stream generator of {@link net.dv8tion.jda.api.utils.cache.MemberCacheView MemberCacheView} instances
*
* @return Combined MemberCacheView spanning over all provided instances
*/
static UnifiedMemberCacheView allMembers(Supplier<Stream<MemberCacheView>> generator)
{
Checks.notNull(generator, "Generator");
return new UnifiedCacheViewImpl.UnifiedMemberCacheViewImpl(generator);
}
/**
* Basic implementation of {@link net.dv8tion.jda.api.utils.cache.CacheView CacheView} interface.
* <br>Using {@link gnu.trove.map.TLongObjectMap TLongObjectMap} to cache entities!
*
* @param <T>
* The type this should cache
*/
class SimpleCacheView<T> extends AbstractCacheView<T>
{
public SimpleCacheView(Class<T> type, Function<T, String> nameMapper)
{
super(type, nameMapper);
}
}
}
|
use diesel::prelude::*;
use diesel::result::Error as DieselError;
use crate::schema::users_on_rocketchat_servers::dsl::*;
use crate::models::{UserOnRocketchatServer, UserId};
pub fn find_by_matrix_user_ids(
connection: &SqliteConnection,
matrix_user_ids: Vec<UserId>,
) -> Result<Vec<UserOnRocketchatServer>, DieselError> {
let user_on_rocketchat_server = users_on_rocketchat_servers
.filter(matrix_user_id.eq_any(matrix_user_ids))
.load::<UserOnRocketchatServer>(connection)?;
Ok(user_on_rocketchat_server)
} |
#!/usr/bin/env bash
# PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here
# will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent
# changes to this script, consider a proposal to conda-smithy so that other feedstocks can also
# benefit from the improvement.
set -xeuo pipefail
export PYTHONUNBUFFERED=1
cat >~/.condarc <<CONDARC
channels:
- conda-forge
conda-build:
root-dir: /home/conda/feedstock_root/build_artifacts
show_channel_urls: true
CONDARC
# A lock sometimes occurs with incomplete builds. The lock file is stored in build_artifacts.
conda clean --lock
conda install --yes --quiet conda-forge-ci-setup=1 conda-build
source run_conda_forge_build_setup
# Install the yum requirements defined canonically in the
# "recipe/yum_requirements.txt" file. After updating that file,
# run "conda smithy rerender" and this line will be updated
# automatically.
/usr/bin/sudo -n yum install -y libX11-devel
conda build /home/conda/recipe_root -m /home/conda/feedstock_root/.ci_support/${CONFIG}.yaml --quiet
upload_or_check_non_existence /home/conda/recipe_root conda-forge --channel=main -m /home/conda/feedstock_root/.ci_support/${CONFIG}.yaml
touch "/home/conda/feedstock_root/build_artifacts/conda-forge-build-done-${CONFIG}"
|
#!/bin/bash
echo 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
echo 'XXXXXXXXXXXXXXXXX TRAIN REFERENCE CNN XXXXXXXXXXXXXXXX'
echo 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
# current folder:
PARENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
TRAINING_ROOT_DIR=$PARENT_DIR
# the script train RE_RUN_NUMBER reference nets and PRE_RUN_NUMBER pre-trained networks for statistics
REF_RUN_NUM=0
PRE_RUN_NUM=10
echo "Working in $PARENT_DIR"
DATASET="MNIST_10k"
CNN_LOG_FOLDER="666_MNIST_10k"
CNN_CONFIG_FILE_PATH="$PARENT_DIR/paper_reference_cnn.ini"
# if set to true, a test run using the test set is performed after the training
TEST_SET_BOOL=True
echo "Begin training the reference network"
INIT_MODE="resume"
WEIGHTS_PATH="None"
RUN_PREFIX="reference-random_init"
for (( i=1; i<=${REF_RUN_NUM}; i++ ))
do
echo "Begin training reference network $i"
RUN_NAME="${RUN_PREFIX}_${i}"
echo "Run name is: $RUN_NAME"
python train_and_test_cnn.py $DATASET $CNN_CONFIG_FILE_PATH $INIT_MODE $WEIGHTS_PATH $CNN_LOG_FOLDER $RUN_NAME $TEST_SET_BOOL $PARENT_DIR
done
echo "Begin training the pre-trained network"
INIT_MODE="pre_trained_encoding"
WEIGHTS_PATH="experiments/paper_reference_experiments/paper_reference_cae/weights/mnist_paper_net_cae_2/paper_cae_test_0.05"
RUN_PREFIX="pre-trained"
for (( i=1; i<=${PRE_RUN_NUM}; i++ ))
do
echo "Begin training pre-trained network $i"
RUN_NAME="${RUN_PREFIX}_${i}"
python train_and_test_cnn.py $DATASET $CNN_CONFIG_FILE_PATH $INIT_MODE $WEIGHTS_PATH $CNN_LOG_FOLDER $RUN_NAME $TEST_SET_BOOL $PARENT_DIR
done
|
<gh_stars>10-100
var interfacedroid_1_1_runtime_1_1_interfaces_1_1_i_objective =
[
[ "EnvironmentReset", "interfacedroid_1_1_runtime_1_1_interfaces_1_1_i_objective.html#a42c5d2e602366932cbd0be02b10756d2", null ],
[ "Evaluate", "interfacedroid_1_1_runtime_1_1_interfaces_1_1_i_objective.html#a85f39034cef720fef989bc6e403e2fa2", null ],
[ "EpisodeLength", "interfacedroid_1_1_runtime_1_1_interfaces_1_1_i_objective.html#a21096d7cb72550799c123989300ec98a", null ],
[ "SignalSpace", "interfacedroid_1_1_runtime_1_1_interfaces_1_1_i_objective.html#a9c13951b7c630778e755575b9b115390", null ],
[ "SolvedThreshold", "interfacedroid_1_1_runtime_1_1_interfaces_1_1_i_objective.html#afc7e0323614052eca0f11f02637405f6", null ]
]; |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2152-1
#
# Security announcement date: 2014-03-24 00:00:00 UTC
# Script generation date: 2017-01-01 21:03:45 UTC
#
# Operating System: Ubuntu 12.04 LTS
# Architecture: i386
#
# Vulnerable packages fix on version:
# - apache2.2-bin:2.2.22-1ubuntu1.5
#
# Last versions recommanded by security team:
# - apache2.2-bin:2.2.22-1ubuntu1.11
#
# CVE List:
# - CVE-2013-6438
# - CVE-2014-0098
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade apache2.2-bin=2.2.22-1ubuntu1.11 -y
|
// Function to check if the given tree is a binary search tree or not
boolean isBST(Node node)
{
// Base Case
if (node == null)
return true;
// if left node is type less than current node then check it recursively
// for each left node should be lesser than the current node
if (node.left != null && node.left.data >= node.data)
return false;
// if right node is type greater than current node then check it recursively
// for each right node should be greater than the current node
if (node.right != null && node.right.data <= node.data)
return false;
// check recursively for every node.
if (!isBST(node.left) || !isBST(node.right))
return false;
return true;
}
// This code has been contributed by Mayank Jaiswal. |
<reponame>PolicyStreet/grapher<gh_stars>0
import {handleAddField, handleAddElement, handleAddReducer} from './createReducers';
/**
* Embeds the reducer body with a collection body
* @param reducerBody
* @param collectionNode
*/
export default function embedReducerWithLink(reducerBody, collectionNode) {
_.each(reducerBody, (value, key) => {
const collection = collectionNode.collection;
if (_.isObject(value)) {
// nested field or link
if (collectionNode.body[key]) {
// if it exists
const linker = collection.getLinker(key);
// if it's a link
if (linker) {
embedReducerWithLink(value, collectionNode.getCollectionNode(key));
return;
}
handleAddField(key, value, collectionNode);
} else {
// does not exist, so it may be a link/reducer/field
handleAddElement(root, key, value);
}
} else {
// if this field or other reducer exists within the collection
if (!collectionNode.body[key]) {
// can only be field or another reducer for this.
const reducer = collection.getReducer(key);
if (reducer) {
// if it's another reducer
return handleAddReducer(key, reducer, collectionNode);
}
return handleAddField(key, value, collectionNode);
}
}
})
} |
#! /bin/bash
if [ ! $1 ];then
echo "usage: bash deploy.sh 1.0.4"
exit 1;
fi
echo "upload to server."
SERVER=root@101.200.53.74
VERSION=$1
DIR=/var/www/downloadfile/software/native
timestamp=$(date +%s)
ssh -p 65522 ${SERVER} "cp ${DIR}/latest.yml ${DIR}/latest.${timestamp}.yml; cp ${DIR}/latest-mac.yml ${DIR}/latest-mac.${timestamp}.yml; cp ${DIR}/latest-mac.json ${DIR}/latest-mac.${timestamp}.json;"
scp -P 65522 build/native/*${VERSION}.exe ${SERVER}:${DIR}
scp -P 65522 build/native/*${VERSION}*.zip ${SERVER}:${DIR}
scp -P 65522 build/native/*${VERSION}*.dmg ${SERVER}:${DIR}
scp -P 65522 build/native/*.yml ${SERVER}:${DIR}
scp -P 65522 build/native/*.json ${SERVER}:${DIR}
# echo "upload to old server."
SERVER=root@59.110.14.106
DIR=/usr/share/nginx/html/muwen/native
ssh ${SERVER} "cp ${DIR}/latest.yml ${DIR}/latest.${timestamp}.yml; cp ${DIR}/latest-mac.yml ${DIR}/latest-mac.${timestamp}.yml; cp ${DIR}/latest-mac.json ${DIR}/latest-mac.${timestamp}.json;"
scp build/native/*${VERSION}.exe ${SERVER}:${DIR}
scp build/native/*${VERSION}*.zip ${SERVER}:${DIR}
scp build/native/*${VERSION}*.dmg ${SERVER}:${DIR}
scp build/native/*.yml ${SERVER}:${DIR}
scp build/native/*.json ${SERVER}:${DIR}
echo "deploy done. Backup timestamp ${timestamp}" |
export DOMAINS="floppybox.baylaunch.com"
export AWS_HOST=3
export HEROKU_APP="floppybox"
source ../recbro/lib/_include.sh || echo "no include"
|
<reponame>f96q/kptboard
class RenameTypToLabels < ActiveRecord::Migration[5.1]
def change
rename_column :labels, :typ, :kind
end
end
|
<filename>src/ext/array/any.ts
export {};
declare global {
interface Array<T> {
/**
* [拡張メソッド]
* 条件にあうデータが1件でもある場合、true
* @description C# LINQのAnyメソッドと同様の動作をします。
* @param callbackfn 条件
* @return データがある場合、true
*/
any(callbackfn?: (value: T, index: number, array: T[]) => boolean): boolean;
}
}
Array.prototype.any = function <T>(callbackfn?: (value: T, index: number, array: T[]) => boolean) {
const items = this as T[];
if (!Array.isArray(items)) return false;
if (callbackfn) return items.some(callbackfn);
else return items.length > 0;
};
|
package org.dhallj.core.typechecking;
import org.dhallj.core.DhallException;
import org.dhallj.core.Expr;
import org.dhallj.core.Operator;
public final class TypeCheckFailure extends DhallException {
@Override
public Throwable fillInStackTrace() {
// This is a failure type; stack traces aren't useful.
return this;
}
TypeCheckFailure(String message) {
super(message);
}
static TypeCheckFailure makeSortError() {
return new TypeCheckFailure("Sort has no type, kind, or sort");
}
static TypeCheckFailure makeUnboundVariableError(String name) {
return new TypeCheckFailure(String.format("Unbound variable: %s", name));
}
static TypeCheckFailure makeOperatorError(Operator operator) {
switch (operator) {
case OR:
case AND:
case EQUALS:
case NOT_EQUALS:
return new TypeCheckFailure(String.format("%s only works on Bools", operator));
case PLUS:
case TIMES:
return new TypeCheckFailure(String.format("%s only works on Naturals", operator));
case TEXT_APPEND:
return new TypeCheckFailure(String.format("%s only works on Text", operator));
case LIST_APPEND:
return new TypeCheckFailure(String.format("%s only works on Lists", operator));
case COMBINE:
case PREFER:
return new TypeCheckFailure("You can only combine records");
case COMBINE_TYPES:
return new TypeCheckFailure(
String.format("%s requires arguments that are record types", operator));
case EQUIVALENT:
return new TypeCheckFailure("Incomparable expression");
default:
return new TypeCheckFailure(String.format("Operator error on %s", operator));
}
}
static TypeCheckFailure makeListAppendError(Expr lhs, Expr rhs) {
return new TypeCheckFailure("You can only append Lists with matching element types");
}
static TypeCheckFailure makeEquivalenceError(Expr lhs, Expr rhs) {
return new TypeCheckFailure("You can only append Lists with matching element types");
}
static TypeCheckFailure makeInterpolationError(Expr interpolated, Expr interpolatedType) {
return new TypeCheckFailure("You can only interpolate Text");
}
static TypeCheckFailure makeSomeApplicationError(Expr arg, Expr argType) {
return new TypeCheckFailure("Some argument has the wrong type");
}
static TypeCheckFailure makeBuiltInApplicationError(String name, Expr arg, Expr argType) {
return new TypeCheckFailure(String.format("Can't apply %s", name));
}
static TypeCheckFailure makeApplicationTypeError(Expr expected, Expr received) {
return new TypeCheckFailure("Wrong type of function argument");
}
static TypeCheckFailure makeApplicationError(Expr base, Expr arg) {
return new TypeCheckFailure("Not a function");
}
static TypeCheckFailure makeUnresolvedImportError() {
return new TypeCheckFailure("Can't type-check unresolved import");
}
static TypeCheckFailure makeIfPredicateError(Expr type) {
return new TypeCheckFailure("Invalid predicate for if");
}
static TypeCheckFailure makeIfBranchTypeMismatchError(Expr thenType, Expr elseType) {
return new TypeCheckFailure("if branches must have matching types");
}
static TypeCheckFailure makeIfBranchError(Expr type) {
return new TypeCheckFailure("if branch is not a term");
}
static TypeCheckFailure makeLambdaInputError(Expr type) {
return new TypeCheckFailure("Invalid function input");
}
static TypeCheckFailure makeAssertError(Expr type) {
return new TypeCheckFailure("Not an equivalence");
}
static TypeCheckFailure makeFieldAccessError() {
return new TypeCheckFailure("Not a record or union");
}
static TypeCheckFailure makeFieldAccessRecordMissingError(String fieldName) {
return new TypeCheckFailure(String.format("Missing record field: %s", fieldName));
}
static TypeCheckFailure makeFieldAccessUnionMissingError(String fieldName) {
return new TypeCheckFailure(String.format("Missing constructor: %s", fieldName));
}
static TypeCheckFailure makeProjectionError() {
return new TypeCheckFailure("Not a record");
}
static TypeCheckFailure makeFieldTypeError(String fieldName) {
return new TypeCheckFailure("Invalid field type");
}
static TypeCheckFailure makeFieldDuplicateError(String fieldName) {
return new TypeCheckFailure(String.format("duplicate field: %s", fieldName));
}
static TypeCheckFailure makeListTypeMismatchError(Expr type1, Expr type2) {
return new TypeCheckFailure("List elements should all have the same type");
}
static TypeCheckFailure makeListTypeError(Expr type) {
return new TypeCheckFailure("Invalid type for List");
}
static TypeCheckFailure makeAnnotationError(Expr expected, Expr received) {
return new TypeCheckFailure("Expression doesn't match annotation");
}
static TypeCheckFailure makeAlternativeTypeMismatchError(Expr type) {
return new TypeCheckFailure("Alternative annotation mismatch");
}
static TypeCheckFailure makeAlternativeTypeError(Expr type) {
return new TypeCheckFailure("Invalid alternative type");
}
/** Not sure under what conditions this wouldn't be caught by the parser.s */
static TypeCheckFailure makeAlternativeDuplicateError(String fieldName) {
return new TypeCheckFailure(String.format("duplicate field: %s", fieldName));
}
static TypeCheckFailure makeMergeHandlersTypeError(Expr type) {
return new TypeCheckFailure("merge expects a record of handlers");
}
static TypeCheckFailure makeMergeUnionTypeError(Expr type) {
return new TypeCheckFailure("toMap expects a union or an Optional");
}
static TypeCheckFailure makeMergeHandlerMissingError(String fieldName) {
return new TypeCheckFailure(String.format("Missing handler: %s", fieldName));
}
static TypeCheckFailure makeMergeHandlerUnusedError(String fieldName) {
return new TypeCheckFailure(String.format("Unused handler: %s", fieldName));
}
static TypeCheckFailure makeMergeHandlerTypeInvalidError(Expr expected, Expr type) {
return new TypeCheckFailure("Wrong handler input type");
}
static TypeCheckFailure makeMergeHandlerTypeNotFunctionError(
String fieldName, Expr expected, Expr type) {
return new TypeCheckFailure(String.format("Handler for %s is not a function", fieldName));
}
static TypeCheckFailure makeMergeHandlerTypeMismatchError(Expr type1, Expr type2) {
return new TypeCheckFailure("Handlers should have the same output type");
}
static TypeCheckFailure makeMergeHandlerTypeDisallowedError(Expr type) {
return new TypeCheckFailure("Disallowed handler type");
}
static TypeCheckFailure makeMergeInvalidAnnotationError(Expr expected, Expr inferred) {
return new TypeCheckFailure("Expression doesn't match annotation");
}
static TypeCheckFailure makeToMapTypeError(Expr type) {
return new TypeCheckFailure("toMap expects a record value");
}
static TypeCheckFailure makeToMapRecordKindError(Expr type) {
return new TypeCheckFailure("toMap expects a record of kind Type");
}
static TypeCheckFailure makeToMapRecordTypeMismatchError(Expr type1, Expr type2) {
return new TypeCheckFailure("toMap expects a homogenous record");
}
static TypeCheckFailure makeToMapResultTypeMismatchError(Expr expected, Expr inferred) {
return new TypeCheckFailure("toMap result type doesn't match annotation");
}
static TypeCheckFailure makeToMapMissingAnnotationError() {
return new TypeCheckFailure("An empty toMap requires a type annotation");
}
static TypeCheckFailure makeToMapInvalidAnnotationError(Expr type) {
return new TypeCheckFailure("An empty toMap was annotated with an invalid type");
}
}
|
TERMUX_PKG_HOMEPAGE=https://www.gnu.org/software/binutils/
TERMUX_PKG_DESCRIPTION="Collection of binary tools, the main ones being ld, the GNU linker, and as, the GNU assembler"
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=2.36
TERMUX_PKG_SRCURL=https://mirrors.kernel.org/gnu/binutils/binutils-${TERMUX_PKG_VERSION}.tar.xz
TERMUX_PKG_SHA256=5788292cc5bbcca0848545af05986f6b17058b105be59e99ba7d0f9eb5336fb8
TERMUX_PKG_DEPENDS="libc++, zlib"
TERMUX_PKG_BREAKS="binutils-dev"
TERMUX_PKG_REPLACES="binutils-dev"
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="--enable-gold --enable-plugins --disable-werror --with-system-zlib --enable-new-dtags"
TERMUX_PKG_EXTRA_MAKE_ARGS="tooldir=$TERMUX_PREFIX"
TERMUX_PKG_RM_AFTER_INSTALL="share/man/man1/windmc.1 share/man/man1/windres.1 bin/ld.bfd"
TERMUX_PKG_NO_STATICSPLIT=true
# Avoid linking against libfl.so from flex if available:
export LEXLIB=
termux_step_pre_configure() {
export CPPFLAGS="$CPPFLAGS -Wno-c++11-narrowing"
if [ $TERMUX_ARCH_BITS = 32 ]; then
export LIB_PATH="${TERMUX_PREFIX}/lib:/system/lib"
else
export LIB_PATH="${TERMUX_PREFIX}/lib:/system/lib64"
fi
}
termux_step_post_make_install() {
cp $TERMUX_PKG_BUILDER_DIR/ldd $TERMUX_PREFIX/bin/ldd
cd $TERMUX_PREFIX/bin
# Setup symlinks as these are used when building, so used by
# system setup in e.g. python, perl and libtool:
for b in ar ld nm objdump ranlib readelf strip; do
ln -s -f $b $TERMUX_HOST_PLATFORM-$b
done
ln -sf ld.gold gold
}
|
#!/bin/sh
txtrst=$(tput sgr0) # Text reset
txtred=$(tput setaf 1) # Red
txtgrn=$(tput setaf 2) # Green
check_for() {
found=`which $2`
version=`$3 2>&1 | grep -o -E [-0-9.]\{1,\} | head -n 1`
if [ -z "$found" ]; then
echo "${txtred}$1 not found!${txtrst}" >&2
else
echo "$1 version $version found." >&2
if [ -z "$4" ]; then
return
fi
fi
if [ -n "$4" ]; then
if [ "$version" \< "$4" ]; then
echo "${txtred}$1 version $4 or greater required!${txtrst}" >&2
fi
if [ -n "$5" ]; then
if [ "$version" \> "$5" ]; then
echo "${txtred}$1 version $5 or less required!${txtrst}" >&2
fi
fi
else
exit 1
fi
}
check_for Python python 'python -V' 2.6
check_for Node.js node 'node -v' 0.4.6 0.4.999999
check_for npm npm "npm -v" 1
check_for mongoDB mongod "mongod --version" 1.4.0
if [ "$1" = "--vows" ] ; then
check_for Vows vows "vows --version" 0.5.8
fi
|
#!/usr/bin/env bash
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
$CLICKHOUSE_CLIENT --multiquery <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;
DROP TABLE IF EXISTS wv;
CREATE TABLE dst(count UInt64, market Int32, w_end DateTime) Engine=MergeTree ORDER BY tuple();
CREATE TABLE mt(a Int32, market Int32, timestamp DateTime) ENGINE=MergeTree ORDER BY tuple();
CREATE WINDOW VIEW wv TO dst INNER ENGINE Memory WATERMARK=ASCENDING AS SELECT count(a) AS count, market, tumbleEnd(wid) AS w_end FROM mt GROUP BY tumble(timestamp, INTERVAL '5' SECOND, 'US/Samoa') AS wid, market;
INSERT INTO mt VALUES (1, 1, '1990/01/01 12:00:00');
INSERT INTO mt VALUES (1, 2, '1990/01/01 12:00:01');
INSERT INTO mt VALUES (1, 3, '1990/01/01 12:00:02');
INSERT INTO mt VALUES (1, 4, '1990/01/01 12:00:05');
INSERT INTO mt VALUES (1, 5, '1990/01/01 12:00:06');
INSERT INTO mt VALUES (1, 6, '1990/01/01 12:00:10');
INSERT INTO mt VALUES (1, 7, '1990/01/01 12:00:11');
INSERT INTO mt VALUES (1, 8, '1990/01/01 12:00:30');
EOF
while true; do
$CLICKHOUSE_CLIENT --query="SELECT count(*) FROM dst" | grep -q "7" && break || sleep .5 ||:
done
$CLICKHOUSE_CLIENT --query="SELECT * FROM dst ORDER BY market, w_end;"
$CLICKHOUSE_CLIENT --query="DROP TABLE wv"
$CLICKHOUSE_CLIENT --query="DROP TABLE mt"
$CLICKHOUSE_CLIENT --query="DROP TABLE dst"
|
#!/usr/bin/env bash
set -e
set -o pipefail
POSITIONAL=()
readonly help_display="Usage: $0 [ command_options ] [ param ]
command options:
--suite, -s e2e suite to run (b2c, b2b, cds, flaky). Default: b2c
--environment, --env [ 2005 | 2011 | ccv2]. Default: 2005
--help, -h show help
--ssr Run ssr smoke test
"
while [ "${1:0:1}" == "-" ]
do
case "$1" in
'--suite' | '-s' )
SUITE=":$2"
shift
shift
;;
'--environment' | '--env' )
CI_ENV=":$2"
shift
shift
;;
'--ssr' )
SSR=true
shift
;;
'--help' | '-h' )
echo "$help_display"
exit 0
;;
* )
POSITIONAL+=("$1")
shift
echo "Error: unknown option: ${POSITIONAL}"
exit 1
;;
esac
done
set -- "${POSITIONAL[@]}"
if [ "$SUITE" == ":ccv2" ]; then
export SPA_ENV='ccv2,b2c'
fi
if [ "$SUITE" == ":ccv2-b2b" ]; then
export SPA_ENV='ccv2,b2b'
fi
echo '-----'
echo "Building Spartacus libraries"
yarn install
(cd projects/storefrontapp-e2e-cypress && yarn install)
yarn build:libs 2>&1 | tee build.log
results=$(grep "Warning: Can't resolve all parameters for" build.log || true)
if [[ -z "${results}" ]]; then
echo "Success: Spartacus production build was successful."
rm build.log
else
echo "ERROR: Spartacus production build failed. Check the import statements. 'Warning: Can't resolve all parameters for ...' found in the build log."
rm build.log
exit 1
fi
echo '-----'
echo "Building Spartacus storefrontapp"
yarn build
if [[ "${SSR}" = true ]]; then
echo "Building Spartacus storefrontapp (SSR PROD mode)"
yarn build:ssr:ci
echo "Starting Spartacus storefrontapp in SSR mode"
(yarn serve:ssr:ci &)
echo '-----'
echo "Running SSR Cypress smoke test"
yarn e2e:run:ci:ssr
else
yarn start:pwa &
echo '-----'
echo "Running Cypress end to end tests"
if [ "${TRAVIS_PULL_REQUEST}" == "false" ]; then
yarn e2e:run:ci"${SUITE}"
else
yarn e2e:run:ci:core"${SUITE}"
fi
fi
|
#
# Be sure to run `pod lib lint StepperView.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'StepperView'
s.version = '1.6.7'
s.swift_version = '5.0'
s.summary = 'SwiftUI iOS component for Step Indications.'
s.description = 'Stepper View Indication componet for SwiftUI'
s.homepage = 'https://github.com/badrinathvm/StepperView'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'badrinathvm' => '<EMAIL>' }
s.source = { :git => 'https://github.com/badrinathvm/StepperView.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/badrivm'
s.ios.deployment_target = '11.0'
s.watchos.deployment_target = '6.0'
s.source_files = 'Sources/**/*.swift'
end
|
<reponame>lsieun/learn-AI
#!/usr/bin/python
# -*- encoding: utf-8
import numpy as np
import pandas as pd
from fuzzywuzzy import fuzz
from fuzzywuzzy import process
def enum_row(row):
print(row['state'])
def find_state_code(row):
if row['state'] != 0:
print(process.extractOne(row['state'], states, score_cutoff=80))
def capital(str):
return str.capitalize()
def correct_state(row):
if row['state'] != 0:
state = process.extractOne(row['state'], states, score_cutoff=80)
if state:
state_name = state[0]
return ' '.join(map(capital, state_name.split(' ')))
return row['state']
def fill_state_code(row):
if row['state'] != 0:
state = process.extractOne(row['state'], states, score_cutoff=80)
if state:
state_name = state[0]
return state_to_code[state_name]
return ''
if __name__ == "__main__":
pd.set_option('display.width', 200)
data = pd.read_excel('..\\sales.xlsx', sheetname='sheet1', header=0)
print('data.head() = \n', data.head())
print('data.tail() = \n', data.tail())
print('data.dtypes = \n', data.dtypes)
print('data.columns = \n', data.columns)
for c in data.columns:
print(c, end=' ')
print()
data['total'] = data['Jan'] + data['Feb'] + data['Mar']
print(data.head())
print(data['Jan'].sum())
print(data['Jan'].min())
print(data['Jan'].max())
print(data['Jan'].mean())
print('=============')
# 添加一行
s1 = data[['Jan', 'Feb', 'Mar', 'total']].sum()
print(s1)
s2 = pd.DataFrame(data=s1)
print(s2)
print(s2.T)
print(s2.T.reindex(columns=data.columns))
# 即:
s = pd.DataFrame(data=data[['Jan', 'Feb', 'Mar', 'total']].sum()).T
s = s.reindex(columns=data.columns, fill_value=0)
print(s)
data = data.append(s, ignore_index=True)
data = data.rename(index={15:'Total'})
print(data.tail())
# apply的使用
print('==============apply的使用==========')
data.apply(enum_row, axis=1)
state_to_code = {"VERMONT": "VT", "GEORGIA": "GA", "IOWA": "IA", "Armed Forces Pacific": "AP", "GUAM": "GU",
"KANSAS": "KS", "FLORIDA": "FL", "AMERICAN SAMOA": "AS", "NORTH CAROLINA": "NC", "HAWAII": "HI",
"NEW YORK": "NY", "CALIFORNIA": "CA", "ALABAMA": "AL", "IDAHO": "ID",
"FEDERATED STATES OF MICRONESIA": "FM",
"Armed Forces Americas": "AA", "DELAWARE": "DE", "ALASKA": "AK", "ILLINOIS": "IL",
"Armed Forces Africa": "AE", "SOUTH DAKOTA": "SD", "CONNECTICUT": "CT", "MONTANA": "MT",
"MASSACHUSETTS": "MA",
"PUERTO RICO": "PR", "Armed Forces Canada": "AE", "NEW HAMPSHIRE": "NH", "MARYLAND": "MD",
"NEW MEXICO": "NM",
"MISSISSIPPI": "MS", "TENNESSEE": "TN", "PALAU": "PW", "COLORADO": "CO",
"Armed Forces Middle East": "AE",
"NEW JERSEY": "NJ", "UTAH": "UT", "MICHIGAN": "MI", "WEST VIRGINIA": "WV", "WASHINGTON": "WA",
"MINNESOTA": "MN", "OREGON": "OR", "VIRGINIA": "VA", "VIRGIN ISLANDS": "VI",
"MARSHALL ISLANDS": "MH",
"WYOMING": "WY", "OHIO": "OH", "SOUTH CAROLINA": "SC", "INDIANA": "IN", "NEVADA": "NV",
"LOUISIANA": "LA",
"NORTHERN MARIANA ISLANDS": "MP", "NEBRASKA": "NE", "ARIZONA": "AZ", "WISCONSIN": "WI",
"NORTH DAKOTA": "ND",
"Armed Forces Europe": "AE", "PENNSYLVANIA": "PA", "OKLAHOMA": "OK", "KENTUCKY": "KY",
"RHODE ISLAND": "RI",
"DISTRICT OF COLUMBIA": "DC", "ARKANSAS": "AR", "MISSOURI": "MO", "TEXAS": "TX", "MAINE": "ME"}
states = list(state_to_code.keys())
print(fuzz.ratio('Python Package', 'PythonPackage'))
print(process.extract('Mississippi', states))
print(process.extract('Mississipi', states, limit=1))
print(process.extractOne('Mississipi', states))
data.apply(find_state_code, axis=1)
print('Before Correct State:\n', data['state'])
data['state'] = data.apply(correct_state, axis=1)
print('After Correct State:\n', data['state'])
data.insert(5, 'State Code', np.nan)
data['State Code'] = data.apply(fill_state_code, axis=1)
print(data)
# group by
print('==============group by================')
print(data.groupby('State Code'))
print('All Columns:\n')
print(data.groupby('State Code').sum())
print('Short Columns:\n')
print(data[['State Code', 'Jan', 'Feb', 'Mar', 'total']].groupby('State Code').sum())
# 写入文件
data.to_excel('sales_result.xls', sheet_name='Sheet1', index=False)
|
<filename>sms-api.js
var SMSAPI = require("smsapi"),
smsapi = new SMSAPI({
normalize: true,
format: 'json'
});
smsapi.authentication
.login("<EMAIL>", "<PASSWORD>")
.then(this.sendMessage)
.then(displayResult)
.catch(displayError);
exports.sendTest = function sendTest(msg) {
console.log(msg);
return msg;
}
exports.sendMessage = function sendMessage(msg) {
return smsapi.message
.sms()
.from(msg.from) // "Info"
.to(msg.to) // "883543667"
.message(msg.text)
.normalize() // replace polish letters
.execute(); // return Promise
}
function displayResult(result) {
console.log('smsApi info:\n', result, '\n');
}
function displayError(err) {
console.error(err);
} |
const isAuthorized = (user, route) => {
if (route.requiresAdmin && user.isAdmin) {
return true
} else if (!route.requiresAdmin && !user.isAdmin) {
return true
} else {
return false
}
} |
mkdir -p ~/.streamlit/
echo "\
[general]\n\
email = \"f.caro@lmu.campus.de\"\n\
" > ~/.streamlit/credentials.toml
echo "\
[server]\n\
headless = true\n\
enableCORS=false\n\
port = $PORT\n\
\n\
" > ~/.streamlit/config.toml |
#!/bin/bash -e
# Get the configuration variables
source configuration.sh
read -p "Are you sure you want to delete all resources? " -n 1 -r
echo # (optional) move to a new line
if [[ ! $REPLY =~ ^[Yy]$ ]]
then
exit 1
fi
echo "Delete Cognito Stack.."
aws cloudformation --region $region delete-stack --stack-name fargate-game-servers-cognito
aws cloudformation --region $region wait stack-delete-complete --stack-name fargate-game-servers-cognito
echo "Done deleting stack!"
echo "Delete Backend Services Stack.."
aws cloudformation --region $region delete-stack --stack-name fargate-game-servers-backend
aws cloudformation --region $region wait stack-delete-complete --stack-name fargate-game-servers-backend
echo "Done deleting stack!"
echo "Delete Task Definition Stack.."
aws cloudformation --region $region delete-stack --stack-name fargate-game-servers-task-definition
aws cloudformation --region $region wait stack-delete-complete --stack-name fargate-game-servers-task-definition
echo "Done deleting stack!"
echo "Delete Redis Stack.."
aws cloudformation --region $region delete-stack --stack-name fargate-game-servers-elasticache-redis
aws cloudformation --region $region wait stack-delete-complete --stack-name fargate-game-servers-elasticache-redis
echo "Done deleting stack!"
read -p "ACTION REQUIRED: Go to the ECS Cluster Tasks in AWS Management Console and STOP ALL TASKS. Once ready, press any key. " -n 1 -r
echo # (optional) move to a new line
echo "Delete VPC and ECS Stack.."
aws cloudformation --region $region delete-stack --stack-name fargate-game-servers-ecs-resources
aws cloudformation --region $region wait stack-delete-complete --stack-name fargate-game-servers-ecs-resources
echo "Done deleting stack!"
echo "All Resources cleaned up!" |
#
# DataTables build environment variables and common functions
#
CLOSURE="/usr/local/closure_compiler/compiler.jar"
JSHINT="/usr/bin/jshint"
# CSS styling frameworks that DataTables supports
FRAMEWORKS=(
'bootstrap5'
'bootstrap4'
'bootstrap'
'bulma'
'foundation'
'jqueryui'
'semanticui'
'material'
'uikit'
'dataTables'
)
# $1 - string - file to get the version from
function version_from_file {
echo $(grep " * @version " $1 | awk -F" " '{ print $3 }')
}
# $1 - string - section name to echo
function echo_section {
# Cyan
printf "\033[0;36m %s\033[0m \n" "$1"
}
# $1 - string - message to echo
function echo_msg {
# Green
printf "\033[0;32m %s\033[0m \n" "$1"
}
# $1 - string - error to echo
function echo_error {
# Red
printf "\033[0;31m %s\033[0m \n" "$1"
}
# Will compress a CSS file using SASS, saving the new file into the same
# directory as the uncompressed file, but with `.min.css` as the extension.
#
# $1 - string - Full path to the file to compress
function css_compress {
# Only compresses CSS at the moment
if [ -z "$DEBUG" ]; then
FILE=$(basename $1 .css)
DIR=$(dirname $1)
echo_msg "CSS compressing $FILE.css"
sass --no-charset --stop-on-error --style compressed $DIR/$FILE.css > $DIR/$FILE.min.css
echo_msg " File size: $(ls -l $DIR/$FILE.min.css | awk -F" " '{ print $5 }')"
fi
}
# Compile a SCSS file
#
# $1 - string - Full path to the file to compile
function scss_compile {
FILE=$(basename $1 .scss)
DIR=$(dirname $1)
echo_msg "SCSS compiling $FILE.scss"
sass --no-charset --stop-on-error --style expanded $DIR/$FILE.scss > $DIR/$FILE.css
css_compress $DIR/$FILE.css
}
# Compile SCSS files for a specific extension and the supported frameworks
#
# $1 - string - Extension name (camelCase)
# $2 - string Build directory where the CSS files should be created
function css_frameworks {
EXTN=$1
DIR=$2
for FRAMEWORK in ${FRAMEWORKS[*]}; do
if [ -e $DIR/$1.$FRAMEWORK.scss ]; then
scss_compile $DIR/$EXTN.$FRAMEWORK.scss
rm $DIR/$EXTN.$FRAMEWORK.scss
fi
done
}
# Compress JS files for a specific extension and the supported frameworks
#
# $1 string - Extension name (camelCase)
# $2 string - Build directory where the JS min files should be created
function js_frameworks {
EXTN=$1
DIR=$2
for FRAMEWORK in ${FRAMEWORKS[*]}; do
if [ -e $DIR/$1.$FRAMEWORK.js ]; then
js_compress $DIR/$EXTN.$FRAMEWORK.js
fi
done
}
# Will compress a JS file using Closure compiler, saving the new file into the
# same directory as the uncompressed file, but with `.min.js` as the extension.
#
# $1 - string - Full path to the file to compress
# $2 - string - Enable ('on' - default) errors or disable ('off')
function js_compress {
LOG=$2
if [ -z "$DEBUG" ]; then
FILE=$(basename $1 .js)
DIR=$(dirname $1)
echo_msg "JS compressing $FILE.js"
# Closure Compiler doesn't support "important" comments so we add a
# @license jsdoc comment to the license block to preserve it
cp $DIR/$FILE.js /tmp/$FILE.js
perl -i -0pe "s/^\/\*! (.*)$/\/** \@license \$1/s" /tmp/$FILE.js
rm /tmp/closure_error.log
java -jar $CLOSURE --charset 'utf-8' --js /tmp/$FILE.js > /tmp/$FILE.min.js 2> /tmp/closure_error.log
if [ -e /tmp/closure_error.log ]; then
if [ -z "$LOG" -o "$LOG" = "on" ]; then
cat /tmp/closure_error.log
fi
fi
# And add the important comment back in
perl -i -0pe "s/^\/\*/\/*!/s" /tmp/$FILE.min.js
mv /tmp/$FILE.min.js $DIR/$FILE.min.js
rm /tmp/$FILE.js
echo_msg " File size: $(ls -l $DIR/$FILE.min.js | awk -F" " '{ print $5 }')"
fi
}
# $1 - string - Full path to input file
# $2 - string - Full path to use for the output file
function js_require {
IN_FILE=$(basename $1)
DIR=$(dirname $1)
OUT=$2
CURR_DIR=$(pwd)
cd $DIR
OLD_IFS=$IFS
IFS='%'
cp $IN_FILE $IN_FILE.build
grep "_buildInclude('" $IN_FILE.build > /dev/null
while [ $? -eq 0 ]; do
REQUIRE=$(grep "_buildInclude('" $IN_FILE.build | head -n 1)
SPACER=$(echo ${REQUIRE} | cut -d _ -f 1)
FILE=$(echo ${REQUIRE} | sed -e "s#^.*_buildInclude('##g" -e "s#');##")
DIR=$(echo ${FILE} | cut -d \. -f 1)
sed "s#^#${SPACER}#" < ${DIR}/${FILE} > ${DIR}/${FILE}.build
sed -e "/${REQUIRE}/r ${DIR}/${FILE}.build" -e "/${REQUIRE}/d" < $IN_FILE.build > $IN_FILE.out
mv $IN_FILE.out $IN_FILE.build
rm ${DIR}/${FILE}.build
grep "_buildInclude('" $IN_FILE.build > /dev/null
done
mv $IN_FILE.build $OUT
IFS=$OLD_IFS
cd $CURR_DIR
}
# Run JSHint over a JS file
#
# $1 - string - Full path to input file
function js_hint {
FILE=$1
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# JSHint
if [ -e $JSHINT ]; then
$JSHINT --config $DIR/jshint.config $FILE
if [ $? -eq 0 ]; then
echo_msg "JSHint passed"
else
echo_error "JSHint failed"
fi
else
echo_error "JSHint not installed at $JSHINT - skipping"
fi
}
# Process XML example files into HTML files - in place! The XML files will be
# removed.
#
# $1 - string - Path to the examples to processing - note that /examples is
# added automatically
function examples_process {
php ${DT_SRC}/build/examples.php \
-d \
-o $1 \
-u ${DT_SRC}/build/templates/example_index.html \
-t ${DT_SRC}/build/templates/example.html \
-c "demo:${DT_BUILT}/examples/resources/demo.css" \
-j "demo:${DT_BUILT}/examples/resources/demo.js" \
-c "syntax:${DT_BUILT}/examples/resources/syntax/shCore.css" \
-j "syntax:${DT_BUILT}/examples/resources/syntax/shCore.js" \
-m "${DT_BUILT}/media" \
-l "css:syntax css:demo js:syntax js:demo"
}
|
#!/bin/sh
set -e
# Mount if neccessary
if [ ! -d /proc/sys/fs/binfmt_misc ]; then
echo "No binfmt support in the kernel."
echo " Try: '/sbin/modprobe binfmt_misc' from the host"
exit 1
fi
if [ ! -f /proc/sys/fs/binfmt_misc/register ]; then
mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
fi
# Reset all pre-registered interpreters, if requested
if [ "$1" = "--reset" ]; then
(
cd /proc/sys/fs/binfmt_misc
for file in *; do
case "${file}" in
status|register)
;;
*)
echo -1 > "${file}"
;;
esac
done
)
fi
# Register new interpreters
# - important: using flags 'C' and 'F'
echo ':qemu-arm:M::\x7fELF\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x28\x00:\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff:/qemu-arm:CF' > /proc/sys/fs/binfmt_misc/register
echo ':qemu-aarch64:M::\x7fELF\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\xb7\x00:\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff:/qemu-aarch64:CF' > /proc/sys/fs/binfmt_misc/register
echo ':qemu-ppc64le:M::\x7fELF\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x15\x00:\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\x00:/qemu-ppc64le:CF' > /proc/sys/fs/binfmt_misc/register
echo ':qemu-riscv64:M::\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\xf3\x00:\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff:/qemu-riscv64:CF' > /proc/sys/fs/binfmt_misc/register
# Show results
echo "---"
echo "Installed interpreter binaries:"
ls -al /qemu-*
echo "---"
cd /proc/sys/fs/binfmt_misc
for file in *; do
case "${file}" in
status|register)
;;
*)
echo "Registered interpreter=${file}"
cat ${file}
echo "---"
;;
esac
done
|
<reponame>fortify-ps/FortifyImportExportUtility
/*******************************************************************************
* (c) Copyright 2020 Micro Focus or one of its affiliates
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute,
* sublicense, and/or sell copies of the Software, and to permit persons to
* whom the Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
* KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
******************************************************************************/
package com.fortify.impexp.from.ssc.release.loader.config;
import java.util.LinkedHashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import com.fortify.client.ssc.api.SSCAttributeDefinitionAPI.SSCAttributeDefinitionHelper;
import com.fortify.client.ssc.api.query.builder.SSCApplicationVersionsQueryBuilder;
import com.fortify.client.ssc.api.query.builder.SSCOrderBy;
import com.fortify.impexp.common.from.annotation.FromPluginComponent;
import com.fortify.impexp.from.ssc.annotation.FromSSC;
import com.fortify.util.spring.expression.TemplateExpression;
import lombok.Data;
@Data
@FromPluginComponent @FromSSC
@ConfigurationProperties("from.ssc.load.releases")
public final class FromSSCReleaseLoaderConfig {
private static final Logger LOG = LoggerFactory.getLogger(FromSSCReleaseLoaderConfig.class);
private static final String EMPTY_TO_STRING = new FromSSCReleaseLoaderConfig().toString();
@Value("${from.ssc.load.releases:undefined}") private String property = "undefined";
private final FromSSCReleaseLoaderIncludeConfig include = new FromSSCReleaseLoaderIncludeConfig();
private final FromSSCReleaseLoaderEntityTransformerConfig transform = new FromSSCReleaseLoaderEntityTransformerConfig();
private final FromSSCReleaseLoaderEntityFilterConfig filter = new FromSSCReleaseLoaderEntityFilterConfig();
private final SSCOrderBy orderBy = new SSCOrderBy();
private final Map<String, TemplateExpression> overrideProperties = new LinkedHashMap<String, TemplateExpression>();
/**
* This method indicates whether the current instance has been configured,
* returning false if the {@link #toString()} value equals the {@link #toString()}
* value of an empty instance.
*
* @return
*/
public boolean isConfigured() {
return !EMPTY_TO_STRING.equals(this.toString());
}
public void updateQueryBuilder(SSCApplicationVersionsQueryBuilder qb, SSCAttributeDefinitionHelper attributeDefinitionHelper) {
logQBUpdate(qb, include);
FromSSCReleaseLoaderIncludeConfigQueryBuilderUpdater.updateQueryBuilder(qb, include, attributeDefinitionHelper);
logQBUpdate(qb, transform);
FromSSCReleaseLoaderEntityTransformerQueryBuilderUpdater.updateQueryBuilder(qb, transform);
logQBUpdate(qb, filter);
FromSSCReleaseLoaderEntityFilterConfigQueryBuilderUpdater.updateQueryBuilder(qb, filter);
logQBUpdate(qb, orderBy);
qb.paramOrderBy(true, orderBy);
}
private <C> void logQBUpdate(SSCApplicationVersionsQueryBuilder qb, C config) {
LOG.debug("Updating {} with configuration {}", qb, config);
}
}
|
<reponame>martinezlucas98/LP3TP
package py.edu.uca.lp3.domain;
import java.io.Serializable;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import java.util.ArrayList;
@Entity
public class Torneo implements Serializable {
/**
*
*/
private static final long serialVersionUID = -1796674952194499268L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private long id;
private String nombreDelTorneo;
private String tipo = "Nacional";
private ArrayList<String> participantes;
public String getNombreDelTorneo() {
return nombreDelTorneo;
}
public void setNombreDelTorneo(String nombreDelTorneo) {
this.nombreDelTorneo = nombreDelTorneo;
}
public ArrayList<String> getParticipantes() {
return participantes;
}
public void setParticipantes(ArrayList<String> participantes) {
this.participantes = participantes;
}
public String getTipo() {
return tipo;
}
public void setTipo(String nacionalInternacional) {
this.tipo = nacionalInternacional;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
}
|
<gh_stars>10-100
from cradmin_legacy.viewhelpers.update import UpdateView
from cradmin_legacy.viewhelpers.crudbase import OnlySaveButtonMixin
from devilry.apps.core import models as coremodels
class AssignmentLongAndShortNameUpdateView(OnlySaveButtonMixin, UpdateView):
model = coremodels.Assignment
fields = ['long_name',
'short_name']
def get_queryset_for_role(self, role):
return self.model.objects.filter(id=self.kwargs['pk'])
|
#!/bin/bash
# Required files:
# ${SEED_DIR}/${USERNAME}
# ${SEED_DIR}/aspera_id_rsa
# ${SEED_DIR}/aspera_id_rsa.pub
# ${SEED_DIR}/sshd_config
USERNAME=aspera
ASPERA_TYPE=$(cat /aspera-type)
SEED_DIR=/mnt/aspera/seed
SECRETS_DIR=/mnt/aspera/.secrets
SSHD_PID=
RSYSLOG_PID=
if [[ "${ASPERA_TYPE}" == "HSTE" ]]; then
FT_BASEDIR=/mjdi/local/GFT
elif [[ "${ASPERA_TYPE}" == "HSTS" ]]; then
FT_BASEDIR=/desbs/stg
else
echo "Unsupported Aspera container type: '${ASPERA_TYPE}'"
exit 1
fi
function ensureUser()
{
if ! whoami &> /dev/null; then
if [ -w /etc/passwd ]; then
echo "${USER_NAME:-default}:x:$(id -u):0:${USER_NAME:-default} user:${HOME}:/sbin/nologin" >> /etc/passwd
fi
fi
}
function createDirectories()
{
mkdir -p ${FT_BASEDIR}/in && \
mkdir -p ${FT_BASEDIR}/out && \
mkdir -p ${FT_BASEDIR}/archive && \
mkdir -p /opt/aspera/etc
chmod 770 ${FT_BASEDIR}/*
chmod 770 /opt/aspera/etc
}
function startSshd()
{
[[ -f /home/${USERNAME}/.ssh/ssh_host_rsa_key ]] || ssh-keygen -f /home/${USERNAME}/.ssh/ssh_host_rsa_key -N '' -t rsa
[[ -f /home/${USERNAME}/.ssh/ssh_host_ecdsa_key ]] || ssh-keygen -f /home/${USERNAME}/.ssh/ssh_host_ecdsa_key -N '' -t ecdsa
[[ -f /home/${USERNAME}/.ssh/ssh_host_ed25519_key ]] || ssh-keygen -f /home/${USERNAME}/.ssh/ssh_host_ed25519_key -N '' -t ed25519
/usr/sbin/sshd -f /home/${USERNAME}/sshd_config -E /home/${USERNAME}/sshdLog || return 1
SSHD_PID=$!
}
function startRsyslog()
{
/usr/sbin/rsyslogd -f /opt/aspera/etc/rsyslog.conf || return 1
RSYSLOG_PID=$!
}
function copyFiles()
{
cp ${SEED_DIR}/sshd_config /home/${USERNAME}/sshd_config
cp ${SEED_DIR}/aspera.conf /opt/aspera/etc/aspera.conf
cp ${SEED_DIR}/rsyslog.conf /opt/aspera/etc/rsyslog.conf
}
function initAspera()
{
if [[ ! -f /opt/aspera/bin/asuserdata ]]
then
echo "WARN: Aspera not present - failed to initialise Aspera"
else
/opt/aspera/bin/asuserdata -v
fi
}
function copyKeys()
{
cp "${SECRETS_DIR}/aspera_id_rsa.pub" /home/${USERNAME}/.ssh/id_rsa.pub && \
cp "${SECRETS_DIR}/aspera_id_rsa" /home/${USERNAME}/.ssh/id_rsa && \
cp /home/${USERNAME}/.ssh/id_rsa.pub /home/${USERNAME}/.ssh/authorized_keys && \
chown -R ${USERNAME}:root /home/${USERNAME}/.ssh/authorized_keys && \
chmod 600 /home/${USERNAME}/.ssh/authorized_keys /home/${USERNAME}/.ssh/id_rsa
cat ${SECRETS_DIR}/aspera-license | base64 -w 60 > /opt/aspera/etc/aspera-license
}
function runAspera()
{
if [[ ! -d /opt/aspera/sbin ]]
then
echo "WARN: Aspera not present - failed to run Aspera"
else
/opt/aspera/sbin/asperacentral || { echo "Failed to execute /opt/aspera/sbin/asperacentral"; return 1; }
/opt/aspera/sbin/asperanoded || { echo "Failed to execute /opt/aspera/sbin/asperanoded"; return 1; }
sleep 10
fi
}
function seedAspera()
{
if [[ ! -d /opt/aspera/sbin ]]
then
echo "WARN: Aspera not present - failed to seed Aspera"
else
/opt/aspera/bin/asnodeadmin -a -u asperaNodeUser -p Password123 -x ${USERNAME}
sleep 10
if [[ "${ASPERA_TYPE}" == "HSTE" ]]; then
curl -ki -u asperaNodeUser:Password123 -X POST https://localhost:9092/access_keys -d '{"id":"MjdiDeployed","secret":"/mjdi/local/GFT","storage":{"type":"local","path":"/mjdi/local/GFT"}}'
elif [[ "${ASPERA_TYPE}" == "HSTS" ]]; then
curl -ki -u asperaNodeUser:Password123 -X POST https://localhost:9092/access_keys -d '{"id":"MjdiCentral","secret":"/desbs/stg","storage":{"type":"local","path":"/desbs/stg"}}'
else
echo "Unsupported Aspera container type (cannot seed): '${ASPERA_TYPE}'"
fi
fi
}
function errorExit()
{
echo "$@"
exit 1
}
trap "errorExit 'Received signal SIGHUP'" SIGHUP
trap "errorExit 'Received signal SIGINT'" SIGINT
trap "errorExit 'Received signal SIGTERM'" SIGTERM
ensureUser || exit 1
createDirectories || exit 1
copyFiles || exit 1
copyKeys || exit 1
startRsyslog || exit 1
initAspera || exit 1
startSshd || exit 1
runAspera || exit 1
seedAspera || exit 1
tail -f /var/log/messages |
import java.util.Arrays;
public class SortArray {
// Helper function to print array
public void printArray(int arr[])
{
int n = arr.length;
for (int i=0; i<n; ++i)
System.out.print(arr[i] + " ");
System.out.println();
}
public static void main(String args[])
{
SortArray sortArray = new SortArray();
int arr[] = {3, 8, 4, 5, 9, 2, 6, 1};
System.out.println("Unsorted array:");
sortArray.printArray(arr);
// Sort the array
Arrays.sort(arr);
System.out.println("Sorted array: ");
sortArray.printArray(arr);
}
} |
<gh_stars>0
package com.vemec.api.services;
import com.vemec.api.models.centro.Centro;
import com.vemec.api.models.usuario.Usuario;
import com.vemec.api.models.usuario.UsuarioRepository;
import com.vemec.api.utils.Mappers;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service;
import org.springframework.web.bind.annotation.RequestBody;
import java.util.ArrayList;
import java.util.Map;
import java.util.Optional;
@Service
public class VemecUserDetailsService implements UserDetailsService {
@Autowired
UsuarioRepository usuarioRepository;
@Autowired
private PasswordEncoder passwordEncoder;
@Override
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
Optional<Usuario> u = usuarioRepository.findByUsername(username);
if(u.isPresent()){
return new User(u.get().getUsername(), u.get().getPassword(),
new ArrayList<>());
}else{
throw new UsernameNotFoundException("No se encontro el usuario: "+ username);
}
}
public Usuario registerNewUser(Map<String, String> payload) throws Exception {
try {
Usuario usuario = new Usuario();
Mappers.mapToUsuario(payload, usuario);
if(usuario.getPassword() != null && usuario.getUsername() != null){
String pass = usuario.getPassword();
usuario.setPassword(passwordEncoder.encode(pass));
usuarioRepository.save(usuario);
return usuario;
}else{
throw new Exception("El campo username y password no deben ser vacios");
}
}
catch (Exception e) {
throw e;
}
}
} |
package com.telenav.osv.recorder.camera.util;
/**
* The available types for teh aspect ratio:
* <ul>
* <li>{@link #ASPECT_RATIO_16_9}</li>
* <li>{@link #ASPECT_RATIO_4_3}</li>
* </ul>
*/
public @interface AspectRatioTypes {
/**
* The 16:9 aspect ratio.
*/
AspectRatio ASPECT_RATIO_16_9 = AspectRatio.createAspectRatio(16, 9);
/**
* The 4:3 aspect ratio.
*/
AspectRatio ASPECT_RATIO_4_3 = AspectRatio.createAspectRatio(4, 3);
}
|
// Variável que determina o pool de threads do libuv (Default: 1)
process.env.UV_THREADPOOL_SIZE = 4;
const crypto = require('crypto');
const http = require('http');
const cluster = require('cluster');
const numCPUs = require('os').cpus().length;
const taskSimulator = require('./task-simulator')
// O arquivo está executando em modo "master"?
if (cluster.isMaster) {
console.log(`Detectado ${numCPUs} CPUs...`)
console.log(`Master ${process.pid} is running`);
// Fork workers >> Envia o arquivo **de novo** para ser executado em modo "slave" (sub-processo)
for (let i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('exit', (worker, code, signal) => {
console.log(`worker ${worker.process.pid} died`);
});
} else {
var server = http.createServer((req, res) => {
res.writeHead(200);
var path = req.url.substr(1);
if (path) {
res.end('Hi there');
return;
}
// crypto.pbkdf2('abcdefg', 'salting', 1000000, 512, 'sha512', () => {
// console.log(`Worker ${process.pid} responded`);
// res.end('Hi there');
// })
taskSimulator.simulate(4000)
console.log(`Worker ${process.pid} responded`);
res.end('Hi there');
})
server.listen(3000);
console.log(`Worker ${process.pid} started`);
}
|
#!/bin/sh
# If you update this from Windows, using Notepad ++, do the following:
# sudo apt-get -y install dos2unix
# dos2unix <FILE>
# chmod u+x <FILE>
#
# Christopher Gray
# version 0.0.6
# 10/13/2018
clear
echo "\r\n \r\n Docker Tests \r\n \r\n"
echo " Link: https://www.digitalocean.com/community/tutorials/how-to-remove-docker-images-containers-and-volumes \r\n \r\n"
echo "Docker version: (docker version) \r\n "
docker version
echo "\r\n Docker Info: (docker info) \r\n "
docker info
echo "\r\n Docker Images downloaded.. (docker images -a) \r\n "
docker images -a
echo "\r\n Docker Containers running: (docker ps -a) \r\n "
docker ps -a
echo "\r\n \r\n"
docker ps
echo "\r\n Docker Volumes: (docker volume ls) \r\n "
docker volume ls
echo "\r\n Docker Networking: (docker network inspect bridge) \r\n "
docker network inspect bridge
echo "\r\n Cheat sheet for deleting a container: https://www.digitalocean.com/community/tutorials/how-to-remove-docker-images-containers-and-volumes \r\n \r\n "
echo "\r\n---------------- DEBUG Commands ---------------------------- \r\n \r\n"
echo "SSH into nginx container: docker exec -it nginx /bin/bash \r\n "
echo "\r\n \r\n"
|
import Foundation
struct Localisation {
let key: String
let value: String
}
func parseAndExtractLocalisations(fromFileAtPath path: String) -> [Localisation] {
guard let data = try? Data(contentsOf: URL(fileURLWithPath: path)), var string = String(data: data, encoding: String.Encoding.utf8) else { return [] }
print("Parsing \(path.components(separatedBy: "/").last ?? path) for Localised Strings")
let matchCount = string.countInstances(of: "localisationKey:") + string.countInstances(of: "localised(with:")
var localisationsCount = matchCount
var newLocalisations: [Localisation] = []
let basePattern = ".localised\\(with:\\s*\\\"([^\n\t\"]*)\\\"\\s*"
// Extract localized strings and store them in newLocalisations array
let regex = try! NSRegularExpression(pattern: basePattern, options: [])
let matches = regex.matches(in: string, options: [], range: NSRange(location: 0, length: string.utf16.count))
for match in matches {
if let range = Range(match.range(at: 1), in: string) {
let localizedString = String(string[range])
let localisation = Localisation(key: "key", value: localizedString)
newLocalisations.append(localisation)
}
}
return newLocalisations
}
// Helper extension to count instances of a substring in a string
extension String {
func countInstances(of substring: String) -> Int {
return components(separatedBy: substring).count - 1
}
}
// Example usage
let path = "/path/to/your/file.txt"
let extractedLocalisations = parseAndExtractLocalisations(fromFileAtPath: path)
print("Extracted Localisations: \(extractedLocalisations)") |
package io.swagger.model.germ;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.annotations.ApiModelProperty;
import io.swagger.model.germ.SeedLotNewTransactionRequest;
import org.springframework.validation.annotation.Validated;
/**
* SeedLotTransaction
*/
@Validated
@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2020-03-20T16:33:36.513Z[GMT]")
public class SeedLotTransaction extends SeedLotNewTransactionRequest {
@JsonProperty("transactionDbId")
private String transactionDbId = null;
public SeedLotTransaction transactionDbId(String transactionDbId) {
this.transactionDbId = transactionDbId;
return this;
}
/**
* Unique DbId for the Seed Lot Transaction
* @return transactionDbId
**/
@ApiModelProperty(example = "28e46db9", required = true, value = "Unique DbId for the Seed Lot Transaction")
public String getTransactionDbId() {
return transactionDbId;
}
public void setTransactionDbId(String transactionDbId) {
this.transactionDbId = transactionDbId;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SeedLotTransaction seedLotTransaction = (SeedLotTransaction) o;
return Objects.equals(this.transactionDbId, seedLotTransaction.transactionDbId) &&
super.equals(o);
}
@Override
public int hashCode() {
return Objects.hash(transactionDbId, super.hashCode());
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class SeedLotTransaction {\n");
sb.append(" ").append(toIndentedString(super.toString())).append("\n");
sb.append(" transactionDbId: ").append(toIndentedString(transactionDbId)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
export LC_ALL=C
set -ueo pipefail
if (( $# < 3 )); then
echo 'Usage: utxo_snapshot.sh <generate-at-height> <snapshot-out-path> <patentcoin-cli-call ...>'
echo
echo " if <snapshot-out-path> is '-', don't produce a snapshot file but instead print the "
echo " expected assumeutxo hash"
echo
echo 'Examples:'
echo
echo " ./contrib/devtools/utxo_snapshot.sh 570000 utxo.dat ./src/patentcoin-cli -datadir=\$(pwd)/testdata"
echo ' ./contrib/devtools/utxo_snapshot.sh 570000 - ./src/patentcoin-cli'
exit 1
fi
GENERATE_AT_HEIGHT="${1}"; shift;
OUTPUT_PATH="${1}"; shift;
# Most of the calls we make take a while to run, so pad with a lengthy timeout.
PATENTCOIN_CLI_CALL="${*} -rpcclienttimeout=9999999"
# Block we'll invalidate/reconsider to rewind/fast-forward the chain.
PIVOT_BLOCKHASH=$($PATENTCOIN_CLI_CALL getblockhash $(( GENERATE_AT_HEIGHT + 1 )) )
(>&2 echo "Rewinding chain back to height ${GENERATE_AT_HEIGHT} (by invalidating ${PIVOT_BLOCKHASH}); this may take a while")
${PATENTCOIN_CLI_CALL} invalidateblock "${PIVOT_BLOCKHASH}"
if [[ "${OUTPUT_PATH}" = "-" ]]; then
(>&2 echo "Generating txoutset info...")
${PATENTCOIN_CLI_CALL} gettxoutsetinfo | grep hash_serialized_2 | sed 's/^.*: "\(.\+\)\+",/\1/g'
else
(>&2 echo "Generating UTXO snapshot...")
${PATENTCOIN_CLI_CALL} dumptxoutset "${OUTPUT_PATH}"
fi
(>&2 echo "Restoring chain to original height; this may take a while")
${PATENTCOIN_CLI_CALL} reconsiderblock "${PIVOT_BLOCKHASH}"
|
#!/bin/bash
set -ex
mkdir -p target
DOCKER="$1"
TARGET="$2"
SKIP_TESTS="$3"
bash ci/fetch-rust-docker.sh "$TARGET"
if [ -f "ci/docker/$DOCKER/Dockerfile" ]; then
docker build -t "$DOCKER" "ci/docker/$DOCKER/"
fi
docker run \
--entrypoint bash \
--user "$(id -u)":"$(id -g)" \
--volume "$(rustc --print sysroot)":/travis-rust:ro \
--volume "$(pwd)":/src:ro \
--volume "$(pwd)"/target:/src/target \
--workdir /src \
--env TARGET="$TARGET" \
--env SKIP_TESTS="$SKIP_TESTS" \
--env CARGO_HOME=/src/target/cargo-home \
--env CARGO_TARGET_DIR=/src/target \
--env LIBZ_SYS_STATIC=1 \
--entrypoint sh \
--tty \
--init \
"$DOCKER" \
-c 'PATH="$PATH":/travis-rust/bin exec bash ci/run.sh'
# check that rustup-init was built with ssl support
# see https://github.com/rust-lang/rustup.rs/issues/1051
if ! (nm target/"$TARGET"/release/rustup-init | grep -q Curl_ssl_version); then
echo "Missing ssl support!!!!" >&2
exit 1
fi
|
#!/bin/bash
# This script starts mongodb
service mongod stop
rm /var/lib/mongodb/mongod.lock
service mongod start
|
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
SOURCE="${BASH_SOURCE[0]}"
while [[ -h "$SOURCE" ]]; do # resolve $SOURCE until the file is no longer a symlink
SCRIPTDIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
SCRIPTDIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
. ${SCRIPTDIR}/shared_utilities.sh
is_source_from_pr_testable "geode" "$(get_geode_pr_exclusion_dirs)" || exit 0
function changes_for_path() {
pushd geode >> /dev/null
local path="$1" # only expand once in the line below
# .git/resource/metadata.json is provided by the github-pr-resource used in Concourse
local mergeBase=$(cat .git/resource/metadata.json |
jq -r -c '.[]| select(.name == "base_sha") | .value') || exit $?
if [[ "${mergeBase}" == "" ]]; then
echo "Could not determine merge base. Exiting..."
exit 1
fi
git diff --name-only ${mergeBase} -- $path
popd >> /dev/null
}
UNIT_TEST_CHANGES=$(changes_for_path '*/src/test/java') || exit $?
INTEGRATION_TEST_CHANGES=$(changes_for_path '*/src/integrationTest/java') || exit $?
DISTRIBUTED_TEST_CHANGES=$(changes_for_path '*/src/distributedTest/java') || exit $?
ACCEPTANCE_TEST_CHANGES=$(changes_for_path '*/src/acceptanceTest/java') || exit $?
UPGRADE_TEST_CHANGES=$(changes_for_path '*/src/upgradeTest/java') || exit $?
CHANGED_FILES_ARRAY=( $UNIT_TEST_CHANGES $INTEGRATION_TEST_CHANGES $DISTRIBUTED_TEST_CHANGES $ACCEPTANCE_TEST_CHANGES $UPGRADE_TEST_CHANGES )
NUM_CHANGED_FILES=${#CHANGED_FILES_ARRAY[@]}
echo "${NUM_CHANGED_FILES} changed tests"
if [[ "${NUM_CHANGED_FILES}" -eq 0 ]]
then
echo "No changed test files, nothing to test."
exit 0
fi
if [[ "${NUM_CHANGED_FILES}" -gt 25 ]]
then
echo "${NUM_CHANGED_FILES} is too many changed tests to stress test. Allowing this job to pass without stress testing."
exit 0
fi
TEST_TARGETS=""
function append_to_test_targets() {
local target="$1"
local files="$2"
if [[ -n "$files" ]]
then
TEST_TARGETS="$TEST_TARGETS $target"
for FILENAME in $files
do
SHORT_NAME=$(basename $FILENAME)
SHORT_NAME="${SHORT_NAME%.java}"
TEST_TARGETS="$TEST_TARGETS --tests $SHORT_NAME"
done
fi
}
append_to_test_targets "repeatUnitTest" "$UNIT_TEST_CHANGES"
append_to_test_targets "repeatIntegrationTest" "$INTEGRATION_TEST_CHANGES"
append_to_test_targets "repeatDistributedTest" "$DISTRIBUTED_TEST_CHANGES"
append_to_test_targets "repeatUpgradeTest" "$UPGRADE_TEST_CHANGES"
# Acceptance tests cannot currently run in parallel, so do not stress these tests
#append_to_test_targets "repeatAcceptanceTest" "$ACCEPTANCE_TEST_CHANGES"
export GRADLE_TASK="compileTestJava compileIntegrationTestJava compileDistributedTestJava $TEST_TARGETS"
export GRADLE_TASK_OPTIONS="-Prepeat=50 -PfailOnNoMatchingTests=false"
echo "GRADLE_TASK_OPTIONS=${GRADLE_TASK_OPTIONS}"
echo "GRADLE_TASK=${GRADLE_TASK}"
${SCRIPTDIR}/execute_tests.sh
|
def create_tuple_list(lst):
result = []
for i in range(len(lst)-1):
result.append((lst[i], lst[i+1]))
return result
test_list = [1, 2, 3, 4, 5]
print(create_tuple_list(test_list)) # Output: [(1, 2), (2, 3), (3, 4), (4, 5)] |
echo "starting thriftserver with hhtp endpoint, connecting to $SPARK_MASTER and $HIVE_SITE_CONF_hive_metastore_uris"
/spark/sbin/start-thriftserver.sh --master $SPARK_MASTER
tail -f /dev/null
|
import psycopg2
import json
from flask import Flask, request
# Connect to the database
conn = psycopg2.connect(dbname='user_db',
user='postgres',host='localhost',port='5432')
# Create the API
app = Flask(__name__)
@app.route('/user/<string:username>', methods=['GET'])
def get_user(username):
cur = conn.cursor()
cur.execute("SELECT * from users where username='{}'".format(username))
user = cur.fetchone()
return json.dumps(user)
if __name__ == "__main__":
app.run() |
package com.simple.app.sync.raytracer;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class RayTracerApp {
}
|
#!/bin/bash
#SBATCH -J "HeatEqnOne_16384"
#SBATCH --nodes=1024
#SBATCH --ntasks-per-node=16
#SBATCH --cpus-per-task=1
#SBATCH --partition=haswell
#SBATCH --mem-per-cpu=2000M
#SBATCH --time=00:10:00
#SBATCH --exclusive
source ./tests/scaling/ZIH_TAURUS2_GCC_4_9_1/run-HeatEqnOne3D-timesteps_20-ADOLC_NO-MPI_ASYNC_YES.sh
#------------------------------------------------------------------------------#
################################################################################
export SCAFESRUN_N_PROCESSES_MPI_START=16384
export SCAFESRUN_N_THREADS_OPENMP_START=1
export SCAFESRUN_N_NODES="512x512x512"
export SCAFESRUN_N_PARTITIONS="32x32x16"
date
scafesrun.sh
date
|
import java.util.Hashtable;
public class CharacterCount {
public static void main(String[] args) {
String sentence = "This is a test sentence";
String[] words = sentence.split(" ");
Hashtable<String, Integer> hashtable = new Hashtable<>();
for (String word : words) {
hashtable.put(word, word.length());
}
System.out.println(hashtable);
}
}
// Output: {This=4, is=2, a=1, test=4, sentence=8} |
from typing import Optional
from hoss.error import *
from hoss.namespace import Namespace
from hoss.core import CoreService
from hoss.auth import AuthService
import urllib.parse
def connect(server_url: str, auth_instance: Optional[AuthService] = None) -> CoreService:
"""Connect to a server
Args:
server_url: URL to the server, including the protocol (e.g. https://hoss.myserver.com)
auth_instance: Optionally provide an already configured auth instance. This can be useful when
interacting with multiple Hoss servers that each run their own auth service.
Returns:
A CoreService instance, which is the primary interface to a server.
"""
return CoreService(server_url, auth_instance)
def resolve(uri, auth_instance: Optional[AuthService] = None):
"""Resolve a Hoss URI into a DatasetRef. This lets you directly load any ref.
The Hoss URI format is `hoss+<server>:<namespace>:<dataset>/<object key>` and can be retrieved
from any DatasetRef instance by the `uri` property.
An example URI would be `hoss+https://hoss.myserver.com:default:example-ds/my-file.bin`
Args:
uri: Hoss URI formatted string
auth_instance: Optionally provide an already configured auth instance. This can be useful when
interacting with multiple Hoss servers that each run their own auth service.
Returns:
A populated DatasetRef if the URI is valid
"""
uri = urllib.parse.urlparse(uri)
if not uri.scheme.lower().startswith("hoss"):
raise ValueError("URI is not a valid Hoss URI")
try:
_, protocol = uri.scheme.lower().split("+")
host, namespace_name, dataset_name = uri.netloc.split(":")
except:
raise ValueError("URI is not a valid Hoss URI")
s = CoreService(f"{protocol}://{host}", auth_instance=auth_instance)
ns = s.get_namespace(namespace_name)
return ns.get_dataset(dataset_name) / uri.path
|
<reponame>community-boating/cbidb-public-web
import { none, some } from "fp-ts/lib/Option";
import { History } from "history";
import * as React from "react";
import * as t from 'io-ts';
import { RadioGroup } from "@components/InputGroup";
import FactaArticleRegion from "@facta/FactaArticleRegion";
import FactaNotitleRegion from "@facta/FactaNotitleRegion";
import NavBarLogoutOnly from "@components/NavBarLogoutOnly";
import { setAPImage } from "@util/set-bg-image";
import {postWrapper as submit} from "@async/member/select-damage-waiver"
import { makePostJSON } from "@core/APIWrapperUtil";
import {validator as pricesValidator} from "@async/prices"
import Currency from "@util/Currency";
import FactaMainPage from "@facta/FactaMainPage";
import FactaButton from "@facta/FactaButton";
interface Props {
prices: t.TypeOf<typeof pricesValidator>,
selected: boolean,
history: History<any>
breadcrumb: JSX.Element,
goNext: () => Promise<void>,
goPrev: () => Promise<void>
}
export default class DamageWaiver extends React.Component<Props, {radio: string}> {
constructor(props: Props) {
super(props);
this.state = {
radio: props.selected ? "Yes" : null
}
}
render() {
const self = this;
return <FactaMainPage setBGImage={setAPImage} navBar={NavBarLogoutOnly({history: this.props.history, sysdate: none, showProgramLink: false})}>
<FactaNotitleRegion>
{this.props.breadcrumb}
</FactaNotitleRegion>
<FactaArticleRegion title="Consider purchasing an Accidental Damage Waiver.">
Any member who signs out a boat, including windsurfers and kayaks, shall be held financially responsible for damage to that boat and its equipment,
and damage to any other boat, windsurfer, or equipment in the event of a collision.
A member shall forfeit all membership privileges until satisfactory arrangements for payment of the cost of repairs is made with the Executive Director.
Members may elect to purchase an annual damage liability waiver for a fee of {Currency.dollars(this.props.prices.damageWaiverPrice).format(true)}.
This waiver covers any accidental damages to boats, but does not cover gross negligence, recklessness, or intentional acts.
Declining the waiver signifies that a member agrees to pay for the cost of repairs, as determined by Community Boating Inc., up to a maximum of $5000.
</FactaArticleRegion>
<FactaNotitleRegion>
<RadioGroup
id="accept"
label=""
columns={1}
values={[{
key: "Yes",
display: `I elect to purchase the damage waiver for ${Currency.dollars(this.props.prices.damageWaiverPrice).format(true)}.`
}, {
key: "No",
display: "I decline to purchase the damage waiver. I acknowledge that I may be held financially responsible for damages incurred to boats or equipment."
}]}
updateAction={(id: string, radio: string) => {
self.setState({
radio
})
}}
value={self.state ? some(self.state.radio) : none}
justElement={true}
/>
</FactaNotitleRegion>
<FactaButton text="< Back" onClick={self.props.goPrev}/>
{(self.state || {} as any).radio != undefined ? <FactaButton text="Next >" spinnerOnClick onClick={() => {
return submit.send(makePostJSON({
wantIt: self.state.radio == "Yes"
})).then(res => {
if (res.type == "Success") {
self.props.goNext()
} else {
window.scrollTo(0, 0);
// self.setState({
// ...self.state,
// validationErrors: res.message.split("\\n") // TODO
// });
}
})
}}/> : ""}
</FactaMainPage>
}
} |
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/util/gdal/GDALImageReader.java
package io.opensphere.core.util.gdal;
import java.awt.Transparency;
import java.awt.color.ColorSpace;
import java.awt.image.BandedSampleModel;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.awt.image.ComponentColorModel;
import java.awt.image.DataBuffer;
import java.awt.image.DataBufferByte;
import java.awt.image.DataBufferInt;
import java.awt.image.DataBufferShort;
import java.awt.image.DataBufferUShort;
import java.awt.image.Raster;
import java.awt.image.SampleModel;
import java.awt.image.WritableRaster;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import javax.imageio.ImageReadParam;
import javax.imageio.ImageReader;
import javax.imageio.ImageTypeSpecifier;
import javax.imageio.metadata.IIOMetadata;
import javax.imageio.spi.ImageReaderSpi;
import javax.imageio.stream.ImageInputStream;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.gdal.gdal.Band;
import org.gdal.gdal.Dataset;
import org.gdal.gdal.gdal;
import org.gdal.gdalconst.gdalconstConstants;
import gnu.trove.list.TIntList;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.map.TObjectIntMap;
import gnu.trove.map.hash.TObjectIntHashMap;
import io.opensphere.core.util.Constants;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.lang.Nulls;
/** A GDAL image reader. */
@SuppressWarnings("PMD.GodClass")
public class GDALImageReader extends ImageReader
{
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(GDALImageReader.class);
/**
* The GDAL data set. This is populated once the image is loaded from the
* temp file.
*/
private Dataset myDataset;
/** Temporary file used so that GDAL can read the image. */
private File myFile;
/**
* Constructor.
*
* @param originatingProvider The provider.
*/
GDALImageReader(ImageReaderSpi originatingProvider)
{
super(originatingProvider);
}
@Override
public void dispose()
{
Dataset dataset = myDataset;
if (dataset != null)
{
dataset.delete();
myDataset = null;
}
File file = myFile;
if (file != null)
{
if (!file.delete())
{
LOGGER.error("Could not delete file " + file);
}
myFile = null;
}
}
@Override
public int getHeight(int imageIndex) throws IOException
{
Dataset dataset = getDataset();
return dataset == null ? 0 : dataset.getRasterYSize();
}
@Override
public IIOMetadata getImageMetadata(int imageIndex) throws IOException
{
return null;
}
@Override
public Iterator<ImageTypeSpecifier> getImageTypes(int imageIndex) throws IOException
{
Dataset dataset = getDataset();
TObjectIntMap<? extends RasterType> rasterTypes = getRasterTypeMap(dataset);
TIntList bankIndices = getBankIndices(rasterTypes);
if (bankIndices.isEmpty())
{
logRasterTypes(dataset);
return Collections.<ImageTypeSpecifier>emptyList().iterator();
}
Band firstBand = dataset.GetRasterBand(bankIndices.get(0));
int bandDataType = firstBand.getDataType();
int dataType = getDataBufferType(bandDataType);
if (dataType == -1)
{
return Collections.<ImageTypeSpecifier>emptyList().iterator();
}
ColorModel colorModel = createColorModel(rasterTypes, firstBand, bandDataType, dataType);
if (colorModel == null)
{
LOGGER.warn("Unsupported color bands in image.");
logRasterTypes(dataset);
return Collections.<ImageTypeSpecifier>emptyList().iterator();
}
SampleModel sampleModel = createSampleModel(dataset.getRasterXSize(), dataset.getRasterYSize(), bankIndices.size(),
dataType);
return Collections.singleton(new ImageTypeSpecifier(colorModel, sampleModel)).iterator();
}
@Override
public int getNumImages(boolean allowSearch) throws IOException
{
return 1;
}
@Override
public IIOMetadata getStreamMetadata() throws IOException
{
return null;
}
@Override
public int getWidth(int imageIndex) throws IOException
{
Dataset dataset = getDataset();
return dataset == null ? 0 : dataset.getRasterXSize();
}
@Override
public BufferedImage read(int imageIndex, ImageReadParam param) throws IOException
{
Dataset dataset = getDataset();
if (dataset == null)
{
return null;
}
TObjectIntMap<? extends RasterType> rasterTypes = getRasterTypeMap(dataset);
TIntList bankIndices = getBankIndices(rasterTypes);
if (bankIndices.isEmpty())
{
LOGGER.warn("No color bands found.");
logRasterTypes(dataset);
return null;
}
int width = dataset.getRasterXSize();
int height = dataset.getRasterYSize();
Band firstBand = dataset.GetRasterBand(1);
int bandDataType = firstBand.getDataType();
List<? extends ByteBuffer> bands = createBands(dataset, bankIndices, bandDataType, width, height);
BufferedImage image = param == null ? null : param.getDestination();
DataBuffer dataBuffer;
if (image == null)
{
int dataType = getDataBufferType(bandDataType);
if (dataType == -1)
{
return null;
}
SampleModel sampleModel = createSampleModel(width, height, bankIndices.size(), dataType);
dataBuffer = sampleModel.createDataBuffer();
WritableRaster raster = Raster.createWritableRaster(sampleModel, dataBuffer, null);
ColorModel colorModel = createColorModel(rasterTypes, firstBand, bandDataType, dataBuffer.getDataType());
image = new BufferedImage(colorModel, raster, false, null);
}
else
{
dataBuffer = image.getTile(0, 0).getDataBuffer();
if (dataBuffer.getNumBanks() != bands.size())
{
throw new IllegalArgumentException("Raster has wrong number of banks.");
}
}
populateDataBuffer(dataBuffer, bands);
return image;
}
@Override
public void setInput(Object input, boolean seekForwardOnly, boolean ignoreMetadata)
{
super.setInput(input, seekForwardOnly, ignoreMetadata);
myDataset = null;
}
/**
* Read the bands from the dataset.
*
* @param dataset The dataset.
* @param bankIndices The raster indices for the color bands.
* @param expectedBandDataType The GDAL data type of the bands.
* @param width The width of the image in pixels.
* @param height The height of the image in pixels.
* @return An array of byte buffers, one for each band.
*/
protected List<? extends ByteBuffer> createBands(Dataset dataset, TIntList bankIndices, int expectedBandDataType, int width,
int height)
{
int bufSizeB = width * height * (gdal.GetDataTypeSize(expectedBandDataType) / Constants.BITS_PER_BYTE);
List<ByteBuffer> bands = New.list(bankIndices.size());
ByteBuffer buf = null;
for (int bankIndex : bankIndices.toArray())
{
Band band = dataset.GetRasterBand(bankIndex);
ByteBuffer data;
int bandDataType = band.getDataType();
if (bandDataType == expectedBandDataType)
{
if (buf == null)
{
buf = ByteBuffer.allocateDirect(bufSizeB);
buf.order(ByteOrder.nativeOrder());
}
else
{
buf.reset();
}
int returnVal = band.ReadRaster_Direct(0, 0, band.getXSize(), band.getYSize(), width, height,
expectedBandDataType, buf);
if (returnVal == gdalconstConstants.CE_None)
{
data = buf;
buf = null;
}
else
{
LOGGER.error("An error occurred while trying to read band " + bankIndex);
GDALGenericUtilities.logLastError(LOGGER, Level.ERROR);
data = null;
}
}
else
{
LOGGER.warn("Band " + bankIndex + " has wrong data type " + bandDataType + " (expecting " + expectedBandDataType
+ ")");
data = null;
}
bands.add(data);
}
return bands;
}
/**
* Create the color model for the image.
*
* @param rasterTypes A map of raster types to raster indices.
* @param firstBand The first color band.
* @param bandDataType The band data type.
* @param dataType The data buffer type.
* @return The color model, or {@code null} if no compatible color model
* could be created.
*/
protected ColorModel createColorModel(TObjectIntMap<? extends RasterType> rasterTypes, Band firstBand, int bandDataType,
int dataType)
{
ColorModel colorModel;
if (rasterTypes.containsKey(RasterType.PALETTE))
{
colorModel = firstBand.GetRasterColorTable().getIndexColorModel(gdal.GetDataTypeSize(bandDataType));
}
else
{
ColorSpace colorSpace = getColorSpace(rasterTypes);
if (colorSpace == null)
{
colorModel = null;
}
else
{
boolean hasAlpha = rasterTypes.containsKey(RasterType.ALPHA);
colorModel = new ComponentColorModel(colorSpace, hasAlpha, false,
hasAlpha ? Transparency.TRANSLUCENT : Transparency.OPAQUE, dataType);
}
}
return colorModel;
}
/**
* Create the sample model for the AWT image.
*
* @param width The image width.
* @param height The image height.
* @param numBands The number of bands in the image.
* @param dataType The data type of the image (e.g.,
* {@link DataBuffer#TYPE_BYTE}).
* @return The sample model.
*/
protected SampleModel createSampleModel(int width, int height, int numBands, int dataType)
{
// Initialized to all 0. There are no offsets for any of the bands.
int[] offsets = new int[numBands];
// The indices are in order.
int[] bankIndices = new int[numBands];
for (int band = 0; band < numBands; ++band)
{
bankIndices[band] = band;
}
return new BandedSampleModel(dataType, width, height, width, bankIndices, offsets);
}
/**
* Get the bank indices for the image. These must match the selected color
* model.
*
* @param rasterTypes The map of raster types to raster indices.
* @return The bank indices to match the color model.
*/
protected TIntList getBankIndices(TObjectIntMap<? extends RasterType> rasterTypes)
{
TIntList bankIndices = new TIntArrayList();
if (rasterTypes.containsKey(RasterType.PALETTE))
{
bankIndices.add(rasterTypes.get(RasterType.PALETTE));
}
else if (rasterTypes.containsKey(RasterType.RED) && rasterTypes.containsKey(RasterType.GREEN)
&& rasterTypes.containsKey(RasterType.BLUE))
{
bankIndices.add(rasterTypes.get(RasterType.RED));
bankIndices.add(rasterTypes.get(RasterType.GREEN));
bankIndices.add(rasterTypes.get(RasterType.BLUE));
}
else if (rasterTypes.containsKey(RasterType.GRAY))
{
bankIndices.add(rasterTypes.get(RasterType.GRAY));
}
if (rasterTypes.containsKey(RasterType.ALPHA))
{
bankIndices.add(rasterTypes.get(RasterType.ALPHA));
}
return bankIndices;
}
/**
* Get the color space for the image.
*
* @param rasterTypes The map of raster types to raster indices.
* @return The selected color space.
*/
protected ColorSpace getColorSpace(TObjectIntMap<? extends RasterType> rasterTypes)
{
ColorSpace colorSpace;
if (rasterTypes.containsKey(RasterType.PALETTE))
{
throw new IllegalArgumentException("Palette type is not supported.");
}
else if (rasterTypes.containsKey(RasterType.RED) && rasterTypes.containsKey(RasterType.GREEN)
&& rasterTypes.containsKey(RasterType.BLUE))
{
colorSpace = ColorSpace.getInstance(ColorSpace.CS_sRGB);
}
else if (rasterTypes.containsKey(RasterType.GRAY))
{
colorSpace = ColorSpace.getInstance(ColorSpace.CS_GRAY);
}
else
{
colorSpace = null;
}
return colorSpace;
}
/**
* Determine the data type to use for the data buffer.
*
* @param bandDataType The GDAL band data type.
* @return The data buffer data type ({@link DataBuffer#TYPE_BYTE},
* {@link DataBuffer#TYPE_USHORT}, or {@link DataBuffer#TYPE_INT}).
*/
protected int getDataBufferType(int bandDataType)
{
if (bandDataType == gdalconstConstants.GDT_Byte)
{
return DataBuffer.TYPE_BYTE;
}
else if (bandDataType == gdalconstConstants.GDT_UInt16)
{
return DataBuffer.TYPE_USHORT;
}
else if (bandDataType == gdalconstConstants.GDT_Int16)
{
return DataBuffer.TYPE_SHORT;
}
else if (bandDataType == gdalconstConstants.GDT_Int32)
{
return DataBuffer.TYPE_INT;
}
else
{
LOGGER.error("Unrecognized band data type : " + bandDataType);
return -1;
}
}
/**
* Load the image into GDAL and return the data set.
*
* @return The data set.
* @throws IOException If there is an error reading the image.
*/
protected Dataset getDataset() throws IOException
{
if (myDataset == null)
{
ImageInputStream imageStream = (ImageInputStream)input;
if (imageStream == null)
{
throw new IllegalStateException("Input has not been set.");
}
if (myFile == null)
{
myFile = File.createTempFile("gdalImageReader", Nulls.STRING);
}
FileOutputStream fos = new FileOutputStream(myFile);
try
{
byte[] data = new byte[2048];
int numBytes;
while ((numBytes = imageStream.read(data)) >= 0)
{
if (abortRequested())
{
return null;
}
fos.write(data, 0, numBytes);
}
}
finally
{
fos.close();
}
Dataset dataset = gdal.Open(myFile.getAbsolutePath(), gdalconstConstants.GA_ReadOnly);
if (dataset == null)
{
GDALGenericUtilities.logLastError(LOGGER, Level.ERROR);
throw new IOException("Failed to create GDAL dataset for file: " + myFile + ": " + gdal.GetLastErrorMsg());
}
myDataset = dataset;
}
return myDataset;
}
/**
* Get the raster type map for a dataset.
*
* @param dataset The dataset.
* @return A map of raster types to raster indices.
*/
protected TObjectIntMap<? extends RasterType> getRasterTypeMap(Dataset dataset)
{
TObjectIntMap<RasterType> result = new TObjectIntHashMap<>();
for (int raster = 1; raster <= dataset.getRasterCount(); ++raster)
{
int colorInterp = dataset.GetRasterBand(raster).GetColorInterpretation();
RasterType rasterType = RasterType.getRasterType(colorInterp);
if (rasterType != RasterType.UNKNOWN)
{
result.put(rasterType, raster);
}
else if (!result.containsKey(RasterType.GRAY))
{
result.put(RasterType.GRAY, raster);
}
}
return result;
}
/**
* Log the raster color interpretations in a dataset.
*
* @param dataset The dataset.
*/
protected void logRasterTypes(Dataset dataset)
{
int rasterCount = dataset.getRasterCount();
if (rasterCount == 0)
{
LOGGER.warn("No bands found in image.");
}
else
{
for (int raster = 1; raster <= rasterCount; ++raster)
{
int colorInterp = dataset.GetRasterBand(raster).GetColorInterpretation();
LOGGER.info("Band " + raster + " has color interpretation " + gdal.GetColorInterpretationName(colorInterp));
}
}
}
/**
* Populate the data buffer with the extracted image bands.
*
* @param dataBuffer The image data buffer.
* @param bands The bands from the image.
*/
protected void populateDataBuffer(DataBuffer dataBuffer, List<? extends ByteBuffer> bands)
{
if (dataBuffer instanceof DataBufferByte)
{
DataBufferByte byteData = (DataBufferByte)dataBuffer;
for (int band = 0; band < bands.size(); band++)
{
ByteBuffer bandData = bands.get(band);
if (bandData != null)
{
bandData.get(byteData.getData(band));
}
}
}
else if (dataBuffer instanceof DataBufferShort)
{
DataBufferShort shortData = (DataBufferShort)dataBuffer;
for (int band = 0; band < bands.size(); band++)
{
ByteBuffer bandData = bands.get(band);
if (bandData != null)
{
bandData.asShortBuffer().get(shortData.getData(band));
}
}
}
else if (dataBuffer instanceof DataBufferUShort)
{
DataBufferUShort shortData = (DataBufferUShort)dataBuffer;
for (int band = 0; band < bands.size(); band++)
{
ByteBuffer bandData = bands.get(band);
if (bandData != null)
{
bandData.asShortBuffer().get(shortData.getData(band));
}
}
}
else if (dataBuffer instanceof DataBufferInt)
{
DataBufferInt intData = (DataBufferInt)dataBuffer;
for (int band = 0; band < bands.size(); band++)
{
ByteBuffer bandData = bands.get(band);
if (bandData != null)
{
bandData.asIntBuffer().get(intData.getData(band));
}
}
}
else
{
throw new IllegalArgumentException("Unrecognized data buffer type : " + dataBuffer);
}
}
/** Identification of a raster type in an image. */
private enum RasterType
{
/** Alpha raster. */
ALPHA(gdalconstConstants.GCI_AlphaBand),
/** Blue raster. */
BLUE(gdalconstConstants.GCI_BlueBand),
/** Grayscale raster. */
GRAY(gdalconstConstants.GCI_GrayIndex),
/** Green raster. */
GREEN(gdalconstConstants.GCI_GreenBand),
/** Color index raster. */
PALETTE(gdalconstConstants.GCI_PaletteIndex),
/** Red raster. */
RED(gdalconstConstants.GCI_RedBand),
/** Unknown raster. */
UNKNOWN(-1),
;
/** The GDAL color interpretation code. */
private int myGdalColorInterpretation;
/**
* Get the raster type for a given GDAL color interpretation.
*
* @param gdalColorInterpretation The GDAL color interpretation code.
* @return The raster type.
*/
public static RasterType getRasterType(int gdalColorInterpretation)
{
for (RasterType type : RasterType.values())
{
if (type.getGdalColorInterpretation() == gdalColorInterpretation)
{
return type;
}
}
return UNKNOWN;
}
/**
* Enum constructor.
*
* @param gdalColorInterpretation The GDAL color interpretation code.
*/
RasterType(int gdalColorInterpretation)
{
myGdalColorInterpretation = gdalColorInterpretation;
}
/**
* Get the GDAL color interpretation code.
*
* @return The code.
*/
private int getGdalColorInterpretation()
{
return myGdalColorInterpretation;
}
}
}
|
package io.datahubproject.openapi.dto;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder;
import io.datahubproject.openapi.generated.OneOfGenericAspectValue;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Builder;
import lombok.Value;
@JsonInclude(JsonInclude.Include.NON_NULL)
@Value
@Builder
@JsonDeserialize(builder = UpsertAspectRequest.UpsertAspectRequestBuilder.class)
public class UpsertAspectRequest {
@JsonProperty("entityType")
@Schema(required = true, description = "The name of the entity matching with its definition in the entity registry")
String entityType;
@JsonProperty("entityUrn")
@Schema(description = "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null")
String entityUrn;
@JsonProperty("entityKeyAspect")
@Schema(description = "A key aspect referencing the entity to be updated, required if entityUrn is null")
OneOfGenericAspectValue entityKeyAspect;
@JsonProperty("aspect")
@Schema(required = true, description = "Aspect value to be upserted")
OneOfGenericAspectValue aspect;
@JsonPOJOBuilder(withPrefix = "")
public static class UpsertAspectRequestBuilder {
}
}
|
class StitcherController {
// Class representing the stitching controller
};
class GlobalControllerImpl {
public:
StitcherController* getController() const {
return controller;
}
void createController(int device) {
// Create a new stitching controller for the specified device
// Implementation details not provided
}
void deleteController() {
// Delete the stitching controller
// Implementation details not provided
}
private:
StitcherController* controller;
// Other necessary members and functions
};
class GlobalController {
public:
GlobalController() : impl(new GlobalControllerImpl()) {}
StitcherController* getController() const {
Q_ASSERT(impl);
return impl->getController();
}
void createController(int device) {
Q_ASSERT(impl);
impl->createController(device);
}
void deleteController() {
Q_ASSERT(impl);
impl->deleteController();
}
private:
GlobalControllerImpl* impl;
}; |
<reponame>hi-manshu-kumar/Eckovation-task
app.constant("LOGIN", "/login");
app.constant("LOGOUT", "/logout");
app.constant("REGISTER", "/register");
app.constant("ABOUT", "/about");
app.constant("URL", "https://raw.githubusercontent.com/hi-manshu-kumar/mobilejson/master/json");
app.constant("PROFILE", "/profile");
app.constant("COMMUNITY", "/community");
|
<reponame>gastbob40/epimodo_bot<gh_stars>1-10
import discord
import yaml
from src.eventsHandler.on_message.mics.check_mentions import check_mentions
from src.eventsHandler.on_message.mics.get_server import get_server
from src.eventsHandler.on_message.mics.get_user import get_user
from src.eventsHandler.on_message.mics.init_server import init_server
from src.eventsHandler.on_message.mics.reload_bot import reload_bot
from src.eventsHandler.on_message.moderation.add_ban import add_ban
from src.eventsHandler.on_message.moderation.unban import unban
from src.eventsHandler.on_message.moderation.add_kick import add_kick
from src.eventsHandler.on_message.moderation.add_general_mute import add_general_mute
from src.eventsHandler.on_message.moderation.mute import mute
from src.eventsHandler.on_message.moderation.add_warn import add_warn
from src.eventsHandler.on_message.moderation.clear_messages import clear_messages
from src.eventsHandler.on_message.moderation.remove_general_mute import remove_general_mute
from src.eventsHandler.on_message.moderation.unmute import unmute
from src.eventsHandler.on_message.moderation.get_mutes import get_mutes
class OnMessage:
@staticmethod
async def run(client: discord.Client, message: discord.Message):
if message.author.bot:
return
with open('run/config/config.yml', 'r') as file:
config = yaml.safe_load(file)
await check_mentions(client, message)
if message.content and message.content[0] != config['prefix']:
return
command = message.content.split(' ')[0][1:]
args = message.content.split(' ')[1:]
if command == 'init':
await init_server(client, message, args)
if command == 'reload':
await reload_bot(client, message, args)
# Sanctions manager
elif command == 'warn':
await add_warn(client, message, args)
elif command == 'kick':
await add_kick(client, message, args)
elif command == 'ban':
await add_ban(client, message, args)
elif command == 'unban':
await unban(client, message, args)
elif command == 'g_mute':
await add_general_mute(client, message, args)
elif command == 'g_unmute':
await remove_general_mute(client, message, args)
elif command == 'mute':
await mute(client, message, args)
elif command == 'get_mutes':
await get_mutes(client, message, args)
elif command == 'unmute':
await unmute(client, message, args)
elif command == 'clear':
await clear_messages(client, message, args)
elif command == 'server':
await get_server(client, message, args)
elif command == 'user':
await get_user(client, message, args)
|
import uuid
class TemplateInstaller:
def __init__(self, templatesuffix, args):
self.templatesuffix = templatesuffix
self.args = args
def generateUniqueIdentifier(self):
return str(uuid.uuid1())
def constructTemplateName(self, uniqueIdentifier):
return uniqueIdentifier + "." + self.templatesuffix
def downloadTemplate(self):
# Implement template download logic based on self.args.systemvmtemplateurl
pass
def installTemplate(self):
# Implement template installation logic
pass
def writeProperties(self):
# Implement property writing logic
pass
def installSystemTemplate(self):
try:
uniqueIdentifier = self.generateUniqueIdentifier()
self.templateName = self.constructTemplateName(uniqueIdentifier)
if self.args.systemvmtemplateurl:
self.downloadTemplate()
self.installTemplate()
self.writeProperties()
print("CloudStack has successfully installed system template")
except Exception as e:
print("Error installing system template:", str(e))
if __name__ == "__main__":
# Example usage
args = {'systemvmtemplateurl': 'http://example.com/template'}
installer = TemplateInstaller('template', args)
installer.installSystemTemplate() |
module Awsm
class TableBase
def initialize( objects, format=:pretty )
@objects = objects
@format = format
@use_fields = config.use_fields
@fields = defaultFields
@headings = defaultHeadings
end
def print
config.fields.each do |name, field|
@headings[ name ] = field[:heading]
@fields[ name ] = field[:block]
end
@rows = @objects.map do |i|
row = []
@use_fields.each do |f|
row << extract_field( i, f )
end
row
end
case @format
when :pretty
puts Terminal::Table.new(
headings: @use_fields.map { |f| @headings[ f ] },
rows: @rows
)
when :tsv
@rows.each do |row|
puts row.join("\t")
end
when :csv
@rows.each do |row|
puts row.join(',')
end
when :json
json = []
@rows.each do |row|
json << Hash[ @use_fields.zip( row ) ]
end
puts JSON.generate( json )
else
puts "Unknown output format: #{@format}"
end
end
private
def field( field )
if @fields[ field ].nil?
raise StandardError, "Unknown field: #{field}"
end
@fields[ field ]
end
def extract_field( instance, field )
@fields[ field ].call( instance )
end
def tag( key, tags )
tags.select { |t| t.key == key }
.map { |t| t.value }
end
end
end
|
#!/usr/bin/env python
"""
This reads in data from montco. This reads it in as
soon as you load it. Why? Because I didn't want to create
the extra step.
d=montco.d
Common commands:
reload(montco)
g = d.groupby(['title'])
g.sum()
# Use loc to locate a specific index
d.loc[(d['timeStamp'] >= '2016-03-20') & ( d['timeStamp'] <= '2016-04-07' ),'train_id'].head()
Example that works:
d.index = pd.DatetimeIndex(d.timeStamp)
g = d.groupby([pd.TimeGrouper('1D'), 'title'])
g['title'].count()
gg=g['title'].count().unstack().fillna(0)
r=gg.reset_index()
r=r.rename(columns = {'index':'date'})
# gg=pd.DataFrame(g.sum().to_records())
gg=g.sum().reset_index()
tlist=gg[(gg.e > myRange)].sort_values(by='e')['title'].tolist()
d.index = pd.DatetimeIndex(d.timeStamp)
df.to_csv('df.csv',index=True,header=True)
df['datetime'].apply(lambda x: x.strftime('%d%m%Y'))
r['hr']=r['timeStamp'].apply(lambda x: x.strftime('%H'))
r['hr']=r['hr'].apply(lambda x: int(x))
Resample
d.index = pd.DatetimeIndex(d.timeStamp)
t=d[d.title.str.match(r'EMS.*')]
k=d[['title','e']].resample('3000T', how=[np.sum,np.mean,np.median, len])
d..fillna(0, inplace=True)
Flatten z
[item for sublist in z for item in sublist]
def fcl(df, dtObj):
return df.iloc[np.argmin(np.abs(df.index.to_pydatetime() - dtObj))]
e=e.rename(columns = {'desc':'desc_orig'})
e['d']=e.index.to_pydatetime()
e['desc']=e['d'].apply(lambda x: fcl(t,x)['desc'])
Interesting:
>>> Series(['a', 'b', 'c']).str.cat(['A', 'B', 'C'], sep=',')
0 a,A
1 b,B
2 c,C
>>> Series(['a', 'b', 'c']).str.cat(sep=',')
'a,b,c'
>>> Series(['a', 'b']).str.cat([['x', 'y'], ['1', '2']], sep=',')
0 a,x,1
1 b,y,2
>>> Series(['a1', 'b2', 'c3']).str.extract('(?P<letter>[ab])(?P<digit>\d)')
letter digit
0 a 1
1 b 2
2 NaN NaN
| get_dummies(self, sep='|')
| Split each string in the Series by sep and return a frame of
| dummy/indicator variables.
|
| Parameters
| ----------
| sep : string, default "|"
| String to split on.
|
| Returns
| -------
| dummies : DataFrame
|
| Examples
| --------
| >>> Series(['a|b', 'a', 'a|c']).str.get_dummies()
| a b c
| 0 1 1 0
| 1 1 0 0
| 2 1 0 1
t=d[d.desc.str.match(r'.*RT309.*') & d.title.str.match(r'.*VEHICLE ACCIDENT.*')]
t=t[t.twp == 'UPPER DUBLIN']
t=d[d.desc.str.match(r'.**') & d.title.str.match(r'.*VEHICLE ACCIDENT.*')]
t=t[t.twp == 'UPPER DUBLIN']
t=d[d.desc.str.match(r'.*ASHBOURNE RD & PARK AVE.*') & d.twp.str.match(r'.*CHELTENHAM.*')]
t=t[t['timeStamp'] > '2016-01-01']
g=t.groupby(['title'])
g.sum()
CHELTENHAM:
p=montco.readCR()
d=montco.d
p.index = pd.DatetimeIndex(p.timeStamp)
c=p[(p.dtype == 'Traffic')]
c['d']=c.index.to_pydatetime()
m=d[(d.title == 'Traffic: VEHICLE ACCIDENT -') & (d.twp == 'CHELTENHAM')]
def fcl(df, dtObj):
return df.iloc[np.argmin(np.abs(df.index.to_pydatetime() - dtObj))]
import numpy as np
c['daddr']=c['d'].apply(lambda x: fcl(m,x)['desc'])
c['dd']=c['d'].apply(lambda x: fcl(m,x)['timeStamp'])
c['dd']=c['dd'].apply(lambda x: datetime.datetime.strptime(x,'%Y-%m-%d %H:%M:%S'))
c['delta']=c['dd']-c['d']
c.to_csv('/Users/mchirico/c.csv',index=True,header=True)
p.to_csv('/Users/mchirico/p.csv',index=True,header=True)
Quick:
gg.sort_values(by='e',inplace=True)
date=now() - datetime.timedelta(days=7)
"""
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
# Working on mce site
from pytz import timezone
import pytz
eastern = timezone('US/Eastern')
import time
# now().strftime('%Y-%m-%d %H:%M:%S')
def nowf():
return now().strftime('%Y-%m-%d %H:%M:%S')
def nowfd():
return now().strftime('%Y-%m-%d 00:00:00')
def now():
timezone='US/Eastern'
native=datetime.datetime.now()
if time.timezone == 0:
return native.replace(tzinfo=pytz.utc).astimezone(pytz.timezone(timezone))
else:
return native
# Reading in data
import requests
requests.packages.urllib3.disable_warnings()
import pandas as pd
import io
import datetime
def readE():
url="https://storage.googleapis.com/montco-stats/eslu.csv"
d=requests.get(url,verify=False).content
d=pd.read_csv(io.StringIO(d.decode('utf-8')),
header=0,names=['eid', 'id','type','station','loc','mun','date','time','unit','status'],
dtype={'eid':str,'id':str,'type':str,'station':str,'loc':str,
'mun':str,'date':str,'time':str,'unit':str,'status':str})
d=pd.DataFrame(d)
d['eid']=d['eid'].apply(lambda x: x.replace('eid=',''))
d['id']=d['id'].apply(lambda x: x.replace('incidentno=',''))
d['type']=d['type'].apply(lambda x: x.replace('incidenttype=',''))
d['loc']=d['loc'].apply(lambda x: x.replace('location=',''))
d['mun']=d['mun'].apply(lambda x: x.replace('mun=',''))
d['station']=d['station'].apply(lambda x: x.replace('station=',''))
d.timeStamp=pd.DatetimeIndex(d.date+' '+d.time)
d.index = d.timeStamp
return d
# Read in the data
def readCR():
url="https://storage.googleapis.com/montco-stats/cheltenham/cheltenhamCR2016.csv"
d=requests.get(url,verify=False).content
d=pd.read_csv(io.StringIO(d.decode('utf-8')),
header=0,names=['dtype','timeStamp','addr','id','desc','agency','lat','lng'],
dtype={'dtype':str,'timeStamp':str,'addr':str,'id':str,
'desc':str,'agency':str,'lat':str,'lng':str})
d=pd.DataFrame(d)
d.timeStamp=d['timeStamp'].apply(lambda x: datetime.datetime.strptime(x,'%m/%d/%y %H:%M'))
d.index = pd.DatetimeIndex(d.timeStamp)
d['e']=1
return d
# Read in the data
def readTZ():
url="https://storage.googleapis.com/montco-stats/tz.csv"
d=requests.get(url,verify=False).content
d=pd.read_csv(io.StringIO(d.decode('utf-8')),
header=0,names=['lat', 'lng','desc','zip','title','timeStamp','twp','e'],
dtype={'lat':str,'lng':str,'desc':str,'zip':str,
'title':str,'timeStamp':datetime.datetime,'twp':str,'e':int})
d=pd.DataFrame(d)
return d
d=readTZ()
d.index = pd.DatetimeIndex(d.timeStamp)
def gg(d=d,title='title'):
date=datetime.datetime.now() - datetime.timedelta(days=7)
date=date.strftime('%Y-%m-%d 00:00:00')
c=d[d.timeStamp >= date]
g=c.groupby(title)
gg=g.sum().reset_index()
gg.sort_values(by='e',inplace=True,ascending=[0])
return gg
def ggc(d=d,title='title'):
date=datetime.datetime.now() - datetime.timedelta(days=7)
date=date.strftime('%Y-%m-%d 00:00:00')
c=d[(d.timeStamp >= date) & (d.twp == 'CHELTENHAM')]
g=c.groupby(title)
gg=g.sum().reset_index()
gg.sort_values(by='e',inplace=True,ascending=[0])
return gg
|
package io.opensphere.laf.dark.border;
import javax.swing.border.AbstractBorder;
import javax.swing.plaf.UIResource;
@SuppressWarnings("serial")
public class OSDarkLAFScrollPaneBorder extends AbstractBorder implements UIResource
{
// Intentionally Blank
}
|
package org.paasta.container.platform.common.api.users;
import org.paasta.container.platform.common.api.common.Constants;
import org.springframework.data.domain.Sort;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
/**
* User Repository 인터페이스
*
* @author hrjin
* @version 1.0
* @since 2020.09.22
*/
@Repository
@Transactional
public interface UsersRepository extends JpaRepository<Users, Long>, JpaSpecificationExecutor<Users> {
@Query(value = "SELECT DISTINCT service_account_name FROM cp_users", nativeQuery = true)
List<String> getUsersNameList();
@Query(value = "SELECT user_id FROM cp_users WHERE namespace = :namespace", nativeQuery = true)
List<String> getUsersNameListByCpNamespaceOrderByCreatedDesc(@Param("namespace") String namespace);
List<Users> findAllByCpNamespace(String namespace, Sort sort);
List<Users> findAllByCpNamespaceAndUserIdContainingIgnoreCase(String namespace, String userId, Sort sort);
List<Users> findAllByUserIdOrderByCreatedDesc(String userId);
@Query(value = "SELECT * FROM cp_users WHERE user_id = :userId AND user_type != :clusterAdmin AND namespace = :namespace limit 1;", nativeQuery = true)
Users getOneUsersDetailByUserId(@Param("userId") String userId, @Param("namespace") String namespace, @Param("clusterAdmin") String clusterAdmin);
@Query(value = "SELECT * FROM cp_users WHERE user_id = :userId AND user_type = :clusterAdmin AND namespace = :namespace limit 1;", nativeQuery = true)
Users getOneUsersDetailByUserIdForAdmin(@Param("userId") String userId, @Param("namespace") String namespace, @Param("clusterAdmin") String clusterAdmin);
@Query(value =
"select * from (" +
"select id, user_id, user_auth_id, service_account_name, namespace, role_set_code, user_type, created" +
" , (select case when count(user_id) > 0 " +
" then 'Y'" +
" else 'N' end " +
"from cp_users " +
"where namespace = :namespace" +
" and user_id = cu.user_id) as display_yn" +
" from cp_users cu" +
" where id in (select id" +
" FROM cp_users cu" +
" where namespace = :namespace" +
" UNION all" +
" SELECT max(id) AS id" +
" FROM cp_users cu" +
" WHERE NOT EXISTS (SELECT '1'" +
" FROM cp_users a" +
" WHERE namespace = :namespace" +
" AND cu.user_id = a.user_id)" +
" GROUP BY user_id)" +
" ) cp where user_id in (select distinct user_id from cp_users where namespace = :defaultNamespace)" +
" order by created desc;", nativeQuery = true)
List<Object[]> findAllUsers(@Param("namespace") String namespace, @Param("defaultNamespace") String defaultNamespace);
Users findByCpNamespaceAndUserId(String namespace, String userId);
void deleteByCpNamespaceAndUserId(String namespace, String userId);
@Query(value = "SELECT * FROM cp_users WHERE cluster_name = :cluster AND namespace = :namespace AND user_type ='" + Constants.AUTH_NAMESPACE_ADMIN + "'limit 1;", nativeQuery = true)
Users findAllByClusterNameAndCpNamespace(@Param("cluster") String cluster, @Param("namespace") String namespace);
@Query(value = "SELECT * FROM cp_users WHERE cluster_name = :cluster AND user_id = :userId AND namespace NOT IN (:defaultNamespace)", nativeQuery = true)
List<Users> findAllByClusterNameAndUserId(@Param("cluster") String cluster, @Param("userId") String userId, @Param("defaultNamespace") String defaultNamespace);
@Query(value = "SELECT * FROM cp_users WHERE cluster_name = :cluster AND user_id = :userId AND user_type ='" + Constants.AUTH_CLUSTER_ADMIN + "'limit 1;", nativeQuery = true)
Users findByClusterNameAndUserIdAndUserType(@Param("cluster") String cluster, @Param("userId") String userId);
@Query(value = "select * from cp_users where namespace = :namespace " +
"and user_id not in (select distinct(user_id) from cp_users where namespace != :namespace) " +
"and user_id not in (select distinct(user_id) from cp_users where user_type = :clusterAdmin) " +
"and user_id like %:searchParam% " +
"order by created desc",nativeQuery = true)
List<Users> findByOnlyTempNamespaceUser(@Param("namespace") String namespace, @Param("searchParam") String searchParam, @Param("clusterAdmin") String clusterAdmin);
List<Users> findAllByUserType(String userType);
void deleteAllByUserType(String userType);
void deleteAllByUserIdAndUserType(String userId, String userType);
List<Users> findAllByCpNamespaceAndUserIdAndUserType(String namespace, String userId, String userType);
List<Users> findAllByCpNamespaceAndUserId(String namespace, String userId);
@Query(value =
"select A.id, A.user_id, A.user_auth_id, A.service_account_name, A.namespace, A.user_type, A.role_set_code, B.created" +
" from "+
" (select * from cp_users where namespace != :namespace and user_id not in (select distinct(user_id) from cp_users where user_type = :clusterAdmin) ) A ," +
" (select * from cp_users where namespace = :namespace and user_id not in (select distinct(user_id) from cp_users where user_type = :clusterAdmin) ) B" +
" where A.user_id = B.user_id" +
" and A.user_auth_id = B.user_auth_id" +
" and A.user_id like %:searchParam%" +
" order by B.created desc" ,nativeQuery = true)
List<Object[]> findAllByUserMappingNamespaceAndRole(@Param("namespace") String namespace, @Param("searchParam") String searchParam, @Param("clusterAdmin") String clusterAdmin);
@Query(value =
"select A.id, A.user_id, A.user_auth_id, A.service_account_name, A.namespace, A.user_type, A.role_set_code, A.service_account_secret, A.cluster_name, A.cluster_api_url, A.cluster_token, B.created" +
" from" +
" (select * from cp_users where namespace != :namespace and user_id = :userId) A ," +
" (select * From cp_users where namespace = :namespace and user_id = :userId and user_type = :userType) B" +
" where A.user_id = B.user_id order by A.namespace" ,nativeQuery = true)
List<Object[]> findAllByUserMappingNamespaceAndRoleDetails(@Param("namespace") String namespace, @Param("userId") String userId, @Param("userType") String userType);
@Query(value = "select * from cp_users where user_type = :userType and user_id like %:searchParam%" ,nativeQuery = true)
List<Users> findAllByUserTypeAndLikeUserId(@Param("userType") String userType, @Param("searchParam") String searchParam);
@Query(value =
"select a.user_id, a.user_auth_id, if(isnull(b.user_id), 'N', 'Y') as is_nsadmin" +
" from" +
" (select user_id, user_auth_id from cp_users where namespace = :defaultNamespace group by user_id) a" +
" left join" +
" (select distinct user_id from cp_users where namespace = :searchNamespace and user_type = :userType) b" +
" on a.user_id = b.user_id" ,nativeQuery = true)
List<Object[]> findNamespaceAdminCheck(@Param("defaultNamespace") String defaultNamespace, @Param("searchNamespace") String searchNamespace,
@Param("userType") String userType);
void deleteAllByUserIdAndUserAuthIdAndCpNamespace(String userId, String userAuthId, String namespace);
void deleteAllByCpNamespace(String namespace);
}
|
<reponame>jamesavery/spiralcode-reference<filename>libgraph/planargraph.cc
#include <queue>
#include "spiral.hh"
#include "planargraph.hh"
#include "triangulation.hh"
#include "cubicgraph.hh"
using namespace std;
PlanarGraph::PlanarGraph(const spiral_nomenclature &fsn){
switch(fsn.construction_scheme){
case spiral_nomenclature::CS_NONE:
// cerr << "none" << endl;
assert(false);
break;
case spiral_nomenclature::CUBIC:
// cerr << "CUBIC" << endl;
*this = CubicGraph(fsn);
break;
case spiral_nomenclature::TRIANGULATION:
// cerr << "TRIANGULATION" << endl;
*this = Triangulation(fsn);
break;
case spiral_nomenclature::LEAPFROG:
// cerr << "LEAPFROG" << endl;
Triangulation T(fsn);
*this = T.inverse_leapfrog_dual();
break;
}
}
// Every polyhedral graph G can be represented by a triangulation.
// 1. If G is a triangulation, it is G
// 2. If G is cubic, it is its dual
// 3. If G is non-cubic and non-triangulation, it is G's leapfrog dual
PlanarGraph PlanarGraph::enveloping_triangulation(construction_scheme_t &scheme) const
{
if(is_triangulation()){
scheme = spiral_nomenclature::TRIANGULATION;
return *this;
} else if(is_cubic()){
scheme = spiral_nomenclature::CUBIC;
return dual_graph();
} else {
scheme = spiral_nomenclature::LEAPFROG;
return leapfrog_dual();
}
}
bool PlanarGraph::is_cubic() const {
for(node_t u=0;u<N;u++)
if(neighbours[u].size() != 3)
return false;
return true;
}
bool PlanarGraph::is_triangulation() const { // NB: A bit expensive
vector<face_t> faces(compute_faces());
for(int i=0;i<faces.size();i++) if(faces[i].size() != 3) return false;
return true;
}
bool PlanarGraph::is_a_fullerene(bool verbose) const {
if(!is_cubic()){
if(verbose) fprintf(stdout,"Graph is not cubic.\n");
return false;
}
vector<face_t> faces(compute_faces(6,true));
int n_faces = faces.size();
int n_edges = count_edges();
const int E = 3*N/2;
const int F = 2+E-N;
if(E != n_edges){
if(verbose) fprintf(stdout,"Graph is not planar cubic: wrong number of edges: %d != %d\n",n_edges,E);
return false;
}
if(F != n_faces){
if(verbose){
fprintf(stdout,"Graph is not planar cubic: wrong number of faces: %d != %d\n",n_faces,F);
cout << "faces = " << faces << ";\n";
}
return false;
}
int Np=0, Nh=0;
for(const face_t &f: faces){
if(f.size()==5) Np++;
if(f.size()==6) Nh++;
}
if(Np != 12){
if(verbose) fprintf(stdout,"Graph is not fullerene: wrong number of pentagons: %d != 12\n",Np);
return false;
}
if(Nh != (F-12)){
if(verbose) fprintf(stdout,"Graph is not fullerene: wrong number of hexagons: %d != %d\n",Nh,F-12);
return false;
}
return true;
}
// the following is a naive approach that iterates over all pairs of edges
// for some purposes it would be sufficient to ensure that each face intersects itself an even number of times (while figures of eight are problematic)
bool PlanarGraph::layout_is_crossingfree() const
{
assert(layout2d.size() == N);
set<edge_t> es = undirected_edges(); // TODO: In new planargraph, this is unnecessary
for (set<edge_t>::iterator e1(es.begin()); e1!=es.end(); e1++){
for (set<edge_t>::iterator e2(e1); e2!=es.end(); e2++){
if (e1->first == e2->first || e1->second == e2->first || e1->first == e2->second || e1->second == e2->second) continue; // equal edges and edges that share a vertex
const double e1ax = layout2d[e1->first].first,
e1ay = layout2d[e1->first].second,
e1bx = layout2d[e1->second].first,
e1by = layout2d[e1->second].second,
e2ax = layout2d[e2->first].first,
e2ay = layout2d[e2->first].second,
e2bx = layout2d[e2->second].first,
e2by = layout2d[e2->second].second;
const double a1 = (e1ay - e1by)/(e1ax - e1bx);
const double b1 = e1ay - a1 * e1ax;
if ((e2ay > a1*e2ax+b1 && e2by > a1*e2bx+b1) || (e2ay < a1*e2ax+b1 && e2by < a1*e2bx+b1)) continue; // both points of the second edge lie on the same side of the first edge
const double a2 = (e2ay - e2by)/(e2ax - e2bx);
const double b2 = e2ay - a2 * e2ax;
if ((e1ay > a2*e1ax+b2 && e1by > a2*e1bx+b2) || (e1ay < a2*e1ax+b2 && e1by < a2*e1bx+b2)) continue; // both points of the first edge lie on the same side of the second edge
cerr << "edges " << *e1 << " and " << *e2 << " intersect." << endl;
return false;
}
}
return true;
}
// checks if the planar graph stays connected after removing v. this function
// implies and relies on the condition that the graph has at most one face
// larger than a triangle. If there is more than one larger face than a
// triangle, the function may return 'false', even though the correct answer is
// 'true'.
bool PlanarGraph::is_cut_vertex(const node_t v) const {
assert(is_oriented); // we need oriented (sorted) neighbours of v (direction doesn't matter)
const vector<node_t> &nv = neighbours[v];
const int n_neighbours = nv.size();
if(n_neighbours < 2) return false;
int n_edges = 0;
for(int i=0; i<n_neighbours; i++){
const int v1=nv[i], v2=nv[(i+1)%n_neighbours];
// and by counting this way we don't count edges between non-neighbours,
// thus avoid the separating triangle problem
if( edge_exists(edge_t(v1,v2)) ){
n_edges++;
}
}
// in a ring of n vertices where each except one adjacent face are triangles,
// the induced graph is connected exactly when there are at least n-1
// triangles
return n_edges < n_neighbours-1;
}
PlanarGraph PlanarGraph::dual_graph(unsigned int Fmax, bool planar_layout) const
{
if(is_oriented){
// Each directed edge uniquely identifies a face
vector<dedge_t> face_reps = compute_face_representations(Fmax);
unordered_map<dedge_t,int> face_numbers(face_reps.size());
for(int i=0;i<face_reps.size();i++) face_numbers[face_reps[i]] = i;
// cerr << "face_reps = " << face_reps << ";\n";
// cerr << "face_reps' = " << get_keys(face_numbers) << ";\n";
// cerr << "face_nums = " << get_values(face_numbers) << ";\n";
// cerr << "faces = " << compute_faces_oriented(Fmax) << ";\n";
PlanarGraph dual(face_numbers.size());
for(const auto &ei: face_numbers){
// e_f is minimal directed edge representation of face f, i_f is its face number.
dedge_t e_f = ei.first; node_t i_f = ei.second;
// cerr << "Processing face " << i_f << ": " << e_f << " -> " << get_face_oriented(e_f,Fmax) << ";\n";
// Now iterate along face f's directed edges in CCW order:
// This visits each face neighbour in CCW order.
node_t u=e_f.first, v=e_f.second, w=-1, i=0;
do {
// e_g is MDE-representation of opposite face along edge e_f
dedge_t e_g = get_face_representation({v,u},Fmax);
// cerr << "Processing face: " << dedge_t{u,v} << " -> " << get_face_oriented({u,v},Fmax)<<" "
// << " opposite " << e_g << " -> " << get_face_oriented(e_g,Fmax) << ";\n";
dual.neighbours[i_f].push_back(face_numbers[e_g]);
w = prev(v,u); u = v; v = w; // CCW node neighbour order + CCW face order
assert(++i <= Fmax); // Face larger than Fmax or corrupted graph
} while (u != e_f.first);
}
assert(dual.is_consistently_oriented());
dual.is_oriented = true;
if(planar_layout && layout2d.size() == N){
dual.layout2d = vector<coord2d>(face_numbers.size());
for(const auto &ei: face_numbers){
dedge_t e_f = ei.first; node_t i_f = ei.second;
face_t f = get_face_oriented(e_f,Fmax);
dual.layout2d[i_f] = f.centroid(layout2d);
}
}
return dual;
// Proper implementation of general oriented planar graph dual ends
} else {
// TODO: ********** Get rid of all this junk below! ******************
IDCounter<face_t> face_numbers;
PlanarGraph dual;
set<edge_t> edge_set = undirected_edges(); // TODO: In new planargraph, this is unnecessary
int Nfaces = edge_set.size()-N+2;
dual.N = Nfaces;
dual.neighbours.resize(Nfaces);
// cerr << "dual_graph(" << Fmax << ")\n";
vector<face_t> allfaces = compute_faces(Fmax,planar_layout);
if(Nfaces != allfaces.size()){
fprintf(stderr,"%d != %d faces: Graph is not polyhedral.\n",Nfaces,int(allfaces.size()));
cout << "errgraph = " << *this << endl;
}
// Construct mapping e -> faces containing e (these are mutually adjacent)
// cerr << "dual_graph::construct facenodes\n";
map< edge_t, set<int> > facenodes;
for(unsigned int i=0;i<allfaces.size(); i++){
const face_t& face(allfaces[i]);
// cerr << "Face "<<i<<": " << face << endl;
for(unsigned int j=0;j<face.size();j++)
facenodes[edge_t(face[j],face[(j+1)%face.size()])].insert(i);
}
// cerr << "dual_graph::test planarity\n";
for(map<edge_t,set<int> >::const_iterator fs(facenodes.begin());fs!=facenodes.end();fs++){
const edge_t& e(fs->first);
const set<int>& connects(fs->second);
if(connects.size() != 2)
fprintf(stderr,"Edge (%d,%d) connects %d faces: Graph is not planar.\n",e.first,e.second,int(connects.size()));
}
// Insert edge between each pair of faces that share an edge
// cerr << "dual_graph::construct graph\n";
set<edge_t> dual_edges;
for(set<edge_t>::const_iterator e(edge_set.begin()); e!= edge_set.end(); e++){
const set<int>& adjacent_faces(facenodes[*e]);
for(set<int>::const_iterator f(adjacent_faces.begin()); f!= adjacent_faces.end(); f++){
set<int>::const_iterator g(f);
for(++g; g!= adjacent_faces.end(); g++)
dual_edges.insert(edge_t(*f,*g));
}
}
//fprintf(stderr,"%d nodes, and %d edges in dual graph.\n",int(dual.N), int(dual.edge_set.size()));
dual = Graph(dual_edges);
// If original graph was planar with 2D layout, there's a corresponding layout for the dual graph
// (but it is not planar, because the outer face is placed in (0,0))
if(planar_layout && layout2d.size() == N){
// cerr << "dual_graph::compute layout.\n";
dual.layout2d = vector<coord2d>(allfaces.size());
for(int i=0;i<allfaces.size();i++)
dual.layout2d[i] = allfaces[i].centroid(layout2d);
}
return dual;
}
}
// the dual of the LF, ie a Triangulation is returned
PlanarGraph PlanarGraph::leapfrog_dual() const
{
assert(is_oriented);
vector<face_t> faces = compute_faces_oriented();
PlanarGraph lf(Graph(N+faces.size(),true));
// Start with all the existing nodes
for(node_t u=0;u<N;u++) lf.neighbours[u] = neighbours[u];
// Now connect new face-center nodes in oriented order
for(int i=0;i<faces.size();i++){
const face_t &f = faces[i];
node_t c = N+i; // Face-center node
// cerr << "new node " << c << " at face " << f << "\n";
for(int j=0;j<f.size();j++){
node_t u = f[j], v = f[(j+1)%f.size()];
lf.insert_edge(dedge_t{v,c},u,-1);
}
}
return lf;
}
vector<face_t> PlanarGraph::compute_faces(unsigned int Nmax, bool planar_layout) const
{
// TODO: This should supercede using the planar embedding for orientation
if(is_oriented) return compute_faces_oriented();
// TODO: Clean up.
if(planar_layout && layout2d.size() == N) return compute_faces_layout_oriented();
// TODO: This should never be used
cerr << " Non-oriented face computation (loop search). This is not reliable!\n";
abort();
cerr << "This shouldn't happen but we'll accept it for now." << endl;
set<edge_t> edge_set = undirected_edges();
set<face_t> faces;
for(const edge_t &e: edge_set){
const node_t s = e.first, t = e.second;
const vector<node_t>& nt(neighbours[t]);
for(unsigned int i=0;i<nt.size();i++)
if(nt[i] != s) {
const node_t u = nt[i];
face_t face(shortest_cycle(s,t,u,Nmax));
// cerr << face << endl;
if(face.size() > 0 && face.size() <= Nmax)
faces.insert(face.normalized());
} //else {
// fprintf(stderr,"Erroneous face starting at (%d -> %d -> %d) found: ",s,t,u);
// cerr << face << endl;
// }
}
// // Make sure that outer face is at position 0
// if(planar_layout){
// if(outer_face.size() < 3)
// outer_face = find_outer_face();
// const set<node_t> of(outer_face.begin(),outer_face.end());
// for(int i=0;i<faces.size();i++){
// const face_t &f(faces[i]);
// const set<node_t> sf(f.begin(),f.end());
// if(of==sf){ // swap faces[i] with faces[0]
// faces[i] = faces[0];
// faces[0] = outer_face;
// }
// }
// } else outer_face = face_t(faces[0]);
vector<face_t> face_vector(faces.begin(),faces.end());
return face_vector;
}
face_t PlanarGraph::get_face_layout_oriented(node_t s, node_t t) const
{
face_t face;
face.push_back(s);
face.push_back(t);
node_t u = s, v = t;
// printf("%d->%d\n",e.first,e.second);
while(v != s){
const vector<node_t>& ns(neighbours[v]);
coord2d vu = layout2d[u]-layout2d[v];
double angle_max = -M_PI;
node_t w=-1;
for(unsigned int i=0;i<ns.size();i++) {
// printf("%d : %d (%d->%d) angle %g\n",i,ns[i],u,v,vu.line_angle(layout[ns[i]]-layout[v]));
if(ns[i] != u) { // Find and use first unvisited edge in order of angle to u->v
coord2d vw = layout2d[ns[i]]-layout2d[v];
double angle = vu.line_angle(vw);
if(angle>= angle_max){
angle_max = angle;
w = ns[i];
}
}
}
if(w == -1) abort(); // There is no face!
u = v; v = w;
if(w != s) face.push_back(w);
}
return face;
}
vector<face_t> PlanarGraph::compute_faces_layout_oriented() const
{
assert(layout2d.size() == N);
// cout << "Computing faces using 2D orientation." << endl;
set<dedge_t> workset;
set<edge_t> edge_set = undirected_edges();
vector<face_t> faces;
set<face_t> face_set;
for(set<edge_t>::const_iterator e(edge_set.begin()); e!= edge_set.end(); e++){
const node_t s = e->first, t = e->second;
workset.insert(dedge_t(s,t));
workset.insert(dedge_t(t,s));
}
// If layout is planar, outer face must exist and be ordered CW,
// rest of faces CCW. If layout is spherical / periodic, all faces
// should be ordered CCW.
if(outer_face.size() < 3)
outer_face = find_outer_face();
if(outer_face.size() < 3){
cerr << "Invalid outer face: " << outer_face << endl;
assert(outer_face.size() < 3);
}
for(node_t u=0;u<N;u++)
if(!outer_face.contains(u) && !outer_face.point_inside(layout2d,u)){
cerr << "Point " << u << "/" << layout2d[u] << " is outside outer face " << outer_face << endl;
for(int i=0;i<outer_face.size();i++) cerr << "\t" << layout2d[outer_face[i]] << endl;
cerr << "Winding number: " << outer_face.winding_number(layout2d,u) << endl;
abort();
}
// cout << "compute_faces_oriented: Outer face "<<outer_face<<" is OK: All vertices are inside face." << endl;
faces.push_back(outer_face);
// Add outer face to output, remove directed edges from work set
for(unsigned int i=0;i<outer_face.size();i++){
const node_t u = outer_face[i], v = outer_face[(i+1)%outer_face.size()];
// printf("Removing directed edge (%d,%d)\n",u,v);
workset.erase(dedge_t(u,v));
}
// Now visit every other edge once in each direction.
while(!workset.empty()){
dedge_t e = *workset.begin();
face_t face(get_face_layout_oriented(e.first,e.second));
face_set.insert(face);
for(int i=0;i<face.size();i++)
workset.erase(dedge_t(face[i],face[(i+1)%face.size()]));
}
copy(face_set.begin(), face_set.end(), std::back_inserter(faces));
return faces;
}
// sort neighbour list CW
void PlanarGraph::orient_neighbours()
{
if(is_oriented) return;
assert(layout2d.size() == N);
for(node_t u=0;u<N;u++){
sort_ccw_point CCW(layout2d,layout2d[u]);
sort(neighbours[u].begin(),neighbours[u].end(),CCW);
reverse(neighbours[u].begin(),neighbours[u].end());
}
is_oriented = true;
}
vector<tri_t> PlanarGraph::triangulation(int face_max) const
{
vector<face_t> faces(compute_faces(face_max));
return triangulation(faces);
}
vector<tri_t> PlanarGraph::centroid_triangulation(const vector<face_t>& faces) const
{
// Test whether faces already form a triangulation
bool is_tri = true; for(int i=0;i<faces.size();i++) if(faces[i].size() != 3) is_tri = false;
if(is_tri){
// cerr << "centroid_triangulation: Faces already form a triangulation.\n";
vector<tri_t> tris(faces.begin(),faces.end());
return orient_triangulation(tris);
} else {
// cerr << "centroid_triangulation: Not a triangulation. Building centroid triangulation!\n";
// cerr << "Original faces:\n";
// cerr << "faces = {"; for(int i=0;i<faces.size();i++) cerr << faces[i] << (i+1<faces.size()?", ":"};\n");
// cerr << "layout = {"; for(int i=0;i<layout2d.size();i++) cerr << layout2d[i] << (i+1<layout2d.size()?", ":"};\n");
// cerr << "G = " << *this << ";\n";
}
// Triangulate by inserting extra vertex at face centroid and connecting
// each face vertex to this midpoint.
vector<tri_t> tris;
for(int i=0;i<faces.size();i++){
const node_t v_new = N+i;
const face_t& f(faces[i]);
if(f.size() > 3)
for(int j=0;j<f.size();j++)
tris.push_back({f[j],v_new,f[(j+1)%f.size()]});
else
tris.push_back({f[0],f[1],f[2]});
}
return tris; // TODO: Make sure triangulation is oriented.
//return orient_triangulation(tris);
}
vector<tri_t> PlanarGraph::triangulation(const vector<face_t>& faces) const
{
// Test whether faces already form a triangulation
bool is_tri = true; for(int i=0;i<faces.size();i++) if(faces[i].size() != 3) is_tri = false;
if(is_tri){
//cerr << "PlanarGraph::triangulation: Faces already form a triangulation.\n";
vector<tri_t> tris(faces.begin(),faces.end());
return orient_triangulation(tris);
} else {
for(int i=0;i<faces.size();i++)
if(faces[i].size() != 3){
fprintf(stderr,"Face %d has %d sides: ",i,int(faces[i].size())); cerr << faces[i] << endl;
}
}
vector<tri_t> tris;
// First, break up the faces into a non-consistent triangulation
for(size_t i=0;i<faces.size();i++){
face_t f(faces[i]);
assert(f.size() >= 3);
for(size_t j=1;j<f.size()-1;j++)
tris.push_back(tri_t(f[0],f[j],f[j+1]));
}
return orient_triangulation(tris);
}
vector<tri_t>& PlanarGraph::orient_triangulation(vector<tri_t>& tris) const
{
// Check that triangles are orientable: Every edge must appear in two faces
map<edge_t,int> edgecount;
for(int i=0;i<tris.size();i++)
for(int j=0;j<3;j++){
edgecount[edge_t(tris[i][j],tris[i][(j+1)%3])]++;
if(edgecount[edge_t(tris[i][j],tris[i][(j+1)%3])]>2)
cerr << tris[i] << " bad!\n";
}
for(map<edge_t,int>::const_iterator e(edgecount.begin()); e!=edgecount.end();e++)
if(e->second != 2){
cerr << "Triangulation not orientable: Edge "<< e->first << " appears in " << e->second <<" tris, not two.\n";
cerr << "tris = " << tris << "+1;\n";
cerr << "g = " << *this << ";\n";
abort();
}
// Now, pick an orientation for triangle 0. We choose the one it
// already has. This determines the orientation of the remaining triangles!
map<dedge_t,bool> done;
for(int i=0;i<3;i++){
done[dedge_t(tris[0][i],tris[0][(i+1)%3])] = true;
}
queue<int> workset;
for(int i=1;i<tris.size();i++) workset.push(i);
while(!workset.empty()){
int i = workset.front(); workset.pop();
tri_t& t(tris[i]);
// Is this triangle connected to any already processed triangle?
bool seen = false, rev_seen = false;
for(int j=0;j<3;j++){ seen |= done[dedge_t(t[j],t[(j+1)%3])]; rev_seen |= done[dedge_t(t[(j+1)%3],t[j])]; }
if(!seen && !rev_seen) {
workset.push(i);
continue;
}
if(seen){
node_t u = t[2]; t[2] = t[1]; t[1] = u;
}
done[dedge_t(t[0],t[1])] = true;
done[dedge_t(t[1],t[2])] = true;
done[dedge_t(t[2],t[0])] = true;
}
// Check consistency of orientation. It is consistent if and only if
// each edge has been used exactly once in each direction.
bool consistent = true;
set<edge_t> edge_set = undirected_edges();
for(set<edge_t>::const_iterator e(edge_set.begin()); e!= edge_set.end(); e++){
if(!done[dedge_t(e->first,e->second)]){
fprintf(stderr,"A: Directed edge %d->%d is missing: triangulation is not consistently oriented.\n",e->first,e->second);
consistent = false;
}
if(!done[dedge_t(e->second,e->first)]){
fprintf(stderr,"B: Directed edge %d->%d is missing: triangulation is not consistently oriented.\n",e->second,e->first);
consistent = false;
}
}
if(!consistent){
cerr << "(*** Inconsistent triangulation: ***)\n";
cerr << "tris = {"; for(int i=0;i<tris.size();i++) cerr << tris[i] << (i+1<tris.size()? ", ":"};\n");
cerr << "outerface = " << outer_face << ";\n";
}
assert(consistent == true);
return tris;
}
// Finds the vertices belonging to the outer face in a symmetric planar
// layout centered at (0,0). Returns the face in CW order.
face_t PlanarGraph::find_outer_face() const
{
assert(layout2d.size() == N);
vector<double> radii(N);
node_t u_farthest = 0;
double rmax = 0;
for(node_t u=0;u<N;u++){
radii[u] = layout2d[u].norm();
if(radii[u] > rmax){ rmax = radii[u]; u_farthest = u; }
}
face_t outer_face;
int i = 0;
for(node_t t = u_farthest, u = u_farthest, v = -1; v != u_farthest && i <= N; i++){
const vector<node_t>& ns(neighbours[u]);
double r = 0;
for(int i=0;i<ns.size();i++)
if(ns[i] != t && ns[i] != u && radii[ns[i]] > r){ r = radii[ns[i]]; v = ns[i]; }
outer_face.push_back(u);
t = u;
u = v;
}
// fprintf(stderr,"(u_farthest,rmax) = (%d,%f); i = %d\n",u_farthest,rmax,i);
// cerr << "Outer face: " << outer_face << endl;
// cerr << "Radii: "; for(int i=0;i<outer_face.size();i++) cerr << " " << radii[outer_face[i]]; cerr << "\n";
assert(i<N);
sort_ccw_point CCW(layout2d,outer_face.centroid(layout2d));
sort(outer_face.begin(),outer_face.end(),CCW); // sort CCW
reverse(outer_face.begin(),outer_face.end()); // reverse to get CW
// cout << "Found outer face: " << outer_face << endl;
return outer_face;
}
vector<double> PlanarGraph::edge_lengths() const
{
assert(layout2d.size() == N);
set<edge_t> edge_set = undirected_edges();
vector<double> lengths(edge_set.size());
unsigned int i = 0;
for(set<edge_t>::const_iterator e(edge_set.begin()); e!=edge_set.end();e++, i++)
lengths[i] = (layout2d[e->first]-layout2d[e->second]).norm();
return lengths;
}
coord2d PlanarGraph::width_height() const {
double xmin=INFINITY,xmax=-INFINITY,ymin=INFINITY,ymax=-INFINITY;
for(node_t u=0;u<N;u++){
double x = layout2d[u].first, y = layout2d[u].second;
if(x<xmin) xmin = x;
if(x>xmax) xmax = x;
if(y<ymin) ymin = y;
if(y>ymax) ymax = y;
}
return coord2d(xmax-xmin,ymax-ymin);
}
void PlanarGraph::scale(const coord2d& x) {
for(node_t u=0;u<N;u++) layout2d[u] *= x;
}
void PlanarGraph::move(const coord2d& x) {
for(node_t u=0;u<N;u++) layout2d[u] += x;
}
ostream& operator<<(ostream& s, const PlanarGraph& g)
{
set<edge_t> edge_set = g.undirected_edges();
s << "Graph[Range["<<g.N<<"],\n\tUndirectedEdge@@#&/@{";
for(set<edge_t>::const_iterator e(edge_set.begin()); e!=edge_set.end(); ){
s << "{" << (e->first+1) << "," << (e->second+1) << "}";
if(++e != edge_set.end())
s << ", ";
else
s << "}";
}
if(g.layout2d.size() == g.N){
s << ",\n\tVertexCoordinates->{";
for(unsigned int i=0;i<g.N;i++){
coord2d xy(g.layout2d[i]);
s << xy << (i+1<g.N?", ":"}");
}
} // else { fprintf(stderr,"No layout, man!\n"); }
s << "\n]";
return s;
}
// **********************************************************************
// COMBINATORIAL PROPERTIES
// **********************************************************************
void perfmatch_dfs(map<dedge_t,int>& faceEdge, const vector<face_t>& faces,
map<dedge_t,int>& matrix, vector<bool>& faceSum, vector<bool>& visited, const dedge_t& e)
{
int frev = faceEdge[reverse(e)];
if(visited[frev]) return;
visited[frev] = true;
const face_t &f(faces[frev]);
for(int i=0;i<f.size();i++)
perfmatch_dfs(faceEdge,faces,matrix,faceSum,visited,dedge_t(f[i],f[(i+1)%f.size()]));
// NB: How to handle outer face?
if(!faceSum[frev]) { //not odd sum of CW edges
int fe = faceEdge[e];
faceSum[frev] = !faceSum[frev];
faceSum[fe] = !faceSum[fe];
matrix[e] *= -1;
matrix[reverse(e)] *= -1;
}
}
#ifdef HAS_LAPACK
#ifdef HAS_MKL
#include <mkl.h>
#else
extern "C" void dgetrf_(int *M, int *N, double *A, int *LDA, int *IPIV, int *INFO);
#endif
double lu_det(const vector<double> &A, int N)
{
int info = 0;
double *result = new double[N*N];
int *ipiv = new int[N];
double prod = 1.0;
memcpy(result,&A[0],N*N*sizeof(double));
dgetrf_(&N,&N, result, &N, ipiv, &info);
{
int i;
for(i=0;i<N;i++) prod *= result[(N+1)*i];
}
free(result);
free(ipiv);
return fabs(prod);
}
size_t PlanarGraph::count_perfect_matchings() const
{
map<dedge_t,int> faceEdge;
vector<face_t> faces(compute_faces());
vector<bool> faceSum(faces.size()), visited(faces.size());
map<dedge_t,int> A;
set<edge_t> edge_set = undirected_edges();
for(set<edge_t>::const_iterator e(edge_set.begin()); e!=edge_set.end(); e++){
A[*e] = 1;
A[reverse(*e)] = -1;
}
for(int i=0;i<faces.size();i++){
const face_t &f(faces[i]);
for(int j=0;j<f.size();j++){
const dedge_t e(f[j],f[(j+1)%f.size()]);
faceEdge[e] = i;
if(A[e] == 1) faceSum[i] = !faceSum[i];
}
}
perfmatch_dfs(faceEdge,faces,A,faceSum,visited,*edge_set.begin());
vector<double> Af(N*N);
for(map<dedge_t,int>::const_iterator a(A.begin()); a!=A.end(); a++)
Af[a->first.first*N+a->first.second] = a->second;
return round(sqrtl(fabs(lu_det(Af,N))));
}
#else
size_t PlanarGraph::count_perfect_matchings() const
{
cerr << "count_perfect_matchings() requires LAPACK.\n";
return 0;
}
#endif
vector<coord3d> PlanarGraph::zero_order_geometry(double scalerad) const
{
assert(layout2d.size() == N);
vector<coord2d> angles(spherical_projection());
// Spherical projection
vector<coord3d> coordinates(N);
for(int i=0;i<N;i++){
double theta = angles[i].first, phi = angles[i].second;
double x = cos(theta)*sin(phi), y = sin(theta)*sin(phi), z = cos(phi);
coordinates[i] = coord3d(x,y,z);
}
// Move to centroid
coord3d cm;
for(node_t u=0;u<N;u++) cm += coordinates[u];
cm /= double(N);
coordinates -= cm;
// Scale spherical projection
double Ravg = 0;
for(node_t u=0;u<N;u++)
for(int i=0;i<3;i++) Ravg += (coordinates[u]-coordinates[neighbours[u][i]]).norm();
Ravg /= (3.0*N);
coordinates *= scalerad*1.5/Ravg;
return coordinates;
}
// In an oriented planar graph, the directed edge starting in the smallest node
// is a unique representation of the face.
dedge_t PlanarGraph::get_face_representation(dedge_t e, int Fmax) const
{
assert(is_oriented);
int i=0;
dedge_t e_min = e;
node_t u = e.first, v = e.second;
while(v!=e.first){
node_t w = prev(v,u); // Previous neighbour in v following u in CCW (corner u-v-w in face)
u=v; v=w;
if(u<e_min.first) e_min = {u,v};
assert(w != -1);
assert(++i<=Fmax); // Fmax is a back-stop to avoid infinite loops in a corrupted graph
}
return e_min;
}
// In an oriented planar graph, the directed edge starting in the smallest node
// is a unique representation of the face.
vector<dedge_t> PlanarGraph::compute_face_representations(int Fmax) const
{
assert(is_oriented);
unordered_set<dedge_t> faces(2*count_edges());
for(node_t u=0;u<N;u++)
for(node_t v: neighbours[u]){
// For each directed edge, find the representative edge of the specified face
// and assign an identifier
faces.insert(get_face_representation({u,v},Fmax));
}
return vector<dedge_t>(faces.begin(),faces.end());
}
face_t PlanarGraph::get_face_oriented(const dedge_t &e, int Fmax) const
{
assert(is_oriented);
int i=0;
node_t u = e.first, v=e.second;
face_t f = vector<int>{{u}};
while(v!=e.first){
node_t w = prev(v,u); // Previous neighbour to u in v defines corner u-v-w in face
f.push_back(v);
u=v; v=w; i++;
assert(w != -1);
assert(i<=Fmax); // Fmax is a back-stop to avoid infinite loops in a corrupted graph
}
return f;
}
vector<face_t> PlanarGraph::compute_faces_oriented(int Fmax) const
{
vector<dedge_t> face_representations = compute_face_representations(Fmax);
vector<face_t> faces(face_representations.size());
for(int i=0;i<face_representations.size();i++) faces[i] = get_face_oriented(face_representations[i],Fmax);
// cerr << "facereps = " << face_representations << ";\n"
// << "faces = " << faces << ";\n";
return faces;
}
// permutation of vertex numbers (ie, replace v by vertex_numbers[v], to get numbered vertices)
// where permutations are as returned by PG.leapfrog_dual().get_spiral()
// locants are vertices that should have small vertex numbers (as far as permitted by symmetry equivalent canonical spirals)
vector<node_t> PlanarGraph::vertex_numbers(vector<vector<node_t>> &permutations, const vector<node_t> &locants) const{
assert(!is_cubic());
vector<node_t> vertex_numbers_inv(N,INT_MAX);
for(int p=0; p<permutations.size(); p++){
const vector<node_t> &perm=permutations[p];
vector<node_t> vertex_numbers_tmp;
// strip face-vertices, keep only vertex-vertices
for(int i=0; i<perm.size(); i++){
if(perm[i] < N) vertex_numbers_tmp.push_back(perm[i]);
}
assert(vertex_numbers_tmp.size() == N);
//invert
vector<node_t> vertex_numbers_inv_tmp(N);
for(int i=0; i<vertex_numbers_tmp.size(); i++) vertex_numbers_inv_tmp[vertex_numbers_tmp[i]] = i;
// copy to vertex_numbers_inv?
if(locants.size()==0){
vertex_numbers_inv = vertex_numbers_inv_tmp;
break;
}
// compare two vectors, but only at chosen positions
for(int l=0; l<locants.size(); l++){
if(vertex_numbers_inv_tmp[locants[l]] > vertex_numbers_inv[locants[l]]) break;
if(vertex_numbers_inv_tmp[locants[l]] < vertex_numbers_inv[locants[l]]){
vertex_numbers_inv = vertex_numbers_inv_tmp;
break;
}
}
}
//invert
vector<node_t> vertex_numbers(N);
for(int i=0; i<vertex_numbers.size(); i++) vertex_numbers[vertex_numbers_inv[i]] = i;
return vertex_numbers;
}
|
#!/usr/bin/env bash
# This scripts runs as root
setupVars="/etc/pivpn/wireguard/setupVars.conf"
if [ ! -f "${setupVars}" ]; then
echo "::: Missing setup vars file!"
exit 1
fi
source "${setupVars}"
echo -e "::::\t\t\e[4mPiVPN debug\e[0m\t\t ::::"
printf "=============================================\n"
echo -e "::::\t\t\e[4mLatest commit\e[0m\t\t ::::"
echo -n "Branch: "
git --git-dir /usr/local/src/pivpn/.git rev-parse --abbrev-ref HEAD
git --git-dir /usr/local/src/pivpn/.git log -n 1 --format='Commit: %H%nAuthor: %an%nDate: %ad%nSummary: %s'
printf "=============================================\n"
echo -e "::::\t \e[4mInstallation settings\e[0m \t ::::"
sed "s/$pivpnHOST/REDACTED/" < ${setupVars}
printf "=============================================\n"
echo -e ":::: \e[4mServer configuration shown below\e[0m ::::"
cd /etc/wireguard/keys || exit
cp ../wg0.conf ../wg0.tmp
# Replace every key in the server configuration with just its file name
for k in *; do
sed "s#$(<"$k")#$k#" -i ../wg0.tmp
done
cat ../wg0.tmp
rm ../wg0.tmp
printf "=============================================\n"
echo -e ":::: \e[4mClient configuration shown below\e[0m ::::"
EXAMPLE="$(head -1 /etc/wireguard/configs/clients.txt | awk '{print $1}')"
if [ -n "$EXAMPLE" ]; then
cp ../configs/"$EXAMPLE".conf ../configs/"$EXAMPLE".tmp
for k in *; do
sed "s#$(<"$k")#$k#" -i ../configs/"$EXAMPLE".tmp
done
sed "s/$pivpnHOST/REDACTED/" < ../configs/"$EXAMPLE".tmp
rm ../configs/"$EXAMPLE".tmp
else
echo "::: There are no clients yet"
fi
printf "=============================================\n"
echo -e ":::: \t\e[4mRecursive list of files in\e[0m\t ::::\n::::\e\t[4m/etc/wireguard shown below\e[0m\t ::::"
ls -LR /etc/wireguard
printf "=============================================\n"
echo -e "::::\t\t\e[4mSelf check\e[0m\t\t ::::"
/opt/pivpn/self_check.sh ${VPN}
printf "=============================================\n"
echo -e ":::: Having trouble connecting? Take a look at the FAQ:"
echo -e ":::: \e[1mhttps://docs.pivpn.io/faq\e[0m"
printf "=============================================\n"
echo -e ":::: \e[1mWARNING\e[0m: This script should have automatically masked sensitive ::::"
echo -e ":::: information, however, still make sure that \e[4mPrivateKey\e[0m, \e[4mPublicKey\e[0m ::::"
echo -e ":::: and \e[4mPresharedKey\e[0m are masked before reporting an issue. An example key ::::"
echo ":::: that you should NOT see in this log looks like this: ::::"
echo ":::: YIAoJVsdIeyvXfGGDDadHh6AxsMRymZTnnzZoAb9cxRe ::::"
printf "=============================================\n"
echo -e "::::\t\t\e[4mDebug complete\e[0m\t\t ::::"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.