text stringlengths 1 1.05M |
|---|
<filename>src/models/Tag.ts<gh_stars>0
/* eslint-disable no-underscore-dangle */
import { DataTypes, Model } from 'sequelize';
import { sequelize } from '../db';
interface TagAttr {
id: number;
tag: string;
type: string;
posts: number;
}
class Tag extends Model implements TagAttr {
public id!: number;
public tag!: string;
public type!: string;
public posts!: number;
public readonly createdAt!: Date;
public readonly updatedAt!: Date;
/**
* Update table based on tag type
* @param {String} tag tag to be updated in table (prefix included)
* @returns {void}
*/
static async createUpdateTag(tag: string): Promise<void> {
const type = '#';
const _tag = tag;
try {
const ifExists = await Tag.findOne({
where: {
tag: _tag,
},
});
if (!ifExists) {
Tag.create({ tag: _tag, type, posts: 1 });
} else {
Tag.increment({ posts: 1 }, { where: { tag: _tag } });
}
} catch (e) {
// do nothing
}
}
}
Tag.init({
id: {
type: DataTypes.INTEGER,
autoIncrement: true,
allowNull: false,
primaryKey: true,
},
tag: {
type: DataTypes.STRING,
primaryKey: true,
allowNull: false,
unique: true,
},
type: {
type: DataTypes.STRING,
allowNull: false,
defaultValue: '#',
},
posts: {
type: DataTypes.INTEGER,
allowNull: true,
},
}, {
sequelize,
modelName: 'tags',
freezeTableName: true,
timestamps: false,
});
export { Tag, TagAttr };
|
#include <iostream>
#include <string>
#include <cctype>
bool isValidPreprocessorDirective(const std::string& directive) {
if (directive.empty() || directive[0] != '#') {
return false;
}
size_t pos = 1;
while (pos < directive.size() && std::isspace(directive[pos])) {
pos++;
}
if (pos == directive.size()) {
return false; // No keyword after '#'
}
while (pos < directive.size() && std::isalpha(directive[pos])) {
pos++;
}
while (pos < directive.size() && std::isspace(directive[pos])) {
pos++;
}
if (pos == directive.size() || directive[pos] == '\n') {
return true; // Valid preprocessor directive without parameters
}
if (directive[pos] == '(') {
pos++;
while (pos < directive.size() && directive[pos] != ')') {
pos++;
}
if (pos == directive.size() || directive[pos] != ')') {
return false; // Unmatched parenthesis
}
pos++;
}
while (pos < directive.size() && std::isspace(directive[pos])) {
pos++;
}
return pos == directive.size() || directive[pos] == '\n';
} |
#!/bin/bash
cd "$(dirname "$0")"
mkdir -p ../docs
if [ -f ../docs/CNAME ]; then
mv ../docs/CNAME ./CNAME_bak
fi
mdbook build . --dest-dir ../docs/
if [ -f ./CNAME ]; then
mv ./CNAME_bak ../docs/
fi
|
def find_dark_colors(colors):
dark_colors = []
for color in colors:
if color == 'Black' or color == 'Purple':
dark_colors.append(color)
return dark_colors
result = find_dark_colors(['Red', 'Orange', 'Green', 'Purple', 'Black', 'White'])
print(result) |
package gofiql
import "testing"
func TestFindToken(t *testing.T) {
s := "name==Joe"
i, b := findToken(&s)
if i != -1 {
t.Logf("Expected -1, got: %d", i)
t.Fail()
}
bb := string(b)
if string(b) != s {
t.Logf("Expected same string in output, got: %s", bb)
t.Fail()
}
s = "name==Joe,name=Tom"
i, b = findToken(&s)
if i == -1 {
t.Logf("Expected != -1, got: %d", i)
t.Fail()
}
bb = string(b)
if string(b) != s {
t.Logf("Expected same string in output, got: %s", bb)
t.Fail()
}
s = "(name==Joe,name=Tom)"
i, b = findToken(&s)
if i == -1 {
t.Logf("Expected != -1, got: %d", i)
t.Fail()
}
bb = string(b)
if string(b) == s {
t.Logf("Expected different string in output, got: %s", bb)
t.Fail()
}
s = s[1 : len(s)-1]
if string(b) != s {
t.Logf("Expected string trimmed of (), got: %s", bb)
t.Fail()
}
s = "(((name==Joe,name=Tom)))"
i, b = findToken(&s)
if i == -1 {
t.Logf("Expected != -1, got: %d", i)
t.Fail()
}
bb = string(b)
if string(b) == s {
t.Logf("Expected different string in output, got: %s", bb)
t.Fail()
}
s = s[3 : len(s)-3]
if string(b) != s {
t.Logf("Expected string trimmed of (), got: %s", bb)
t.Fail()
}
s = "(((((name==Joe,name=Tom)));qty=gt=10))"
i, b = findToken(&s)
if i == -1 {
t.Logf("Expected != -1, got: %d", i)
t.Fail()
}
bb = string(b)
if string(b) == s {
t.Logf("Expected different string in output, got: %s", bb)
t.Fail()
}
s = s[2 : len(s)-2]
if string(b) != s {
t.Logf("Expected string trimmed of (), got: %s", bb)
t.Fail()
}
}
|
<reponame>wesleyskap/directlog<filename>lib/directlog/base.rb
module Directlog
class Base
attr_reader :response
class << self
%w(resource_name collection_name).each do |attribute|
define_method "#{attribute}=" do |param|
instance_variable_set "@#{attribute}", param
end
define_method "#{attribute}" do
instance_variable_get "@#{attribute}"
end
end
def collection_name
@collection_name ||= "#{resource_name}s"
end
def call(method, params)
Response.new method, api.call(method, message: params.merge(authentication))
end
def create(method, params)
new response: call(method, params)
end
end
def initialize(params)
params.each do |key, value|
instance_variable_set "@#{key}", value
define_singleton_method(key) { instance_variable_get "@#{key}" }
end
end
private
def self.api
Savon.client wsdl: "#{endpoint}/wsdirectlog.asmx?wsdl", log: Directlog.config['ws']['log'], read_timeout: 120, open_timeout: 120, log_level: :debug do
convert_request_keys_to :camelcase
end
end
def self.endpoint
Directlog.config['ws']['endpoint']
end
def self.authentication
{ 'login' => Directlog.config['ws']['login'], 'password' => Directlog.config['ws']['password'] }
end
end
end
|
#!/bin/sh
if [ -z $IFMAPCLI ]; then
echo "set IFMAPCLI environment with 'export IFMAPCLI=/path/to/ifmapcli/jars'"
exit 1
fi
COMMAND="java -jar $IFMAPCLI"
################################################################################
IP_ADDRESS=10.0.0.1
MAC_ADDRESS=ee:ee:ee:ee:ee:ee
USERNAME=joe
echo "publish pdp subgraph"
$COMMAND/pdp.jar update $USERNAME $IP_ADDRESS $MAC_ADDRESS > /dev/null
|
<filename>src/Lista.java
public class Lista {
private NodoLista cabeza; //Se crea un objeto inicial de tipo NodoLis
private NodoLista ultimo; //Se crea un objeto final de tipo NodoLis
public void insertar(InfoDoctor p) { //Método para ingresar doctores a la lista
if (cabeza == null) {//Si la lista está vacía
cabeza = new NodoLista();//Creo el nodo, almaceno el objeto y actualizo inicio
ultimo = cabeza;//actualizo último
} else if (p.getID() < cabeza.getDato().getID()) {//si el nuevo nodo va a la izquierda de inicio
cabeza.setBack(new NodoLista());
cabeza.getBack().setNext(cabeza);
cabeza = cabeza.getBack();
} else if (p.getID() >= ultimo.getDato().getID()) {//si el nuevo nodo va al final
ultimo.setNext(new NodoLista());//enlazo el nodo que tenía como último con el siguiente
ultimo.getNext().setBack(ultimo);
ultimo = ultimo.getNext();//actualizo último
} else {//si voy a ubicar el nuevo nodo en una posición intermedia
NodoLista aux = cabeza;//coloc aux al inicio para recorrer la lista
while (aux.getNext().getDato().getID() < p.getID()) { //busco el lugar dónde ubicar el nuevo nodo. Si es menor, entonces avanza al siguiente
aux = aux.getNext();
}
NodoLista temp = new NodoLista();//cuando ya ubiqué el lugar, creo el nuevo nodo
temp.setNext(aux.getNext());//Enlazo temp al siguiente del nodo que ya tenía
temp.getNext().setBack(temp);
aux.setNext(temp);//Creo la referencia circular
temp.setBack(aux);
}
cabeza.setBack(ultimo);
ultimo.setNext(cabeza);
}
public void elimina(int id) {
if (cabeza != null) {
if (cabeza.getDato().getID() == id) {
cabeza = cabeza.getNext();
ultimo.setNext(cabeza);
} else {
NodoLista aux = cabeza;
while (aux.getNext() != null && aux.getNext().getDato().getID() < id && id <= ultimo.getDato().getID()) {
aux = aux.getNext();
}
if (aux.getNext() != null && aux.getNext().getDato().getID() == id) {
aux.setNext(aux.getNext().getNext());
}
if (id > ultimo.getDato().getID()) {
System.out.println("Este id no existe");
}
}
}
cabeza.setBack(ultimo);
ultimo.setNext(cabeza);
}
@Override
public String toString() {
NodoLista aux = cabeza;
String s = "Lista: \n";
if (aux != null) {
s += aux + "\n ";
aux = aux.getNext();
while (aux != cabeza) {
s += aux + "\n ";
aux = aux.getNext();
}
} else {
s += "vacia";
}
return s;
}
}
|
#!/usr/bin/env bash
###
# integration-runner.sh
#
# A basic integration test runner using docker-compose
###
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
##
# TEST_MODE
# Options:
# - default runs the tests as usual
# - wait sets up the docker-compose environment, but don't do anything (this allows for repeat tests)
# - rm same as default, but stops and removes docker-compose containers afterwards
###
TEST_MODE="${TEST_MODE:-"default"}"
# Controls where Jest places test outputs inside of `als_account-lookup-service-int` container
JEST_JUNIT_OUTPUT_DIR="${JEST_JUNIT_OUTPUT_DIR:-"/tmp"}"
JEST_JUNIT_OUTPUT_NAME="${JEST_JUNIT_OUTPUT_NAME:-"junit.xml"}"
# Test output on host machine
RESULTS_DIR="${RESULTS_DIR:-"/tmp"}"
function startDocker() {
docker-compose \
-f ${DIR}/../docker-compose.yml \
-f ${DIR}/../docker-compose.integration.yml \
up -d
}
function waitForDocker() {
echo 'Waiting for docker services to be healthy'
HEALTHY_COUNT=$(docker ps | grep "healthy" | wc -l)
EXPECTED_HEALTHY_COUNT=5
EXPECTED_SERVICE_COUNT=6
while [ $(docker ps | grep "healthy" | wc -l) -lt $EXPECTED_HEALTHY_COUNT ]; do
TOTAL_SERVICES=$(docker ps | grep "als_*" | wc -l)
# exit early if we don't have the required services
if [ ${TOTAL_SERVICES} -lt ${EXPECTED_SERVICE_COUNT} ]; then
echo 'Not all docker-compose services are running. Check the logs and try again.'
exit 1
fi
echo "."
sleep 5
done
}
function runMigration() {
docker exec -it als_account-lookup-service sh -c "npm run migrate"
}
function runTests() {
docker exec -it als_account-lookup-service-int sh -c "JEST_JUNIT_OUTPUT_DIR=${JEST_JUNIT_OUTPUT_DIR} JEST_JUNIT_OUTPUT_NAME=${JEST_JUNIT_OUTPUT_NAME} npm run test:int"
}
function copyResults() {
echo "Copying results from: ${JEST_JUNIT_OUTPUT_DIR}/${JEST_JUNIT_OUTPUT_NAME} to: ${RESULTS_DIR}"
docker cp als_account-lookup-service-int:${JEST_JUNIT_OUTPUT_DIR}/${JEST_JUNIT_OUTPUT_NAME} ${RESULTS_DIR}
}
function tearDown() {
docker-compose \
-f ${DIR}/../docker-compose.yml \
-f ${DIR}/../docker-compose.integration.yml \
stop
docker-compose \
-f ${DIR}/../docker-compose.yml \
-f ${DIR}/../docker-compose.integration.yml \
rm -f
}
startDocker
waitForDocker
runMigration
case ${TEST_MODE} in
default)
runTests
EXIT_RESULT=$?
copyResults
exit ${EXIT_RESULT}
;;
wait)
echo 'Running tests in `wait` mode'
;;
rm)
runTests
EXIT_RESULT=$?
copyResults
tearDown
exit ${EXIT_RESULT}
;;
*)
echo "Unsupported TEST_MODE: ${TEST_MODE}"
exit 1
esac
|
Dictionaries are used in Python to store data in the form of key-value pairs, where each key is associated with a specific value. Dictionaries can be used for a variety of tasks including representing complex data, creating lookup tables, and organizing information. The key-value pairs in a dictionary are mutable, meaning they can be changed at any point in the program. Finally, dictionaries are very efficient in terms of memory space and can be used to store large amounts of data in a relatively small amount of space. |
DROP VIEW archive;
ALTER TABLE archive2 RENAME TO archive;
ALTER TABLE archive ADD ar_len INTEGER;
|
#include <iostream>
#include <string>
class DebugUtility {
public:
void trace(const std::string& functionName) {
std::cout << "TRACE: " << functionName << std::endl;
}
};
#define UNIMPLEMENTED() std::cout << "ERROR: Function " << __FUNCTION__ << " is unimplemented!" << std::endl;
class Game {
DebugUtility debug;
public:
void PSGPSampleTexture() {
debug.trace("PSGPSampleTexture");
// Function implementation goes here
}
void renderScene() {
UNIMPLEMENTED();
}
};
int main() {
Game game;
game.PSGPSampleTexture();
game.renderScene();
return 0;
} |
<filename>src/layouts/Layout.js
import React from 'react'
import { withRouter } from 'react-router'
import Default from './Default'
import BodyStyle from 'components/common/BodyStyle'
import { isTouchDevice } from 'utils/domutils'
import ToastContainer from 'containers/Toast'
const getLayoutPerRoute = (props) => {
switch (props.location.pathname) {
default:
return <Default children={props.children} />
}
}
const Layout = (props) => {
const { children, history } = props
const { location } = history
return (
<BodyStyle className={!isTouchDevice() ? 'no-touch' : ''}>
<div>
{
getLayoutPerRoute({children, location})
}
<ToastContainer />
</div>
</BodyStyle>
)
}
export default withRouter(Layout)
|
/**
* ISO 7816-4 command-response specific part
*
* Copyright (C) 2017, <NAME> <<EMAIL> >
*/
/**
* Build Extended ISO 7816-4 (command) APDU.
* - optionally create class with Uint8Array/ArrayBuffer containing apdu and prototype getter/setter for properties
* @param {Number} CLA Class Byte.
* @param {Number} INS Instruction Byte.
* @param {Number} P1 P1 Byte.
* @param {Number} P2 P2 Byte.
* @param {Uint8Array} commandData Command Data Byte Array.
* @param {Number} responseLength Length of expectet answer.
* @return {Uint8Array} APDU Byte Array.
*/
function buildExtendedAPDU(CLA, INS, P1, P2, commandData, responseLength) { //always build extended APDU
if(commandData.length>65535) throw new Error("command data must be <=65535 Bytes");
if(responseLength>65536) throw new Error("Maximum response size must be <=65536");
//determine length
let hasCommandData = typeof commandData === Number && commandData > 0;
let apduLength = 4;
if(hasCommandData) apduLength+=commandData.length;
if(responseLength>0) {
if(hasCommandData) apduLength+=2;
if(!hasCommandData) apduLength+=3;
}
/**
* Encode Number as ByteArray of given length with Big/Little Endian.
* @param {Number} number Positive number to be encoded.
* @param {Number} byteLength Length in Bytes of Array
* @param {Boolean} [littleEndian=false] little endian
* @return {Uint8Array} ArrayBuffer is accessible via buffer property.
*/
let encodeNumber = (number,byteLength,littleEndian = false) => {
if(number >= Math.pow(2,32)) throw new Error("Number is too large for conversion using >> operator.");
if(number <0) throw new Error("Number has to be positive.");
let numberByteArray = new Uint8Array(byteLength); //.buffer property contains ArrayBuffer read&write support
//fill array according to endianess
if(littleEndian) {
for(let i =0;i<numberByteArray.length;i++) { //shifting gives big endian encoded number
numberByteArray[i] = (number >>(8*i))&0xFF;
}
} else {
for(let i =0;i<numberByteArray.length;i++) { //shifting gives big endian encoded number
numberByteArray[numberByteArray.length-1-i] = (number >>(8*i))&0xFF; //only save last byte
}
}
return numberByteArray;
};
//build APDU
let apdu = new Uint8Array(apduLength);
let i=0;
apdu[i] = CLA;i++;
apdu[i] = INS;i++;
apdu[i] = P1;i++;
apdu[i] = P2;i++;
if(hasCommandData) {
apdu[i] = 0x00;i++;
let commandDataLength = encodeNumber(commandData.length,2,false);
apdu.set(commandDataLength,i);i+=2;
apdu.set(commandData,i);i+=commandData.length;
}
if(responseLength>0) {
if(responseLength==65536) responseLength=0; //65536 is 0x0000
if(hasCommandData ) { //2 Byte
apdu.set(encodeNumber(responseLength,2,false),i);i+=2;
}
if(!hasCommandData) { //3 Byte
apdu[i] = 0x00;i++;
apdu.set(encodeNumber(responseLength,2,false),i);i+=2;
}
}
return apdu;
}
let apdus = {
GET_CHALLENGE: (length) => {
return buildExtendedAPDU(0x00,0x84,0x00,0x00,0,1);
},
};
export {buildExtendedAPDU, apdus};
|
def triangle_area(side1, side2, side3):
s = (side1 + side2 + side3) / 2
area = (s * (s - side1) * (s - side2) * (s - side3)) ** 0.5
return area |
<gh_stars>10-100
package mqtt
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"sync"
"time"
g "github.com/chryscloud/video-edge-ai-proxy/globals"
"github.com/chryscloud/video-edge-ai-proxy/models"
"github.com/chryscloud/video-edge-ai-proxy/services"
"github.com/chryscloud/video-edge-ai-proxy/utils"
badger "github.com/dgraph-io/badger/v2"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/events"
"github.com/docker/docker/api/types/filters"
"github.com/docker/docker/client"
qtt "github.com/eclipse/paho.mqtt.golang"
"github.com/go-redis/redis/v7"
)
const (
mqttBrokerURL = "tls://mqtt.googleapis.com:8883"
protocolVersion = 4 // corresponds to MQTT 3.1.1
)
// ProcessManager - start, stop of docker containers
type mqttManager struct {
rdb *redis.Client
settingsService *services.SettingsManager
processService *services.ProcessManager
appService *services.AppProcessManager
client *qtt.Client
clientOpts *qtt.ClientOptions
stop chan bool
gatewayID string
projectID string
jwt string
processEvents sync.Map
lastProcessEventNotified sync.Map
mutex sync.Mutex
}
func NewMqttManager(rdb *redis.Client, settingsService *services.SettingsManager, processService *services.ProcessManager, appService *services.AppProcessManager) *mqttManager {
return &mqttManager{
rdb: rdb,
settingsService: settingsService,
processService: processService,
appService: appService,
processEvents: sync.Map{},
lastProcessEventNotified: sync.Map{},
mutex: sync.Mutex{},
}
}
func (mqtt *mqttManager) onConnect(client qtt.Client) {
g.Log.Info("MQTT client connected", client.IsConnected())
}
func (mqtt *mqttManager) onMessage(client qtt.Client, msg qtt.Message) {
g.Log.Info("Command received from Chrysalis Cloud:", msg.Topic())
var edgeConfig models.EdgeCommandPayload
err := json.Unmarshal(msg.Payload(), &edgeConfig)
if err != nil {
g.Log.Error("failed to unmarshal config payload", err, string(msg.Payload()))
return
}
// mapping to local process types for cameras
operation := ""
if edgeConfig.Type == models.ProcessTypeRTSP {
if edgeConfig.Operation == "a" {
operation = models.DeviceOperationStart
} else if edgeConfig.Operation == "r" {
operation = models.DeviceOperationDelete
} else {
g.Log.Error("camera command operation not supported: ", edgeConfig.Name, edgeConfig.ImageTag, edgeConfig.Operation)
return
}
} else {
// mapping to local process types for applications
operation = edgeConfig.Operation
}
err = utils.PublishToRedis(mqtt.rdb, edgeConfig.Name, models.MQTTProcessOperation(operation), edgeConfig.Type, msg.Payload())
if err != nil {
g.Log.Error("failed to process starting of the new device on the edge", err)
}
}
func (mqtt *mqttManager) onConnectionLost(client qtt.Client, err error) {
g.Log.Error("MQTT connection lost", err)
}
func (mqtt *mqttManager) configHandler(client qtt.Client, msg qtt.Message) {
g.Log.Info("Received config request: ", msg.Topic())
g.Log.Info("Message: ", string(msg.Payload()))
}
// StartGatewayListener checks every 15 seconds if there are any settings for connection to gateway
func (mqtt *mqttManager) StartGatewayListener() error {
delay := time.Second * 15
go func() {
for {
_, err := mqtt.getMQTTSettings()
if err == nil {
mqttErr := mqtt.run()
if mqttErr != nil {
g.Log.Error("Failed to init mqtt", mqttErr)
}
// exit the waiting function
break
}
select {
case <-time.After(delay):
case <-mqtt.stop:
g.Log.Info("MQTT cron job stopped")
return
}
}
}()
return nil
}
func (mqtt *mqttManager) run() error {
err := mqtt.gatewayInit()
if err != nil {
if err == ErrNoMQTTSettings {
return nil
}
g.Log.Error("failed to connect gateway and report presence", err)
return err
}
// init redis listener for local messages (this is only for active local changes)
// e.g. Device/process added, removed, ...
sub := mqtt.rdb.Subscribe(models.RedisLocalMQTTChannel)
go func(sub *redis.PubSub) {
defer sub.Close()
for {
val, err := sub.ReceiveMessage()
if err != nil {
g.Log.Error("failed to receive mqtt local pubsub message", err)
} else {
g.Log.Info("redis message received: ", val)
payload := []byte(val.Payload)
var localMsg models.MQTTMessage
err := json.Unmarshal(payload, &localMsg)
if err != nil {
g.Log.Error("failed to unmarshal internal redis pubsub message", err)
} else {
g.Log.Info("Received message object from redis pubsub for mqtt: ", localMsg.DeviceID)
var opErr error
if localMsg.ProcessType == models.MQTTProcessType(models.ProcessTypeRTSP) {
if localMsg.ProcessOperation == models.MQTTProcessOperation(models.DeviceOperationAdd) {
opErr = mqtt.bindDevice(localMsg.DeviceID, models.MQTTProcessType(models.ProcessTypeRTSP))
} else if localMsg.ProcessOperation == models.MQTTProcessOperation(models.DeviceOperationRemove) {
opErr = mqtt.unbindDevice(localMsg.DeviceID, models.MQTTProcessType(models.ProcessTypeRTSP))
} else if localMsg.ProcessOperation == models.MQTTProcessOperation(models.DeviceOperationUpgradeAvailable) {
// TODO: TBD
g.Log.Warn("TBD: process operation upgrade available")
} else if localMsg.ProcessOperation == models.MQTTProcessOperation(models.DeviceOperationUpgradeFinished) {
// TODO: TBD
g.Log.Warn("TBD: process operation upgrade completed/finished")
} else if localMsg.ProcessOperation == models.MQTTProcessOperation(models.DeviceOperationStart) {
opErr = mqtt.StartCamera(localMsg.Message)
} else if localMsg.ProcessOperation == models.MQTTProcessOperation(models.DeviceOperationDelete) {
opErr = mqtt.StopCamera(localMsg.Message)
} else if localMsg.ProcessOperation == models.MQTTProcessOperation(models.DeviceInternalTesting) {
// **********
// internal testing operations
// **********
testErr := mqtt.reportDeviceStateChange(localMsg.DeviceID, models.ProcessStatusRestarting)
if testErr != nil {
g.Log.Error("TEST FAILED ------------------> ", testErr)
}
} else {
opErr = errors.New("local message operation not recognized")
g.Log.Error("message operation not recognized: ", localMsg.ProcessOperation, localMsg.DeviceID, localMsg.ProcessType)
}
} else if localMsg.ProcessType == models.MQTTProcessType(models.ProcessTypeApplication) {
// INSTALL APPLICATION
if localMsg.ProcessOperation == models.MQTTProcessOperation(models.DeviceOperationAdd) {
payload, siErr := mqtt.PullApplication(localMsg.Message)
if siErr != nil {
opErr = siErr
} else {
opErr = mqtt.StartApplication(payload)
}
} else if localMsg.ProcessOperation == models.MQTTProcessOperation(models.DeviceOperationRemove) {
// DELETE APPLICATION
opErr = mqtt.StopApplication(localMsg.Message)
} else {
opErr = errors.New("local message application operation not recognized")
g.Log.Error("message application operation not recognized: ", localMsg.ProcessOperation, localMsg.DeviceID, localMsg.ProcessType)
}
}
if opErr != nil {
g.Log.Error("local pubsub gateway msg failed", opErr)
}
}
}
}
}(sub)
// reporting device changes
go func() {
cl, err := client.NewClient("unix:///var/run/docker.sock", "1.40", nil, nil)
if err != nil {
g.Log.Error("failed to initialize docker event listener")
return
}
filterArgs := filters.NewArgs()
filterArgs.Add("type", events.ContainerEventType)
opts := types.EventsOptions{
Filters: filterArgs,
}
// listening to events of docker
messages, errs := cl.Events(context.Background(), opts)
for {
select {
case err := <-errs:
if err != nil && err != io.EOF {
g.Log.Error(err)
}
case e := <-messages:
dsErr := mqtt.changedDeviceState(mqtt.gatewayID, e)
if dsErr != nil {
g.Log.Error("failed to update device state", e, dsErr)
}
}
}
}()
// report gateway state 60 seconds
delay := time.Second * 60
go func() {
for {
err := mqtt.gatewayState(mqtt.gatewayID)
if err != nil {
g.Log.Error("failed to report gateway state: ", err)
}
select {
case <-time.After(delay):
case <-mqtt.stop:
g.Log.Info("MQTT cron job stopped")
return
}
}
}()
// reporting very first container stats right after first 10 seconds (sort of a ground truth)
time.AfterFunc(time.Second*10, func() {
err := mqtt.ReportContainersStats()
if err != nil {
g.Log.Error("failed to retrieve all device stats", err)
}
})
// report system wide info every 5 minutes
sysDelay := time.Minute * 5
go func() {
for {
select {
case <-time.After(sysDelay):
err := mqtt.ReportContainersStats()
if err != nil {
g.Log.Error("failed to retrieve all device stats", err)
}
case <-mqtt.stop:
g.Log.Info("Syscron stopped")
return
}
}
}()
return nil
}
// Start the MQTT communication gateway
func (mqtt *mqttManager) gatewayInit() error {
// check settings if they exist
settings, err := mqtt.settingsService.Get()
if err != nil {
if err == badger.ErrKeyNotFound {
return nil
}
g.Log.Error("failed to retrieve edge settings", err)
return err
}
if settings.ProjectID == "" || settings.Region == "" || settings.GatewayID == "" || settings.RegistryID == "" || settings.PrivateRSAKey == nil {
g.Log.Warn("ProjectID: ", settings.ProjectID, "Region: ", settings.Region, "GatewayID: ", settings.GatewayID, "RegistryID: ", settings.RegistryID)
return ErrNoMQTTSettings
}
// rotate it every day at least (JWT token must expire sooner)
jwt, ccErr := utils.CreateJWT(settings.ProjectID, settings.PrivateRSAKey, time.Hour*1)
if ccErr != nil {
g.Log.Error("Failed to create JWT key for communication with ChrysCloud MQTT", ccErr)
return ccErr
}
clientID := fmt.Sprintf("projects/%s/locations/%s/registries/%s/devices/%s", settings.ProjectID, settings.Region, settings.RegistryID, settings.GatewayID)
opts := qtt.NewClientOptions()
opts.AddBroker(mqttBrokerURL)
opts.SetClientID(clientID)
opts.SetUsername("unused")
opts.SetPassword(<PASSWORD>)
opts.SetProtocolVersion(protocolVersion)
opts.SetOnConnectHandler(mqtt.onConnect)
opts.SetDefaultPublishHandler(mqtt.onMessage)
opts.SetConnectionLostHandler(mqtt.onConnectionLost)
opts.SetCleanSession(false)
opts.SetAutoReconnect(true)
opts.SetMaxReconnectInterval(time.Second * 15)
mqtt.gatewayID = settings.GatewayID
mqtt.projectID = settings.ProjectID
mqtt.jwt = jwt
mqtt.clientOpts = opts
cl, cErr := mqtt.connectClient(opts, settings, jwt)
if cErr != nil {
g.Log.Error("failed to connect client", cErr)
return cErr
}
mqtt.client = cl
mqtt.monitorTokenExpiration()
return nil
}
func (mqtt *mqttManager) connectClient(opts *qtt.ClientOptions, settings *models.Settings, jwt string) (*qtt.Client, error) {
// Create and connect a client using the above options.
client := qtt.NewClient(opts)
if token := client.Connect(); token.Wait() && token.Error() != nil {
g.Log.Error("failed to connect with mqtt ChrysCloud broker", token.Error())
return nil, token.Error()
}
mqtt.client = &client
for {
time.Sleep(time.Second * 5)
// register subscribers
err := mqtt.gatewaySubscribers()
if err == nil {
break
}
g.Log.Error("failed to initialize subscribers", err)
}
return &client, nil
}
func (mqtt *mqttManager) StopGateway() error {
g.Log.Info("mqtt disconnect")
if mqtt.client != nil {
(*mqtt.client).Disconnect(20)
}
mqtt.stop <- true
return nil
}
// monitoring the connection state every 15 seconds (also handles jwt expired tokens)
func (mqtt *mqttManager) monitorTokenExpiration() error {
delay := time.Second * 15
go func() {
for {
expirationTime, err := utils.ParseJWTTokenExpirationTime(mqtt.jwt)
if err != nil {
g.Log.Error("failed ot parse jwt tokens expiration time: ", err)
return
}
today := time.Now().UTC().Unix() * 1000
diff := today - (expirationTime.Unix() * 1000)
if diff >= -(60 * 1000) {
g.Log.Info("Re-issuing JWT token and re-connecting MQTT client", diff)
sett, err := mqtt.settingsService.Get()
if err != nil {
g.Log.Error("failed to retrieve settings", sett)
return
}
cl := (*mqtt.client)
cl.Disconnect(300)
jwt, ccErr := utils.CreateJWT(sett.ProjectID, sett.PrivateRSAKey, time.Hour*1)
if ccErr != nil {
g.Log.Error("Failed to create JWT key for communication with ChrysCloud MQTT", ccErr)
return
}
mqtt.clientOpts.SetPassword(jwt)
mqtt.jwt = jwt
_, cErr := mqtt.connectClient(mqtt.clientOpts, sett, jwt)
if cErr != nil {
g.Log.Error("failed to reconnect client", cErr)
return
}
}
select {
case <-time.After(delay):
case <-mqtt.stop:
g.Log.Info("mqtt stopped")
return
}
}
}()
return nil
}
|
#!/bin/sh
cd /app
pip install -r requirements.txt
python -m flask run -h 0.0.0.0 -p 8080
|
#!/bin/bash
# shellcheck source=mulle-domain-compose.sh
. "${MULLE_DOMAIN_LIBEXEC_DIR}/mulle-domain-compose.sh" || exit 1
domain::resolve::initialize |
<filename>src/api/index.js
const {
checkUrl,
ISSUER_BASE_URL, // Auth0 Tenant Url
AUDIENCE,
API_PORT,
API_URL, // URL for Expenses API
REQUIRED_SCOPES,
} = require("./env-config");
const express = require("express");
const cors = require("cors");
const { createServer } = require("http");
const { auth , requiredScopes, } = require("express-oauth2-jwt-bearer");
const morgan = require("morgan");
const logger = require("./winston");
const app = express();
// Used to normalize URL
app.use(checkUrl());
app.use(morgan('":method :url :status :res[content-length] - :response-time ms"', { stream: logger.stream }));
app.use(cors());
const expenses = [
{
date: new Date(),
description: "Pizza for a Coding Dojo session.",
value: 102,
},
{
date: new Date(),
description: "Coffee for a Coding Dojo session.",
value: 42,
},
];
/****************************
* This method is here to allow a
* successful response on root requests.
* This stops content security policy
* from preventing the user to make
* requests via the browsers console.
****************************/
app.get("/", (req, res) => {
res.status(200).end("OK");
});
/****************************/
app.get("/total", (req, res) => {
const total = expenses.reduce((accum, expense) => accum + expense.value, 0);
res.send({ total, count: expenses.length });
});
// 👆 public routes above 👆
// Issuer and Audience can be obtained from env vars, but better make it explicit
app.use(auth({
issuerBaseURL: ISSUER_BASE_URL,
audience: AUDIENCE
}));
// 👇 private routes below 👇
app.get("/reports", requiredScopes(REQUIRED_SCOPES), (req, res) => {
logger.info(`Valid token with scopes ${REQUIRED_SCOPES}`);
res.send(expenses);
});
app.use((err, req, res, next) => {
res.status(err.status || 500);
res.json({
status: err.status,
message: err.message,
});
});
createServer(app).listen(API_PORT, () => {
logger.info(`API server listening at: ${API_URL}`);
});
|
import React from 'react';
import { BrowserRouter as Router, Route, Link } from 'react-router-dom';
const App = () => {
return (
<Router>
<div>
<nav>
<ul>
<li><Link to="/">View Posts</Link></li>
<li><Link to="/create">Create Post</Link></li>
</ul>
</nav>
<Route path="/" exact>
<h1>View Posts</h1>
</Route>
<Route path="/create">
<h1>Create Post</h1>
</Route>
<Route path="/edit/:postId">
<h1>Edit Post</h1>
</Route>
</div>
</Router>
)
}
export default App; |
'use strict';
import Reflux from 'reflux';
import LiveListActions from 'app/actions/live-list';
const debug = require('debug')('AiC:Stores:LiveList');
const LiveListStore = Reflux.createStore({
// Base Store //
listenables: LiveListActions,
init() {
this.state = {};
this.state.liveList = {};
},
// Actions //
onNotifyList(requestInfo, avms) {
debug('onNotifyList', avms);
this.state.liveList.avms = avms;
this.state.liveList.status = 'LIVE_STATUS_LISTED';
this.updateState();
},
// Live list
onListImages() {
debug('onListImages');
},
onListImagesCompleted(images) {
debug('onListImagesCompleted', images);
this.state.liveList.images = images;
this.updateState();
},
onListImagesFailed(errorMessage) {
debug('onListImagesFailed');
this.state.liveList.status = 'LIVE_STATUS_LIST_IMAGES_FAILED';
this.state.liveList.message = errorMessage;
this.updateState();
},
// State update
updateState() {
debug('updateState', 'new state', this.state);
this.trigger(this.state);
}
});
module.exports = LiveListStore;
|
'use strict';
var angular = require('angular');
var scopeTimeout = require('../util/scope-timeout');
var annotationMetadata = require('../annotation-metadata');
var memoize = require('../util/memoize');
// @ngInject
function AnnotationShareDialogController($element, $scope, analytics, session, store) {
var self = this;
var shareLinkInput = $element.find('input')[0];
$scope.$watch('vm.isOpen', function (isOpen) {
if (isOpen) {
// Focus the input and select it once the dialog has become visible
scopeTimeout($scope, function () {
shareLinkInput.focus();
shareLinkInput.select();
});
}
});
this.copyToClipboard = function (event) {
var $container = angular.element(event.currentTarget).parent();
var shareLinkInput = $container.find('input')[0];
try {
shareLinkInput.select();
// In some browsers, execCommand() returns false if it fails,
// in others, it may throw an exception instead.
if (!document.execCommand('copy')) {
throw new Error('Copying link failed');
}
self.copyToClipboardMessage = 'Link copied to clipboard!';
} catch (ex) {
self.copyToClipboardMessage = 'Select and copy to share.';
} finally {
setTimeout(function () {
self.copyToClipboardMessage = null;
$scope.$digest();
}, 1000);
}
};
this.onShareClick = function(target){
if(target){
analytics.track(analytics.events.ANNOTATION_SHARED, target);
}
};
function checkAuthenticated(callback) {
var xhr = new XMLHttpRequest();
var url = "https://10.241.109.147:5000/api/getuser/"+session.state.userid;
xhr.open("GET", url, true);
xhr.setRequestHeader("Content-type", "application/json");
xhr.onreadystatechange = function () {
if (xhr.status === 200 && xhr.readyState === 4) {
var data = JSON.parse(xhr.responseText);
console.log(data)
callback(data);
} else {
callback('err');
}
};
xhr.send();
}
function shareDiscussion(authCode,infos) {
var xhr = new XMLHttpRequest();
var url = "https://plazza.orange.com/api/core/v3/contents";
xhr.open("POST", url, true);
xhr.setRequestHeader("Content-type", "application/json");
xhr.setRequestHeader("Authorization", "Basic " +authCode );
xhr.setRequestHeader("X-JCAPI-Token", "<PASSWORD>");
xhr.onreadystatechange = function () {
if (xhr.readyState === 4 && xhr.status === 201) {
alert("Publié sur Plazza avec succés");
window.location.reload();
} else {
console.log('text err '+xhr.responseText);
}
};
var content = "<body>"+
"<p><h2>"+infos.title+"</h2></p>"+
"<p><b>Tags : </b>"+infos.tags+
"</p>"+
"<p><b>Publié sur : </b><a href="+infos.uri+" target='_blanck'>"+infos.uri+"</a></p>"+
"</body>"
var discussion = { "content": { "type": "text/html", "text": content },
"subject": "Discussion depuis Annotons nos contenus",
"type": "discussion"
};
var data = JSON.stringify(discussion);
xhr.send(data);
}
this.onSharePlazza = function () {
var uri = this.uri;
var fields = uri.split('/a/');
var annotation_id = fields[1];
var infos = {};
store.annotation.get({ id: annotation_id }).then(function (annot) {
infos.title = annot.title;
infos.uri = annot.uri;
infos.tags=[];
for (var i = 0; i < annot.tags.length; i++) {
infos.tags.push(" "+annot.tags[i]+" ");
}
checkAuthenticated(function (data) {
//traitement post
if(data!='err'){
shareDiscussion(data.password,infos);
}else{
console.log('error');
}
});
});
};
}
module.exports = {
controller: AnnotationShareDialogController,
controllerAs: 'vm',
bindings: {
annotation: '<',
group: '<',
uri: '<',
isPrivate: '<',
isOpen: '<',
onClose: '&',
},
template: require('../templates/annotation-share-dialog.html'),
};
|
/**
* NOTE: This class is auto generated by the swagger code generator program (3.0.18).
* https://github.com/swagger-api/swagger-codegen
* Do not edit the class manually.
*/
package io.swagger.api.core;
import io.swagger.model.Model202AcceptedSearchResponse;
import io.swagger.model.core.PersonListResponse;
import io.swagger.model.core.PersonNewRequest;
import io.swagger.model.core.PersonSearchRequest;
import io.swagger.model.core.PersonSingleResponse;
import io.swagger.annotations.*;
import org.brapi.test.BrAPITestServer.exceptions.BrAPIServerException;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import javax.validation.Valid;
import java.util.List;
@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2020-03-20T16:31:52.030Z[GMT]")
@Api(value = "people", description = "the people API")
public interface PeopleApi {
@ApiOperation(value = "Get filtered list of People", nickname = "peopleGet", notes = "Get filtered list of people", response = PersonListResponse.class, authorizations = {
@Authorization(value = "AuthorizationToken") }, tags = { "People", })
@ApiResponses(value = { @ApiResponse(code = 200, message = "OK", response = PersonListResponse.class),
@ApiResponse(code = 400, message = "Bad Request", response = String.class),
@ApiResponse(code = 401, message = "Unauthorized", response = String.class),
@ApiResponse(code = 403, message = "Forbidden", response = String.class) })
@RequestMapping(value = "/people", produces = { "application/json" }, method = RequestMethod.GET)
ResponseEntity<PersonListResponse> peopleGet(
@ApiParam(value = "A persons first name") @Valid @RequestParam(value = "firstName", required = false) String firstName,
@ApiParam(value = "A persons last name") @Valid @RequestParam(value = "lastName", required = false) String lastName,
@ApiParam(value = "The unique ID of a person") @Valid @RequestParam(value = "personDbId", required = false) String personDbId,
@ApiParam(value = "A systems user ID associated with this person. Different from personDbId because you could have a person who is not a user of the system.") @Valid @RequestParam(value = "userID", required = false) String userID,
@ApiParam(value = "Search for Germplasm by an external reference") @Valid @RequestParam(value = "externalReferenceID", required = false) String externalReferenceID,
@ApiParam(value = "Search for Germplasm by an external reference") @Valid @RequestParam(value = "externalReferenceSource", required = false) String externalReferenceSource,
@ApiParam(value = "Used to request a specific page of data to be returned. The page indexing starts at 0 (the first page is 'page'= 0). Default is `0`.") @Valid @RequestParam(value = "page", required = false) Integer page,
@ApiParam(value = "The size of the pages to be returned. Default is `1000`.") @Valid @RequestParam(value = "pageSize", required = false) Integer pageSize,
@ApiParam(value = "HTTP HEADER - Token used for Authorization <strong> Bearer {token_string} </strong>") @RequestHeader(value = "Authorization", required = false) String authorization)
throws BrAPIServerException;
@ApiOperation(value = "Get the details for a specific Person", nickname = "peoplePersonDbIdGet", notes = "Get the details for a specific Person", response = PersonSingleResponse.class, authorizations = {
@Authorization(value = "AuthorizationToken") }, tags = { "People", })
@ApiResponses(value = { @ApiResponse(code = 200, message = "OK", response = PersonSingleResponse.class),
@ApiResponse(code = 400, message = "Bad Request", response = String.class),
@ApiResponse(code = 401, message = "Unauthorized", response = String.class),
@ApiResponse(code = 403, message = "Forbidden", response = String.class),
@ApiResponse(code = 404, message = "Not Found", response = String.class) })
@RequestMapping(value = "/people/{personDbId}", produces = { "application/json" }, method = RequestMethod.GET)
ResponseEntity<PersonSingleResponse> peoplePersonDbIdGet(
@ApiParam(value = "The unique ID of a person", required = true) @PathVariable("personDbId") String personDbId,
@ApiParam(value = "HTTP HEADER - Token used for Authorization <strong> Bearer {token_string} </strong>") @RequestHeader(value = "Authorization", required = false) String authorization)
throws BrAPIServerException;
@ApiOperation(value = "Update an existing Person", nickname = "peoplePersonDbIdPut", notes = "Update an existing Person", response = PersonSingleResponse.class, authorizations = {
@Authorization(value = "AuthorizationToken") }, tags = { "People", })
@ApiResponses(value = { @ApiResponse(code = 200, message = "OK", response = PersonSingleResponse.class),
@ApiResponse(code = 400, message = "Bad Request", response = String.class),
@ApiResponse(code = 401, message = "Unauthorized", response = String.class),
@ApiResponse(code = 403, message = "Forbidden", response = String.class),
@ApiResponse(code = 404, message = "Not Found", response = String.class) })
@RequestMapping(value = "/people/{personDbId}", produces = { "application/json" }, consumes = {
"application/json" }, method = RequestMethod.PUT)
ResponseEntity<PersonSingleResponse> peoplePersonDbIdPut(
@ApiParam(value = "The unique ID of a person", required = true) @PathVariable("personDbId") String personDbId,
@ApiParam(value = "") @Valid @RequestBody PersonNewRequest body,
@ApiParam(value = "HTTP HEADER - Token used for Authorization <strong> Bearer {token_string} </strong>") @RequestHeader(value = "Authorization", required = false) String authorization) throws BrAPIServerException;
@ApiOperation(value = "Create new People", nickname = "peoplePost", notes = "Create new People entities. `personDbId` is generated and managed by the server.", response = PersonListResponse.class, authorizations = {
@Authorization(value = "AuthorizationToken") }, tags = { "People", })
@ApiResponses(value = { @ApiResponse(code = 200, message = "OK", response = PersonListResponse.class),
@ApiResponse(code = 400, message = "Bad Request", response = String.class),
@ApiResponse(code = 401, message = "Unauthorized", response = String.class),
@ApiResponse(code = 403, message = "Forbidden", response = String.class) })
@RequestMapping(value = "/people", produces = { "application/json" }, consumes = {
"application/json" }, method = RequestMethod.POST)
ResponseEntity<PersonListResponse> peoplePost(@ApiParam(value = "") @Valid @RequestBody List<PersonNewRequest> body,
@ApiParam(value = "HTTP HEADER - Token used for Authorization <strong> Bearer {token_string} </strong>") @RequestHeader(value = "Authorization", required = false) String authorization) throws BrAPIServerException;
@ApiOperation(value = "Submit a search request for People", nickname = "searchPeoplePost", notes = "Advanced searching for the programs resource. See Search Services for additional implementation details.", response = PersonListResponse.class, authorizations = {
@Authorization(value = "AuthorizationToken") }, tags = { "People", })
@ApiResponses(value = { @ApiResponse(code = 200, message = "OK", response = PersonListResponse.class),
@ApiResponse(code = 202, message = "Accepted", response = Model202AcceptedSearchResponse.class),
@ApiResponse(code = 400, message = "Bad Request", response = String.class),
@ApiResponse(code = 401, message = "Unauthorized", response = String.class),
@ApiResponse(code = 403, message = "Forbidden", response = String.class) })
@RequestMapping(value = "/search/people", produces = { "application/json" }, consumes = {
"application/json" }, method = RequestMethod.POST)
ResponseEntity<PersonListResponse> searchPeoplePost(
@ApiParam(value = "") @Valid @RequestBody PersonSearchRequest body,
@ApiParam(value = "HTTP HEADER - Token used for Authorization <strong> Bearer {token_string} </strong>") @RequestHeader(value = "Authorization", required = false) String authorization) throws BrAPIServerException;
@ApiOperation(value = "Get the results of a People search request", nickname = "searchPeopleSearchResultsDbIdGet", notes = "Advanced searching for the people resource. See Search Services for additional implementation details.", response = PersonListResponse.class, authorizations = {
@Authorization(value = "AuthorizationToken") }, tags = { "People", })
@ApiResponses(value = { @ApiResponse(code = 200, message = "OK", response = PersonListResponse.class),
@ApiResponse(code = 202, message = "Accepted", response = Model202AcceptedSearchResponse.class),
@ApiResponse(code = 400, message = "Bad Request", response = String.class),
@ApiResponse(code = 401, message = "Unauthorized", response = String.class),
@ApiResponse(code = 403, message = "Forbidden", response = String.class) })
@RequestMapping(value = "/search/people/{searchResultsDbId}", produces = {
"application/json" }, method = RequestMethod.GET)
ResponseEntity<PersonListResponse> searchPeopleSearchResultsDbIdGet(
@ApiParam(value = "Permanent unique identifier which references the search results", required = true) @PathVariable("searchResultsDbId") String searchResultsDbId,
@ApiParam(value = "Used to request a specific page of data to be returned. The page indexing starts at 0 (the first page is 'page'= 0). Default is `0`.") @Valid @RequestParam(value = "page", required = false) Integer page,
@ApiParam(value = "The size of the pages to be returned. Default is `1000`.") @Valid @RequestParam(value = "pageSize", required = false) Integer pageSize,
@ApiParam(value = "HTTP HEADER - Token used for Authorization <strong> Bearer {token_string} </strong>") @RequestHeader(value = "Authorization", required = false) String authorization) throws BrAPIServerException;
}
|
#!/bin/bash
# pmc
# 2019-03-25
# regex in bash
# scripts from the regex-bash lecture
# if [[ arg =~ pattern ]]
# match on > 2 also
#if [[ "$str" =~ [[:alpha:]]{2} ]] ; then
#if [[ "$str" =~ [[:alpha:]][[:alpha:]] ]] ; then
read -p "string " str
if [[ "$str" =~ ^[[:alpha:]]{2}$ ]] ; then
echo $str 2 alpha only
else
echo $str not 2 alpha only
fi
if [[ "$str" =~ ^[[:alpha:]][[:alpha:]]$ ]] ; then
echo $str 2 alpha only
else
echo $str not 2 alpha only
fi
|
#!/bin/bash
# shellcheck disable=SC2006
# shellcheck disable=SC2086
set -e
BOLD_FONT="\e[1m"
GREEN_FONT="\e[92m"
RED_FONT="\e[31m"
PURPLE_FONT="\e[95m"
YELLOW_FONT="\e[93m"
RESET_FONT="\e[0m"
echo -e "$BOLD_FONT"
SSH_KEY=${1}
ARTIFACT=${2}
NODES=${3}
SSH_FLAGS="-4 -oStrictHostKeyChecking=no"
echo -e "$YELLOW_FONT"
echo "##################################################################"
echo "##################################################################"
echo "Start work on:"
echo " Node -> ${NODES} "
echo " Artifacts -> ${ARTIFACT} "
echo "##################################################################"
echo "##################################################################"
for node in ${NODES}
do
echo ""
echo ""
echo "Start deployment on ${node}."
echo -e "$PURPLE_FONT"
ssh ${SSH_FLAGS} -i ${SSH_KEY} rpkideploy@${node} -C "rm -rf deploy_work_dir && mkdir deploy_work_dir"
scp ${SSH_FLAGS} -i ${SSH_KEY} src/main/scripts/upgrade.sh ${ARTIFACT} rpkideploy@${node}:./deploy_work_dir
ssh ${SSH_FLAGS} -i ${SSH_KEY} rpkideploy@${node} -C "cd ./deploy_work_dir && ./upgrade.sh ${ARTIFACT} /export/bad/apps/rpki-ta-0"
echo -e "$YELLOW_FONT"
echo "Deployment done on ${node}."
echo "------------------------------------------------"
echo "------------------------------------------------"
echo ""
echo ""
done
echo -e "$RESET_FONT"
exit 0
|
#!/usr/bin/env bash
# This script runs clang-format and fixes copyright headers on all relevant files in the repo.
# This is the primary script responsible for fixing style violations.
set -uo pipefail
IFS=$'\n\t'
CLANG_FORMAT_FILE_EXTS=(".c" ".h" ".cpp" ".hpp" ".cc" ".hh" ".cxx" ".m" ".mm" ".inc" ".java" ".glsl")
# Loops through all text files tracked by Git.
git grep -zIl '' |
while IFS= read -rd '' f; do
# Exclude 3rd party files.
if [[ "$f" == "thirdparty"* ]]; then
continue
elif [[ "$f" == "godot-cpp"* ]]; then
continue
elif [[ "$f" == "demo/addons/godot-xr-tools"* ]]; then
continue
elif [[ "$f" == "gradle"* ]]; then
continue
elif [[ "$f" == "build"* ]]; then
continue
elif [[ "$f" == "android"* ]]; then
continue
elif [[ "$f" == ".github"* ]]; then
continue
fi
for extension in ${CLANG_FORMAT_FILE_EXTS[@]}; do
if [[ "$f" == *"$extension" ]]; then
# Run clang-format.
clang-format -i "$f"
fi
done
done
git diff > patch.patch
# If no patch has been generated all is OK, clean up, and exit.
if [ ! -s patch.patch ] ; then
printf "Files in this commit comply with the clang-format style rules.\n"
rm -f patch.patch
exit 0
fi
# A patch has been created, notify the user, clean up, and exit.
printf "\n*** The following differences were found between the code "
printf "and the formatting rules:\n\n"
cat patch.patch
printf "\n*** Aborting, please fix your commit(s) with 'git commit --amend' or 'git rebase -i <hash>'\n"
rm -f patch.patch
exit 1
|
package com.inner.lovetao.loginregister.mvp.contract;
import com.inner.lovetao.config.UserInfo;
import com.inner.lovetao.core.TaoResponse;
import com.jess.arms.mvp.IModel;
import com.jess.arms.mvp.IView;
import io.reactivex.Observable;
/**
* desc:
* Created by xcz
* on 2019/01/28
*/
public interface BindPhoneActivityContract {
//对于经常使用的关于UI的方法可以定义到IView中,如显示隐藏进度条,和显示文字消息
interface View extends IView {
void getPhoneCodeSu();
void bindPhoneNumSu(UserInfo userInfo);
}
//Model层定义接口,外部只需关心Model返回的数据,无需关心内部细节,即是否使用缓存
interface Model extends IModel {
Observable<TaoResponse> getPhoneCode(String phone);
Observable<TaoResponse<UserInfo>> bindPhone(String phone,
String verifyCode,
String InvitationCode,
String nike, String imgUrl, String openId, String sid);
}
}
|
<filename>pkg/k8s/clusterresourcefactory_test.go<gh_stars>1-10
package k8s
import (
"strings"
"testing"
. "github.com/onsi/gomega"
)
func TestCloudFactoryValidation(t *testing.T) {
g := NewGomegaWithT(t)
testCases := []struct {
title string
option *ClusterResourceFactoryOptions
errExpected bool
isValid bool
countExpected int
errContent string
}{
{
title: "account name, namespace, id, and cluster id empty at the same time",
option: &ClusterResourceFactoryOptions{
AccountName: "",
AccountID: "",
AccountNamespace: "",
ClusterID: "",
},
errExpected: false,
isValid: false,
countExpected: 0,
errContent: "cannot be empty at the same time",
},
{
title: "account name and id set at the same time",
option: &ClusterResourceFactoryOptions{
AccountName: "foo",
AccountID: "bar",
},
errExpected: true,
isValid: false,
countExpected: 2,
errContent: "cannot be combined",
},
{
title: "account name and cluster id set at the same time",
option: &ClusterResourceFactoryOptions{
AccountName: "foo",
ClusterID: "bar",
},
errExpected: true,
isValid: false,
countExpected: 2,
errContent: "cannot be combined",
},
{
title: "account id and cluster id set at the same time",
option: &ClusterResourceFactoryOptions{
AccountID: "foo",
ClusterID: "bar",
},
errExpected: true,
isValid: false,
countExpected: 2,
errContent: "cannot be combined",
},
{
title: "succeed",
option: &ClusterResourceFactoryOptions{
AccountName: "foo",
},
errExpected: false,
isValid: true,
countExpected: 1,
},
}
for _, tc := range testCases {
t.Run(tc.title, func(t *testing.T) {
count := tc.option.countAccountIdentifiers()
g.Expect(count).Should(Equal(tc.countExpected), "count of identifiers doesn't match")
valid, err := tc.option.ValidateIdentifiers()
g.Expect(valid).Should(Equal(tc.isValid), "Boolean response doesn't match")
if tc.errExpected {
g.Expect(err).Should(HaveOccurred())
if tc.errContent != "" {
g.Expect(true).Should(Equal(strings.Contains(err.Error(), tc.errContent)), "Error string does not contain content")
}
} else {
g.Expect(err).ShouldNot(HaveOccurred())
}
})
}
}
|
<gh_stars>10-100
package io.opensphere.core.pipeline.processor;
import io.opensphere.core.pipeline.renderer.AbstractRenderer;
import io.opensphere.core.pipeline.util.TextureGroup;
import io.opensphere.core.util.Utilities;
/**
* The model data for a texture geometry. This comprises the texture handles as
* well as the coordinates required to render the texture.
*/
public class TextureModelData implements AbstractRenderer.ModelData
{
/** The non-texture data. */
private final AbstractRenderer.ModelData myModelData;
/** The textures for the geometry. */
private final TextureGroup myTextureGroup;
/**
* Constructor.
*
* @param modelData The non-texture model data.
* @param texture The textures for the geometry.
*/
public TextureModelData(AbstractRenderer.ModelData modelData, TextureGroup texture)
{
myModelData = modelData;
myTextureGroup = texture;
}
@Override
public boolean equals(Object obj)
{
if (obj instanceof TextureModelData)
{
TextureModelData model = (TextureModelData)obj;
if (Utilities.sameInstance(myModelData, model.getModelData()))
{
return true;
}
}
return false;
}
/**
* Get the non-texture model data.
*
* @return The model data.
*/
public AbstractRenderer.ModelData getModelData()
{
return myModelData;
}
/**
* Get the texture group.
*
* @return The texture group.
*/
public TextureGroup getTextureGroup()
{
return myTextureGroup;
}
@Override
public int hashCode()
{
if (myModelData != null)
{
return myModelData.hashCode();
}
return super.hashCode();
}
}
|
"""
This function takes the head node of a singly linked list and returns the middle element of the list.
"""
def FindMiddle(head):
# Initialise two pointers fast and slow
fast = slow = head
# Move fast pointer twice as fast as slow pointer
while fast and fast.next:
fast = fast.next.next
slow = slow.next
# Return slow pointer
return slow
# Node class
class Node:
def __init__(self, data):
self.data = data
self.next = None
#Linked List class
class LinkedList:
def __init__(self):
self.head = None
def insert_at_beginning(self, data):
self.data = data
node = Node(data)
node.next = self.head
self.head = node
#print Linked List
def printList(self):
temp = self.head
while (temp):
print (temp.data)
temp = temp.next
# Linked List
llist = LinkedList()
llist.head = Node(1)
second = Node(2)
third = Node(3)
fourth = Node(4)
fifth = Node(5)
llist.head.next = second;
second.next = third;
third.next = fourth;
fourth.next = fifth;
# Find the mid element
middle = FindMiddle(llist.head)
print("Middle Element:", middle.data) |
<?php
function isArmstrong($number)
{
$total = 0;
$original_number = $number;
while ($number > 0)
{
$remainder = $number % 10;
$total = $total + ($remainder * $remainder * $remainder);
$number = (int)($number / 10);
}
if ($total == $original_number)
{
return true;
}
else
{
return false;
}
}
$number = 153;
if (isArmstrong($number))
{
echo "True";
}
else
{
echo "False";
}
?> |
#!/bin/bash
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script builds SwiftShader's Vulkan ICD. By default, it creates a
# `.swiftshader` installation directory in your OS's home directory (`HOME` on
# not-windows, `USERPROFILE` on windows):
#
# bash build_tools/third_party/swiftshader/build_vk_swiftshader.sh
#
# The parent directory for the installation can be overridden using the first
# positional argument:
#
# bash build_tools/third_party/swiftshader/build_vk_swiftshader.sh <parent-dir>
#
# If the `.swiftshader` installation dir already exists, it will be deleted and
# rebuilt.
#
# Note that you will need a working CMake installation for this script to
# succeed. On Windows, Visual Studio 2019 is recommended.
#
# Afterward, you'll need to set the `VK_ICD_FILENAMES` environment variable to
# the absolute path of the `vk_swiftshader_icd.json` manifest file. This tells
# the Vulkan loader on your system to load it. Assuming you use the default
# installation directory this can be done on not-Windows via:
#
# export VK_ICD_FILENAMES="${HOME?}/.swiftshader/Linux/vk_swiftshader_icd.json"
#
# or on Windows via:
#
# set VK_ICD_FILENAMES=%USERPROFILE%\.swiftshader\Windows\vk_swiftshader_icd.json
#
# If you used a custom installation directory then the correct path will be
# printed to stdout.
#
# See https://vulkan.lunarg.com/doc/view/1.1.70.1/windows/loader_and_layer_interface.html
# for further details about the Vulkan loader and ICDs.
set +e # Ignore errors if not found.
CYGPATH="$(which cygpath 2>/dev/null)"
set -e
if [[ -z "${CYGPATH?}" ]]; then
# Anything that isn't Windows.
BASE_DIR="${1:-${HOME?}}"
SWIFTSHADER_INSTALL_DIR="${BASE_DIR?}/.swiftshader"
else
# Windows.
BASE_DIR="${1:-${USERPROFILE?}}"
SWIFTSHADER_INSTALL_DIR="${BASE_DIR?}"'\.swiftshader'
fi
SWIFTSHADER_COMMIT=6287c18b1d249152563f0cb2d5cb0c6d0eb9e3d6
SWIFTSHADER_DIR="$(mktemp --directory --tmpdir swiftshader_XXXXXX)"
# Clone swiftshader and checkout the appropriate commit.
git clone https://github.com/google/swiftshader "${SWIFTSHADER_DIR?}"
cd "${SWIFTSHADER_DIR?}"
git pull origin master --ff-only
git checkout "${SWIFTSHADER_COMMIT?}"
# Install swiftshader in SWIFTSHADER_INSTALL_DIR.
# Options:
# - 64 bit platform and host compiler
# - Build Vulkan only, don't build GL
# - Don't build samples or tests
echo "Installing to ${SWIFTSHADER_INSTALL_DIR?}"
if [[ -d "${SWIFTSHADER_INSTALL_DIR?}" ]]; then
echo " Install directory already exists, cleaning it"
rm -rf "${SWIFTSHADER_INSTALL_DIR?}"
fi
cmake -B "${SWIFTSHADER_INSTALL_DIR?}" \
-GNinja \
-DSWIFTSHADER_BUILD_VULKAN=ON \
-DSWIFTSHADER_BUILD_EGL=OFF \
-DSWIFTSHADER_BUILD_GLESv2=OFF \
-DSWIFTSHADER_BUILD_GLES_CM=OFF \
-DSWIFTSHADER_BUILD_PVR=OFF \
-DSWIFTSHADER_BUILD_TESTS=OFF \
"${SWIFTSHADER_DIR?}"
# Build the project, choosing just the vk_swiftshader target.
cmake --build "${SWIFTSHADER_INSTALL_DIR?}" --config Release --target vk_swiftshader
echo
echo "Ensure the following variable is set in your enviroment:"
if [[ -d "${SWIFTSHADER_INSTALL_DIR?}/Linux/" ]]; then
echo " export VK_ICD_FILENAMES=${SWIFTSHADER_INSTALL_DIR?}/Linux/vk_swiftshader_icd.json"
else
echo ' set VK_ICD_FILENAMES='"${SWIFTSHADER_INSTALL_DIR?}"'\Windows\vk_swiftshader_icd.json'
fi
|
//
// Created by kepler-br on 6/12/20.
//
#ifndef WOLFENSTEIN_WORLD_H
#define WOLFENSTEIN_WORLD_H
#include "types.h"
#include <glm/vec2.hpp>
class World
{
private:
Block *world;
size_t world_length;
glm::ivec2 world_dimensions;
int block_size = 512;
public:
World();
const size_t &get_world_length() const;
const Block &get_block(const size_t &index) const;
Block &get_block(const size_t &index);
const Block &get_block(const glm::ivec2 &position) const;
Block &get_block(const glm::ivec2 &position);
const glm::ivec2 &get_world_dimensions() const;
const int &get_block_size() const;
void set_block_size(const int &block_size);
void set_block_seen(const bool seen, const size_t index);
void bake_light();
};
#endif //WOLFENSTEIN_WORLD_H
|
<filename>src/app/pages/factures/liste-factures-fournisseurs/liste-factures-fournisseurs.component.ts
import { Component, OnInit } from '@angular/core';
import { UtilsServiceService } from '../../../utils-service.service';
import { DialogService } from 'primeng/dynamicdialog';
import { ConfirmationService } from 'primeng/api';
import jsPDF from "jspdf";
import {DatePipe} from "@angular/common";
@Component({
selector: 'ngx-liste-factures-fournisseurs',
templateUrl: './liste-factures-fournisseurs.component.html',
styleUrls: ['./liste-factures-fournisseurs.component.scss'],
})
export class ListeFacturesFournisseursComponent implements OnInit {
showProviderInvoiceWindow = false;
invoices = [];
loading = false;
invoice = null;
displayDeleteProviderInvoice = false;
invoicesToExport: any[]=[];
exportColumns : any[]=[];
constructor(private UtilsService: UtilsServiceService,
public dialogService: DialogService, private confirmationService: ConfirmationService, private datePipe:DatePipe) { }
ngOnInit(): void {
this.initInvoice();
this.getAllInvoices();
this.exportColumns = [
{ dataKey: 'invoiceNumber', title: 'Numéro' },
{ dataKey: 'providerLabel', title: 'Fournisseur' },
{ dataKey: 'invoiceDate', title: 'Date facture' },
{ dataKey: 'invoiceDeadlineDate', title: 'Date d\'échéance' },
{ dataKey: 'invoiceTotalAmount', title: 'Montant facture'},
{ dataKey: 'invoiceNet', title: 'Montant NET à payer' },
{ dataKey: 'invoicePayment', title: 'Montant payé' },
{ dataKey: 'invoiceStatus', title: 'Statut' },
];
}
getExportColums(){
this.invoices.forEach(invoice => {
let invoiceToExport = {
'invoiceNumber':invoice.invoiceNumber,
'providerLabel':invoice.provider.providerLabel,
'invoiceDate':this.datePipe.transform(invoice.invoiceDate,'dd-MM-yyyy'),
'invoiceDeadlineDate':this.datePipe.transform(invoice.invoiceDeadlineDate,'dd-MM-yyyy'),
'invoiceTotalAmount': invoice.invoiceTotalAmountS,
'invoicePayment': invoice.invoicePaymentS,
'invoiceNet':invoice.invoiceNetS,
'invoiceNetExcel':invoice.invoiceNet,
'invoiceTotalAmountExcel':invoice.invoiceTotalAmount,
'invoicePaymentExcel': invoice.invoicePayment,
'invoiceStatus': '',
};
if(invoice.invoiceStatus === 'CLOSED'){
invoiceToExport.invoiceStatus='Fermée'
}else if(invoice.invoiceStatus === 'OPENED'){
invoiceToExport.invoiceStatus='Ouverte';
}
this.invoicesToExport.push(invoiceToExport);
})
}
saveInvoice(invoice) {
const context = this;
invoice.invoiceTotalAmount = Math.round(invoice.invoiceTotalAmount * 1000) / 1000
this.UtilsService.post(UtilsServiceService.API_PROVIDER_INVOICE, invoice).subscribe(response => {
this.hideInvoiceWindow();
if (invoice.invoiceId == null) {
this.UtilsService.showToast('success',
'Facture ajoutée avec succés',
`La facture fournisseur numéro ${invoice.invoiceNumber} a été ajoutée avec succcés`);
} else {
this.UtilsService.showToast('success',
'Facture modfiée avec succés',
`La facture fournisseur numéro ${invoice.invoiceNumber} a été modifiée avec succcés`);
}
context.getAllInvoices();
context.initInvoice();
},
error => {
this.UtilsService.showToast('danger',
'Erreur interne',
`Un erreur interne a été produit lors de la souvegar de facture fournisseur numéro ${invoice.invoiceNumber}`);
});
}
hideInvoiceWindow() {
this.showProviderInvoiceWindow = false;
}
getAllInvoices() {
const context = this;
this.UtilsService.get(UtilsServiceService.API_PROVIDER_INVOICE).subscribe(response => {
context.invoices = response;
this.getExportColums();
},
error => {
this.UtilsService.showToast('danger',
'Erreur interne',
`Un erreur interne a été produit lors du chargement des factures`);
});
}
editInvoice(invoice) {
this.invoice = invoice;
this.showProviderInvoiceWindow = true;
}
delInvoice() {
const context = this;
const url = UtilsServiceService.API_INVOICE + '/' + this.invoice.invoiceId;
this.UtilsService.delete(url).subscribe(response => {
this.UtilsService.showToast('success',
'Facture supprimée avec succés',
`La facture fournisseur numéro ${this.invoice.invoiceNumber} a été supprimée avec succcés`);
context.getAllInvoices();
this.displayDeleteProviderInvoice = false;
},
error => {
this.UtilsService.showToast('danger',
'Erreur interne',
`Un erreur interne a été produit lors de la suppression de facture fournisseur numéro ${this.invoice.invoiceNumber}`);
this.displayDeleteProviderInvoice = false;
});
}
deleteInvoice(invoice) {
this.invoice = invoice;
this.displayDeleteProviderInvoice = true;
}
initInvoice() {
this.invoice = {
invoiceId: null,
invoiceCurrency: 'TND',
invoiceNumber: '',
provider: null,
invoiceDate: null,
invoiceDeadlineDate: null,
invoiceNet: 0,
invoiceRs: 0,
invoiceRsType: 'VALUE',
invoiceTotalAmount: 0,
invoiceDeadlineInNumberOfDays: 0,
};
}
closeInvoice(invoice) {
const context = this;
const url = UtilsServiceService.API_INVOICE + '/' + invoice.invoiceId;
this.UtilsService.put(url, null).subscribe(response => {
this.UtilsService.showToast('success',
'Facture fermée avec succés',
`La facture fournisseur numéro ${invoice.invoiceNumber} a été fermée avec succcés`);
context.getAllInvoices();
},
error => {
this.UtilsService.showToast('danger',
'Erreur interne',
`Un erreur interne a été produit lors de la fermeture de facture fournisseur numéro ${invoice.invoiceNumber}`);
});
}
exportPdf() {
const doc = new jsPDF('p','pt');
doc['autoTable'](this.exportColumns, this.invoicesToExport);
// doc.autoTable(this.exportColumns, this.products);
doc.save("factures-fournisseur.pdf");
}
exportExcel() {
let invoicesToExport=[];
this.invoicesToExport.forEach(invoice => {
let invoiceToExport = {
'Numéro':invoice.invoiceNumber,
'fournisseur':invoice.providerLabel,
"Date facture":invoice.invoiceDate,
"Date d'échéance":invoice.invoiceDeadlineDate,
'Montant facture': invoice.invoiceTotalAmountExcel,
'Montant Net à payé':invoice.invoiceNetExcel,
'Montant payé': invoice.invoicePaymentExcel,
'Statut': invoice.invoiceStatus,
};
invoicesToExport.push(invoiceToExport);
})
import('xlsx').then(xlsx => {
const worksheet = xlsx.utils.json_to_sheet(invoicesToExport);
const workbook = { Sheets: { 'data': worksheet }, SheetNames: ['data'] };
const excelBuffer: any = xlsx.write(workbook, { bookType: 'xlsx', type: 'array' });
this.saveAsExcelFile(excelBuffer, "factures-fournisseur");
});
}
saveAsExcelFile(buffer: any, fileName: string): void {
import("file-saver").then(FileSaver => {
let EXCEL_TYPE = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;charset=UTF-8';
let EXCEL_EXTENSION = '.xlsx';
const data: Blob = new Blob([buffer], {
type: EXCEL_TYPE
});
FileSaver.saveAs(data, fileName + '_export_' + new Date().getTime() + EXCEL_EXTENSION);
});
}
}
|
<gh_stars>0
/*
* Copyright (c) 2019 Ford Motor Company
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*
*/
package com.ford.labs.daab.subscribers.slack;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Value;
import java.util.List;
@Value
class PostMessageRequest {
String channel;
String text;
@JsonProperty("as_user") boolean asUser;
List<Attachment> attachments;
@Value
static class Attachment {
String fallback;
String color;
String title;
@JsonProperty("title_link") String titleLink;
long ts;
}
}
|
#include<stdio.h>
void main()
{
int i,j;
int n=5;
for(i=n/2;i<=n;i+=2)
{
for(j=1;j<n-i;j+=2)
{
printf(" ");
}
for(j=1;j<=i;j++)
{
printf("*");
}
for(j=1;j<=n-i;j++)
{
printf(" ");
}
for(j=1;j<=i;j++)
{
printf("*");
}
printf("\n");
}
for(i=n;i>=1;i--)
{
for(j=i;j<n;j++)
{
printf(" ");
}
for(j=1;j<=(i*2)-1;j++)
{
printf("*");
}
printf("\n");
}
}
|
#!/usr/bin/env bash
curl -F 'image=@./02264090.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02263227.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./0252ir.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02261092.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02261509.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02260745.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02262875.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02263620.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02263989.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02261607.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./0722aat.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02261832.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02260601.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02260148.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02260739.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02264061.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02264074.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./0706aad.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02260725.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./0686zj.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
curl -F 'image=@./02264054.jpg' -F 'id=999' http://localhost:8686/answer/blank | jq .
|
import { Either, left, right } from '@core/logic/Either'
import { InvalidBodyLengthError } from './errors/InvalidBodyLengthError'
export class Body {
private readonly body: string
get value(): string {
return this.body
}
private constructor(body: string) {
this.body = body
}
static validate(body: string): boolean {
if (!body || body.trim().length <= 20) {
return false
}
return true
}
static create(body: string): Either<InvalidBodyLengthError, Body> {
if (!this.validate(body)) {
return left(new InvalidBodyLengthError())
}
return right(new Body(body))
}
}
|
function _do_git_hook_before_repo_dir_add() {
local dir=${1?'dir arg required'}
local repo=${1?'repo arg required'}
_do_log_debug 'git' "_do_git_hook_before_repo_dir_add ${dir} ${repo}"
_do_dir_push "${dir}"
local git_dir
if git_dir=$(git rev-parse --show-toplevel) 2>/dev/null; then
if ! _do_repo_dir_exists "${dir}"; then
_do_git_repo_add "${git_dir}"
fi
fi
_do_dir_pop
}
|
<filename>api/v1beta1/foundationdb_version.go
/*
* foundationdb_version.go
*
* This source file is part of the FoundationDB open source project
*
* Copyright 2021 Apple Inc. and the FoundationDB project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package v1beta1
import (
"fmt"
"regexp"
"strconv"
)
// FdbVersion represents a version of FoundationDB.
//
// This provides convenience methods for checking features available in
// different versions.
type FdbVersion struct {
// Major is the major version
Major int
// Minor is the minor version
Minor int
// Patch is the patch version
Patch int
}
// FDBVersionRegex describes the format of a FoundationDB version.
var FDBVersionRegex = regexp.MustCompile(`(\d+)\.(\d+)\.(\d+)`)
// ParseFdbVersion parses a version from its string representation.
func ParseFdbVersion(version string) (FdbVersion, error) {
matches := FDBVersionRegex.FindStringSubmatch(version)
if matches == nil {
return FdbVersion{}, fmt.Errorf("could not parse FDB version from %s", version)
}
major, err := strconv.Atoi(matches[1])
if err != nil {
return FdbVersion{}, err
}
minor, err := strconv.Atoi(matches[2])
if err != nil {
return FdbVersion{}, err
}
patch, err := strconv.Atoi(matches[3])
if err != nil {
return FdbVersion{}, err
}
return FdbVersion{Major: major, Minor: minor, Patch: patch}, nil
}
// String gets the string representation of an FDB version.
func (version FdbVersion) String() string {
return fmt.Sprintf("%d.%d.%d", version.Major, version.Minor, version.Patch)
}
// Compact prints the version in the major.minor format.
func (version FdbVersion) Compact() string {
return fmt.Sprintf("%d.%d", version.Major, version.Minor)
}
// IsAtLeast determines if a version is greater than or equal to another version.
func (version FdbVersion) IsAtLeast(other FdbVersion) bool {
if version.Major < other.Major {
return false
}
if version.Major > other.Major {
return true
}
if version.Minor < other.Minor {
return false
}
if version.Minor > other.Minor {
return true
}
if version.Patch < other.Patch {
return false
}
if version.Patch > other.Patch {
return true
}
return true
}
// IsProtocolCompatible determines whether two versions of FDB are protocol
// compatible.
func (version FdbVersion) IsProtocolCompatible(other FdbVersion) bool {
return version.Major == other.Major && version.Minor == other.Minor
}
// HasInstanceIDInSidecarSubstitutions determines if a version has
// FDB_INSTANCE_ID supported natively in the variable substitutions in the
// sidecar.
func (version FdbVersion) HasInstanceIDInSidecarSubstitutions() bool {
return version.IsAtLeast(FdbVersion{Major: 6, Minor: 2, Patch: 15})
}
// PrefersCommandLineArgumentsInSidecar determines if a version has
// support for configuring the sidecar exclusively through command-line
// arguments.
func (version FdbVersion) PrefersCommandLineArgumentsInSidecar() bool {
return version.IsAtLeast(FdbVersion{Major: 6, Minor: 2, Patch: 15})
}
// SupportsUsingBinariesFromMainContainer determines if a version has
// support for having the sidecar dynamically switch between using binaries
// from the main container and binaries provided by the sidecar.
func (version FdbVersion) SupportsUsingBinariesFromMainContainer() bool {
return version.IsAtLeast(FdbVersion{Major: 6, Minor: 2, Patch: 15})
}
// HasRatekeeperRole determines if a version has a dedicated role for
// ratekeeper.
func (version FdbVersion) HasRatekeeperRole() bool {
return version.IsAtLeast(FdbVersion{Major: 6, Minor: 2, Patch: 0})
}
// HasMaxProtocolClientsInStatus determines if a version has the
// max_protocol_clients field in the cluster status.
func (version FdbVersion) HasMaxProtocolClientsInStatus() bool {
return version.IsAtLeast(FdbVersion{Major: 6, Minor: 2, Patch: 0})
}
// HasSidecarCrashOnEmpty determines if a version has the flag to have the
// sidecar crash on a file being empty.
func (version FdbVersion) HasSidecarCrashOnEmpty() bool {
return version.IsAtLeast(FdbVersion{Major: 6, Minor: 2, Patch: 20})
}
// HasNonBlockingExcludes determines if a version has support for non-blocking
// exclude commands.
//
// This is currently set to false across the board, pending investigation into
// potential bugs with non-blocking excludes.
func (version FdbVersion) HasNonBlockingExcludes() bool {
return false
}
// NextMajorVersion returns the next major version of FoundationDB.
func (version FdbVersion) NextMajorVersion() FdbVersion {
return FdbVersion{Major: version.Major + 1, Minor: 0, Patch: 0}
}
// NextMinorVersion returns the next minor version of FoundationDB.
func (version FdbVersion) NextMinorVersion() FdbVersion {
return FdbVersion{Major: version.Major, Minor: version.Minor + 1, Patch: 0}
}
// NextPatchVersion returns the next patch version of FoundationDB.
func (version FdbVersion) NextPatchVersion() FdbVersion {
return FdbVersion{Major: version.Major, Minor: version.Minor, Patch: version.Patch + 1}
}
// Equal checks if two FdbVersion are the same.
func (version FdbVersion) Equal(other FdbVersion) bool {
return version.Major == other.Major &&
version.Minor == other.Minor &&
version.Patch == other.Patch
}
// Versions provides a shorthand for known versions.
// This is only to be used in testing.
var Versions = struct {
NextMajorVersion, NextPatchVersion,
WithSidecarInstanceIDSubstitution, WithoutSidecarInstanceIDSubstitution,
WithCommandLineVariablesForSidecar, WithEnvironmentVariablesForSidecar,
WithBinariesFromMainContainer, WithoutBinariesFromMainContainer,
WithRatekeeperRole, WithoutRatekeeperRole,
WithSidecarCrashOnEmpty, WithoutSidecarCrashOnEmpty,
Default FdbVersion
}{
Default: FdbVersion{Major: 6, Minor: 2, Patch: 20},
NextPatchVersion: FdbVersion{Major: 6, Minor: 2, Patch: 21},
NextMajorVersion: FdbVersion{Major: 7, Minor: 0, Patch: 0},
WithSidecarInstanceIDSubstitution: FdbVersion{Major: 6, Minor: 2, Patch: 15},
WithoutSidecarInstanceIDSubstitution: FdbVersion{Major: 6, Minor: 2, Patch: 11},
WithCommandLineVariablesForSidecar: FdbVersion{Major: 6, Minor: 2, Patch: 15},
WithEnvironmentVariablesForSidecar: FdbVersion{Major: 6, Minor: 2, Patch: 11},
WithBinariesFromMainContainer: FdbVersion{Major: 6, Minor: 2, Patch: 15},
WithoutBinariesFromMainContainer: FdbVersion{Major: 6, Minor: 2, Patch: 11},
WithRatekeeperRole: FdbVersion{Major: 6, Minor: 2, Patch: 15},
WithoutRatekeeperRole: FdbVersion{Major: 6, Minor: 1, Patch: 12},
WithSidecarCrashOnEmpty: FdbVersion{Major: 6, Minor: 2, Patch: 20},
WithoutSidecarCrashOnEmpty: FdbVersion{Major: 6, Minor: 2, Patch: 15},
}
|
<reponame>dj-1087/MJU_Club_HomePage<gh_stars>0
import PropTypes from "prop-types";
import React from 'react';
import {Link} from "react-router-dom";
import BlogDetails from '../../components/Blog/BlogDetails.jsx';
import Comment from '../../components/Comment/Comment.jsx';
import SidebarWrap from '../../components/Sidebar/SidebarWrap.jsx';
import SidebarWidget from '../../components/Sidebar/SidebarWidget.jsx';
import SidebarBanner from '../../components/Sidebar/SidebarBanner.jsx';
import SidebarTitle from '../../components/Sidebar/SidebarTitle';
import SidebarSearch from '../../components/Sidebar/SidebarSearch.jsx';
import SidebarCategories from '../../components/Sidebar/SidebarCategories.jsx';
import SidebarPost from '../../components/Sidebar/SidebarPost.jsx';
import SidebarTag from '../../components/Sidebar/SidebarTag.jsx';
const BlogDetailsContainer = ({data}) => {
return (
<div className="section section-padding fix">
<div className="container">
<div className="row mb-n10">
<div className="col-lg-8 col-12 order-lg-1 mb-10">
<div className="row row-cols-1 no-gutters">
<BlogDetails data={data} />
<div className="entry-author">
<div className="author-info">
<div className="author-avatar">
<img src={process.env.PUBLIC_URL + "/images/author/blog-author.png"} alt="" />
</div>
<div className="author-description">
<h6 className="author-name"><NAME></h6>
<span className="designation">CEO at Flow</span>
<div className="author-biographical-info">
She is a lawyer, podcaster, speaker, and writer. As an educational content director, she helps develop HasThemes premium training products.
</div>
</div>
</div>
</div>
<div className="blog-nav-links">
<h4 className="title">Related Posts </h4>
<div className="nav-list">
<div className="nav-item prev">
<div className="inner">
<Link to={process.env.PUBLIC_URL + `/blog-details/${data.id}`}>
<div className="hover-bg has-thumbnail" style={{backgroundImage: `url(${process.env.PUBLIC_URL}/images/pagination/blog-pagination.jpg)`}}></div>
<span className="cate">Marketing</span>
<h6>Eleven top tips for developing agile marketing strategies that work</h6>
</Link>
</div>
</div>
<div className="nav-item next">
<div className="inner">
<Link to={process.env.PUBLIC_URL + `/blog-details/${data.id}`}>
<div className="hover-bg has-thumbnail" style={{backgroundImage: `url(${process.env.PUBLIC_URL}/images/pagination/blog-pagination-2.jpg)`}}></div>
<span className="cate">Startup</span>
<h6>Growing a startup involves balancing out the financial stack</h6>
</Link>
</div>
</div>
</div>
</div>
<div className="comment-form-wrap">
<div className="comment-respond">
<h3 className="title">Leave a Reply</h3>
<Comment
url=""
id={data.id}
title={data.title}
/>
</div>
</div>
</div>
</div>
<div className="col-lg-4 col-12 order-lg-2 mb-10">
<SidebarWrap>
<SidebarWidget>
<SidebarSearch />
</SidebarWidget>
<SidebarWidget>
<SidebarTitle title="Categories" />
<SidebarCategories />
</SidebarWidget>
<SidebarWidget>
<SidebarTitle classOption="mb-2" title="Popular Posts" />
<SidebarPost />
</SidebarWidget>
<SidebarWidget>
<SidebarBanner />
</SidebarWidget>
<SidebarWidget>
<SidebarTitle title="Popular tags" />
<SidebarTag />
</SidebarWidget>
</SidebarWrap>
</div>
</div>
</div>
</div>
)
}
BlogDetailsContainer.propTypes = {
data: PropTypes.object
};
export default BlogDetailsContainer;
|
#!/bin/sh
pep8 --filename=*.py --count --ignore=W293,E201,E202,E501 .
python -m compileall -f -q .
|
#!/usr/bin/env bash
dirs="./build ./dist ./utils3.egg-info"
# Check for existing build/dist directories.
printf "\nChecking for existing build directories ...\n\n"
for d in ${dirs}; do
# Delete the directory if it exists.
if [ -d "${d}" ]; then
printf "Deleting %s\n" ${d}
rm -rf "${d}"
fi
done
# Create the package and wheel file.
python ./setup.py sdist bdist_wheel
# Update requirements file.
printf "\nUpdating the requirements file ...\n"
pipreqs . --force
# Notfication.
printf "\nAll done.\n\n"
|
#!/bin/bash
#
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
set -e
# Needed because if it is set, cd may print the path it changed to.
unset CDPATH
# On Mac OS, readlink -f doesn't work, so follow_links traverses the path one
# link at a time, and then cds into the link destination and find out where it
# ends up.
#
# The function is enclosed in a subshell to avoid changing the working directory
# of the caller.
function follow_links() (
cd -P "$(dirname -- "$1")"
file="$PWD/$(basename -- "$1")"
while [[ -h "$file" ]]; do
cd -P "$(dirname -- "$file")"
file="$(readlink -- "$file")"
cd -P "$(dirname -- "$file")"
file="$PWD/$(basename -- "$file")"
done
echo "$file"
)
SCRIPT_DIR=$(follow_links "$(dirname -- "${BASH_SOURCE[0]}")")
FLUTTER_DIR="$(cd "$SCRIPT_DIR/.."; pwd -P)"
cd "$FLUTTER_DIR"
if git remote get-url upstream >/dev/null 2>&1; then
UPSTREAM=upstream/master
else
UPSTREAM=master
fi;
FLUTTER_VERSION="$(curl -s https://raw.githubusercontent.com/flutter/flutter/master/bin/internal/engine.version)"
BEHIND="$(git rev-list "$FLUTTER_VERSION".."$UPSTREAM" --oneline | wc -l)"
MAX_BEHIND=16 # no more than 4 bisections to identify the issue
if [[ $BEHIND -le $MAX_BEHIND ]]; then
echo "OK, the flutter/engine to flutter/flutter roll is only $BEHIND commits behind."
else
echo "ERROR: The flutter/engine to flutter/flutter roll is $BEHIND commits behind!"
echo " It exceeds our max allowance of $MAX_BEHIND. Unless that this commit fixes the roll,"
echo " please roll engine into flutter first before merging more commits into engine."
exit 1
fi
|
#!/bin/bash
# Exit if any of the intermediate steps fail
set -e
# Extract arguments from the input into shell variables.
# jq will ensure that the values are properly quoted
# and escaped for consumption by the shell.
eval "$(jq -r '@sh "INTERCONNECT_ATTACHMENT=\(.interconnect_name) REGION=\(.region) PROJECT_ID=\(.project_id)"')"
max_iterations=20
wait_seconds=6
cloud_router_ip=""
customer_router_ip=""
iterations=0
while true
do
((++iterations))
sleep $wait_seconds
interconnect=$(gcloud compute interconnects attachments describe $INTERCONNECT_ATTACHMENT --region $REGION --project=$PROJECT_ID --format=json)
cloud_router_ip=$(echo $interconnect | jq -r '.cloudRouterIpAddress')
customer_router_ip=$(echo $interconnect | jq -r '.customerRouterIpAddress')
if [ ! -z "$cloud_router_ip" ] && [ ! -z "$customer_router_ip" ]; then
break
fi
if [ "$iterations" -ge "$max_iterations" ]; then
exit 1
fi
done
if [ -z "$cloud_router_ip" ] && [ -z "$customer_router_ip" ]; then
break
fi
[ -z "$cloud_router_ip" ] && echo "GCP - Cloud router is null"
[ -z "$customer_router_ip" ] && echo "GCP - Customer router is null"
# Safely produce a JSON object containing the result value.
# jq will ensure that the value is properly quoted
# and escaped to produce a valid JSON string.
jq -n --arg cloud_router_ip "$cloud_router_ip" --arg customer_router_ip "$customer_router_ip" '{"cloud_router_ip":$cloud_router_ip,"customer_router_ip":$customer_router_ip}'
|
#ifndef _TOS_VFS_ERR_H_
#define _TOS_VFS_ERR_H_
typedef enum vfs_err_en {
VFS_ERR_NONE,
VFS_ERR_BUFFER_NULL,
VFS_ERR_DEVICE_NOT_REGISTERED,
VFS_ERR_DEVICE_ALREADY_REGISTERED,
VFS_ERR_FILE_NO_AVAILABLE,
VFS_ERR_FILE_NOT_OPEN,
VFS_ERR_FS_ALREADY_MOUNTED,
VFS_ERR_FS_ALREADY_REGISTERED,
VFS_ERR_FS_NOT_REGISTERED,
VFS_ERR_FS_NOT_MOUNT,
VFS_ERR_OPS_NULL,
VFS_ERR_OPS_FAILED,
VFS_ERR_INODE_NAME_TOO_LONG,
VFS_ERR_INODE_CREATE_FAILED,
VFS_ERR_INODE_NOT_FOUND,
VFS_ERR_INODE_INVALID,
VFS_ERR_INODE_BUSY,
VFS_ERR_INODE_INAVALIABLE,
VFS_ERR_OPEN_DIR,
VFS_ERR_OUT_OF_MEMORY,
VFS_ERR_PARA_INVALID,
VFS_ERR_PATH_TOO_LONG,
} vfs_err_t;
#endif /* _TOS_VFS_ERR_H_ */
|
#!/bin/sh
# Copyright 2005-2019 ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
#
# In applying this licence, ECMWF does not waive the privileges and immunities granted to it by
# virtue of its status as an intergovernmental organisation nor does it submit to any jurisdiction.
#
. ./include.sh
REDIRECT=/dev/null
dir="${data_dir}/tigge/"
# check tigge global
for file in ${dir}tigge_*.grib
do
${tigge_dir}/tigge_check ${file} 2> $REDIRECT > $REDIRECT
done
# check tigge-lam
for file in ${dir}tiggelam_*.grib
do
${tigge_dir}/tigge_check -l ${file} 2> $REDIRECT > $REDIRECT
done
# Test non-TIGGE files too. We now expect tigge_check to fail!
# All the GRIB files in the samples are non-TIGGE
for file in ${ECCODES_SAMPLES_PATH}/regular_*.tmpl; do
set +e
${tigge_dir}/tigge_check ${file} 2> $REDIRECT > $REDIRECT
status=$?
set -e
if [ $status -eq 0 ]; then
# should have failed and returned a non-zero exit code
exit 1
fi
done
# GRIB-531
TEMP=temp.$$.tigge
${tools_dir}/grib_get -nparameter ${data_dir}/tigge_pf_ecmwf.grib2 > $TEMP
diff ${data_dir}/tigge_pf_ecmwf.grib2.ref $TEMP
# GRIB-205. Changing productionStatusOfProcessedData should not change
# anything else
input=${dir}/tigge_ecmf_sfc_sd.grib
${tools_dir}/grib_set -s productionStatusOfProcessedData=5 $input $TEMP
${tools_dir}/grib_compare -bproductionStatusOfProcessedData $input $TEMP
rm -f $TEMP
# GRIB-757 validity date/time check fails for the following:
tigge_bad_validity="
tigge_kwbc_sfc_sf.grib
tigge_kwbc_sfc_slhf.grib
tigge_kwbc_sfc_sshf.grib
tigge_kwbc_sfc_ssr.grib
tigge_kwbc_sfc_str.grib
tigge_kwbc_sfc_ttr.grib
"
for file in $tigge_bad_validity; do
set +e
${tigge_dir}/tigge_check -w ${dir}${file} > $TEMP
status=$?
set -e
[ $status -eq 1 ]
cat $TEMP
grep -q "invalid validity Date/Time" $TEMP
done
rm -f $TEMP
|
#!/bin/bash
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.icon images/devutils.png
# @raycast.title String Inspector
# @raycast.mode silent
# @raycast.packageName DevUtils.app
# Documentation:
# @raycast.description Inspect your current clipboard string (length, words count, unicode, etc.)
# @raycast.author DevUtils.app
# @raycast.authorURL https://devutils.app
open -a DevUtils
open devutils://stringinspect?clipboard
|
/*
* Copyright 1&1 Internet AG, https://github.com/1and1/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.oneandone.pommes.descriptor;
import net.oneandone.pommes.cli.Environment;
import net.oneandone.pommes.database.Gav;
import net.oneandone.pommes.database.Project;
import net.oneandone.pommes.scm.Scm;
import net.oneandone.sushi.fs.file.FileNode;
import java.io.IOException;
import java.net.URISyntaxException;
/** descriptor without meta information like form poms, only the scm url is know */
public class RawDescriptor extends Descriptor {
public static RawDescriptor createOpt(FileNode node) throws IOException {
Scm scm;
if (!node.isDirectory()) {
return null;
}
scm = Scm.probeCheckout(node);
if (scm == null) {
return null;
}
return new RawDescriptor(scm, node);
}
private final Scm scm;
private final FileNode directory;
public RawDescriptor(Scm scm, FileNode directory) {
this.scm = scm;
this.directory = directory;
}
//--
@Override
protected Project doLoad(Environment environment, String zone, String origin, String revision, String foundScm) throws IOException {
Gav artifact;
if (!foundScm.equals(scm.getUrl(directory))) {
throw new IllegalArgumentException(foundScm + " " + scm.getUrl(directory));
}
try {
artifact = scm.defaultGav(foundScm);
} catch (URISyntaxException e) {
throw new IOException(e);
}
return new Project(zone, origin, revision, null, artifact, foundScm, null);
}
}
|
#!/bin/bash
# Copyright Amazon.com, Inc. or its affiliates.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
# THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
function initialize() {
export AWS_AZ=$(curl -s http://169.254.169.254/latest/meta-data/placement/availability-zone/)
export AWS_REGION=$(echo ${AWS_AZ} | sed -e 's/[a-z]$//')
export INSTANCE_ID=$(curl -s http://169.254.169.254/latest/meta-data/instance-id)
export EBS_AUTOSCALE_CONFIG_FILE=/etc/ebs-autoscale.json
}
function detect_init_system() {
# detects the init system in use
# based on the following:
# https://unix.stackexchange.com/a/164092
if [[ `/sbin/init --version` =~ upstart ]]; then echo upstart;
elif [[ `systemctl` =~ -\.mount ]]; then echo systemd;
elif [[ -f /etc/init.d/cron && ! -h /etc/init.d/cron ]]; then echo sysv-init;
else echo unknown; fi
}
function get_config_value() {
local filter=$1
jq -r $filter $EBS_AUTOSCALE_CONFIG_FILE
}
function get_metadata() {
local key=$1
echo `curl -s http://169.254.169.254/latest/meta-data/$key`
}
function logthis() {
echo "[`date`] $1" >> $(get_config_value .logging.log_file)
}
function starting() {
logthis "Starting EBS Autoscale"
}
function stopping() {
logthis "Stopping EBS Autoscale"
}
|
<gh_stars>10-100
//
// AppDelegate.h
// CIE ID
//
// Created by <NAME> on 11/12/2018. http://www.ugochirico.com
// Copyright © 2018 IPZS. All rights reserved.
//
#import <Cocoa/Cocoa.h>
@interface AppDelegate : NSObject <NSApplicationDelegate>
@end
|
#!@RCD_SCRIPTS_SHELL@
#
# $NetBSD: policyd_weight.sh,v 1.1.1.1 2007/07/06 13:49:46 xtraeme Exp $
#
# PROVIDE: policyd_weight
# BEFORE: mail
# REQUIRE: DAEMON LOGIN
. /etc/rc.subr
name="policyd_weight"
rcvar=$name
pidfile="@VARBASE@/run/policyd-weight.pid"
command_interpreter="@PREFIX@/bin/perl"
command="@PREFIX@/sbin/policyd-weight"
extra_commands="reload"
required_files="@PKG_SYSCONFDIR@/policyd-weight.conf"
reload_cmd="policyd_weight_op"
restart_cmd="policyd_weight_op"
start_cmd="policyd_weight_op"
stop_cmd="policyd_weight_stop"
policyd_weight_op()
{
${command} ${rc_arg}
}
policyd_weight_stop()
{
${command} -k stop
}
if [ -f /etc/rc.subr -a -f /etc/rc.conf ]; then
load_rc_config $name
run_rc_command "$1"
else
@ECHO@ -n " ${name}"
${command} start
fi
|
/*
* Copyright 2018 The boardgame.io Authors
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT.
*/
import React from 'react';
import { HexGrid } from './hex';
import Token from './token';
import Enzyme from 'enzyme';
import Adapter from 'enzyme-adapter-react-16';
Enzyme.configure({ adapter: new Adapter() });
test('render correctly', () => {
const grid = Enzyme.mount(<HexGrid levels={5} />);
expect(grid.html()).toContain('svg');
});
test('outline', () => {
const grid = Enzyme.mount(<HexGrid levels={5} outline={false} />);
const hex = grid.find('polygon');
expect(hex.length).toBe(0);
});
test('click handler', () => {
{
const onClick = jest.fn();
const grid = Enzyme.mount(<HexGrid layers={4} onClick={onClick} />);
grid
.find('Hex')
.at(0)
.simulate('click');
expect(onClick).toHaveBeenCalled();
}
// No crash when onClick is not provided.
{
const grid = Enzyme.mount(<HexGrid layers={4} />);
grid
.find('Hex')
.at(0)
.simulate('click');
}
});
test('mouse over handler', () => {
{
const onMouseOver = jest.fn();
const grid = Enzyme.mount(<HexGrid layers={4} onMouseOver={onMouseOver} />);
grid
.find('Hex')
.at(0)
.simulate('mouseOver');
expect(onMouseOver).toHaveBeenCalled();
}
// No crash when onMouseOver is not provided.
{
const grid = Enzyme.mount(<HexGrid layers={4} />);
grid
.find('Hex')
.at(0)
.simulate('mouseOver');
}
});
test('mouse out handler', () => {
{
const onMouseOut = jest.fn();
const grid = Enzyme.mount(<HexGrid layers={4} onMouseOut={onMouseOut} />);
grid
.find('Hex')
.at(0)
.simulate('mouseOut');
expect(onMouseOut).toHaveBeenCalled();
}
// No crash when onMouseOut is not provided.
{
const grid = Enzyme.mount(<HexGrid layers={4} />);
grid
.find('Hex')
.at(0)
.simulate('mouseOut');
}
});
test('child', () => {
{
const grid = Enzyme.mount(
<HexGrid layers={2} outline={false}>
<Token />
</HexGrid>
);
expect(grid.html()).toContain('polygon');
}
{
const grid = Enzyme.mount(
<HexGrid layers={2} outline={false}>
<Token>
<div />
</Token>
</HexGrid>
);
expect(grid.html()).not.toContain('polygon');
}
});
test('colorMap', () => {
const colorMap = { '0,0,0': '#123' };
const grid = Enzyme.mount(<HexGrid layers={1} colorMap={colorMap} />);
expect(grid.html()).toContain('fill: #123');
});
|
#!/bin/bash
echo -e "[RUBOCOP] --> Init (wait a second)"
# For now, only check Layout cops
# TODO: add other cops (and fix related issues), eg. Lint
if (bundle exec rubocop --only 'Layout' 2>/dev/null | grep 'no offenses detected' >/dev/null) ; then
echo -e "[RUBOCOP] --> 👍 approved."
exit 0
else
bundle exec rubocop --only 'Layout'
echo -e "[RUBOCOP] --> ✋ You've got some offenses."
echo -e "Run \"bundle exec rubocop --only 'Layout' -a\" to fix them."
exit 1
fi
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolHopper-v1_doule_ddpg_softcopy_action_noise_seed4_run4_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolHopper-v1 --random-seed 4 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolHopper-v1/doule_ddpg_softcopy_action_noise_seed4_run4 --continuous-act-space-flag
|
package users
import (
"net/http"
"strconv"
"github.com/gin-gonic/gin"
"github.com/egnimos/book_store_users_api/domain/users"
"github.com/egnimos/book_store_users_api/services"
"github.com/egnimos/book_store_users_api/utils/errors"
"github.com/egnimos/bookstore-oauth-shared-library/oauth"
)
/*
NOTE:::: int64 can handle this number of users ==>> 1235376474764783456 BUT int can handle this number of users ==>> 1235376474764783456
CONCLUSION:::: as both int64 && int can handle the same number of request....
*/
//CreateUser : this function will create the user in data base
func CreateUser(c *gin.Context) {
//intialize
var user users.User
//fetch the json request and unmarshal the json file into struct
if err := c.ShouldBindJSON(&user); err != nil {
restErr := errors.NewBadRequestError("invalid json request while creating a user")
c.JSON(restErr.Status, restErr)
return
}
//send the user struct to the services
result, err := services.UsersService.CreateUser(user)
if err != nil {
c.JSON(err.Status, err)
return
}
c.JSON(http.StatusCreated, result.Marshall(c.GetHeader("X-Public") == "true"))
}
//GetUser : this function will get the user info of given ID
func GetUser(c *gin.Context) {
//authenticate the user and check the user by the auth_token
if err := oauth.AuthenticateRequest(c.Request); err != nil {
c.JSON(err.Status, err)
return
}
// strconv.ParseInt(c.Param("user_id"), 10, 64)
// strconv.Atoi(c.Param("user_id"))
userID, userErr := strconv.ParseInt(c.Param("user_id"), 10, 64)
if userErr != nil {
err := errors.NewBadRequestError("user id should be a number")
c.JSON(err.Status, err)
return
}
// send the id to the services
user, getErr := services.UsersService.GetUser(userID)
if getErr != nil {
c.JSON(getErr.Status, getErr)
return
}
//check whether the caller ID is the same with the user ID
if oauth.GetCallerId(c.Request) == user.ID {
c.JSON(http.StatusOK, user.Marshall(false))
return
}
c.JSON(http.StatusOK, user.Marshall(oauth.IsPublic(c.Request)))
}
//UpdateUser : this user usually update the data from the database...
func UpdateUser(c *gin.Context) {
userID, userErr := strconv.ParseInt(c.Param("user_id"), 10, 64)
if userErr != nil {
err := errors.NewBadRequestError("user id should be a number")
c.JSON(err.Status, err)
return
}
//intialize
var user users.User
//check whether the given json body is valid or not
if err := c.ShouldBindJSON(&user); err != nil {
invalidErr := errors.NewInternalServerError("invalid json body")
c.JSON(invalidErr.Status, invalidErr)
return
}
//send the user struct to the services
user.ID = userID
//check whether the request method is PATCH and PUT
isPartial := c.Request.Method == http.MethodPatch
result, err := services.UsersService.UpdateUser(isPartial, user)
if err != nil {
c.JSON(err.Status, err)
return
}
//final implementation
c.JSON(http.StatusOK, result.Marshall(c.GetHeader("X-Public") == "true"))
}
//DeleteUser : delete the data from the users database of given ID
func DeleteUser(c *gin.Context) {
userID, err := strconv.ParseInt(c.Param("user_id"), 10, 64)
if err != nil {
paramErr := errors.NewBadRequestError("user id should be a number")
c.JSON(paramErr.Status, paramErr)
return
}
//send the userID to the services
result, deleteErr := services.UsersService.DeleteUser(userID)
if deleteErr != nil {
c.JSON(deleteErr.Status, deleteErr)
return
}
c.JSON(http.StatusOK, result)
}
//SearchUser : search the user on the basis of ID or name or status or email
func SearchUser(c *gin.Context) {
status := c.Query("status")
usersList, err := services.UsersService.SearchUser(status)
if err != nil {
c.JSON(err.Status, err)
return
}
c.JSON(http.StatusOK, usersList.Marshall(c.GetHeader("X-Public") == "true"))
}
//LoginUser : this methods provide the email and password to login the user
func Login(c *gin.Context) {
var request users.LoginRequest
if err := c.ShouldBindJSON(&request); err != nil {
restErr := errors.NewBadRequestError("invalid json body")
c.JSON(restErr.Status, restErr)
return
}
//sending the data to the service file loginUser method
user, err := services.UsersService.LoginUser(request)
if err != nil {
c.JSON(err.Status, err)
return
}
c.JSON(http.StatusOK, user.Marshall(c.GetHeader("X-Public") == "true"))
}
|
<filename>Express Js Session/Members.js
const members = [
{
id: 1,
name: '<NAME>',
email: '<EMAIL>',
status: 'Active'
},
{
id: 2,
name: '<NAME>',
email: '<EMAIL>',
status: 'Inactive'
},
{
id: 3,
name: '<NAME>',
email: '<EMAIL>',
status: 'Active'
}
];
module.exports = members;
|
<reponame>christinefeng/phoenix<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.expression;
import java.math.BigDecimal;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.phoenix.expression.function.CosFunction;
import org.apache.phoenix.expression.function.SinFunction;
import org.apache.phoenix.expression.function.TanFunction;
import org.apache.phoenix.query.BaseTest;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.schema.types.PDecimal;
import org.apache.phoenix.schema.types.PDouble;
import org.apache.phoenix.schema.types.PFloat;
import org.apache.phoenix.schema.types.PInteger;
import org.apache.phoenix.schema.types.PLong;
import org.apache.phoenix.schema.types.PNumericType;
import org.apache.phoenix.schema.types.PSmallint;
import org.apache.phoenix.schema.types.PTinyint;
import org.apache.phoenix.schema.types.PUnsignedDouble;
import org.apache.phoenix.schema.types.PUnsignedFloat;
import org.apache.phoenix.schema.types.PUnsignedInt;
import org.apache.phoenix.schema.types.PUnsignedLong;
import org.junit.Test;
import com.google.common.collect.Lists;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Unit tests for {@link SinFunction}
* Unit tests for {@link CosFunction}
* Unit tests for {@link TanFunction}
*/
@RunWith(Parameterized.class)
public class MathTrigFunctionTest {
private Number[] value;
private PNumericType dataType;
public MathTrigFunctionTest(Number[] value, PNumericType dataType) {
this.value = value;
this.dataType = dataType;
}
@Parameters(name = "{0} {1}")
public static synchronized Collection<Object> data() {
return Arrays.asList(new Object[][]{
{
new BigDecimal[]{BigDecimal.valueOf(1.0), BigDecimal.valueOf(0.0),
BigDecimal.valueOf(-1.0), BigDecimal.valueOf(123.1234),
BigDecimal.valueOf(-123.1234)},
PDecimal.INSTANCE
},
{
new Float[]{1.0f, 0.0f, -1.0f, Float.MAX_VALUE, Float.MIN_VALUE,
-Float.MAX_VALUE, -Float.MIN_VALUE, 123.1234f, -123.1234f},
PFloat.INSTANCE
},
{
new Float[]{1.0f, 0.0f, Float.MAX_VALUE, Float.MIN_VALUE, 123.1234f},
PUnsignedFloat.INSTANCE
},
{
new Double[]{1.0, 0.0, -1.0, Double.MAX_VALUE, Double.MIN_VALUE,
-Double.MAX_VALUE, -Double.MIN_VALUE, 123.1234, -123.1234},
PDouble.INSTANCE
},
{
new Double[]{1.0, 0.0, Double.MAX_VALUE, Double.MIN_VALUE, 123.1234},
PUnsignedDouble.INSTANCE
},
{
new Long[]{(long) 1, (long) 0, (long) -1, Long.MAX_VALUE,
Long.MIN_VALUE, (long) 123, (long) -123},
PLong.INSTANCE
},
{
new Long[]{(long) 1, (long) 0, Long.MAX_VALUE, (long) 123},
PUnsignedLong.INSTANCE
},
{
new Integer[]{1, 0, -1, Integer.MAX_VALUE, Integer.MIN_VALUE, 123, -123},
PInteger.INSTANCE
},
{
new Integer[]{1, 0, Integer.MAX_VALUE, 123},
PUnsignedInt.INSTANCE
},
{
new Short[]{(short) 1, (short) 0, (short) -1, Short.MAX_VALUE,
Short.MIN_VALUE, (short) 123, (short) -123},
PSmallint.INSTANCE
},
{
new Short[]{(short) 1, (short) 0, Short.MAX_VALUE, (short) 123},
PSmallint.INSTANCE
},
{
new Byte[]{(byte) 1, (byte) 0, (byte) -1, Byte.MAX_VALUE,
Byte.MIN_VALUE, (byte) 123, (byte) -123},
PTinyint.INSTANCE
},
{
new Byte[]{(byte) 1, (byte) 0, Byte.MAX_VALUE, (byte) 123},
PTinyint.INSTANCE
}
});
}
private boolean testExpression(LiteralExpression literal, double expectedResult,
String testedFunction) throws SQLException {
List<Expression> expressions = Lists.newArrayList((Expression) literal);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
Expression mathFunction = null;
if (testedFunction.equals("SIN")) {
mathFunction = new SinFunction(expressions);
} else if (testedFunction.equals("COS")) {
mathFunction = new CosFunction(expressions);
} else if (testedFunction.equals("TAN")) {
mathFunction = new TanFunction(expressions);
}
boolean ret = mathFunction.evaluate(null, ptr);
if (ret) {
Double result =
(Double) mathFunction.getDataType().toObject(ptr, mathFunction.getSortOrder());
assertTrue(BaseTest.twoDoubleEquals(result.doubleValue(), expectedResult));
}
return ret;
}
private void test(Number value, PNumericType dataType, double expectedResult,
String testedFunction)
throws SQLException {
LiteralExpression literal = LiteralExpression.newConstant(value, dataType, SortOrder.ASC);
boolean ret1 = testExpression(literal, expectedResult, testedFunction);
literal = LiteralExpression.newConstant(value, dataType, SortOrder.DESC);
boolean ret2 = testExpression(literal, expectedResult, testedFunction);
assertEquals(ret1, ret2);
}
@Test
public void testBatch()
throws SQLException {
for (int i = 0; i < value.length; ++i) {
test(value[i], dataType, Math.sin(value[i].doubleValue()), "SIN");
test(value[i], dataType, Math.cos(value[i].doubleValue()), "COS");
test(value[i], dataType, Math.tan(value[i].doubleValue()), "TAN");
}
}
}
|
#!/bin/bash
rm -f failed_logs.txt
for log in /media/heimdal/Dataset1/*/Traverse/*/*.log; do
pocolog $log || echo $log >> failed_logs.txt
done |
<gh_stars>0
const routes = require('next-routes')();
routes
.add('/MemberPortal', '/portal/MemberPortal')
.add('/AgencyPortal', '/portal/AgencyPortal');
module.exports = routes;
|
#include <iostream>
int main() {
int num1, num2;
std::cout << "Enter two numbers: ";
std::cin >> num1 >> num2;
// print the sum
std::cout << "The sum is: " << num1 + num2 << std::endl;
// return the product
return num1 * num2;
} |
<reponame>gajduk/WirelessCooperation
package results_formatter;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
public class ParseResultsToMatlab {
public static void main(String[] args) {
try ( BufferedReader jin = new BufferedReader(new FileReader("results.txt"))) {
int N = 10;
String descr = "";
System.out.print("N = [");
for ( N = 10 ; N < 101 ; N += 10 ) {
System.out.print(N);
if ( N <= 100 ) System.out.print(",");
}
System.out.println("];");
N = 10;
while ( jin.ready() ) {
descr = jin.readLine();
descr = descr.replaceAll(" <\\|> ", "").replaceAll("N:10","").replaceAll("\\.","_").replaceAll(":","_");
if ( N == 10 )
System.out.print(descr+" = [");
double e = Double.parseDouble(jin.readLine());
System.out.printf("%.2f",e*N);
N += 10;
if ( N <= 100 ) System.out.print(",");
if ( N > 100 ) {
System.out.println("];");
N = 10;
}
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
|
<filename>client/templates/mall/mall.js<gh_stars>0
Template.mall.created = function () {
//console.log("mall.created:"+Router.current().url);
this.autorun(function () {
var mallParams = Session.get("mallParams");
var tmpmallParams = Router.current().params.query.mall;//for /?mall=0001
//console.log("mall.created: tmpmallParams="+tmpmallParams);
if(tmpmallParams)
{
mallParams=tmpmallParams;
if(Meteor.userId())
{
//console.log("mall.created: userid="+Meteor.userId());
Meteor.call("userlocation_set",tmpmallParams,Meteor.userId());
//console.log("mall.created: location="+tmpmallParams);
//console.log("mall.created: profile.mallParams="+_(Meteor.user().profile.mallParams));
//Meteor.user().profile.mallParams=tmpmallParams;
}
}
else {
if(Meteor.userId())
{
// console.log("mall.created: mallParams="+Meteor.user().profile.mallParams);
// tmpmallParams=Meteor.user().profile.mallParams;
//console.log("mall.created: userid="+Meteor.userId());
//console.log("mall.created: findone="+UserDataExts.findOne({userID: Meteor.userId()}));
//的确会出现UserDataExts.findOne为空的情况,主要是由于数据库没有完成初始化导致的
if(UserDataExts.findOne({userID: Meteor.userId()}))
{
mallParams=UserDataExts.findOne({userID: Meteor.userId()}).location;
//console.log("mall debug 001: mallParams="+mallParams);
}
else {
//console.log("mall debug 001: mallParams=NOT find");
}
}
}
//console.log("mall.created: mallParams="+mallParams);
Session.set("mallParams", mallParams);
//this.subscription = Meteor.subscribe('Malls',Session.get("mallParams"));
}.bind(this));
};
Template.mall.rendered = function () {
// this.autorun(function () {
// if (!this.subscription.ready()) {
// IonLoading.show();
// } else {
// IonLoading.hide();
// }
// }.bind(this));
};
Template.mall.helpers({
mallname: function () {
//console.log(Malls);
var mall=Malls.findOne({index:Session.get("mallParams")});//(Session.get("SelectedMall"));
return mall && mall.name;
//console.log("mall name=???"+Meteor.call("mallname_md"));
//return Meteor.call("mallname_md");
//return (Malls.find().count());
},
mallabout: function () {
var mall=Malls.findOne({index:Session.get("mallParams")});//(Session.get("SelectedMall"));
return mall && mall.about;
//console.log("mall about=???"+Meteor.call("mallabout_md"));
//return Meteor.call("mallabout_md");
//return (Malls.find().count());
},
});
|
List<int> array = [3, 5, 2, 7];
// Reverse the array
List<int> reversedArray = array.reversed.toList();
for (int element in reversedArray){
print(element);
}
// Output: [7, 2, 5, 3] |
const express = require('express');
const bodyParser = require('body-parser');
const movieRoutes = require("./routes/movie");
const app = express();
app.use(bodyParser.json());
app.use((req, res, next) => {
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Methods', 'GET, OPTIONS');
res.setHeader('Access-Control-Allow-Headers', '*');
next();
});
app.use("/movie", movieRoutes);
app.listen(8000); |
#pragma once
#include <vector>
#include <string>
namespace qbus {
namespace pulsar {
inline std::string to_string(const std::vector<std::string>& v) {
std::string result = "[";
for (size_t i = 0; i < v.size(); i++) {
if (i > 0) result += ", ";
result += v[i];
}
result += "]";
return result;
}
} // namespace pulsar
} // namespace qbus
|
/**
*/
package tdt4250.mush.model;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Value Exchange</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* </p>
* <ul>
* <li>{@link tdt4250.mush.model.ValueExchange#getCollection <em>Collection</em>}</li>
* <li>{@link tdt4250.mush.model.ValueExchange#getValue <em>Value</em>}</li>
* </ul>
*
* @see tdt4250.mush.model.MushPackage#getValueExchange()
* @model
* @generated
*/
public interface ValueExchange extends Expression {
/**
* Returns the value of the '<em><b>Collection</b></em>' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the value of the '<em>Collection</em>' containment reference.
* @see #setCollection(Expression)
* @see tdt4250.mush.model.MushPackage#getValueExchange_Collection()
* @model containment="true"
* @generated
*/
Expression getCollection();
/**
* Sets the value of the '{@link tdt4250.mush.model.ValueExchange#getCollection <em>Collection</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Collection</em>' containment reference.
* @see #getCollection()
* @generated
*/
void setCollection(Expression value);
/**
* Returns the value of the '<em><b>Value</b></em>' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the value of the '<em>Value</em>' containment reference.
* @see #setValue(Expression)
* @see tdt4250.mush.model.MushPackage#getValueExchange_Value()
* @model containment="true"
* @generated
*/
Expression getValue();
/**
* Sets the value of the '{@link tdt4250.mush.model.ValueExchange#getValue <em>Value</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Value</em>' containment reference.
* @see #getValue()
* @generated
*/
void setValue(Expression value);
} // ValueExchange
|
#!/bin/bash
STACK=$1
DCU=alpha # data coordinator universe or whatever
DSID=$2
COPY_NUMBER=$3
case $STACK in
"local" )
DCU=primus
CLUSTER="http://localhost:9200" ;;
"staging" )
CLUSTER="http://spandex.elasticsearch.aws-us-west-2-staging.socrata.net" ;;
"rc" )
CLUSTER="http://spandex.elasticsearch.aws-us-west-2-rc.socrata.net" ;;
"eu-prod" )
CLUSTER="http://spandex.elasticsearch.aws-eu-west-1-prod.socrata.net" ;;
"fedramp-prod" )
CLUSTER="http://spandex-6.elasticsearch.aws-us-east-1-fedramp-prod.socrata.net" ;;
*)
echo "Did not recognize stack name $STACK"
esac
if [ "$CLUSTER" == "" ] || [ "$DSID" == "" ] || [ "$COPY_NUMBER" == "" ]; then
echo "Usage $0 <local|staging|rc|eu-prod|fedramp-prod> <dataset_id> <copy_number>"
exit 1
fi
delete_query="{\"query\":{\"bool\":{\"must\":[{\"term\":{\"dataset_id\":\"$DCU.$DSID\"}},{\"term\":{\"copy_number\":$COPY_NUMBER}}]}}}"
echo $delete_query
copy_count=$(curl -s $CLUSTER/spandex/dataset_copy/_search?size=0 -d $delete_query |jq '.hits.total')
column_count=$(curl -s $CLUSTER/spandex/column_map/_search?size=0 -d $delete_query |jq '.hits.total')
value_count=$(curl -s $CLUSTER/spandex/field_value/_search?size=0 -d $delete_query |jq '.hits.total')
echo "found $copy_count copies, $column_count columns, $value_count values; deleting $DSID"
curl -XDELETE $CLUSTER/spandex/_query -d $delete_query
echo
|
<filename>apps/bfd-pipeline/bfd-pipeline-rda-bridge/src/main/java/gov/cms/bfd/pipeline/bridge/io/RifSource.java<gh_stars>0
package gov.cms.bfd.pipeline.bridge.io;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
public class RifSource implements Source<String> {
private final BufferedReader reader;
private String nextLine;
public RifSource(Path inputPath) throws IOException {
reader = Files.newBufferedReader(inputPath, StandardCharsets.UTF_8);
nextLine = reader.readLine();
}
@Override
public boolean hasInput() {
return nextLine != null;
}
@Override
public String read() throws IOException {
if (nextLine == null) {
throw new IOException("End of source reached.");
}
String line = nextLine;
nextLine = reader.readLine();
return line;
}
@Override
public void close() throws IOException {
reader.close();
}
}
|
"use strict";
const fs = require("fs");
const path = require("path");
const readMd = (page) =>
fs.readFileSync(path.join(__dirname, `${page}.md`), { encoding: "utf-8" });
const aboutPageAspect = {
pages: {
home: {
content: readMd("about"),
},
privacy: {
content: readMd("privacy"),
},
},
};
fs.writeFileSync(path.join(__dirname, 'terria-saas-about-pages.json'), JSON.stringify(aboutPageAspect, null, 2));
|
use pairing::bls12_381::{Bls12, Fr, G1, G2, Fq12};
use pairing::{CurveAffine, PrimeField, Field, Engine};
use bls12_381::{Signature, PublicKey, verify};
pub fn noop_operation(tree: &CircuitAccountTree, acc_id: u32) -> Operation<Bn256> {
let signature_data = SignatureData::init_empty();
let first_sig_msg = Fr::zero();
let second_sig_msg = Fr::zero();
let third_sig_msg = Fr::zero();
let signer_pub_key_packed = [Some(false); 256];
// Unpack the signer_pub_key_packed into a valid public key
let mut signer_pub_key = G2::zero();
for (i, bit) in signer_pub_key_packed.iter().enumerate() {
if *bit {
signer_pub_key.add_assign(&G2::one());
}
if i < 255 {
signer_pub_key.double();
}
}
// Create a signature from the provided data
let signature = Signature {
r: G1::one(),
s: G2::one(),
};
// Verify the signature using the unpacked public key and the message
let message = format!("{}{}{}", first_sig_msg, second_sig_msg, third_sig_msg);
let message_hash = Fr::from_str(&message).unwrap().into_repr();
let message_hash_g1 = G1::from_repr(message_hash).unwrap();
let public_key = PublicKey::from_affine(signer_pub_key.into_affine());
let result = verify(&signature, &[message_hash_g1], &public_key);
// Return the result of the signature validation
Operation::new(result)
} |
<reponame>matscus/preconfigured
package main
import (
"flag"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
"github.com/joho/godotenv"
log "github.com/sirupsen/logrus"
)
var (
logLevel, service, path string
isServices bool
)
func main() {
flag.StringVar(&service, "service", ".", "PATH service dir")
flag.StringVar(&path, "path", "result", "PATH from create result files")
flag.StringVar(&logLevel, "loglevel", "INFO", "log level, default INFO")
flag.Parse()
setLogLevel(logLevel)
err := godotenv.Load()
if err != nil {
log.Warn("Error loading .env file")
}
dirs, err := os.ReadDir(service)
if err != nil {
log.Fatal(err)
}
if service != "." {
isServices = true
}
for _, v := range dirs {
if v.IsDir() {
name := v.Name()
if isServices {
name = fmt.Sprintf("%s/%s", service, v.Name())
}
if v.Name() != "vendor" && v.Name() != path && v.Name() != ".git" {
err = filepath.WalkDir(name,
func(filePath string, info os.DirEntry, err error) error {
if err != nil {
return err
}
if info.IsDir() {
err = os.MkdirAll(fmt.Sprintf("%s/%s", path, filePath), 0777)
if err != nil {
return err
}
} else {
writer(filePath, fmt.Sprintf("%s/%s", path, filePath))
if err != nil {
return err
}
}
return nil
})
if err != nil {
log.Error(err)
}
}
} else {
if isServices {
err := os.MkdirAll(fmt.Sprintf("%s/%s", path, service), 0777)
if err != nil {
log.Error(err)
}
writer(fmt.Sprintf("%s/%s", service, v.Name()), fmt.Sprintf("%s/%s/%s", path, service, v.Name()))
if err != nil {
log.Error(err)
}
}
}
}
if err != nil {
log.Fatal(err)
}
}
func writer(r string, w string) error {
readFile, err := ioutil.ReadFile(r)
if err != nil {
return err
}
data := []byte(os.ExpandEnv(string(readFile)))
writeFile, err := os.Create(w)
if err != nil {
return err
}
writeFile.Write(data)
writeFile.Close()
return nil
}
func setLogLevel(level string) {
level = strings.ToUpper(level)
switch level {
case "INFO":
log.SetLevel(log.InfoLevel)
case "WARN":
log.SetLevel(log.WarnLevel)
case "ERROR":
log.SetLevel(log.ErrorLevel)
case "DEBUG":
log.SetLevel(log.DebugLevel)
case "TRACE":
log.SetLevel(log.TraceLevel)
}
}
|
function filterByType(people: (Person | Dependent)[], type: number): Dependent[] {
return people.filter((person) => {
if ('type' in person && person.type === type) {
return true;
}
return false;
}) as Dependent[];
} |
#!/usr/bin/env bash
# PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here
# will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent
# changes to this script, consider a proposal to conda-smithy so that other feedstocks can also
# benefit from the improvement.
# -*- mode: jinja-shell -*-
set -xeuo pipefail
export FEEDSTOCK_ROOT="${FEEDSTOCK_ROOT:-/home/conda/feedstock_root}"
source ${FEEDSTOCK_ROOT}/.scripts/logging_utils.sh
( endgroup "Start Docker" ) 2> /dev/null
( startgroup "Configuring conda" ) 2> /dev/null
export PYTHONUNBUFFERED=1
export RECIPE_ROOT="${RECIPE_ROOT:-/home/conda/recipe_root}"
export CI_SUPPORT="${FEEDSTOCK_ROOT}/.ci_support"
export CONFIG_FILE="${CI_SUPPORT}/${CONFIG}.yaml"
cat >~/.condarc <<CONDARC
conda-build:
root-dir: ${FEEDSTOCK_ROOT}/build_artifacts
CONDARC
mamba install --update-specs --yes --quiet "conda-forge-ci-setup=3" conda-build pip boa -c conda-forge
mamba update --update-specs --yes --quiet "conda-forge-ci-setup=3" conda-build pip boa -c conda-forge
# set up the condarc
setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
source run_conda_forge_build_setup
# Install the yum requirements defined canonically in the
# "recipe/yum_requirements.txt" file. After updating that file,
# run "conda smithy rerender" and this line will be updated
# automatically.
/usr/bin/sudo -n yum install -y xorg-x11-server-Xvfb gtk2-devel
# make the build number clobber
make_build_number "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
( endgroup "Configuring conda" ) 2> /dev/null
if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then
if [[ "x${BUILD_OUTPUT_ID:-}" != "x" ]]; then
EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --output-id ${BUILD_OUTPUT_ID}"
fi
conda debug "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \
${EXTRA_CB_OPTIONS:-} \
--clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml"
# Drop into an interactive shell
/bin/bash
else
conda mambabuild "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \
--suppress-variables ${EXTRA_CB_OPTIONS:-} \
--clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml"
( startgroup "Validating outputs" ) 2> /dev/null
validate_recipe_outputs "${FEEDSTOCK_NAME}"
( endgroup "Validating outputs" ) 2> /dev/null
( startgroup "Uploading packages" ) 2> /dev/null
if [[ "${UPLOAD_PACKAGES}" != "False" ]] && [[ "${IS_PR_BUILD}" == "False" ]]; then
upload_package --validate --feedstock-name="${FEEDSTOCK_NAME}" "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
fi
( endgroup "Uploading packages" ) 2> /dev/null
fi
( startgroup "Final checks" ) 2> /dev/null
touch "${FEEDSTOCK_ROOT}/build_artifacts/conda-forge-build-done-${CONFIG}" |
source activate ../../conda_env/ATLAS/
MSA=$1 #Sequences_cluster_smoking.txt
OUT=$2 #Smoking_probes.mat
#MSA=Sequences_cluster.txt
#OUT=Cluster3.mat
clustalo -i $MSA --distmat-out=$OUT --full
|
function validateForm(element) {
var result = true;
element.find('[required]').each(
function () {
var fieldElement = $(this);
//如果为null则设置为''
var value = fieldElement.val() || '';
if (value) {
value = value.trim();
}
if (!value || value === fieldElement.attr('data-placeholder')) {
alert((fieldElement.attr('data-name') || this.name) + "不能为空!");
result = false;
return result;
}
}
);
return result;
}
function getValue(idName) {
return document.getElementById(idName).value;
}
$("#btn-submitsqlora").click(function (){
//获取form对象,判断输入,通过则提交
var formSubmit = $("#form-submitsqlora");
var sqlContent = $("#sql_content");
var clusterName = $("#cluster_name");
var review_man = $('#review_man');
var message = $('#message');
var $check = $('#cluster_name_checkbox').find('input[type="checkbox"]:checked');
var checkedArr = [];
var message = $("#message");
$check.each(function() {
var item = $(this);
checkedArr.push(item.val());
})
clusterName.val(checkedArr.join());
var $check_review = $('#review_man_checkbox').find('input[type="checkbox"]:checked');
var viewArr = [];
$check_review.each(function() {
var item = $(this);
viewArr.push(item.val());
})
review_man.val(viewArr.join());
var type = $('#data_change_type').val();
if(validateForm(formSubmit)){
if (clusterName.val() == '') {
alert('实例名不能为空');
}else if(review_man.val() == '') {
alert('审核人不能为空');
}else if((type === '数据修订'|| type === '数据初始化')&&sqlContent.val()===''){
alert('sql文本不能为空');
}else if((type === '数据迁移'|| type === '其他')&&message.val()==='') {
alert('备注不能为空');
}else {
$('#btn-submitsqlora').prop('disabled',true);
formSubmit.submit();
}
}
});
$("#review_man").change(function review_man(){
var review_man = $(this).val();
$("div#" + review_man).hide();
});
function deleteTitle() {
var $cluster_name = $('#cluster_name');
var $realSelect = $cluster_name.next().find('button');
// clean title
$realSelect.attr('title','');
// add new block
$realSelect.on('mouseover',function(){
let $realSelect = $cluster_name.next();
let content = $realSelect.find('.filter-option').html();
if(!$('#groupContent').length){
$(this).parent().append('<div id="groupContent" class="group-content"><span class="close-group-icon" id="closeGroupIcon">x</span>'+content+'</div>')
// after append can remove block
$('#closeGroupIcon').on('click',function() {
$('#groupContent').remove();
});
}
}).on('click',function() {
$('#groupContent').remove();
})
}
function readFile() {
$('#btn-addAttachment').on('change',function() {
var file = document.getElementById("btn-addAttachment").files[0];
if(file) {
var reader = new FileReader();
reader.readAsText(file,'UTF-8');
reader.onload = function (e) {
var fileText = e.target.result;
$('#sql_content').val(fileText);
}
} else {
alert('please add attachment');
}
})
}
$(document).ready(function () {
// var pathname = window.location.pathname;
// if (pathname == "/editsql/") {
// document.getElementById('workflowid').value = sessionStorage.getItem('editWorkflowDetailId');
// document.getElementById('workflow_name').value = sessionStorage.getItem('editWorkflowNname');
// document.getElementById('sql_content').value = sessionStorage.getItem('editSqlContent');
// document.getElementById('cluster_name').value = sessionStorage.getItem('editClustername');
// document.getElementById('is_backup').value = sessionStorage.getItem('editIsbackup');
// document.getElementById('review_man').value = sessionStorage.getItem('editReviewman');
// var sub_review_name = sessionStorage.getItem('editSubReviewman');
// $("input[name='sub_review_man'][value=\'"+sub_review_name+"\']").attr("checked", true);
// // getValue('workflowid') = sessionStorage.getItem('editWorkflowDetailId');
// // getValue('workflow_name') = sessionStorage.getItem('editWorkflowNname');
// // getValue('sql_content') = sessionStorage.getItem('editSqlContent');
// // getValue('cluster_name') = sessionStorage.getItem('editClustername');
// // getValue('is_backup') = sessionStorage.getItem('editIsbackup');
// // getValue('review_man') = sessionStorage.getItem('editReviewman');
// // var sub_review_name = sessionStorage.getItem('editSubReviewman');
// // $("input[name='sub_review_man'][value=\'"+sub_review_name+"\']").attr("checked", true);
// }
readFile();
// delete title
// var $cluster_name = $('#cluster_name');
// $cluster_name.on('hidden.bs.select', function () {
// deleteTitle()
// })
// // for exec after bs-select
// setTimeout(function(){
// $cluster_name.trigger('hidden.bs.select')
// },2000)
});
|
<gh_stars>0
/*
txtConfirmPassword: {
required: true,
equalTo: "#txtPassword",
minlength: 4,
maxlength: 32
},
*/
$( function() {
jQuery.validator.addMethod("alphanumeric", function(value, element) {
return this.optional(element) || /^[-a-z0-9_.]+$/i.test(value);
}, "Caracteres inválidos!");
jQuery.validator.addMethod("lowercase", function(value, element) {
return this.optional(element) || /^[-a-z0-9_.]+$/.test(value);
}, "Não use maiúsculas!");
$("#tokenpass").validate(
{
rules: {
'pass1': { required: true },
'pass2': { required: true,
equalTo: "#pass1" },
},
messages: {
'pass1': { required: "" },
'pass2': { required: "",
equalTo: "<br/>As senhas introduzidas são diferentes." },
},
}
);
$("#recoverForm").validate(
{
rules: { recover: { required: true, remote: "/ajax/userOrEmailExists" }},
messages: { recover: {
required: "",
remote: "<br/>Nenhum utilizador com esse nome de utilizador ou e-mail."
}}
}
);
$("#registerForm").validate(
{
rules: {
username: {
required: true,
alphanumeric: true,
lowercase: true,
remote: "/ajax/userAvailable",
},
email: {
required: true,
email: true,
},
},
messages: {
username: {
required: "Obrigatório!",
remote: "Indisponível!",
},
email: {
required: "Obrigatório!",
email: "Inválido!",
},
},
}
);
});
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-STWS/13-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-STWS/13-512+0+512-N-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_first_half_quarter --eval_function penultimate_quarter_eval |
<filename>packages/react-core/src/components/ApplicationLauncher/examples/ApplicationLauncherFavoritesAndSearch.tsx
import React from 'react';
import {
ApplicationLauncher,
ApplicationLauncherItem,
ApplicationLauncherGroup,
ApplicationLauncherSeparator
} from '@patternfly/react-core';
import pfLogoSm from './pf-logo-small.svg';
const icon: JSX.Element = <img src={pfLogoSm} />;
const appLauncherItems: React.ReactElement[] = [
<ApplicationLauncherGroup key="group 1c">
<ApplicationLauncherItem key="group 1a" id="item-1" icon={icon}>
Item without group title
</ApplicationLauncherItem>
<ApplicationLauncherSeparator key="separator" />
</ApplicationLauncherGroup>,
<ApplicationLauncherGroup label="Group 2" key="group 2c">
<ApplicationLauncherItem key="group 2a" id="item-2" isExternal icon={icon} component="button">
Group 2 button
</ApplicationLauncherItem>
<ApplicationLauncherItem key="group 2b" id="item-3" isExternal href="#" icon={icon}>
Group 2 anchor link
</ApplicationLauncherItem>
<ApplicationLauncherSeparator key="separator" />
</ApplicationLauncherGroup>,
<ApplicationLauncherGroup label="Group 3" key="group 3c">
<ApplicationLauncherItem key="group 3a" id="item-4" isExternal icon={icon} component="button">
Group 3 button
</ApplicationLauncherItem>
<ApplicationLauncherItem key="group 3b" id="item-5" isExternal href="#" icon={icon}>
Group 3 anchor link
</ApplicationLauncherItem>
</ApplicationLauncherGroup>
];
export const ApplicationLauncherFavoritesAndSearch: React.FunctionComponent = () => {
const [isOpen, setIsOpen] = React.useState(false);
const [favorites, setFavorites] = React.useState<string[]>([]);
const [filteredItems, setFilteredItems] = React.useState<React.ReactNode[]>(null);
const onToggle = (isOpen: boolean) => setIsOpen(isOpen);
const onFavorite = (itemId: string, isFavorite: boolean) => {
let updatedFavorites: string[] = [...favorites, itemId];
if (isFavorite) {
updatedFavorites = favorites.filter(id => id !== itemId);
}
setFavorites(updatedFavorites);
};
const onSearch = (textInput: string) => {
if (textInput === '') {
setFilteredItems(null);
} else {
const filteredGroups = appLauncherItems
.map((group: React.ReactElement) => {
const filteredGroup = React.cloneElement(group, {
children: group.props.children.filter((item: React.ReactElement) => {
if (item.type === ApplicationLauncherSeparator) {
return item;
}
return item.props.children.toLowerCase().includes(textInput.toLowerCase());
})
});
if (
filteredGroup.props.children.length > 0 &&
filteredGroup.props.children[0].type !== ApplicationLauncherSeparator
) {
return filteredGroup;
}
})
.filter(newGroup => newGroup);
if (filteredGroups.length > 0) {
let lastGroup = filteredGroups.pop();
lastGroup = React.cloneElement(lastGroup, {
children: lastGroup.props.children.filter(item => item.type !== ApplicationLauncherSeparator)
});
filteredGroups.push(lastGroup);
}
setFilteredItems(filteredGroups);
}
};
return (
<ApplicationLauncher
onToggle={onToggle}
onFavorite={onFavorite}
onSearch={onSearch}
isOpen={isOpen}
items={filteredItems || appLauncherItems}
favorites={favorites}
isGrouped
/>
);
};
|
<gh_stars>10-100
package network
import (
"bufio"
"fmt"
"net/http"
"testing"
"time"
)
func TestHttpServer(t *testing.T) {
go func() {
http.HandleFunc("/hello", hello)
http.HandleFunc("/headers", headers)
http.ListenAndServe(":8099", nil)
}()
time.Sleep(time.Second)
resp, err := http.Get("http://localhost:8099/hello")
assertEq(nil, err)
assertEq("200 OK", resp.Status)
scanner := bufio.NewScanner(resp.Body)
assertEq("", scanner.Text())
scanner.Scan()
assertEq("hello", scanner.Text())
resp.Body.Close()
resp, err = http.Get("http://localhost:8099/headers")
assertEq(nil, err)
defer resp.Body.Close()
assertEq("200 OK", resp.Status)
scanner = bufio.NewScanner(resp.Body)
assertEq("", scanner.Text())
scanner.Scan()
assertEq("User-Agent: Go-http-client/1.1", scanner.Text())
for scanner.Scan() {
pShow(scanner.Text())
}
assertEq(nil, scanner.Err())
}
func hello(w http.ResponseWriter, req *http.Request) {
fmt.Fprintf(w, "hello\n")
}
func headers(w http.ResponseWriter, req *http.Request) {
for name, headers := range req.Header {
for _, h := range headers {
fmt.Fprintf(w, "%v: %v\n", name, h)
}
}
}
func httpServerTest() {
http.HandleFunc("/hello", hello)
http.HandleFunc("/headers", headers)
http.ListenAndServe(":8090", nil)
}
|
#!/bin/sh
array_enum()
{
update "-p $PWD" array_enum
move_xml ArrayEnumE1Enum ArrayEnumE2Enum ArrayEnum1Array ArrayEnum2Array
}
array_ok()
{
update "-p $PWD" array_ok
indices=`seq 1 5`
move_xml ArrayOK1Array ArrayOK2Array ArrayOK3Array ArrayOK4Array ArrayOK5Array
}
array_struct()
{
update "-p $PWD" array_struct
move_xml ArrayStructS1Serializable ArrayStructS2Serializable ArrayStruct1Array ArrayStruct2Array
}
array_struct_member_array()
{
update "-p $PWD" array_struct_member_array
move_xml ArrayStructMemberArraySerializable ArrayStructMemberArrayArray
}
built_in_type()
{
update "-p $PWD" built_in_type
move_xml BuiltInTypeArray
}
|
import plusnew from "@plusnew/core";
import enzymeAdapterPlusnew, { mount } from "@plusnew/enzyme-adapter";
import { configure } from "enzyme";
import stateFactory from "../../index";
import { promiseHandler, registerRequestIdleCallback } from "testHelper";
configure({ adapter: new enzymeAdapterPlusnew() });
type blogPostType = {
id: string;
model: "blogPost";
attributes: {
name: string;
counter: number;
};
relationships: {
author: {
model: "user";
id: number;
};
};
};
describe("test Merge", () => {
it("commitAttributes should add changes to Merge, and remove them if it is equal with repo", async () => {
const callIdleCallbacks = registerRequestIdleCallback();
const { Repository, Branch, Item, Merge } =
stateFactory<{
blogPost: {
listParameter: {
sort: "asc" | "desc";
};
item: blogPostType;
};
}>();
const list = promiseHandler((_parameter: { sort: "asc" | "desc" }) => ({
items: [
{
id: "1",
model: "blogPost" as const,
},
{
id: "2",
model: "blogPost" as const,
},
],
totalCount: 5,
}));
const item = promiseHandler((id: string) => ({
id: id,
model: "blogPost" as const,
attributes: {
name: `foo-${id}`,
counter: 0,
},
relationships: {
author: {
model: "user" as const,
id: 1,
},
},
}));
const wrapper = mount(
<Repository
requests={{
blogPost: {
readList: list.fn,
readItem: item.fn,
},
}}
>
<Branch>
<Item model="blogPost" id={"1"}>
{(view, { commitAttributes }) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h1>
<span>{view.item.attributes.counter}</span>
<button
key="increment"
onclick={() =>
commitAttributes({
counter: view.item.attributes.counter + 1,
})
}
/>
<button
key="decrement"
onclick={() =>
commitAttributes({
counter: view.item.attributes.counter - 1,
})
}
/>
</h1>
)
}
</Item>
<Item model="blogPost" id={"2"}>
{(view) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h2>
<span>{view.item.attributes.counter}</span>
</h2>
)
}
</Item>
<Merge>
{({ changes, merge }) => (
<button
key="submit"
disabled={Object.keys(changes).length === 0}
onclick={() => merge(changes)}
/>
)}
</Merge>
</Branch>
<Branch>
<Item model="blogPost" id={"1"}>
{(view, { commitAttributes }) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h3>
<span>{view.item.attributes.counter}</span>
<button
onclick={() =>
commitAttributes({
counter: view.item.attributes.counter + 1,
})
}
/>
</h3>
)
}
</Item>
</Branch>
</Repository>
);
expect(wrapper.contains(<span>item-loading</span>)).toBe(true);
await item.resolve();
callIdleCallbacks();
expect(wrapper.contains(<span>item-loading</span>)).toBe(false);
expect(wrapper.find("h1").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(false);
wrapper.find("h1").find({ key: "decrement" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
wrapper.find({ key: "submit" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{2}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(false);
});
it("commitAttributes should add changes to Merge, and remove them if it is equal with repo", async () => {
const callIdleCallbacks = registerRequestIdleCallback();
const { Repository, Branch, Item, Merge } =
stateFactory<{
blogPost: {
listParameter: {
sort: "asc" | "desc";
};
item: blogPostType;
};
}>();
const list = promiseHandler((_parameter: { sort: "asc" | "desc" }) => ({
items: [
{
id: "1",
model: "blogPost" as const,
},
{
id: "2",
model: "blogPost" as const,
},
],
totalCount: 5,
}));
const item = promiseHandler((id: string) => ({
id: id,
model: "blogPost" as const,
attributes: {
name: `foo-${id}`,
counter: 0,
},
relationships: {
author: {
model: "user" as const,
id: 1,
},
},
}));
const wrapper = mount(
<Repository
requests={{
blogPost: {
readList: list.fn,
readItem: item.fn,
},
}}
>
<Branch>
<Item model="blogPost" id={"1"}>
{(view, { commitAttributes }) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h1>
<span>{view.item.attributes.counter}</span>
<button
key="increment"
onclick={() =>
commitAttributes({
counter: view.item.attributes.counter + 1,
})
}
/>
</h1>
)
}
</Item>
<Merge>
{({ changes }) => (
<span data-test-id="isDeleted">
{changes.blogPost?.map((change) => {
if (change.isDeleted === true) {
return "isDeleted";
} else if (change.isDeleted === false) {
return "notDeleted";
}
throw new Error("isDeleted needs to be either");
})}
</span>
)}
</Merge>
</Branch>
</Repository>
);
expect(wrapper.contains(<span>item-loading</span>)).toBe(true);
await item.resolve();
callIdleCallbacks();
expect(wrapper.contains(<span>item-loading</span>)).toBe(false);
expect(wrapper.find("h1").contains(<span>{0}</span>)).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
expect(wrapper.find({ "data-test-id": "isDeleted" }).text()).toBe(
"notDeleted"
);
});
it("commitRelationships should add changes to Merge, and remove them if it is equal with repo, for multiple relationship", async () => {
const callIdleCallbacks = registerRequestIdleCallback();
type blogPostType = {
id: string;
model: "blogPost";
attributes: {
name: string;
counter: number;
};
relationships: {
authors: {
model: "user";
id: number;
}[];
};
};
const { Repository, Branch, Item, Merge } =
stateFactory<{
blogPost: {
listParameter: {
sort: "asc" | "desc";
};
item: blogPostType;
};
}>();
const list = promiseHandler((_parameter: { sort: "asc" | "desc" }) => ({
items: [
{
id: "1",
model: "blogPost" as const,
},
{
id: "2",
model: "blogPost" as const,
},
],
totalCount: 5,
}));
const item = promiseHandler((id: string) => ({
id: id,
model: "blogPost" as const,
attributes: {
name: `foo-${id}`,
counter: 0,
},
relationships: {
authors: [
{
model: "user" as const,
id: 0,
},
],
},
}));
const wrapper = mount(
<Repository
requests={{
blogPost: {
readList: list.fn,
readItem: item.fn,
},
}}
>
<Branch>
<Item model="blogPost" id={"1"}>
{(view, { commitRelationships }) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h1>
<span>{view.item.relationships.authors.length}</span>
<button
key="increment"
onclick={() =>
commitRelationships({
authors: [
...view.item.relationships.authors,
{
model: "user",
id: view.item.relationships.authors.length,
},
],
})
}
/>
<button
key="decrement"
onclick={() =>
commitRelationships({
authors: view.item.relationships.authors.slice(0, -1),
})
}
/>
</h1>
)
}
</Item>
<Item model="blogPost" id={"2"}>
{(view) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h2>
<span>{view.item.relationships.authors.length}</span>
</h2>
)
}
</Item>
<Merge>
{({ changes, merge }) => (
<button
key="submit"
disabled={Object.keys(changes).length === 0}
onclick={() => merge(changes)}
/>
)}
</Merge>
</Branch>
<Branch>
<Item model="blogPost" id={"1"}>
{(view, { commitRelationships }) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h3>
<span>{view.item.relationships.authors.length}</span>
<button
onclick={() =>
commitRelationships({
authors: [
...view.item.relationships.authors,
{
model: "user",
id: view.item.relationships.authors.length,
},
],
})
}
/>
</h3>
)
}
</Item>
</Branch>
</Repository>
);
expect(wrapper.contains(<span>item-loading</span>)).toBe(true);
await item.resolve();
callIdleCallbacks();
expect(wrapper.contains(<span>item-loading</span>)).toBe(false);
expect(wrapper.find("h1").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{2}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(false);
wrapper.find("h1").find({ key: "decrement" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
wrapper.find({ key: "submit" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{2}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{2}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{3}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{2}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(false);
});
it("commitRelationships should add changes to Merge, and remove them if it is equal with repo, for multiple relationship, with different quantity", async () => {
const callIdleCallbacks = registerRequestIdleCallback();
type blogPostType = {
id: string;
model: "blogPost";
attributes: {
name: string;
counter: number;
};
relationships: {
authors: {
model: "user";
id: number;
}[];
};
};
const { Repository, Branch, Item, Merge } =
stateFactory<{
blogPost: {
listParameter: {
sort: "asc" | "desc";
};
item: blogPostType;
};
}>();
const list = promiseHandler((_parameter: { sort: "asc" | "desc" }) => ({
items: [
{
id: "1",
model: "blogPost" as const,
},
{
id: "2",
model: "blogPost" as const,
},
],
totalCount: 5,
}));
const item = promiseHandler((id: string) => ({
id: id,
model: "blogPost" as const,
attributes: {
name: `foo-${id}`,
counter: 0,
},
relationships: {
authors: [
{
model: "user" as const,
id: 0,
},
],
},
}));
const wrapper = mount(
<Repository
requests={{
blogPost: {
readList: list.fn,
readItem: item.fn,
},
}}
>
<Branch>
<Item model="blogPost" id={"1"}>
{(view, { commitRelationships }) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h1>
<span>{view.item.relationships.authors[0].id}</span>
<button
key="increment"
onclick={() =>
commitRelationships({
authors: [
{
model: "user",
id: view.item.relationships.authors[0].id + 1,
},
],
})
}
/>
<button
key="decrement"
onclick={() =>
commitRelationships({
authors: [
{
model: "user",
id: view.item.relationships.authors[0].id - 1,
},
],
})
}
/>
</h1>
)
}
</Item>
<Item model="blogPost" id={"2"}>
{(view) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h2>
<span>{view.item.relationships.authors[0].id}</span>
</h2>
)
}
</Item>
<Merge>
{({ changes, merge }) => (
<button
key="submit"
disabled={Object.keys(changes).length === 0}
onclick={() => merge(changes)}
/>
)}
</Merge>
</Branch>
<Branch>
<Item model="blogPost" id={"1"}>
{(view, { commitRelationships }) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h3>
<span>{view.item.relationships.authors[0].id}</span>
<button
onclick={() =>
commitRelationships({
authors: [
{
model: "user",
id: view.item.relationships.authors[0].id + 1,
},
],
})
}
/>
</h3>
)
}
</Item>
</Branch>
</Repository>
);
expect(wrapper.contains(<span>item-loading</span>)).toBe(true);
await item.resolve();
callIdleCallbacks();
expect(wrapper.find("h1").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(false);
wrapper.find("h1").find({ key: "decrement" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
wrapper.find({ key: "submit" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{2}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(false);
});
it("commitRelationships should add changes to Merge, and remove them if it is equal with repo, for single relationships", async () => {
const callIdleCallbacks = registerRequestIdleCallback();
const { Repository, Branch, Item, Merge } =
stateFactory<{
blogPost: {
listParameter: {
sort: "asc" | "desc";
};
item: blogPostType;
};
}>();
const list = promiseHandler((_parameter: { sort: "asc" | "desc" }) => ({
items: [
{
id: "1",
model: "blogPost" as const,
},
{
id: "2",
model: "blogPost" as const,
},
],
totalCount: 5,
}));
const item = promiseHandler((id: string) => ({
id: id,
model: "blogPost" as const,
attributes: {
name: `foo-${id}`,
counter: 0,
},
relationships: {
author: {
model: "user" as const,
id: 0,
},
},
}));
const wrapper = mount(
<Repository
requests={{
blogPost: {
readList: list.fn,
readItem: item.fn,
},
}}
>
<Branch>
<Item model="blogPost" id={"1"}>
{(view, { commitRelationships }) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h1>
<span>{view.item.relationships.author.id}</span>
<button
key="increment"
onclick={() =>
commitRelationships({
author: {
model: "user",
id: view.item.relationships.author.id + 1,
},
})
}
/>
<button
key="decrement"
onclick={() =>
commitRelationships({
author: {
model: "user",
id: view.item.relationships.author.id - 1,
},
})
}
/>
</h1>
)
}
</Item>
<Item model="blogPost" id={"2"}>
{(view) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h2>
<span>{view.item.relationships.author.id}</span>
</h2>
)
}
</Item>
<Merge>
{({ changes, merge }) => (
<button
key="submit"
disabled={Object.keys(changes).length === 0}
onclick={() => merge(changes)}
/>
)}
</Merge>
</Branch>
<Branch>
<Item model="blogPost" id={"1"}>
{(view, { commitRelationships }) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<h3>
<span>{view.item.relationships.author.id}</span>
<button
onclick={() =>
commitRelationships({
author: {
model: "user",
id: view.item.relationships.author.id + 1,
},
})
}
/>
</h3>
)
}
</Item>
</Branch>
</Repository>
);
expect(wrapper.contains(<span>item-loading</span>)).toBe(true);
await item.resolve();
callIdleCallbacks();
expect(wrapper.contains(<span>item-loading</span>)).toBe(false);
expect(wrapper.find("h1").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(false);
wrapper.find("h1").find({ key: "decrement" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
wrapper.find({ key: "submit" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(true);
wrapper.find("h1").find({ key: "increment" }).simulate("click");
expect(wrapper.find("h1").contains(<span>{2}</span>)).toBe(true);
expect(wrapper.find("h2").contains(<span>{0}</span>)).toBe(true);
expect(wrapper.find("h3").contains(<span>{1}</span>)).toBe(true);
expect(wrapper.find({ key: "submit" }).prop("disabled")).toBe(false);
});
it("merge invalidates list cache", async () => {
const callIdleCallbacks = registerRequestIdleCallback();
const { Repository, Branch, Item, List, Merge } =
stateFactory<{
blogPost: {
listParameter: {
sort: "asc" | "desc";
};
item: blogPostType;
};
}>();
const items = [
{
id: "0",
model: "blogPost" as const,
},
{
id: "1",
model: "blogPost" as const,
},
];
const list = promiseHandler((_parameter: { sort: "asc" | "desc" }) => ({
items,
totalCount: 5,
}));
const item = promiseHandler((id: string) => ({
id: id,
model: "blogPost" as const,
attributes: {
name: `foo-${id}`,
counter: 0,
},
relationships: {
author: {
model: "user" as const,
id: 0,
},
},
}));
const wrapper = mount(
<Repository
requests={{
blogPost: {
readList: list.fn,
readItem: item.fn,
},
}}
>
<Branch>
<List model="blogPost" parameter={{ sort: "asc" }}>
{(listState) => (
<>
{listState.isLoading && <div>list-loading</div>}
{listState.items.map((item) => (
<Item model="blogPost" id={item.id}>
{(view) =>
view.isLoading ? (
<span>item-loading</span>
) : (
<span class="name">{view.item.attributes.name}</span>
)
}
</Item>
))}
<Merge>
{({ merge }) => (
<button
onclick={() => {
const id = `${items.length}`;
items.push({
id: id,
model: "blogPost",
});
merge({
blogPost: [
{
id: id,
model: "blogPost",
attributes: {
name: `bar-${id}`,
counter: 0,
},
relationships: {
author: {
model: "user",
id: 0,
},
},
},
],
});
}}
/>
)}
</Merge>
</>
)}
</List>
</Branch>
</Repository>
);
expect(wrapper.contains(<div>list-loading</div>)).toBe(true);
expect(wrapper.contains(<span>item-loading</span>)).toBe(false);
await list.resolve();
expect(wrapper.contains(<div>list-loading</div>)).toBe(false);
expect(wrapper.contains(<span>item-loading</span>)).toBe(true);
await item.resolve();
callIdleCallbacks();
expect(wrapper.contains(<div>list-loading</div>)).toBe(false);
expect(wrapper.contains(<span>item-loading</span>)).toBe(false);
expect(wrapper.containsMatchingElement(<span>foo-0</span>)).toBe(true);
expect(wrapper.containsMatchingElement(<span>foo-1</span>)).toBe(true);
expect(wrapper.find(".name").length).toBe(2);
wrapper.find("button").simulate("click");
expect(wrapper.contains(<div>list-loading</div>)).toBe(true);
expect(wrapper.containsMatchingElement(<span>item-loading</span>)).toBe(
false
);
expect(wrapper.containsMatchingElement(<span>foo-0</span>)).toBe(true);
expect(wrapper.containsMatchingElement(<span>foo-1</span>)).toBe(true);
expect(wrapper.find(".name").length).toBe(2);
await list.resolve();
expect(wrapper.contains(<div>list-loading</div>)).toBe(false);
expect(wrapper.containsMatchingElement(<span>item-loading</span>)).toBe(
false
);
expect(wrapper.containsMatchingElement(<span>foo-0</span>)).toBe(true);
expect(wrapper.containsMatchingElement(<span>foo-1</span>)).toBe(true);
expect(wrapper.containsMatchingElement(<span>bar-2</span>)).toBe(true);
expect(wrapper.find(".name").length).toBe(3);
});
});
|
<gh_stars>0
const express = require('express');
const router = express.Router();
var mongoose = require('mongoose');
const Food = require('../../../models/FNDDS/Food');
const FoodNut = require('../../../models/FNDDS/FoodNut');
// @route POST api/foods
// @desc Create a foods
// @access Public
router.post('/create/', async (req, res) => {
try {
const food = await Food.findOne({ Code: req.body.Code });
const newFoodNut = new FoodNut({
Food: food.id,
Desc: req.body.Desc,
Val: req.body.Val,
Unit: req.body.Unit
});
const foodnut = await newFoodNut.save();
res.json(foodnut);
} catch (err) {
console.error(err.message);
res.status(500).send('Server Error');
}
});
// @route Get api/foodnut
// @desc Create a foodnut
// @access Public
router.get('/getall/', async (req, res) => {
try {
const foodnut = await FoodNut.find().limit(65);
res.json(foodnut);
} catch (err) {
console.error(err.message);
res.status(500).send('Server Error');
}
});
// @route Get api/foodnut
// @desc Create a foodnut
// @access Publict
router.get('/foodid/:foodid', async (req, res) => {
try {
const food = await Food.findById(req.body.foodid);
console.log(food)
const foodnut = await FoodNut.find({food: req.params.foodid});
console.log(foodnut)
if (!foodnut) {
return res.status(404).json({ msg: 'Food not found' });
}
res.json(foodnut);
} catch (err) {
console.error(err.message);
res.status(500).send('Server Error');
}
});
module.exports = router;
|
#!/usr/bin/env bash
set -o errexit
: "${HELM_TILLER_SILENT:='false'}"
: "${HELM_TILLER_PORT:=44134}"
CURRENT_FOLDER=$(pwd)
cd "$HELM_PLUGIN_DIR"
function usage() {
if [[ -n "$1" ]]; then
printf "%s\\n\\n" "$1"
fi
cat <<' EOF'
Helm plugin for using Tiller locally
Usage:
helm tiller install
helm tiller start [tiller_namespace]
helm tiller start-ci [tiller_namespace]
helm tiller stop
helm tiller run [tiller_namespace] -- [command] [args]
Available Commands:
install Manually install/upgrade Tiller binary
start Start Tiller and open new pre-set shell
start-ci Start Tiller without opening new shell
run Start Tiller and run arbitrary command within the environment
stop Stop Tiller
Example use with the set namespace:
$ helm tiller start my-tiller-namespace
Example use of `run`, that starts/stops tiller before/after the specified command:
$ helm tiller run helm list
$ helm tiller run my-tiller-namespace -- helm list
$ helm tiller run my-tiller-namespace -- bash -c 'echo running helm; helm list'
EOF
}
check_helm() {
# Check if helm is installed
if ! command -v helm >/dev/null 2>&1; then
echo "Helm client is not installed!"
exit 0
fi
}
check_install_tiller() {
INSTALLED_HELM=$(helm version -c --short | awk -F[:+] '{print $2}' | cut -d ' ' -f 2)
if [[ "${HELM_TILLER_SILENT}" == "false" ]]; then
echo "Installed Helm version $INSTALLED_HELM"
fi
# check if the tiller binary exists
if [ ! -f ./bin/tiller ]; then
# check if tiller binary is already installed in the path
if command -v tiller >/dev/null 2>&1; then
EXISTING_TILLER=$(command -v tiller)
mkdir -p ./bin
cp "${EXISTING_TILLER}" ./bin/
INSTALLED_TILLER=$(./bin/tiller --version)
echo "Copied found $EXISTING_TILLER to helm-tiller/bin"
else
INSTALLED_TILLER=v0.0.0
fi
else
INSTALLED_TILLER=$(./bin/tiller --version)
if [[ "${HELM_TILLER_SILENT}" == "false" ]]; then
echo "Installed Tiller version $INSTALLED_TILLER"
fi
fi
# check if tiller and helm versions match
if [[ "${INSTALLED_HELM}" == "${INSTALLED_TILLER}" ]]; then
if [[ "${HELM_TILLER_SILENT}" == "false" ]]; then
echo "Helm and Tiller are the same version!"
fi
else
./scripts/install.sh "$INSTALLED_HELM"
fi
}
helm_env() {
if [[ -n "$1" ]]
then
# Set namespace
echo export TILLER_NAMESPACE="${1}"
fi
echo export HELM_HOST=localhost:${HELM_TILLER_PORT}
}
start_tiller() {
if [[ "${HELM_TILLER_SILENT}" == "false" ]]; then
echo "Starting Tiller..."
fi
{ ./bin/tiller --storage=secret --listen=localhost:${HELM_TILLER_PORT} & } 2>/dev/null
if [[ "${HELM_TILLER_SILENT}" == "false" ]]; then
echo "Tiller namespace: $TILLER_NAMESPACE"
fi
}
run_tiller() {
if [[ "${HELM_TILLER_SILENT}" == "false" ]]; then
echo "Starting Tiller..."
fi
{ ./bin/tiller --storage=secret --listen=localhost:${HELM_TILLER_PORT} & } 2>/dev/null
cd "${CURRENT_FOLDER}"
}
stop_tiller() {
if [[ "${HELM_TILLER_SILENT}" == "false" ]]; then
echo "Stopping Tiller..."
fi
pkill -f ./bin/tiller
}
COMMAND=$1
# do shift only if some argument is provided
if [[ -n "$1" ]]; then
shift
fi
case $COMMAND in
install)
check_helm
check_install_tiller
;;
start)
check_helm
check_install_tiller
eval '$(helm_env "$@")'
start_tiller
cd "${CURRENT_FOLDER}"
# open user's preferred shell
# shellcheck disable=SC2236
if [[ ! -z "$SHELL" ]]; then
$SHELL
else
bash
fi
;;
start-ci)
check_helm
check_install_tiller
eval '$(helm_env "$@")'
start_tiller
;;
run)
check_helm
check_install_tiller
start_args=()
args=()
while [[ $# -gt 0 ]]; do
case "$1" in
-- ) start_args=( "${args[@]}" ); args=(); shift ;;
* ) args+=("${1}"); shift ;;
esac
done
trap stop_tiller EXIT
eval '$(helm_env "${start_args[@]}")'
run_tiller "${start_args[@]}"
# shellcheck disable=SC2145
if [[ "${HELM_TILLER_SILENT}" == "false" ]]; then
echo Running: "${args[@]}"
echo
fi
"${args[@]}"
;;
stop)
stop_tiller
;;
*)
usage "$@"
;;
esac
|
#!/bin/bash
if [ "$1" == '--locally' ]; then # Local installation
# Creates the directory
mkdir -p ~/.local/bin
mkdir -p ~/.local/applications
# Copies the files
cp android-connect ~/.local/bin/
cp interface/android-connect-pygtk ~/.local/bin/
cp rsc/android-connect.desktop ~/.local/share/applications/
else # Root installation
# Copies the files
cp android-connect /bin/
cp interface/android-connect-pygtk /bin/
cp rsc/android-connect.desktop /usr/share/applications/
fi
|
cat << EOF
▄▄▄▄ ▓█████ ██▀███ ██▓ ██▓ ▒█████ ▒███████▒
▓█████▄ ▓█ ▀ ▓██ ▒ ██▒▓██▒ ▓██▒▒██▒ ██▒▒ ▒ ▒ ▄▀░
▒██▒ ▄██▒███ ▓██ ░▄█ ▒▒██░ ▒██▒▒██░ ██▒░ ▒ ▄▀▒░
▒██░█▀ ▒▓█ ▄ ▒██▀▀█▄ ▒██░ ░██░▒██ ██░ ▄▀▒ ░
░▓█ ▀█▓░▒████▒░██▓ ▒██▒░██████▒░██░░ ████▓▒░▒███████▒
░▒▓███▀▒░░ ▒░ ░░ ▒▓ ░▒▓░░ ▒░▓ ░░▓ ░ ▒░▒░▒░ ░▒▒ ▓░▒░▒
▒░▒ ░ ░ ░ ░ ░▒ ░ ▒░░ ░ ▒ ░ ▒ ░ ░ ▒ ▒░ ░░▒ ▒ ░ ▒
░ ░ ░ ░░ ░ ░ ░ ▒ ░░ ░ ░ ▒ ░ ░ ░ ░ ░
░ ░ ░ ░ ░ ░ ░ ░ ░ ░ ░
░ ░
EOF
MY_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/$(basename "${BASH_SOURCE[0]}")"
MY_DIR="$(dirname $MY_PATH)"
echo "My Dir: $MY_DIR"
if [[ "$(uname -s)" == CYGWIN* ]]; then
echo 'CYGWIN DETECTED'
MY_DIR=`cygpath -w $MY_DIR`
echo "My Dir: $MY_DIR"
fi
cd $MY_DIR/berlioz-cli
docker build . -t berlioz-circleci-cli
docker tag berlioz-circleci-cli berliozcloud/circleci-cli
docker push berliozcloud/circleci-cli |
#!/bin/bash
onedark_black="#282c34"
onedark_blue="#61afef"
onedark_yellow="#e5c07b"
onedark_red="#e06c75"
onedark_white="#aab2bf"
onedark_green="#98c379"
onedark_visual_grey="#3e4452"
onedark_comment_grey="#5c6370"
get() {
local option=$1
local default_value=$2
local option_value="$(tmux show-option -gqv "$option")"
if [ -z "$option_value" ]; then
echo "$default_value"
else
echo "$option_value"
fi
}
set() {
local option=$1
local value=$2
tmux set-option -gq "$option" "$value"
}
setw() {
local option=$1
local value=$2
tmux set-window-option -gq "$option" "$value"
}
set "status" "on"
set "status-justify" "left"
set "status-left-length" "100"
set "status-right-length" "100"
set "status-right-attr" "none"
set "message-fg" "$onedark_white"
set "message-bg" "$onedark_black"
set "message-command-fg" "$onedark_white"
set "message-command-bg" "$onedark_black"
set "status-attr" "none"
set "status-left-attr" "none"
setw "window-status-fg" "$onedark_black"
setw "window-status-bg" "$onedark_black"
setw "window-status-attr" "none"
setw "window-status-activity-bg" "$onedark_black"
setw "window-status-activity-fg" "$onedark_black"
setw "window-status-activity-attr" "none"
setw "window-status-separator" ""
set "window-style" "fg=$onedark_comment_grey"
set "window-active-style" "fg=$onedark_white"
set "pane-border-fg" "$onedark_white"
set "pane-border-bg" "$onedark_black"
set "pane-active-border-fg" "$onedark_green"
set "pane-active-border-bg" "$onedark_black"
set "display-panes-active-colour" "$onedark_yellow"
set "display-panes-colour" "$onedark_blue"
set "status-bg" "$onedark_black"
set "status-fg" "$onedark_white"
status_widgets_x=$(get "@onedark_widgets_x")
status_widgets_y=$(get "@onedark_widgets_y")
status_widgets_z=$(get "@onedark_widgets_z")
set "status-right" "#[fg=$onedark_white,bg=$onedark_black,nounderscore,noitalics]${status_widgets_x} #[fg=$onedark_visual_grey,bg=$onedark_black]#[fg=$onedark_visual_grey,bg=$onedark_visual_grey]#[fg=$onedark_white, bg=$onedark_visual_grey]${status_widgets_y} #[fg=$onedark_green,bg=$onedark_visual_grey,nobold,nounderscore,noitalics]#[fg=$onedark_black,bg=$onedark_green,bold] ${status_widgets_z} #[fg=$onedark_yellow, bg=$onedark_green]#[fg=$onedark_red,bg=$onedark_yellow]"
set "status-left" "#[fg=$onedark_black,bg=$onedark_green,bold]#{?client_prefix,#[bg=${onedark_red}],} #S #[fg=$onedark_green,bg=$onedark_black,nobold,nounderscore,noitalics]#{?client_prefix,#[fg=${onedark_red}],}"
set "window-status-format" "#[fg=$onedark_black,bg=$onedark_black,nobold,nounderscore,noitalics]#[fg=$onedark_white,bg=$onedark_black] #I #W #[fg=$onedark_black,bg=$onedark_black,nobold,nounderscore,noitalics]"
set "window-status-current-format" "#[fg=$onedark_black,bg=$onedark_visual_grey,nobold,nounderscore,noitalics]#[fg=$onedark_white,bg=$onedark_visual_grey,nobold] #I #W #[fg=$onedark_visual_grey,bg=$onedark_black,nobold,nounderscore,noitalics]"
|
<reponame>jneuendorf/pyllute
// function property(g, s, d) {
// console.log('this:', this)
// console.log(arguments.callee.caller)
// return {
// get: g,
// set: s,
// deleteProperty: d
// }
// }
//
// class A {
// x = property(
// function() {return this._x},
// function(x) {this._x = x}
// )
// y = 2
//
// // x = Object.defineProperty()
//
// m() {
// console.log(2)
// }
// }
class A {
_x = 2
get x() {
console.log('get')
return this._x
}
set x(val) {
console.log('set')
this._x = val
}
}
// const a = new A()
// console.log(a.x)
// a.x = 4
// console.log(a.x)
// delete a.x
// console.log(a.x)
const {install} = require('./src/index')
install()
console.log(hash(A))
console.log(hash(new A()))
return
// const a = {}
// const b = {}
// print(id(a))
// print(id(a))
// print(id(b))
// input('enter something: ').then(output =>
// console.log('you typed', output)
// )
// print(int('2'))
// print(int('2.3'))
function* g1() {
yield 2;
yield 3;
yield 4;
}
function* g2() {
yield 1;
yield* g1();
yield 5;
}
print(g1()[Symbol.iterator])
const iterator = map((x, y) => [x, y], g1(), g2())
print(iterator.next())
print(iterator.next())
print(iterator.next())
print(iterator.next())
// const A = type('A', [], {
// prop: 'myprop',
// arrowMethod: (ref) => {
// console.log('arrowMethod', this === ref)
// },
// method: function(ref) {
// console.log('method', this.prop)
// },
// })
// const a = new A()
// print(type(a) === A)
// a.method(a)
// a.arrowMethod(a)
// print(list(zip([1,2,3], [4,5,6], [7,8,9,10])))
// // from Python's docs:
// const x = [1, 2, 3]
// const y = [4, 5, 6]
// zipped = zip(x, y)
// print(list(zipped))
// const [x2, y2] = zip(...zip(x, y))
// print(x.toString() == list(x2).toString() && y.toString() == list(y2).toString())
|
#! /usr/bin/bash
gunicorn -b 127.0.0.1:8000 -w 4 -k uvicorn.workers.UvicornWorker blog.main:app --name blogfolio_api --chdir /root/Documents/blogfolio/ --access-logfile /root/.config/blogfolio/logs/access.log --error-logfile /root/.config/blogfolio/logs/error.log --user root
|
package au.org.noojee.irrigation.weather.bureaus;
import java.util.ArrayList;
import java.util.List;
import au.org.noojee.irrigation.weather.bureaus.australia.BureauOfMeterologyAustralia;
public class WeatherBureaus
{
static private final List<WeatherBureau> bureaus = new ArrayList<>();
private static WeatherBureau defaultBureau;
static {
// register your bureau here.
// Note this a static code block so will run as the application loads so don't
// do anything in you ctor that will break due to it being run very early.
// Your WeatherBureau shouldn't do anything until its actually used!
register(new BureauOfMeterologyAustralia());
}
static void register(WeatherBureau bureau)
{
bureaus.add(bureau);
}
static public void setDefaultBureau(WeatherBureau defaultBureau)
{
WeatherBureaus.defaultBureau = defaultBureau;
}
static public WeatherBureau getDefaultBureau()
{
return defaultBureau;
}
public static List<WeatherBureau> getBureaus()
{
return bureaus;
}
}
|
<filename>engine/api/notification/permission_test.go
package notification
import (
"context"
"testing"
"github.com/ovh/cds/engine/api/bootstrap"
"github.com/ovh/cds/engine/api/group"
"github.com/ovh/cds/engine/api/project"
"github.com/ovh/cds/engine/api/test"
"github.com/ovh/cds/engine/api/test/assets"
"github.com/ovh/cds/sdk"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// Test_projectPermissionUserIDs test the usernames selected to send notifications
func Test_projectPermissionUserIDs(t *testing.T) {
db, cache, end := test.SetupPG(t, bootstrap.InitiliazeDB)
defer end()
g1 := assets.InsertTestGroup(t, db, sdk.RandomString(10))
g2 := assets.InsertTestGroup(t, db, sdk.RandomString(10))
g3 := assets.InsertTestGroup(t, db, sdk.RandomString(10))
_, _ = assets.InsertLambdaUser(t, db, g1)
u2, _ := assets.InsertLambdaUser(t, db, g2)
pkey := sdk.RandomString(10)
proj := assets.InsertTestProject(t, db, cache, pkey, pkey)
require.NoError(t, group.InsertLinkGroupProject(context.TODO(), db, &group.LinkGroupProject{
GroupID: g2.ID,
ProjectID: proj.ID,
Role: sdk.PermissionReadWriteExecute,
}))
require.NoError(t, group.InsertLinkGroupProject(context.TODO(), db, &group.LinkGroupProject{
GroupID: g3.ID,
ProjectID: proj.ID,
Role: sdk.PermissionReadWriteExecute,
}))
require.NoError(t, group.InsertLinkGroupUser(context.TODO(), db, &group.LinkGroupUser{
GroupID: g3.ID,
AuthentifiedUserID: u2.ID,
Admin: false,
}), "unable to insert user in group")
assert.NoError(t, project.Update(db, proj))
group.DefaultGroup = g1
userList, err := projectPermissionUserIDs(context.Background(), db, cache, proj.ID, sdk.PermissionRead)
assert.NoError(t, err)
assert.NotEmpty(t, userList)
assert.Equal(t, 1, len(userList))
assert.Equal(t, u2.ID, userList[0], "Only user 2 have to be here. u1, in the default group only should not be here.")
}
|
<reponame>MurmurationsNetwork/MurmurationsProfileGenerator
import {
Button,
HStack,
Image,
Input,
InputGroup,
InputRightElement,
Modal,
ModalBody,
ModalCloseButton,
ModalContent,
ModalFooter,
ModalHeader,
ModalOverlay,
Switch,
Text,
useToast,
VStack
} from '@chakra-ui/react'
import { useState } from 'react'
import { useAuth } from '@/lib/auth'
export default function SignIn({ isOpen, onClose }) {
const [email, setEmail] = useState('')
const [password, setPassword] = useState('')
const [signup, setSignup] = useState(false)
const handleEmailChange = event => setEmail(event.target.value)
const handlePasswordChange = event => setPassword(event.target.value)
const { signinWithGithub, signinWithGoogle, signinWithEmail, signupWithEmail } = useAuth()
const toast = useToast()
const [show, setShow] = useState(false)
function handleMask() {
setShow(!show)
}
function signinGithub() {
signinWithGithub()
onClose()
}
function signinGoogle() {
signinWithGoogle()
onClose()
}
async function signinEmail(email, password) {
const error = await signinWithEmail(email, password)
if (error.code) {
if (error.code == 'auth/wrong-password') {
toast({
title: 'Wrong Password',
description: 'The password you have entered is invalid.',
status: 'error',
position: 'top',
duration: 5000,
isClosable: true
})
} else if (error.code == 'auth/user-not-found') {
toast({
title: 'User Not Found',
description:
'There is no user record corresponding to this email address. The user may have been deleted.',
status: 'error',
position: 'top',
duration: 5000,
isClosable: true
})
} else {
toast({
title: 'Sign In Error',
description: error.code,
status: 'error',
position: 'top',
duration: 5000,
isClosable: true
})
}
} else {
toast({
title: 'Sign In Completed',
description: 'You are now signed in.',
status: 'success',
position: 'top',
duration: 5000,
isClosable: true
})
setEmail('')
setPassword('')
onClose()
}
}
async function signupEmail(email, password) {
const error = await signupWithEmail(email, password)
if (error.code) {
if (error.code == 'auth/email-already-in-use') {
toast({
title: 'Already In Use',
description: 'The email address is already in use by another account.',
status: 'error',
position: 'top',
duration: 5000,
isClosable: true
})
} else {
toast({
title: 'Sign Up Error',
description: error.code,
status: 'error',
position: 'top',
duration: 5000,
isClosable: true
})
}
} else {
toast({
title: 'Sign Up Completed',
description: 'You are now signed in.',
status: 'success',
position: 'top',
duration: 5000,
isClosable: true
})
setEmail('')
setPassword('')
onClose()
}
}
function handleToggle() {
setSignup(!signup)
}
return (
<Modal isOpen={isOpen} onClose={onClose}>
<ModalOverlay />
<ModalContent>
<ModalHeader>
<Text>Sign in to manage your profiles</Text>
</ModalHeader>
<ModalCloseButton />
<ModalBody my={8}>
<VStack spacing={8}>
<HStack spacing={8}>
<Image height={8} src="github-yellow.svg" alt="GitHub" />
<Button
colorScheme="yellow"
color="white"
borderRadius="2xl"
onClick={() => signinGithub()}
>
Sign in with GitHub
</Button>
</HStack>
<HStack spacing={8}>
<Image height={8} src="google-yellow.svg" alt="Google" />
<Button
colorScheme="yellow"
color="white"
borderRadius="2xl"
onClick={() => signinGoogle()}
>
Sign in with Google
</Button>
</HStack>
<HStack spacing={4}>
<Text>Or Sign in/Sign up with Email</Text>
</HStack>
</VStack>
<VStack spacing={4} margin={4}>
<HStack spacing={4} isInline justifyContent="flex-start" alignItems="flex-start">
<Text fontWeight="600">Sign In</Text>
<Switch size="lg" colorScheme="yellow" onChange={handleToggle} />
<Text fontWeight="600">Sign up</Text>
</HStack>
{signup ? (
<>
<Input value={email} onChange={handleEmailChange} placeholder="Email" />
<InputGroup>
<Input
value={password}
onChange={handlePasswordChange}
placeholder="Password"
type={show ? 'text' : 'password'}
/>
<InputRightElement width="4.5rem">
<Button h="1.75rem" size="sm" onClick={handleMask}>
{show ? 'Hide' : 'Show'}
</Button>
</InputRightElement>
</InputGroup>
<Button
colorScheme="yellow"
color="white"
borderRadius="2xl"
onClick={() => signupEmail(email, password)}
>
Sign up with Email
</Button>
</>
) : (
<>
<Input value={email} onChange={handleEmailChange} placeholder="Email" />
<InputGroup>
<Input
value={password}
onChange={handlePasswordChange}
placeholder="Password"
type={show ? 'text' : 'password'}
/>
<InputRightElement width="4.5rem">
<Button h="1.75rem" size="sm" onClick={handleMask}>
{show ? 'Hide' : 'Show'}
</Button>
</InputRightElement>
</InputGroup>
<Button
colorScheme="yellow"
color="white"
borderRadius="2xl"
onClick={() => signinEmail(email, password)}
>
Sign in with Email
</Button>
</>
)}
</VStack>
</ModalBody>
<ModalFooter></ModalFooter>
</ModalContent>
</Modal>
)
}
|
#!/bin/sh
set -x
set -e
# create CA
# step 1 private key
openssl genrsa -out rootCA.key 2048
# step 2 certificate
openssl req -x509 -new -key rootCA.key -out rootCA.cer -days 730 -subj /CN="Example Custom CA"
# create certificate (authorised by CA)
# step 1 private key
openssl genrsa -out server.key 2048
subj=/CN=`hostname`
# step 2 CSR (Client Signing Request)
openssl req -new -out server.req -key server.key -subj $subj
# step 3 certificate
openssl x509 -req -in server.req -out server.cer -CAkey rootCA.key -CA rootCA.cer -days 365 -CAcreateserial -CAserial serial
# adaptions for node.js
# convert crt and key to pem
openssl x509 -in server.cer -out server.crt.pem -outform PEM
openssl rsa -in server.key -out server.key.pem -outform PEM
|
#!/bin/bash
#
# Copyright IBM Corp. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
#
# usage: ./gen_crypto_cfg.sh [opt] [value]
#
function printHelp {
echo "Usage: "
echo " ./gen_crypto_cfg.sh [opt] [value] "
echo " -o: number of orderers, default=1"
echo " -p: number of peers per organization, default=1"
echo " -r: number of organization, default=1"
echo " -C: company name, default=example.com"
echo " -M: JSON file containing organization and MSP name mappings (optional) "
echo " "
echo "Example:"
echo " ./gen_crypto_cfg.sh -o 1 -p 2 -r 2"
exit
}
CWD=$PWD
#default vars
cfgOutFile=$CWD"/crypto-config.yaml"
#default values
nOrderer=1
peersPerOrg=1
nOrg=1
comName="example.com"
orgMap=
while getopts ":o:p:r:C:M:" opt; do
case $opt in
# number of orderers
o)
nOrderer=$OPTARG
echo "nOrderer: $nOrderer"
;;
# number of peers per org
p)
peersPerOrg=$OPTARG
echo "peersPerOrg: $peersPerOrg"
;;
# number of orgs
r)
nOrg=$OPTARG
echo "nOrg: $nOrg"
;;
# company name
C)
comName=$OPTARG
echo "comName: $comName"
;;
# filenames containing organization and MSP names
M)
orgMap=$OPTARG
echo "orgMap: $orgMap"
;;
# else
\?)
echo "Invalid option: -$OPTARG" >&2
printHelp
;;
:)
echo "Option -$OPTARG requires an argument." >&2
printHelp
;;
esac
done
echo "nOrderer=$nOrderer, peersPerOrg=$peersPerOrg, nOrg=$nOrg"
echo "cfgOutFile=$cfgOutFile"
# rm cfgOutFile
rm -f $cfgOutFile
#begin process
echo "OrdererOrgs:" >> $cfgOutFile
#for (( i=1; i<=$nOrderer; i++ ))
#do
#echo " - Name: OrdererOrg$i" >> $cfgOutFile
echo " - Name: Orderer" >> $cfgOutFile
tt=$comName
echo " Domain: $tt" >> $cfgOutFile
echo " Template:" >> $cfgOutFile
echo " Count: $nOrderer" >> $cfgOutFile
#done
echo "PeerOrgs:" >> $cfgOutFile
for (( i=1; i<=$nOrg; i++ ))
do
orgMSP="PeerOrg"$i
if [ ! -z $orgMap ] && [ -f $orgMap ]
then
omVal=$(jq .$orgMSP $orgMap)
if [ ! -z $omVal ] && [ $omVal != "null" ]
then
# Strip quotes from omVal if they are present
if [ ${omVal:0:1} == "\"" ]
then
omVal=${omVal:1}
fi
let "OMLEN = ${#omVal} - 1"
if [ ${omVal:$OMLEN:1} == "\"" ]
then
omVal=${omVal:0:$OMLEN}
fi
orgMSP=$omVal
fi
fi
echo " - Name: $orgMSP" >> $cfgOutFile
orgName=org$i
if [ ! -z $orgMap ] && [ -f $orgMap ]
then
onVal=$(jq .$orgName $orgMap)
if [ ! -z $onVal ] && [ $onVal != "null" ]
then
# Strip quotes from onVal if they are present
if [ ${onVal:0:1} == "\"" ]
then
onVal=${onVal:1}
fi
let "ONLEN = ${#onVal} - 1"
if [ ${onVal:$ONLEN:1} == "\"" ]
then
onVal=${onVal:0:$ONLEN}
fi
orgName=$onVal
fi
fi
tt=$orgName"."$comName
echo " Domain: $tt" >> $cfgOutFile
echo " EnableNodeOUs: true" >> $cfgOutFile
echo " Template:" >> $cfgOutFile
echo " Count: $peersPerOrg" >> $cfgOutFile
echo " Users:" >> $cfgOutFile
echo " Count: 1" >> $cfgOutFile
done
exit
|
#!/usr/bin/env bash
set -eo pipefail
[[ $TRACE ]] && set -x
# A script to bootstrap dokku.
# It expects to be run on Ubuntu 18.04/20.04, or CentOS 7 via 'sudo'
# If installing a tag higher than 0.3.13, it may install dokku via a package (so long as the package is higher than 0.3.13)
# It checks out the dokku source code from Github into ~/dokku and then runs 'make install' from dokku source.
# We wrap this whole script in functions, so that we won't execute
# until the entire script is downloaded.
# That's good because it prevents our output overlapping with wget's.
# It also means that we can't run a partially downloaded script.
SUPPORTED_VERSIONS="Debian [9, 10], CentOS [7], Fedora (partial) [33, 34], Ubuntu [18.04, 20.04]"
log-fail() {
declare desc="log fail formatter"
echo "$@" 1>&2
exit 1
}
ensure-environment() {
local FREE_MEMORY
if [[ -z "$DOKKU_TAG" ]]; then
echo "Preparing to install $DOKKU_REPO..."
else
echo "Preparing to install $DOKKU_TAG from $DOKKU_REPO..."
fi
hostname -f >/dev/null 2>&1 || {
log-fail "This installation script requires that you have a hostname set for the instance. Please set a hostname for 127.0.0.1 in your /etc/hosts"
}
FREE_MEMORY=$(grep MemTotal /proc/meminfo | awk '{print $2}')
if [[ "$FREE_MEMORY" -lt 1003600 ]]; then
echo "For dokku to build containers, it is strongly suggested that you have 1024 megabytes or more of free memory"
echo "If necessary, please consult this document to setup swap: https://dokku.com/docs/getting-started/advanced-installation/#vms-with-less-than-1-gb-of-memory"
fi
}
install-requirements() {
echo "--> Ensuring we have the proper dependencies"
case "$DOKKU_DISTRO" in
debian)
if ! dpkg -l | grep -q software-properties-common; then
apt-get update -qq >/dev/null
apt-get -qq -y --no-install-recommends install software-properties-common
fi
;;
ubuntu)
if ! dpkg -l | grep -q software-properties-common; then
apt-get update -qq >/dev/null
apt-get -qq -y --no-install-recommends install software-properties-common
fi
add-apt-repository universe >/dev/null
apt-get update -qq >/dev/null
;;
esac
}
install-dokku() {
if ! command -v dokku &>/dev/null; then
echo "--> Note: Installing dokku for the first time will result in removal of"
echo " files in the nginx 'sites-enabled' directory. Please manually"
echo " restore any files that may be removed after the installation and"
echo " web setup is complete."
echo ""
echo " Installation will continue in 10 seconds."
sleep 10
fi
if [[ -n $DOKKU_BRANCH ]]; then
install-dokku-from-source "origin/$DOKKU_BRANCH"
elif [[ -n $DOKKU_TAG ]]; then
local DOKKU_SEMVER="${DOKKU_TAG//v/}"
major=$(echo "$DOKKU_SEMVER" | awk '{split($0,a,"."); print a[1]}')
minor=$(echo "$DOKKU_SEMVER" | awk '{split($0,a,"."); print a[2]}')
patch=$(echo "$DOKKU_SEMVER" | awk '{split($0,a,"."); print a[3]}')
use_plugin=false
# 0.4.0 implemented a `plugin` plugin
if [[ "$major" -eq "0" ]] && [[ "$minor" -ge "4" ]] && [[ "$patch" -ge "0" ]]; then
use_plugin=true
elif [[ "$major" -ge "1" ]]; then
use_plugin=true
fi
# 0.3.13 was the first version with a debian package
if [[ "$major" -eq "0" ]] && [[ "$minor" -eq "3" ]] && [[ "$patch" -ge "13" ]]; then
install-dokku-from-package "$DOKKU_SEMVER"
echo "--> Running post-install dependency installation"
dokku plugins-install-dependencies
elif [[ "$use_plugin" == "true" ]]; then
install-dokku-from-package "$DOKKU_SEMVER"
echo "--> Running post-install dependency installation"
sudo -E dokku plugin:install-dependencies --core
else
install-dokku-from-source "$DOKKU_TAG"
fi
else
install-dokku-from-package
echo "--> Running post-install dependency installation"
sudo -E dokku plugin:install-dependencies --core
fi
}
install-dokku-from-source() {
local DOKKU_CHECKOUT="$1"
if ! command -v apt-get &>/dev/null; then
log-fail "This installation script requires apt-get. For manual installation instructions, consult https://dokku.com/docs/getting-started/advanced-installation/"
fi
apt-get -qq -y --no-install-recommends install sudo git make software-properties-common
cd /root
if [[ ! -d /root/dokku ]]; then
git clone "$DOKKU_REPO" /root/dokku
fi
cd /root/dokku
git fetch origin
[[ -n $DOKKU_CHECKOUT ]] && git checkout "$DOKKU_CHECKOUT"
make install
}
install-dokku-from-package() {
case "$DOKKU_DISTRO" in
debian | ubuntu)
install-dokku-from-deb-package "$@"
;;
centos | fedora | rhel)
install-dokku-from-rpm-package "$@"
;;
*)
log-fail "Unsupported Linux distribution. For manual installation instructions, consult https://dokku.com/docs/getting-started/advanced-installation/"
;;
esac
}
in-array() {
declare desc="return true if value ($1) is in list (all other arguments)"
local e
for e in "${@:2}"; do
[[ "$e" == "$1" ]] && return 0
done
return 1
}
install-dokku-from-deb-package() {
local DOKKU_CHECKOUT="$1"
local NO_INSTALL_RECOMMENDS=${DOKKU_NO_INSTALL_RECOMMENDS:=""}
local OS_ID
if ! in-array "$DOKKU_DISTRO_VERSION" "18.04" "20.04" "9" "10"; then
log-fail "Unsupported Linux distribution. Only the following versions are supported: $SUPPORTED_VERSIONS"
fi
if [[ -n $DOKKU_DOCKERFILE ]]; then
NO_INSTALL_RECOMMENDS=" --no-install-recommends "
fi
echo "--> Initial apt-get update"
apt-get update -qq >/dev/null
apt-get -qq -y --no-install-recommends install apt-transport-https
if ! command -v docker &>/dev/null; then
echo "--> Installing docker"
if uname -r | grep -q linode; then
echo "--> NOTE: Using Linode? Docker may complain about missing AUFS support."
echo " You can safely ignore this warning."
echo ""
echo " Installation will continue in 10 seconds."
sleep 10
fi
export CHANNEL=stable
wget -nv -O - https://get.docker.com/ | sh
fi
OS_ID="$(lsb_release -cs 2>/dev/null || echo "bionic")"
if ! in-array "$DOKKU_DISTRO" "debian" "ubuntu"; then
DOKKU_DISTRO="ubuntu"
OS_ID="bionic"
fi
if [[ "$DOKKU_DISTRO" == "ubuntu" ]]; then
OS_IDS=("bionic" "focal")
if ! in-array "$OS_ID" "${OS_IDS[@]}"; then
OS_ID="bionic"
fi
elif [[ "$DOKKU_DISTRO" == "debian" ]]; then
OS_IDS=("stretch" "buster")
if ! in-array "$OS_ID" "${OS_IDS[@]}"; then
OS_ID="buster"
fi
fi
echo "--> Installing dokku"
wget -nv -O - https://packagecloud.io/dokku/dokku/gpgkey | apt-key add -
echo "deb https://packagecloud.io/dokku/dokku/$DOKKU_DISTRO/ $OS_ID main" | tee /etc/apt/sources.list.d/dokku.list
apt-get update -qq >/dev/null
[[ -n $DOKKU_VHOST_ENABLE ]] && echo "dokku dokku/vhost_enable boolean $DOKKU_VHOST_ENABLE" | sudo debconf-set-selections
[[ -n $DOKKU_WEB_CONFIG ]] && echo "dokku dokku/web_config boolean $DOKKU_WEB_CONFIG" | sudo debconf-set-selections
[[ -n $DOKKU_HOSTNAME ]] && echo "dokku dokku/hostname string $DOKKU_HOSTNAME" | sudo debconf-set-selections
[[ -n $DOKKU_SKIP_KEY_FILE ]] && echo "dokku dokku/skip_key_file boolean $DOKKU_SKIP_KEY_FILE" | sudo debconf-set-selections
[[ -n $DOKKU_KEY_FILE ]] && echo "dokku dokku/key_file string $DOKKU_KEY_FILE" | sudo debconf-set-selections
[[ -n $DOKKU_NGINX_ENABLE ]] && echo "dokku dokku/nginx_enable string $DOKKU_NGINX_ENABLE" | sudo debconf-set-selections
if [[ -n $DOKKU_CHECKOUT ]]; then
# shellcheck disable=SC2086
apt-get -qq -y $NO_INSTALL_RECOMMENDS install "dokku=$DOKKU_CHECKOUT"
else
# shellcheck disable=SC2086
apt-get -qq -y $NO_INSTALL_RECOMMENDS install dokku
fi
}
install-dokku-from-rpm-package() {
local DOKKU_CHECKOUT="$1"
if ! in-array "$DOKKU_DISTRO_VERSION" "7"; then
log-fail "Unsupported Linux distribution. Only the following versions are supported: $SUPPORTED_VERSIONS"
fi
echo "--> Installing docker"
curl -fsSL https://get.docker.com/ | sh
echo "--> Installing epel for nginx packages to be available"
yum install -y epel-release
echo "--> Installing herokuish and dokku"
curl -s https://packagecloud.io/install/repositories/dokku/dokku/script.rpm.sh | bash
if [[ -n $DOKKU_CHECKOUT ]]; then
yum -y install herokuish "dokku-$DOKKU_CHECKOUT"
else
yum -y install herokuish dokku
fi
echo "--> Enabling docker and nginx on system startup"
systemctl enable docker
systemctl enable nginx
echo "--> Starting nginx"
systemctl start nginx
}
main() {
export DOKKU_DISTRO DOKKU_DISTRO_VERSION
# shellcheck disable=SC1091
DOKKU_DISTRO=$(. /etc/os-release && echo "$ID")
# shellcheck disable=SC1091
DOKKU_DISTRO_VERSION=$(. /etc/os-release && echo "$VERSION_ID")
export DEBIAN_FRONTEND=noninteractive
export DOKKU_REPO=${DOKKU_REPO:-"https://github.com/dokku/dokku.git"}
ensure-environment
install-requirements
install-dokku
}
main "$@"
|
<reponame>mkmozgawa/luxmed-bot
package com.lbs.server.conversation
import java.time.format.TextStyle
import java.time.{LocalTime, ZonedDateTime}
import akka.actor.ActorSystem
import com.lbs.bot.model.Button
import com.lbs.bot.{Bot, _}
import com.lbs.server.conversation.DatePicker._
import com.lbs.server.conversation.Login.UserId
import com.lbs.server.conversation.base.{Conversation, Interactional}
import com.lbs.server.lang.{Localizable, Localization}
import com.lbs.server.util.DateTimeUtil._
import com.lbs.server.util.MessageExtractors.{CallbackCommand, TextCommand}
import scala.util.control.NonFatal
/**
* Date picker Inline Keyboard
*
* ⬆ ⬆ ⬆
* dd MM yyyy
* ⬇ ⬇ ⬇
*
*/
class DatePicker(val userId: UserId, val bot: Bot, val localization: Localization, originator: Interactional)
(val actorSystem: ActorSystem) extends Conversation[ZonedDateTime] with Localizable {
private var mode: Mode = DateFromMode
entryPoint(configure)
def configure: Step =
monologue {
case Msg(newMode: Mode, _) =>
mode = newMode
stay()
case Msg(initialDate: ZonedDateTime, _) =>
goto(requestDate) using initialDate
}
def requestDate: Step =
ask { initialDate =>
val message = mode match {
case DateFromMode => lang.chooseDateFrom(initialDate)
case DateToMode => lang.chooseDateTo(initialDate)
}
bot.sendMessage(userId.source, message, inlineKeyboard = dateButtons(initialDate))
} onReply {
case Msg(cmd@CallbackCommand(Tags.Done), finalDate) =>
val (message, updatedDate) = mode match {
case DateFromMode =>
val startOfTheDay = finalDate.`with`(LocalTime.MIN)
val dateFrom = if (startOfTheDay.isBefore(ZonedDateTime.now())) finalDate else startOfTheDay
lang.dateFromIs(dateFrom) -> dateFrom
case DateToMode =>
val dateTo = finalDate.`with`(LocalTime.MAX).minusHours(2)
lang.dateToIs(dateTo) -> dateTo
}
bot.sendEditMessage(userId.source, cmd.message.messageId, message)
originator ! updatedDate
end()
case Msg(TextCommand(dayMonth), finalDate) =>
try {
val updatedDate = applyDayMonth(dayMonth, finalDate)
val message = mode match {
case DateFromMode =>
lang.dateFromIs(updatedDate)
case DateToMode =>
lang.dateToIs(updatedDate)
}
bot.sendMessage(userId.source, message)
originator ! updatedDate
end()
} catch {
case NonFatal(ex) =>
error("Unable to parse date", ex)
bot.sendMessage(userId.source, "Incorrect date. Please use format dd-MM")
goto(requestDate)
}
case Msg(cmd@CallbackCommand(tag), date) =>
val modifiedDate = modifyDate(date, tag)
bot.sendEditMessage(userId.source, cmd.message.messageId, inlineKeyboard = dateButtons(modifiedDate))
stay() using modifiedDate
}
private def modifyDate(date: ZonedDateTime, tag: String) = {
val dateModifier = tag match {
case Tags.DayInc => date.plusDays _
case Tags.MonthInc => date.plusMonths _
case Tags.YearInc => date.plusYears _
case Tags.DayDec => date.minusDays _
case Tags.MonthDec => date.minusMonths _
case Tags.YearDec => date.minusYears _
}
dateModifier(1)
}
private def dateButtons(date: ZonedDateTime) = {
val day = date.getDayOfMonth.toString
val dayOfWeek = date.getDayOfWeek.getDisplayName(TextStyle.SHORT, lang.locale)
val month = date.getMonth.getDisplayName(TextStyle.SHORT, lang.locale)
val year = date.getYear.toString
createInlineKeyboard(Seq(
Seq(Button("⬆", Tags.DayInc), Button("⬆", Tags.MonthInc), Button("⬆", Tags.YearInc)),
Seq(Button(s"$day ($dayOfWeek)"), Button(month), Button(year)),
Seq(Button("⬇", Tags.DayDec), Button("⬇", Tags.MonthDec), Button("⬇", Tags.YearDec)),
Seq(Button("Done", Tags.Done))
))
}
}
object DatePicker {
trait Mode
object DateFromMode extends Mode
object DateToMode extends Mode
object Tags {
val DayInc = "day_inc"
val MonthInc = "month_inc"
val YearInc = "year_inc"
val DayDec = "day_dec"
val MonthDec = "month_dec"
val YearDec = "year_dec"
val Done = "done"
}
} |
<reponame>mausinixterra/letsencrypt<filename>certbot/client.py
"""Certbot client API."""
import logging
import os
import platform
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
import OpenSSL
import zope.component
from acme import client as acme_client
from acme import crypto_util as acme_crypto_util
from acme import errors as acme_errors
from acme import jose
from acme import messages
import certbot
from certbot import account
from certbot import auth_handler
from certbot import cli
from certbot import constants
from certbot import crypto_util
from certbot import eff
from certbot import error_handler
from certbot import errors
from certbot import interfaces
from certbot import reverter
from certbot import storage
from certbot import util
from certbot.display import ops as display_ops
from certbot.display import enhancements
from certbot.plugins import selection as plugin_selection
logger = logging.getLogger(__name__)
def acme_from_config_key(config, key):
"Wrangle ACME client construction"
# TODO: Allow for other alg types besides RS256
net = acme_client.ClientNetwork(key, verify_ssl=(not config.no_verify_ssl),
user_agent=determine_user_agent(config))
return acme_client.Client(config.server, key=key, net=net)
def determine_user_agent(config):
"""
Set a user_agent string in the config based on the choice of plugins.
(this wasn't knowable at construction time)
:returns: the client's User-Agent string
:rtype: `str`
"""
# WARNING: To ensure changes are in line with Certbot's privacy
# policy, talk to a core Certbot team member before making any
# changes here.
if config.user_agent is None:
ua = ("CertbotACMEClient/{0} ({1}; {2}{8}) Authenticator/{3} Installer/{4} "
"({5}; flags: {6}) Py/{7}")
ua = ua.format(certbot.__version__, cli.cli_command, util.get_os_info_ua(),
config.authenticator, config.installer, config.verb,
ua_flags(config), platform.python_version(),
"; " + config.user_agent_comment if config.user_agent_comment else "")
else:
ua = config.user_agent
return ua
def ua_flags(config):
"Turn some very important CLI flags into clues in the user agent."
if isinstance(config, DummyConfig):
return "FLAGS"
flags = []
if config.duplicate:
flags.append("dup")
if config.renew_by_default:
flags.append("frn")
if config.allow_subset_of_names:
flags.append("asn")
if config.noninteractive_mode:
flags.append("n")
hook_names = ("pre", "post", "renew", "manual_auth", "manual_cleanup")
hooks = [getattr(config, h + "_hook") for h in hook_names]
if any(hooks):
flags.append("hook")
return " ".join(flags)
class DummyConfig(object):
"Shim for computing a sample user agent."
def __init__(self):
self.authenticator = "XXX"
self.installer = "YYY"
self.user_agent = None
self.verb = "SUBCOMMAND"
def __getattr__(self, name):
"Any config properties we might have are None."
return None
def sample_user_agent():
"Document what this Certbot's user agent string will be like."
return determine_user_agent(DummyConfig())
def register(config, account_storage, tos_cb=None):
"""Register new account with an ACME CA.
This function takes care of generating fresh private key,
registering the account, optionally accepting CA Terms of Service
and finally saving the account. It should be called prior to
initialization of `Client`, unless account has already been created.
:param .IConfig config: Client configuration.
:param .AccountStorage account_storage: Account storage where newly
registered account will be saved to. Save happens only after TOS
acceptance step, so any account private keys or
`.RegistrationResource` will not be persisted if `tos_cb`
returns ``False``.
:param tos_cb: If ACME CA requires the user to accept a Terms of
Service before registering account, client action is
necessary. For example, a CLI tool would prompt the user
acceptance. `tos_cb` must be a callable that should accept
`.RegistrationResource` and return a `bool`: ``True`` iff the
Terms of Service present in the contained
`.Registration.terms_of_service` is accepted by the client, and
``False`` otherwise. ``tos_cb`` will be called only if the
client action is necessary, i.e. when ``terms_of_service is not
None``. This argument is optional, if not supplied it will
default to automatic acceptance!
:raises certbot.errors.Error: In case of any client problems, in
particular registration failure, or unaccepted Terms of Service.
:raises acme.errors.Error: In case of any protocol problems.
:returns: Newly registered and saved account, as well as protocol
API handle (should be used in `Client` initialization).
:rtype: `tuple` of `.Account` and `acme.client.Client`
"""
# Log non-standard actions, potentially wrong API calls
if account_storage.find_all():
logger.info("There are already existing accounts for %s", config.server)
if config.email is None:
if not config.register_unsafely_without_email:
msg = ("No email was provided and "
"--register-unsafely-without-email was not present.")
logger.warning(msg)
raise errors.Error(msg)
if not config.dry_run:
logger.info("Registering without email!")
# Each new registration shall use a fresh new key
key = jose.JWKRSA(key=jose.ComparableRSAKey(
rsa.generate_private_key(
public_exponent=65537,
key_size=config.rsa_key_size,
backend=default_backend())))
acme = acme_from_config_key(config, key)
# TODO: add phone?
regr = perform_registration(acme, config)
if regr.terms_of_service is not None:
if tos_cb is not None and not tos_cb(regr):
raise errors.Error(
"Registration cannot proceed without accepting "
"Terms of Service.")
regr = acme.agree_to_tos(regr)
acc = account.Account(regr, key)
account.report_new_account(config)
account_storage.save(acc, acme)
eff.handle_subscription(config)
return acc, acme
def perform_registration(acme, config):
"""
Actually register new account, trying repeatedly if there are email
problems
:param .IConfig config: Client configuration.
:param acme.client.Client client: ACME client object.
:returns: Registration Resource.
:rtype: `acme.messages.RegistrationResource`
"""
try:
return acme.register(messages.NewRegistration.from_data(email=config.email))
except messages.Error as e:
if e.code == "invalidEmail" or e.code == "invalidContact":
if config.noninteractive_mode:
msg = ("The ACME server believes %s is an invalid email address. "
"Please ensure it is a valid email and attempt "
"registration again." % config.email)
raise errors.Error(msg)
else:
config.email = display_ops.get_email(invalid=True)
return perform_registration(acme, config)
else:
raise
class Client(object):
"""Certbot's client.
:ivar .IConfig config: Client configuration.
:ivar .Account account: Account registered with `register`.
:ivar .AuthHandler auth_handler: Authorizations handler that will
dispatch DV challenges to appropriate authenticators
(providing `.IAuthenticator` interface).
:ivar .IAuthenticator auth: Prepared (`.IAuthenticator.prepare`)
authenticator that can solve ACME challenges.
:ivar .IInstaller installer: Installer.
:ivar acme.client.Client acme: Optional ACME client API handle.
You might already have one from `register`.
"""
def __init__(self, config, account_, auth, installer, acme=None):
"""Initialize a client."""
self.config = config
self.account = account_
self.auth = auth
self.installer = installer
# Initialize ACME if account is provided
if acme is None and self.account is not None:
acme = acme_from_config_key(config, self.account.key)
self.acme = acme
if auth is not None:
self.auth_handler = auth_handler.AuthHandler(
auth, self.acme, self.account, self.config.pref_challs)
else:
self.auth_handler = None
def obtain_certificate_from_csr(self, domains, csr, authzr=None):
"""Obtain certificate.
Internal function with precondition that `domains` are
consistent with identifiers present in the `csr`.
:param list domains: Domain names.
:param .util.CSR csr: PEM-encoded Certificate Signing
Request. The key used to generate this CSR can be different
than `authkey`.
:param list authzr: List of
:class:`acme.messages.AuthorizationResource`
:returns: `.CertificateResource` and certificate chain (as
returned by `.fetch_chain`).
:rtype: tuple
"""
if self.auth_handler is None:
msg = ("Unable to obtain certificate because authenticator is "
"not set.")
logger.warning(msg)
raise errors.Error(msg)
if self.account.regr is None:
raise errors.Error("Please register with the ACME server first.")
logger.debug("CSR: %s, domains: %s", csr, domains)
if authzr is None:
authzr = self.auth_handler.get_authorizations(domains)
certr = self.acme.request_issuance(
jose.ComparableX509(
OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr.data)),
authzr)
notify = zope.component.getUtility(interfaces.IDisplay).notification
retries = 0
chain = None
while retries <= 1:
if retries:
notify('Failed to fetch chain, please check your network '
'and continue', pause=True)
try:
chain = self.acme.fetch_chain(certr)
break
except acme_errors.Error:
logger.debug('Failed to fetch chain', exc_info=True)
retries += 1
if chain is None:
raise acme_errors.Error(
'Failed to fetch chain. You should not deploy the generated '
'certificate, please rerun the command for a new one.')
return certr, chain
def obtain_certificate(self, domains):
"""Obtains a certificate from the ACME server.
`.register` must be called before `.obtain_certificate`
:param list domains: domains to get a certificate
:returns: `.CertificateResource`, certificate chain (as
returned by `.fetch_chain`), and newly generated private key
(`.util.Key`) and DER-encoded Certificate Signing Request
(`.util.CSR`).
:rtype: tuple
"""
authzr = self.auth_handler.get_authorizations(
domains,
self.config.allow_subset_of_names)
auth_domains = set(a.body.identifier.value for a in authzr)
domains = [d for d in domains if d in auth_domains]
# Create CSR from names
if self.config.dry_run:
key = util.Key(file=None,
pem=crypto_util.make_key(self.config.rsa_key_size))
csr = util.CSR(file=None, form="pem",
data=acme_crypto_util.make_csr(
key.pem, domains, self.config.must_staple))
else:
key = crypto_util.init_save_key(
self.config.rsa_key_size, self.config.key_dir)
csr = crypto_util.init_save_csr(key, domains, self.config.csr_dir)
certr, chain = self.obtain_certificate_from_csr(
domains, csr, authzr=authzr)
return certr, chain, key, csr
# pylint: disable=no-member
def obtain_and_enroll_certificate(self, domains, certname):
"""Obtain and enroll certificate.
Get a new certificate for the specified domains using the specified
authenticator and installer, and then create a new renewable lineage
containing it.
:param list domains: Domains to request.
:param plugins: A PluginsFactory object.
:param str certname: Name of new cert
:returns: A new :class:`certbot.storage.RenewableCert` instance
referred to the enrolled cert lineage, False if the cert could not
be obtained, or None if doing a successful dry run.
"""
certr, chain, key, _ = self.obtain_certificate(domains)
if (self.config.config_dir != constants.CLI_DEFAULTS["config_dir"] or
self.config.work_dir != constants.CLI_DEFAULTS["work_dir"]):
logger.warning(
"Non-standard path(s), might not work with crontab installed "
"by your operating system package manager")
new_name = certname if certname else domains[0]
if self.config.dry_run:
logger.debug("Dry run: Skipping creating new lineage for %s",
new_name)
return None
else:
return storage.RenewableCert.new_lineage(
new_name, OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM, certr.body.wrapped),
key.pem, crypto_util.dump_pyopenssl_chain(chain),
self.config)
def save_certificate(self, certr, chain_cert,
cert_path, chain_path, fullchain_path):
"""Saves the certificate received from the ACME server.
:param certr: ACME "certificate" resource.
:type certr: :class:`acme.messages.Certificate`
:param list chain_cert:
:param str cert_path: Candidate path to a certificate.
:param str chain_path: Candidate path to a certificate chain.
:param str fullchain_path: Candidate path to a full cert chain.
:returns: cert_path, chain_path, and fullchain_path as absolute
paths to the actual files
:rtype: `tuple` of `str`
:raises IOError: If unable to find room to write the cert files
"""
for path in cert_path, chain_path, fullchain_path:
util.make_or_verify_dir(
os.path.dirname(path), 0o755, os.geteuid(),
self.config.strict_permissions)
cert_pem = OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM, certr.body.wrapped)
cert_file, abs_cert_path = _open_pem_file('cert_path', cert_path)
try:
cert_file.write(cert_pem)
finally:
cert_file.close()
logger.info("Server issued certificate; certificate written to %s",
abs_cert_path)
if not chain_cert:
return abs_cert_path, None, None
else:
chain_pem = crypto_util.dump_pyopenssl_chain(chain_cert)
chain_file, abs_chain_path =\
_open_pem_file('chain_path', chain_path)
fullchain_file, abs_fullchain_path =\
_open_pem_file('fullchain_path', fullchain_path)
_save_chain(chain_pem, chain_file)
_save_chain(cert_pem + chain_pem, fullchain_file)
return abs_cert_path, abs_chain_path, abs_fullchain_path
def deploy_certificate(self, domains, privkey_path,
cert_path, chain_path, fullchain_path):
"""Install certificate
:param list domains: list of domains to install the certificate
:param str privkey_path: path to certificate private key
:param str cert_path: certificate file path (optional)
:param str chain_path: chain file path
"""
if self.installer is None:
logger.warning("No installer specified, client is unable to deploy"
"the certificate")
raise errors.Error("No installer available")
chain_path = None if chain_path is None else os.path.abspath(chain_path)
msg = ("Unable to install the certificate")
with error_handler.ErrorHandler(self._recovery_routine_with_msg, msg):
for dom in domains:
self.installer.deploy_cert(
domain=dom, cert_path=os.path.abspath(cert_path),
key_path=os.path.abspath(privkey_path),
chain_path=chain_path,
fullchain_path=fullchain_path)
self.installer.save() # needed by the Apache plugin
self.installer.save("Deployed ACME Certificate")
msg = ("We were unable to install your certificate, "
"however, we successfully restored your "
"server to its prior configuration.")
with error_handler.ErrorHandler(self._rollback_and_restart, msg):
# sites may have been enabled / final cleanup
self.installer.restart()
def enhance_config(self, domains, chain_path):
"""Enhance the configuration.
:param list domains: list of domains to configure
:param chain_path: chain file path
:type chain_path: `str` or `None`
:raises .errors.Error: if no installer is specified in the
client.
"""
if self.installer is None:
logger.warning("No installer is specified, there isn't any "
"configuration to enhance.")
raise errors.Error("No installer available")
enhanced = False
enhancement_info = (
("hsts", "ensure-http-header", "Strict-Transport-Security"),
("redirect", "redirect", None),
("staple", "staple-ocsp", chain_path),
("uir", "ensure-http-header", "Upgrade-Insecure-Requests"),)
supported = self.installer.supported_enhancements()
for config_name, enhancement_name, option in enhancement_info:
config_value = getattr(self.config, config_name)
if enhancement_name in supported:
if config_name == "redirect" and config_value is None:
config_value = enhancements.ask(enhancement_name)
if config_value:
self.apply_enhancement(domains, enhancement_name, option)
enhanced = True
elif config_value:
logger.warning(
"Option %s is not supported by the selected installer. "
"Skipping enhancement.", config_name)
msg = ("We were unable to restart web server")
if enhanced:
with error_handler.ErrorHandler(self._rollback_and_restart, msg):
self.installer.restart()
def apply_enhancement(self, domains, enhancement, options=None):
"""Applies an enhancement on all domains.
:param list domains: list of ssl_vhosts (as strings)
:param str enhancement: name of enhancement, e.g. ensure-http-header
:param str options: options to enhancement, e.g. Strict-Transport-Security
.. note:: When more `options` are needed, make options a list.
:raises .errors.PluginError: If Enhancement is not supported, or if
there is any other problem with the enhancement.
"""
msg = ("We were unable to set up enhancement %s for your server, "
"however, we successfully installed your certificate."
% (enhancement))
with error_handler.ErrorHandler(self._recovery_routine_with_msg, msg):
for dom in domains:
try:
self.installer.enhance(dom, enhancement, options)
except errors.PluginEnhancementAlreadyPresent:
logger.warning("Enhancement %s was already set.",
enhancement)
except errors.PluginError:
logger.warning("Unable to set enhancement %s for %s",
enhancement, dom)
raise
self.installer.save("Add enhancement %s" % (enhancement))
def _recovery_routine_with_msg(self, success_msg):
"""Calls the installer's recovery routine and prints success_msg
:param str success_msg: message to show on successful recovery
"""
self.installer.recovery_routine()
reporter = zope.component.getUtility(interfaces.IReporter)
reporter.add_message(success_msg, reporter.HIGH_PRIORITY)
def _rollback_and_restart(self, success_msg):
"""Rollback the most recent checkpoint and restart the webserver
:param str success_msg: message to show on successful rollback
"""
logger.critical("Rolling back to previous server configuration...")
reporter = zope.component.getUtility(interfaces.IReporter)
try:
self.installer.rollback_checkpoints()
self.installer.restart()
except:
# TODO: suggest letshelp-letsencrypt here
reporter.add_message(
"An error occurred and we failed to restore your config and "
"restart your server. Please submit a bug report to "
"https://github.com/letsencrypt/letsencrypt",
reporter.HIGH_PRIORITY)
raise
reporter.add_message(success_msg, reporter.HIGH_PRIORITY)
def validate_key_csr(privkey, csr=None):
"""Validate Key and CSR files.
Verifies that the client key and csr arguments are valid and correspond to
one another. This does not currently check the names in the CSR due to
the inability to read SANs from CSRs in python crypto libraries.
If csr is left as None, only the key will be validated.
:param privkey: Key associated with CSR
:type privkey: :class:`certbot.util.Key`
:param .util.CSR csr: CSR
:raises .errors.Error: when validation fails
"""
# TODO: Handle all of these problems appropriately
# The client can eventually do things like prompt the user
# and allow the user to take more appropriate actions
# Key must be readable and valid.
if privkey.pem and not crypto_util.valid_privkey(privkey.pem):
raise errors.Error("The provided key is not a valid key")
if csr:
if csr.form == "der":
csr_obj = OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_ASN1, csr.data)
csr = util.CSR(csr.file, OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM, csr_obj), "pem")
# If CSR is provided, it must be readable and valid.
if csr.data and not crypto_util.valid_csr(csr.data):
raise errors.Error("The provided CSR is not a valid CSR")
# If both CSR and key are provided, the key must be the same key used
# in the CSR.
if csr.data and privkey.pem:
if not crypto_util.csr_matches_pubkey(
csr.data, privkey.pem):
raise errors.Error("The key and CSR do not match")
def rollback(default_installer, checkpoints, config, plugins):
"""Revert configuration the specified number of checkpoints.
:param int checkpoints: Number of checkpoints to revert.
:param config: Configuration.
:type config: :class:`certbot.interfaces.IConfig`
"""
# Misconfigurations are only a slight problems... allow the user to rollback
installer = plugin_selection.pick_installer(
config, default_installer, plugins, question="Which installer "
"should be used for rollback?")
# No Errors occurred during init... proceed normally
# If installer is None... couldn't find an installer... there shouldn't be
# anything to rollback
if installer is not None:
installer.rollback_checkpoints(checkpoints)
installer.restart()
def view_config_changes(config, num=None):
"""View checkpoints and associated configuration changes.
.. note:: This assumes that the installation is using a Reverter object.
:param config: Configuration.
:type config: :class:`certbot.interfaces.IConfig`
"""
rev = reverter.Reverter(config)
rev.recovery_routine()
rev.view_config_changes(num)
def _open_pem_file(cli_arg_path, pem_path):
"""Open a pem file.
If cli_arg_path was set by the client, open that.
Otherwise, uniquify the file path.
:param str cli_arg_path: the cli arg name, e.g. cert_path
:param str pem_path: the pem file path to open
:returns: a tuple of file object and its absolute file path
"""
if cli.set_by_cli(cli_arg_path):
return util.safe_open(pem_path, chmod=0o644, mode="wb"),\
os.path.abspath(pem_path)
else:
uniq = util.unique_file(pem_path, 0o644, "wb")
return uniq[0], os.path.abspath(uniq[1])
def _save_chain(chain_pem, chain_file):
"""Saves chain_pem at a unique path based on chain_path.
:param str chain_pem: certificate chain in PEM format
:param str chain_file: chain file object
"""
try:
chain_file.write(chain_pem)
finally:
chain_file.close()
logger.info("Cert chain written to %s", chain_file.name)
|
<filename>docs/reference/source/conf.py
# Configuration file for the Sphinx documentation builder.
#
# For a full list of all Sphinx configuration options see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
import os
import sys
import datetime
import importlib
import sphinx_rtd_theme
from sphinx.builders.html import StandaloneHTMLBuilder
# Set up paths for import
file_path = os.path.realpath(__file__) # Obtain path of this config file
root_path = (os.sep).join(file_path.split(os.sep)[:-4]) # Obtain project root path
sys.path.insert(1, root_path) # Import from root path
# -- Project information -----------------------------------------------------
from docs.reference.source.project import project, author, codename, codedir, report_title, report_author, logo
# Scan the project to generate documentation
scan = True
# Source
src = lambda sep: codedir + f'{sep}' if codedir != '.' else ''
# Obtain the project's release version, which must be stored in a
# __version__ variable inside the main project script or package.
# WARNING: the script or the package's __init__.py WILL BE RUN
# on import.
# In the case of single-script projects, the following architecture
# is suggested:
#
# # My single-script project
#
# __version__ = <version of your project>
#
# if __name__ == '__main__':
# <body of your project>
release = importlib.import_module(f'{src(".")}{codename}').__version__ # Get project version
sys.path.remove(root_path) # Remove root path from search
# Copyright
copyright = f'{datetime.datetime.now().date().strftime("%Y")}, {author}'
# Language
language = 'en'
# -- Text editing ------------------------------------------------------------
# Replacements
rst_epilog = f'''
.. |project| replace:: {project}
.. |version| replace:: {release}
.. |codename| replace:: {codename}
'''
# Custom roles
rst_prolog = '''
'''
# Cross-referencing
numfig = True
numfig_format = {'figure': 'Figure %s',
'table': 'Table %s',
'code-block': 'Listing %s',
'section': 'Section %s'}
autosectionlabel_maxdepth = 1 # Automatically label top level sections only
# BibTeX citations
# In text: :cite:t:`key`
# Parenthetical: :cite:p:`key`
extensions = ['sphinxcontrib.bibtex']
bibtex_bibfiles = ['bibliography.bib']
# -- General configuration ---------------------------------------------------
# Extract documentation from the __init__ function of classes
autoclass_content = 'init'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions += ['sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.imgmath',
'sphinx.ext.autosectionlabel'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['source/_templates']
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# A list of prefixes that are ignored for sorting the Python module index
# (e.g., if this is set to ['foo.'], then foo.bar is shown under B, not F).
# This can be handy if you document a project that consists of a single package.
# Works only for the HTML builder currently. Default is [].
modindex_common_prefix = [f'{codename}.']
# -- Use package and module documentation templates with better_apidoc --------
def run_apidoc(app):
"""Generage API documentation"""
import better_apidoc
# Set package search path
sys.path.insert(0, os.path.abspath(f'../../{src("/")}.'))
better_apidoc.APP = app
better_apidoc.main(
[
'better-apidoc',
'-t',
templates_path[0],
'-fMeET',
'-o',
'source',
f'../../{src("/")}{codename}',
]
)
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_templates/*'] # Exclude templates from rendering
# -- HTML SETTINGS -------------------------------------------------------------
root_doc = 'index'
# Figure format priority for .. image:: <name>.*
StandaloneHTMLBuilder.supported_image_types = [
'image/svg+xml',
'image/gif',
'image/png',
'image/jpeg'
]
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
extensions += ['sphinx_rtd_theme']
# Logo
html_logo = f'figures/{logo}'
html_theme_options = {
'logo_only': False,
'display_version': True,
}
# Math
imgmath_image_format = 'svg'
imgmath_latex = 'latex'
imgmath_latex_preamble = ''
# -- LATEX SETTINGS ------------------------------------------------------------
report_doc = 'report'
figures = [os.path.join(dp, f) for dp, dn, filenames in os.walk('figures') for f in filenames]
latex_additional_files = ['report.sty', 'project.sty'] + figures
latex_engine = 'lualatex'
latex_elements = {
'preamble': r'''
\RequirePackage{project}
''',
'releasename': 'Version',
'papersize': 'a4paper',
'pointsize': '11pt',
'classoptions': ',openany,oneside',
'maketitle': '\maketitle',
'tableofcontents': '',
'figure_align': 'H',
'sphinxsetup': r'''
hmargin={2cm,2cm},
vmargin={4cm,3cm},
''',
}
latex_documents = [
(report_doc, 'main.tex', report_title, report_author, 'manual'),
]
# Document __init__ files
special_members = '__init__'
# -- Generate documentation ----------------------------------------------------
if scan:
def setup(app):
app.connect('builder-inited', run_apidoc)
|
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/master/script-library/docs/kubectl-helm.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./kubectl-helm-debian.sh
set -e
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
export DEBIAN_FRONTEND=noninteractive
# Install curl if missing
if ! dpkg -s curl ca-certificates > /dev/null 2>&1; then
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
apt-get update
fi
apt-get -y install --no-install-recommends curl ca-certificates
fi
# Install the kubectl
echo "Downloading kubectl..."
curl -sSL -o /usr/local/bin/kubectl https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl
chmod +x /usr/local/bin/kubectl
# Install Helm
echo "Installing Helm..."
curl -s https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 | bash -
echo "Done!" |
#!/bin/bash
screen -p 0 -S GamemodeChanger -X quit
screen -p 0 -S MinecraftServer -X quit
screen -p 0 -S StopWatcher -X quit
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.