repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
IceBotYT/cas
|
pkg/extractor/wildcard/wildcard_test.go
|
<filename>pkg/extractor/wildcard/wildcard_test.go
package wildcard
import (
"log"
file2 "github.com/codenotary/cas/pkg/extractor/file"
"io/ioutil"
"os"
"path/filepath"
"testing"
"github.com/codenotary/cas/pkg/uri"
"github.com/stretchr/testify/assert"
)
func TestWildcard(t *testing.T) {
file, err := ioutil.TempFile("", "cas-test-scheme-file")
if err != nil {
log.Fatal(err)
}
defer os.Remove(file.Name())
err = ioutil.WriteFile(file.Name(), []byte("123\n"), 0644)
if err != nil {
log.Fatal(err)
}
// Default empty schema is wildcard
u, _ := uri.Parse(file.Name())
artifacts, err := Artifact(u)
assert.NoError(t, err)
assert.NotNil(t, artifacts[0])
assert.Equal(t, file2.Scheme, artifacts[0].Kind)
assert.Equal(t, filepath.Base(file.Name()), artifacts[0].Name)
assert.Equal(t, "181210f8f9c779c26da1d9b2075bde0127302ee0e3fca38c9a83f5b1dd8e5d3b", artifacts[0].Hash)
u, _ = uri.Parse("../../../README.md")
artifacts, err = Artifact(u)
assert.NoError(t, err)
assert.NotNil(t, artifacts)
assert.Equal(t, artifacts[0].ContentType, "text/plain; charset=utf-8")
}
|
spacekpe/SoftHSMv2
|
src/bin/util/softhsm2-util-ossl.cpp
|
/*
* Copyright (c) 2010 .SE (The Internet Infrastructure Foundation)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*****************************************************************************
softhsm2-util-ossl.cpp
Code specific for OpenSSL
*****************************************************************************/
#include <config.h>
#define UTIL_OSSL
#include "softhsm2-util.h"
#include "softhsm2-util-ossl.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <iostream>
#include <fstream>
#include <openssl/pem.h>
#include <openssl/evp.h>
#include <openssl/err.h>
#include <openssl/pkcs12.h>
// Init OpenSSL
void crypto_init()
{
// We do not need to do this one
// OpenSSL_add_all_algorithms();
}
// Final OpenSSL
void crypto_final()
{
// EVP_cleanup();
CRYPTO_cleanup_all_ex_data();
}
// Import a key pair from given path
int crypto_import_key_pair
(
CK_SESSION_HANDLE hSession,
char* filePath,
char* filePIN,
char* label,
char* objID,
size_t objIDLen,
int noPublicKey
)
{
EVP_PKEY* pkey = crypto_read_file(filePath, filePIN);
if (pkey == NULL)
{
return 1;
}
RSA* rsa = NULL;
DSA* dsa = NULL;
switch (EVP_PKEY_type(pkey->type))
{
case EVP_PKEY_RSA:
rsa = EVP_PKEY_get1_RSA(pkey);
break;
case EVP_PKEY_DSA:
dsa = EVP_PKEY_get1_DSA(pkey);
break;
default:
fprintf(stderr, "ERROR: Cannot handle this algorithm.\n");
EVP_PKEY_free(pkey);
return 1;
break;
}
EVP_PKEY_free(pkey);
int result = 0;
if (rsa)
{
result = crypto_save_rsa(hSession, label, objID, objIDLen, noPublicKey, rsa);
RSA_free(rsa);
}
else if (dsa)
{
result = crypto_save_dsa(hSession, label, objID, objIDLen, noPublicKey, dsa);
DSA_free(dsa);
}
else
{
fprintf(stderr, "ERROR: Could not get the key material.\n");
result = 1;
}
return result;
}
// Read the key from file
EVP_PKEY* crypto_read_file(char* filePath, char* filePIN)
{
BIO* in = NULL;
PKCS8_PRIV_KEY_INFO* p8inf = NULL;
EVP_PKEY* pkey = NULL;
X509_SIG* p8 = NULL;
if (!(in = BIO_new_file(filePath, "rb")))
{
fprintf(stderr, "ERROR: Could open the PKCS#8 file: %s\n", filePath);
return NULL;
}
// The PKCS#8 file is encrypted
if (filePIN)
{
p8 = PEM_read_bio_PKCS8(in, NULL, NULL, NULL);
BIO_free(in);
if (!p8)
{
fprintf(stderr, "ERROR: Could not read the PKCS#8 file. "
"Maybe the file is not encrypted.\n");
return NULL;
}
p8inf = PKCS8_decrypt(p8, filePIN, strlen(filePIN));
X509_SIG_free(p8);
if (!p8inf)
{
fprintf(stderr, "ERROR: Could not decrypt the PKCS#8 file. "
"Maybe wrong PIN to file (--file-pin <PIN>)\n");
return NULL;
}
}
else
{
p8inf = PEM_read_bio_PKCS8_PRIV_KEY_INFO(in, NULL, NULL, NULL);
BIO_free(in);
if (!p8inf)
{
fprintf(stderr, "ERROR: Could not read the PKCS#8 file. "
"Maybe it is encypted (--file-pin <PIN>)\n");
return NULL;
}
}
if (p8inf->broken)
{
fprintf(stderr, "ERROR: Broken key encoding.\n");
PKCS8_PRIV_KEY_INFO_free(p8inf);
return NULL;
}
// Convert the PKCS#8 to OpenSSL
pkey = EVP_PKCS82PKEY(p8inf);
PKCS8_PRIV_KEY_INFO_free(p8inf);
if (!pkey)
{
fprintf(stderr, "ERROR: Could not convert the key.\n");
return NULL;
}
return pkey;
}
// Save the key data in PKCS#11
int crypto_save_rsa
(
CK_SESSION_HANDLE hSession,
char* label,
char* objID,
size_t objIDLen,
int noPublicKey,
RSA* rsa
)
{
rsa_key_material_t* keyMat = crypto_malloc_rsa(rsa);
if (!keyMat)
{
fprintf(stderr, "ERROR: Could not convert the key material to binary information.\n");
return 1;
}
CK_OBJECT_CLASS pubClass = CKO_PUBLIC_KEY, privClass = CKO_PRIVATE_KEY;
CK_KEY_TYPE keyType = CKK_RSA;
CK_BBOOL ckTrue = CK_TRUE, ckFalse = CK_FALSE, ckToken = CK_TRUE;
if (noPublicKey)
{
ckToken = CK_FALSE;
}
CK_ATTRIBUTE pubTemplate[] = {
{ CKA_CLASS, &pubClass, sizeof(pubClass) },
{ CKA_KEY_TYPE, &keyType, sizeof(keyType) },
{ CKA_LABEL, label, strlen(label) },
{ CKA_ID, objID, objIDLen },
{ CKA_TOKEN, &ckToken, sizeof(ckToken) },
{ CKA_VERIFY, &ckTrue, sizeof(ckTrue) },
{ CKA_ENCRYPT, &ckFalse, sizeof(ckFalse) },
{ CKA_WRAP, &ckFalse, sizeof(ckFalse) },
{ CKA_PUBLIC_EXPONENT, keyMat->bigE, keyMat->sizeE },
{ CKA_MODULUS, keyMat->bigN, keyMat->sizeN }
};
CK_ATTRIBUTE privTemplate[] = {
{ CKA_CLASS, &privClass, sizeof(privClass) },
{ CKA_KEY_TYPE, &keyType, sizeof(keyType) },
{ CKA_LABEL, label, strlen(label) },
{ CKA_ID, objID, objIDLen },
{ CKA_SIGN, &ckTrue, sizeof(ckTrue) },
{ CKA_DECRYPT, &ckFalse, sizeof(ckFalse) },
{ CKA_UNWRAP, &ckFalse, sizeof(ckFalse) },
{ CKA_SENSITIVE, &ckTrue, sizeof(ckTrue) },
{ CKA_TOKEN, &ckTrue, sizeof(ckTrue) },
{ CKA_PRIVATE, &ckTrue, sizeof(ckTrue) },
{ CKA_EXTRACTABLE, &ckFalse, sizeof(ckFalse) },
{ CKA_PUBLIC_EXPONENT, keyMat->bigE, keyMat->sizeE },
{ CKA_MODULUS, keyMat->bigN, keyMat->sizeN },
{ CKA_PRIVATE_EXPONENT, keyMat->bigD, keyMat->sizeD },
{ CKA_PRIME_1, keyMat->bigP, keyMat->sizeP },
{ CKA_PRIME_2, keyMat->bigQ, keyMat->sizeQ },
{ CKA_EXPONENT_1, keyMat->bigDMP1, keyMat->sizeDMP1 },
{ CKA_EXPONENT_2, keyMat->bigDMQ1, keyMat->sizeDMQ1 },
{ CKA_COEFFICIENT, keyMat->bigIQMP, keyMat->sizeIQMP }
};
CK_OBJECT_HANDLE hKey1, hKey2;
CK_RV rv = p11->C_CreateObject(hSession, privTemplate, 19, &hKey1);
if (rv != CKR_OK)
{
fprintf(stderr, "ERROR: Could not save the private key in the token. "
"Maybe the algorithm is not supported.\n");
crypto_free_rsa(keyMat);
return 1;
}
rv = p11->C_CreateObject(hSession, pubTemplate, 10, &hKey2);
crypto_free_rsa(keyMat);
if (rv != CKR_OK)
{
p11->C_DestroyObject(hSession, hKey1);
fprintf(stderr, "ERROR: Could not save the public key in the token.\n");
return 1;
}
printf("The key pair has been imported.\n");
return 0;
}
// Convert the OpenSSL key to binary
rsa_key_material_t* crypto_malloc_rsa(RSA* rsa)
{
if (rsa == NULL)
{
return NULL;
}
rsa_key_material_t* keyMat = (rsa_key_material_t*)malloc(sizeof(rsa_key_material_t));
if (keyMat == NULL)
{
return NULL;
}
keyMat->sizeE = BN_num_bytes(rsa->e);
keyMat->sizeN = BN_num_bytes(rsa->n);
keyMat->sizeD = BN_num_bytes(rsa->d);
keyMat->sizeP = BN_num_bytes(rsa->p);
keyMat->sizeQ = BN_num_bytes(rsa->q);
keyMat->sizeDMP1 = BN_num_bytes(rsa->dmp1);
keyMat->sizeDMQ1 = BN_num_bytes(rsa->dmq1);
keyMat->sizeIQMP = BN_num_bytes(rsa->iqmp);
keyMat->bigE = (CK_VOID_PTR)malloc(keyMat->sizeE);
keyMat->bigN = (CK_VOID_PTR)malloc(keyMat->sizeN);
keyMat->bigD = (CK_VOID_PTR)malloc(keyMat->sizeD);
keyMat->bigP = (CK_VOID_PTR)malloc(keyMat->sizeP);
keyMat->bigQ = (CK_VOID_PTR)malloc(keyMat->sizeQ);
keyMat->bigDMP1 = (CK_VOID_PTR)malloc(keyMat->sizeDMP1);
keyMat->bigDMQ1 = (CK_VOID_PTR)malloc(keyMat->sizeDMQ1);
keyMat->bigIQMP = (CK_VOID_PTR)malloc(keyMat->sizeIQMP);
if
(
!keyMat->bigE ||
!keyMat->bigN ||
!keyMat->bigD ||
!keyMat->bigP ||
!keyMat->bigQ ||
!keyMat->bigDMP1 ||
!keyMat->bigDMQ1 ||
!keyMat->bigIQMP
)
{
crypto_free_rsa(keyMat);
return NULL;
}
BN_bn2bin(rsa->e, (unsigned char*)keyMat->bigE);
BN_bn2bin(rsa->n, (unsigned char*)keyMat->bigN);
BN_bn2bin(rsa->d, (unsigned char*)keyMat->bigD);
BN_bn2bin(rsa->p, (unsigned char*)keyMat->bigP);
BN_bn2bin(rsa->q, (unsigned char*)keyMat->bigQ);
BN_bn2bin(rsa->dmp1, (unsigned char*)keyMat->bigDMP1);
BN_bn2bin(rsa->dmq1, (unsigned char*)keyMat->bigDMQ1);
BN_bn2bin(rsa->iqmp, (unsigned char*)keyMat->bigIQMP);
return keyMat;
}
// Free the memory of the key
void crypto_free_rsa(rsa_key_material_t* keyMat)
{
if (keyMat == NULL) return;
if (keyMat->bigE) free(keyMat->bigE);
if (keyMat->bigN) free(keyMat->bigN);
if (keyMat->bigD) free(keyMat->bigD);
if (keyMat->bigP) free(keyMat->bigP);
if (keyMat->bigQ) free(keyMat->bigQ);
if (keyMat->bigDMP1) free(keyMat->bigDMP1);
if (keyMat->bigDMQ1) free(keyMat->bigDMQ1);
if (keyMat->bigIQMP) free(keyMat->bigIQMP);
free(keyMat);
}
// Save the key data in PKCS#11
int crypto_save_dsa
(
CK_SESSION_HANDLE hSession,
char* label,
char* objID,
size_t objIDLen,
int noPublicKey,
DSA* dsa
)
{
dsa_key_material_t* keyMat = crypto_malloc_dsa(dsa);
if (keyMat == NULL)
{
fprintf(stderr, "ERROR: Could not convert the key material to binary information.\n");
return 1;
}
CK_OBJECT_CLASS pubClass = CKO_PUBLIC_KEY, privClass = CKO_PRIVATE_KEY;
CK_KEY_TYPE keyType = CKK_DSA;
CK_BBOOL ckTrue = CK_TRUE, ckFalse = CK_FALSE, ckToken = CK_TRUE;
if (noPublicKey)
{
ckToken = CK_FALSE;
}
CK_ATTRIBUTE pubTemplate[] = {
{ CKA_CLASS, &pubClass, sizeof(pubClass) },
{ CKA_KEY_TYPE, &keyType, sizeof(keyType) },
{ CKA_LABEL, label, strlen(label) },
{ CKA_ID, objID, objIDLen },
{ CKA_TOKEN, &ckToken, sizeof(ckToken) },
{ CKA_VERIFY, &ckTrue, sizeof(ckTrue) },
{ CKA_ENCRYPT, &ckFalse, sizeof(ckFalse) },
{ CKA_WRAP, &ckFalse, sizeof(ckFalse) },
{ CKA_PRIME, keyMat->bigP, keyMat->sizeP },
{ CKA_SUBPRIME, keyMat->bigQ, keyMat->sizeQ },
{ CKA_BASE, keyMat->bigG, keyMat->sizeG },
{ CKA_VALUE, keyMat->bigY, keyMat->sizeY }
};
CK_ATTRIBUTE privTemplate[] = {
{ CKA_CLASS, &privClass, sizeof(privClass) },
{ CKA_KEY_TYPE, &keyType, sizeof(keyType) },
{ CKA_LABEL, label, strlen(label) },
{ CKA_ID, objID, objIDLen },
{ CKA_SIGN, &ckTrue, sizeof(ckTrue) },
{ CKA_DECRYPT, &ckFalse, sizeof(ckFalse) },
{ CKA_UNWRAP, &ckFalse, sizeof(ckFalse) },
{ CKA_SENSITIVE, &ckTrue, sizeof(ckTrue) },
{ CKA_TOKEN, &ckTrue, sizeof(ckTrue) },
{ CKA_PRIVATE, &ckTrue, sizeof(ckTrue) },
{ CKA_EXTRACTABLE, &ckFalse, sizeof(ckFalse) },
{ CKA_PRIME, keyMat->bigP, keyMat->sizeP },
{ CKA_SUBPRIME, keyMat->bigQ, keyMat->sizeQ },
{ CKA_BASE, keyMat->bigG, keyMat->sizeG },
{ CKA_VALUE, keyMat->bigX, keyMat->sizeX }
};
CK_OBJECT_HANDLE hKey1, hKey2;
CK_RV rv = p11->C_CreateObject(hSession, privTemplate, 15, &hKey1);
if (rv != CKR_OK)
{
fprintf(stderr, "ERROR: Could not save the private key in the token. "
"Maybe the algorithm is not supported.\n");
crypto_free_dsa(keyMat);
return 1;
}
rv = p11->C_CreateObject(hSession, pubTemplate, 12, &hKey2);
crypto_free_dsa(keyMat);
if (rv != CKR_OK)
{
p11->C_DestroyObject(hSession, hKey1);
fprintf(stderr, "ERROR: Could not save the public key in the token.\n");
return 1;
}
printf("The key pair has been imported.\n");
return 0;
}
// Convert the OpenSSL key to binary
dsa_key_material_t* crypto_malloc_dsa(DSA* dsa)
{
if (dsa == NULL)
{
return NULL;
}
dsa_key_material_t* keyMat = (dsa_key_material_t*)malloc(sizeof(dsa_key_material_t));
if (keyMat == NULL)
{
return NULL;
}
keyMat->sizeP = BN_num_bytes(dsa->p);
keyMat->sizeQ = BN_num_bytes(dsa->q);
keyMat->sizeG = BN_num_bytes(dsa->g);
keyMat->sizeX = BN_num_bytes(dsa->priv_key);
keyMat->sizeY = BN_num_bytes(dsa->pub_key);
keyMat->bigP = (CK_VOID_PTR)malloc(keyMat->sizeP);
keyMat->bigQ = (CK_VOID_PTR)malloc(keyMat->sizeQ);
keyMat->bigG = (CK_VOID_PTR)malloc(keyMat->sizeG);
keyMat->bigX = (CK_VOID_PTR)malloc(keyMat->sizeX);
keyMat->bigY = (CK_VOID_PTR)malloc(keyMat->sizeY);
if (!keyMat->bigP || !keyMat->bigQ || !keyMat->bigG || !keyMat->bigX || !keyMat->bigY)
{
crypto_free_dsa(keyMat);
return NULL;
}
BN_bn2bin(dsa->p, (unsigned char*)keyMat->bigP);
BN_bn2bin(dsa->q, (unsigned char*)keyMat->bigQ);
BN_bn2bin(dsa->g, (unsigned char*)keyMat->bigG);
BN_bn2bin(dsa->priv_key, (unsigned char*)keyMat->bigX);
BN_bn2bin(dsa->pub_key, (unsigned char*)keyMat->bigY);
return keyMat;
}
// Free the memory of the key
void crypto_free_dsa(dsa_key_material_t* keyMat)
{
if (keyMat == NULL) return;
if (keyMat->bigP) free(keyMat->bigP);
if (keyMat->bigQ) free(keyMat->bigQ);
if (keyMat->bigG) free(keyMat->bigG);
if (keyMat->bigX) free(keyMat->bigX);
if (keyMat->bigY) free(keyMat->bigY);
free(keyMat);
}
|
C0PEP0D/sheld0n
|
cases/surfers_in_channel_flow_z/param/env/objects/static/surfer__us_0o8__surftimeconst_2o5/group/homogeneous/_member/agent/_behaviour/_sensor/velocity_gradients/choice.h
|
<filename>cases/surfers_in_channel_flow_z/param/env/objects/static/surfer__us_0o8__surftimeconst_2o5/group/homogeneous/_member/agent/_behaviour/_sensor/velocity_gradients/choice.h
#ifndef C0P_PARAM_OBJECTS_SURFER__US_0O8__SURFTIMECONST_2O5_GROUP_HOMOGENEOUS_MEMBER_AGENT_BEHAVIOUR_SENSOR_VELOCITY_GRADIENTS_CHOICE_H
#define C0P_PARAM_OBJECTS_SURFER__US_0O8__SURFTIMECONST_2O5_GROUP_HOMOGENEOUS_MEMBER_AGENT_BEHAVIOUR_SENSOR_VELOCITY_GRADIENTS_CHOICE_H
#pragma once
// THIS FILE SHOULD NOT BE EDITED DIRECTLY BY THE USERS.
// THIS FILE WILL BE AUTOMATICALLY EDITED WHEN THE
// CHOOSE COMMAND IS USED
// choose your behaviour
#include "core/env/objects/object/agent/behaviour/sensor/velocity_gradients/accurate/core.h"
#include "param/env/objects/static/surfer__us_0o8__surftimeconst_2o5/group/homogeneous/_member/agent/_behaviour/_sensor/velocity_gradients/accurate/parameters.h"
namespace c0p {
template<typename SurferUs0O8Surftimeconst2O5GroupHomogeneousMemberAgentActiveStep>
using SurferUs0O8Surftimeconst2O5GroupHomogeneousMemberAgentBehaviourSensorVelocityGradients = AgentBehaviourSensorVelocityGradientsAccurate<SurferUs0O8Surftimeconst2O5GroupHomogeneousMemberAgentBehaviourSensorVelocityGradientsAccurateParameters, SurferUs0O8Surftimeconst2O5GroupHomogeneousMemberAgentActiveStep>;
}
#endif
|
risnadesmayanti/program2-spk
|
inoerp/modules/inv/serial_transaction/serial_transaction.js
|
<reponame>risnadesmayanti/program2-spk
function setValFromSelectPage(inv_serial_number_id, serial_number) {
this.inv_serial_number_id = inv_serial_number_id;
this.serial_number = serial_number;
}
setValFromSelectPage.prototype.setVal = function() {
if (this.inv_serial_number_id) {
$("#inv_serial_number_id").val(this.inv_serial_number_id);
}
if (this.serial_number) {
$("#serial_number").val(this.serial_number);
}
};
$(document).ready(function() {
//Popup for selecting
$(".inv_serial_transaction_header_id.select_popup").on("click", function() {
void window.open('select.php?class_name=inv_serial_number', '_blank',
'width=1000,height=800,TOOLBAR=no,MENUBAR=no,SCROLLBARS=yes,RESIZABLE=yes,LOCATION=no,DIRECTORIES=no,STATUS=no');
});
});
|
franblas/NAOC
|
src/main/scala/handlers/server/Dialog.scala
|
package handlers.server
import handlers.GameClient
import handlers.packets.{PacketWriter, ServerCodes}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
/**
* Created by franblas on 15/04/17.
*/
class Dialog(code: Int, data1: Int, data2: Int, data3: Int, data4: Int, dialogType: Int, autoWrapText: Boolean, msg: String, gameClient: GameClient) {
def process(): Future[Array[Byte]] = {
gameClient.player.map(_ => compute()).getOrElse(Future { Array.emptyByteArray })
}
private def compute(): Future[Array[Byte]] = {
val writer = new PacketWriter(ServerCodes.dialog)
writer.writeByte(0x00)
writer.writeByte(code.toByte)
writer.writeShort(data1.toShort)
writer.writeShort(data2.toShort)
writer.writeShort(data3.toShort)
writer.writeShort(data4.toShort)
writer.writeByte(dialogType.toByte)
writer.writeByte(if (autoWrapText) 0x01 else 0x00)
if (msg.length > 0) writer.writeString(msg)
writer.writeByte(0x00)
writer.toFinalFuture()
}
}
|
CATION-M/X-moe
|
Projects/kirikiri2-master/kirikiri2/src/plugins/win32/javascript/tjsbase.h
|
#ifndef __TJSBASE_H__
#define __TJSBASE_H__
#include <windows.h>
#include "tp_stub.h"
#include <include/v8.h>
using namespace v8;
#define TJSINSTANCENAME L"__tjsinstance__"
class TJSBase {
public:
TJSBase(const tTJSVariant &variant) : variant(variant) {}
virtual ~TJSBase() {};
void wrap(Isolate *isolate, Local<Object> &obj);
static bool getVariant(Isolate *isolate, tTJSVariant &result, Local<Object> &obj);
protected:
tTJSVariant variant;
};
#endif
|
zegl-testar/sturdy
|
api/pkg/users/module/oss.go
|
<gh_stars>0
//go:build !enterprise && !cloud
// +build !enterprise,!cloud
package module
import (
"getsturdy.com/api/pkg/di"
avatars_module "getsturdy.com/api/pkg/users/avatars/module"
"getsturdy.com/api/pkg/users/db"
"getsturdy.com/api/pkg/users/graphql"
oss_selfhosted_service "getsturdy.com/api/pkg/users/oss/selfhosted/service"
"getsturdy.com/api/pkg/users/service"
)
func Module(c *di.Container) {
c.Import(db.Module)
c.Import(graphql.Module)
c.Import(service.Module)
c.Register(oss_selfhosted_service.New, new(service.Service))
c.Import(avatars_module.Module)
}
|
giantswarm/endpoint-operator
|
service/resource/endpoint/update.go
|
package endpoint
import (
"context"
"github.com/giantswarm/microerror"
"github.com/giantswarm/operatorkit/framework"
apiv1 "k8s.io/client-go/pkg/api/v1"
)
func (r *Resource) ApplyUpdateChange(ctx context.Context, obj, updateState interface{}) error {
k8sEndpoint, err := toK8sEndpoint(updateState)
if err != nil {
return microerror.Mask(err)
}
if k8sEndpoint == nil {
return nil // Nothing to do.
}
if isEmptyEndpoint(*k8sEndpoint) {
return nil
}
_, err = r.k8sClient.CoreV1().Endpoints(k8sEndpoint.Namespace).Update(k8sEndpoint)
if err != nil {
return microerror.Mask(err)
}
return nil
}
func (r *Resource) NewUpdatePatch(ctx context.Context, obj, currentState, desiredState interface{}) (*framework.Patch, error) {
createState, err := r.newCreateChange(ctx, obj, currentState, desiredState)
if err != nil {
return nil, microerror.Mask(err)
}
updateState, err := r.newUpdateChange(ctx, obj, currentState, desiredState)
if err != nil {
return nil, microerror.Mask(err)
}
patch := framework.NewPatch()
patch.SetCreateChange(createState)
patch.SetUpdateChange(updateState)
return patch, nil
}
func (r *Resource) newUpdateChange(ctx context.Context, obj, currentState, desiredState interface{}) (*apiv1.Endpoints, error) {
currentEndpoint, err := toEndpoint(currentState)
if err != nil {
return nil, microerror.Mask(err)
}
if currentEndpoint == nil {
return nil, nil // The endpoint does not exist, we should create it instead.
}
desiredEndpoint, err := toEndpoint(desiredState)
if err != nil {
return nil, microerror.Mask(err)
}
if desiredEndpoint == nil {
return nil, nil // Nothing to do.
}
endpoint := &Endpoint{
ServiceName: desiredEndpoint.ServiceName,
ServiceNamespace: desiredEndpoint.ServiceNamespace,
}
for _, currentIP := range currentEndpoint.IPs {
if !containsIP(endpoint.IPs, currentIP) {
endpoint.IPs = append(endpoint.IPs, currentIP)
}
}
for _, desiredIP := range desiredEndpoint.IPs {
if !containsIP(endpoint.IPs, desiredIP) {
endpoint.IPs = append(endpoint.IPs, desiredIP)
}
}
if len(endpoint.IPs) == 0 {
// Nothing to do.
return nil, nil
}
updateState, err := r.newK8sEndpoint(endpoint)
if err != nil {
return nil, microerror.Mask(err)
}
return updateState, nil
}
|
p2pu/learning-circles
|
api/schema.py
|
import phonenumbers
import re
from datetime import datetime
from decimal import Decimal
# Validators should return a function that takes the value as argument
# and return a tuple (value, error) with error being None if the user
# supplied data is correct
# Validators should also convert the data to the correct type
# but data should only be converted if no information is lost, eg.
# for an email address, return the original text provided by the user
# if it passes the validation
def _required(func):
def decorator(*args, **kwargs):
required = kwargs.get('required', False)
if 'required' in kwargs:
del kwargs['required']
def required_validator(data):
if required and not data and data is not False:
return None, 'Field is required'
if not required and not data and data is not False:
return None, None
# actual validator is now only created during validation
return func(*args, **kwargs)(data)
return required_validator
return decorator
@_required
def integer():
def _validate(value):
error = 'Not a valid integer'
if value is None:
return None, error
try:
n = int(value)
return n, None
except ValueError:
return None, error
except TypeError:
return None, error
return _validate
@_required
def floating_point():
def _validate(value):
error = 'Not a valid float'
if value is None:
return None, error
try:
f = float(value)
return f, None
except ValueError:
return None, error
return _validate
@_required
def boolean():
def _validate(value):
error = 'Not a boolean value'
if value == 'true' or value == 'on':
return True, None
if value == 'false':
return False, None
if type(value) != type(True):
return None, error
return value, None
return _validate
@_required
def text(length=None):
def _validate(string):
if length and len(string) > length:
return None, 'String too long'
return string, None
return _validate
@_required
def date(format='%Y-%m-%d'):
error = 'Not a valid date'
def _validate(data):
try:
date = datetime.strptime(data, format).date()
return date, None
except ValueError:
return None, error
return _validate
@_required
def time(format='%H:%M'):
error = 'Not a valid time'
def _validate(data):
try:
time = datetime.strptime(data, format).time()
return time, None
except ValueError:
return None, error
return _validate
@_required
def email():
def _validate(email):
error = 'Invalid email address'
if email == None:
return None, None # Not sure if this is correct?
if email.count('@') != 1:
return None, error
user, domain = email.split('@')
# Regular expressions lifted from Django django.core.validators
user_re = re.compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z" # dot-atom
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"\Z)', # quoted-string
re.IGNORECASE)
domain_re = re.compile(r'((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(?<!-))\Z', re.IGNORECASE)
if user_re.match(user) is None or domain_re.match(domain) is None:
return None, error
return email, None
return _validate
@_required
def mobile():
def _validate(mobile):
try:
nr = phonenumbers.parse(mobile, None)
if phonenumbers.is_valid_number(nr) is False:
return None, 'Not a valid phone number'
return mobile, None
except phonenumbers.phonenumberutil.NumberParseException:
return None, 'Not a valid phone number'
return _validate
@_required
def chain(checks):
""" Chain multiple checks. First error fails, result from last check is returned as data """
def _validate(data):
parsed = data
for check in checks:
parsed, error = check(parsed)
if error != None:
return None, error
return parsed, None
return _validate
@_required
def schema(schema):
def _validate(data):
return validate(schema, data if data else {})
return _validate
def validate(schema, data):
""" Validate a schema
schema = {
'field name 1': validator function,
'field name 2': [
validation function 1,
validation function 2,
],
'example email field', email
}
"""
cleaned_data = {}
errors = {}
for field, validator in list(schema.items()):
parsed, error = validator(data.get(field))
if error != None and error != [] and error != {}:
if type(error) == type([]):
errors[field] = error
else:
errors[field] = [error]
else:
# don't set keys for None values
# it would lead to confusion when using the data in a way
# that is typical: value = data.get('field', 'default value')
if parsed is not None:
cleaned_data[field] = parsed
# TODO - errors for extra fields
return cleaned_data, errors
def django_get_to_dict(get):
data = {}
for key, value in list(get.items()):
data[key] = value[0] if len(value)==1 else value
return data
|
matty234/client
|
shared/chat/selectable-small-team-container.js
|
// @flow
import * as Constants from '../constants/chat2'
import * as Types from '../constants/types/chat2'
import SelectableSmallTeam from './selectable-small-team'
import {namedConnect} from '../util/container'
type OwnProps = {|
conversationIDKey: Types.ConversationIDKey,
filter?: string,
numSearchHits?: number,
maxSearchHits?: number,
isSelected: boolean,
onSelectConversation: () => void,
|}
const mapStateToProps = (state, ownProps: OwnProps) => {
const conversationIDKey = ownProps.conversationIDKey
return {
_hasBadge: Constants.getHasBadge(state, conversationIDKey),
_hasUnread: Constants.getHasUnread(state, conversationIDKey),
_meta: Constants.getMeta(state, conversationIDKey),
_username: state.config.username,
}
}
const mapDispatchToProps = () => ({})
const mergeProps = (stateProps, dispatchProps, ownProps) => {
const hasUnread = stateProps._hasUnread
const styles = Constants.getRowStyles(stateProps._meta, ownProps.isSelected, hasUnread)
const participantNeedToRekey = stateProps._meta.rekeyers.size > 0
const youNeedToRekey = !participantNeedToRekey && stateProps._meta.rekeyers.has(stateProps._username)
const isLocked = participantNeedToRekey || youNeedToRekey
// order participants by hit, if it's set
const filter = ownProps.filter || ''
const participants = Constants.getRowParticipants(stateProps._meta, stateProps._username)
.toArray()
.sort((a, b) => {
const ai = a.indexOf(filter)
const bi = b.indexOf(filter)
if (ai === -1) {
return bi === -1 ? -1 : 1
} else if (bi === -1) {
return -1
} else {
if (bi === 0) {
return 1
}
return -1
}
})
return {
backgroundColor: styles.backgroundColor,
isLocked,
isMuted: stateProps._meta.isMuted,
isSelected: ownProps.isSelected,
maxSearchHits: ownProps.maxSearchHits,
numSearchHits: ownProps.numSearchHits,
onSelectConversation: ownProps.onSelectConversation,
participants,
showBadge: stateProps._hasBadge,
showBold: styles.showBold,
teamname: stateProps._meta.teamname,
usernameColor: styles.usernameColor,
}
}
export default namedConnect<OwnProps, _, _, _, _>(
mapStateToProps,
mapDispatchToProps,
mergeProps,
'SelectableSmallTeam'
)(SelectableSmallTeam)
|
project-kotinos/mlibrary___fishrappr
|
app/views/catalog/old_home.rb
|
#redirect_to uri -> :url => "#{@publication}/home.rb"
|
openpreserve/scape
|
xa-pit/src/main/java/eu/planets_project/clients/ws/PlanetsServiceExplorer.java
|
package eu.planets_project.clients.ws;
import java.net.URL;
import java.util.HashMap;
import java.util.logging.Logger;
import javax.xml.namespace.QName;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.ws.Service;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import eu.planets_project.services.PlanetsService;
import eu.planets_project.services.characterise.Characterise;
import eu.planets_project.services.compare.CommonProperties;
import eu.planets_project.services.compare.Compare;
import eu.planets_project.services.compare.CompareProperties;
import eu.planets_project.services.datatypes.ServiceDescription;
import eu.planets_project.services.fixity.Fixity;
import eu.planets_project.services.identify.Identify;
import eu.planets_project.services.migrate.Migrate;
import eu.planets_project.services.modify.Modify;
import eu.planets_project.services.validate.Validate;
import eu.planets_project.services.view.CreateView;
/**
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*
*/
public class PlanetsServiceExplorer {
private static Logger log = Logger.getLogger(PlanetsServiceExplorer.class.getName());
private URL wsdlLocation = null;
private QName qName = null;
// Create a static hashmap, mapping QNames to the interfaces:
private static HashMap<QName, Class<?>> classmap = new HashMap<QName, Class<?>>();
static {
classmap.put(CommonProperties.QNAME, CommonProperties.class);
classmap.put(Identify.QNAME, Identify.class);
classmap.put(Migrate.QNAME, Migrate.class);
classmap.put(Modify.QNAME, Modify.class);
classmap.put(Validate.QNAME, Validate.class);
classmap.put(Characterise.QNAME, Characterise.class);
classmap.put(CreateView.QNAME, CreateView.class);
classmap.put(Compare.QNAME, Compare.class);
classmap.put(CompareProperties.QNAME, CompareProperties.class);
classmap.put(Fixity.QNAME, Fixity.class);
}
/**
* Probes for the QName on construction.
* @param wsdlLocation The location of the WSDL of the service.
*/
public PlanetsServiceExplorer(URL wsdlLocation) {
log.fine("Creating new instance");
this.wsdlLocation = wsdlLocation;
this.qName = determineServiceQNameFromWsdl();
}
/**
* @return the wsdlLocation
*/
public URL getWsdlLocation() {
return wsdlLocation;
}
/**
* @return the qName
*/
public QName getQName() {
return qName;
}
/**
* Attempts to instantiate a service, and so checks if the thing is
* essentially working.
*
* @return true if an instanstiable PlanetsService
*/
public boolean isServiceInstanciable() {
Service service = Service.create(wsdlLocation, qName);
PlanetsService s = (PlanetsService) service.getPort(getServiceClass());
if ( s != null ) {
return true;
} else {
return false;
}
}
/**
* @return the service description for this service, or null.
*/
public ServiceDescription getServiceDescription() {
Service service = Service.create(wsdlLocation, qName);
PlanetsService s = (PlanetsService) service.getPort(getServiceClass());
if ( s != null ) {
return s.describe();
} else {
return null;
}
}
/**
* @return the service class
*/
public Class<?> getServiceClass() {
return classmap.get(qName);
}
/**
* This method examines a given service end-point and attempt to determine
* the QName of the wsdl:service.
*
* @param wsdlLocation
* @return the QName
*/
private QName determineServiceQNameFromWsdl() {
log.fine("determining qname");
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
// Using factory get an instance of document builder
DocumentBuilder db;
try {
log.fine("new doc builder");
db = dbf.newDocumentBuilder();
} catch (ParserConfigurationException e) {
e.printStackTrace();
return null;
}
// parse using builder to get DOM representation of the XML file
Document dom;
try {
log.fine("parsing wsdl");
dom = db.parse(wsdlLocation.openStream());
} catch (Exception e) {
e.printStackTrace();
return null;
}
// get the root elememt
Element root = dom.getDocumentElement();
log.fine("getting root element");
return new QName(root.getAttribute("targetNamespace"), root
.getAttribute("name"));
}
}
|
shaojiankui/iOS10-Runtime-Headers
|
PrivateFrameworks/StoreServices.framework/SSUpdatesDatabaseSchema.h
|
<filename>PrivateFrameworks/StoreServices.framework/SSUpdatesDatabaseSchema.h
/* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/StoreServices.framework/StoreServices
*/
@interface SSUpdatesDatabaseSchema : NSObject
+ (void)_migrate7000to7001InDatabase:(id)arg1;
+ (void)createSchemaInDatabase:(id)arg1 withName:(id)arg2;
+ (id)databasePath;
+ (bool)databaseRequiresMigration:(id)arg1;
@end
|
IAmAFrenchFry/the-thunderhead
|
commands/dev.give.js
|
const Discord = require("discord.js");
const fs = require("graceful-fs");
var eco = require('discord-economy');
const config = require("../static/config.json");
module.exports.run = async (client, message, args) => {
if (config.sudo.indexOf(message.author.id) < 0) return; // Dev Only
var recipient = message.mentions.members.first();
if (!recipient) return message.reply("Please provide a vaild Mention.");
var amountRecieved = parseInt(args[1], 10);
if (!amountRecieved) return message.channel.send(message.author.username + " Please provide an amount to give.");
let output = await eco.AddToBalance(recipient.id, amountRecieved);
var fundEmbed = new Discord.MessageEmbed()
.setTitle(`**Balance: **${output.newbalance}`)
.setFooter(`Thunderhead Banking. ${message.mentions.users.first().username}'s account was funded.`, message.mentions.users.first().avatarURL())
.setColor(client.colors["discord"]);
message.channel.send(fundEmbed);
}
module.exports.config = {
name: `${config["developer_prepended_prefix"]}give`,
aliases: [`${config["developer_prepended_prefix"]}fund`],
use: `${config["developer_prepended_prefix"]}give [@User] [Amount]`,
description: "Give a user currency.",
state : "delta",
page: 0
};
|
myleandrov/YogeshPateliOS
|
gruul/gruul-common-open/gruul-common-base-open/src/main/java/com/medusa/gruul/common/core/util/DateUtils.java
|
package com.medusa.gruul.common.core.util;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
/**
* @author whh
*/
public class DateUtils {
public static LocalDateTime timestampCoverLocalDateTime(long timestamp) {
DateTime expiresIn = DateUtil.date(timestamp);
return expiresIn.toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime();
}
public static long localDateTimeCoverTimestamp(LocalDateTime localDateTime) {
return localDateTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
|
gtrdp/social-density-estimation
|
speakercount/src/de/fau/cs/jstk/framed/MVN.java
|
<reponame>gtrdp/social-density-estimation<filename>speakercount/src/de/fau/cs/jstk/framed/MVN.java
/*
Copyright (c) 2009-2011
Speech Group at Informatik 5, Univ. Erlangen-Nuremberg, GERMANY
<NAME>
<NAME>
This file is part of the Java Speech Toolkit (JSTK).
The JSTK is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The JSTK is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the JSTK. If not, see <http://www.gnu.org/licenses/>.
*/
package de.fau.cs.jstk.framed;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import de.fau.cs.jstk.io.FrameInputStream;
import de.fau.cs.jstk.io.FrameOutputStream;
import de.fau.cs.jstk.io.FrameSource;
import de.fau.cs.jstk.io.IOUtil;
import de.fau.cs.jstk.stat.Density;
import de.fau.cs.jstk.stat.Sample;
import de.fau.cs.jstk.stat.Trainer;
import de.fau.cs.jstk.util.Pair;
/**
* Perform a mean and variance normalization to the incoming feature vector.
*
* @author sikoried
*/
public class MVN implements FrameSource {
private static Logger logger = Logger.getLogger(MVN.class);
/** FrameSource to read from */
private FrameSource source;
public MVN() {
// nothing to do
}
public MVN(FrameSource src) {
setFrameSource(src);
}
public MVN(FrameSource src, String parameterFile) throws IOException, ClassNotFoundException {
setFrameSource(src);
loadFromFile(parameterFile);
}
public void setNormalizations(boolean means, boolean variances) {
this.normalizeMeans = means;
this.normalizeVars = variances;
}
public FrameSource getSource() {
return source;
}
public void setSource(FrameSource src) {
if (source != null && source.getFrameSize() != src.getFrameSize())
throw new RuntimeException("MVN.setSource(): FrameSource dimensions don't match!");
source = src;
}
/** number of samples that contributed to the statistics */
private long samples;
/** mean values to subtract */
public double [] means;
/** variances */
public double [] variances;
/** sigmas for normalization (sqrt(var)) */
public double [] sigmas;
/**
* Return the current frame size
*/
public int getFrameSize() {
return source.getFrameSize();
}
/**
* Set the FrameSource to read from.
* @param src Valid FrameSource instance.
*/
public void setFrameSource(FrameSource src) {
source = src;
}
/**
* Read the next frame from the source, normalize for zero mean and uniform
* standard deviation, and output the frame.
*/
public boolean read(double[] buf) throws IOException {
// read, return false if there wasn't any frame to read.
if (!source.read(buf))
return false;
// mean and variance normalization
if (normalizeMeans && normalizeVars) {
for (int i = 0; i < buf.length; ++i)
buf[i] = (buf[i] - means[i]) / sigmas[i];
} else if (normalizeMeans && !normalizeVars) {
for (int i = 0; i < buf.length; ++i)
buf[i] = (buf[i] - means[i]);
} else if (!normalizeMeans && normalizeVars) {
for (int i = 0; i < buf.length; ++i)
buf[i] /= sigmas[i];
}
return true;
}
private boolean normalizeMeans = true;
private boolean normalizeVars = true;
/**
* Reset all internal statistics to clear the normalization parameters.
*/
public void resetStatistics() {
samples = 0;
means = null;
variances = null;
sigmas = null;
}
public void extendStatistics1(List<double []> data) throws IOException {
if (data.size() < 1)
return;
Density stat = Trainer.ml1(data, true);
extendStatistics(stat, data.size());
}
/**
* Add samples from the given list to the normalization statistics. Initialize
* the parameters if necessary.
*/
public void extendStatistics(List<Sample> data) throws IOException {
if (data.size() < 1)
return;
Density stat = Trainer.ml(data, true);
extendStatistics(stat, data.size());
}
private void extendStatistics(Density stat, int size) throws IOException {
if (size < 1)
return;
if (means == null) {
// step 2a: set the new statistics
samples = size;
means = stat.mue;
variances = stat.cov;
} else {
// step 2b: combine old and new statistics
if (means.length != stat.fd)
throw new IOException("frame dimensions do not match: means.length = " + means.length + " input_fs = " + stat.fd);
for (int i = 0; i < stat.fd; ++i) {
// merge with the new statistics
double mean_old = means[i];
means[i] = (mean_old * samples + stat.mue[i] * size) / (samples + size);
variances[i] = (
(variances[i] + mean_old*mean_old) * samples +
(stat.cov[i] + stat.mue[i]*stat.mue[i]) * size
) / (samples + size) - means[i] * means[i];
}
// don't forget to update the number of samples for these statistics
samples += size;
}
// step 3: compute sigmas
if (sigmas == null)
sigmas = new double [variances.length];
for (int i = 0; i < variances.length; ++i)
sigmas[i] = Math.sqrt(variances[i]);
}
/**
* Add samples from the given source to the normalization statistics. Initialize
* the parameters if necessary.
* @param src
* @throws IOException
*/
public void extendStatistics(FrameSource src) throws IOException {
double [] buf = new double [src.getFrameSize()];
LinkedList<Sample> data = new LinkedList<Sample>();
while (src.read(buf))
data.add(new Sample((short) 0, buf));
extendStatistics(data);
}
/**
* Read the normalization parameters from the referenced file.
* @param fileName
* @throws IOException
* @throws ClassNotFoundException
*/
public void loadFromFile(String fileName) throws IOException {
InputStream is = new FileInputStream(fileName);
read(is);
is.close();
}
/**
* Read the normalization parameters from the given InputStream
* @param is
* @throws IOException
*/
public void read(InputStream is) throws IOException {
samples = IOUtil.readLong(is, ByteOrder.LITTLE_ENDIAN);
int fd = IOUtil.readInt(is, ByteOrder.LITTLE_ENDIAN);
means = new double [fd];
variances = new double [fd];
sigmas = new double [fd];
if (!IOUtil.readDouble(is, means, ByteOrder.LITTLE_ENDIAN))
throw new IOException("Could not read mean values");
if (!IOUtil.readDouble(is, variances, ByteOrder.LITTLE_ENDIAN))
throw new IOException("Could not read mean variances");
if (!IOUtil.readDouble(is, sigmas, ByteOrder.LITTLE_ENDIAN))
throw new IOException("Could not read mean sigmas");
}
/**
* Save the normalization parameters to the referenced file.
* @param fileName
* @throws IOException
*/
public void saveToFile(String fileName) throws IOException {
OutputStream os = new FileOutputStream(new File(fileName));
write(os);
os.close();
}
/**
* Save the normalization parameters to the given OutputStream
* @param os
* @throws IOException
*/
public void write(OutputStream os) throws IOException {
IOUtil.writeLong(os, samples, ByteOrder.LITTLE_ENDIAN);
IOUtil.writeInt(os, means.length, ByteOrder.LITTLE_ENDIAN);
IOUtil.writeDouble(os, means, ByteOrder.LITTLE_ENDIAN);
IOUtil.writeDouble(os, variances, ByteOrder.LITTLE_ENDIAN);
IOUtil.writeDouble(os, sigmas, ByteOrder.LITTLE_ENDIAN);
}
/**
* Generate a String represenation of the normalization parameters
*/
public String toString() {
StringBuffer ret = new StringBuffer();
ret.append("framed.MVN samples = " + samples + "\n");
ret.append(" m = [");
for (double m : means)
ret.append(" " + m);
ret.append(" ]\n v = [");
for (double v : variances)
ret.append(" " + v);
ret.append(" ]\n");
return ret.toString();
}
public static final String synopsis =
"sikoried, 12-4-2009\n" +
"Compute a mean and variance normalization for each feature file individually.\n" +
"Optionally, the normalization parameters can be estimated on all referenced\n" +
"files (cumulative) or loaded from file. See the options for more details.\n" +
"\n" +
"usage: framed.MVN [options]\n" +
" --io in-file out-file\n" +
" Use the given files for in and output. This option may be used multiple\n" +
" times.\n" +
" --in-out-list list-file\n" +
" Use a list containing lines \"<in-file> <out-file>\" for batch processing.\n" +
" This option may be used multiple times.\n" +
" --in-list list-file directory\n" +
" Read all files contained in the list and save the output to the given\n" +
" directory. This option may be used multiple times.\n" +
" --dir <input-dir>\n" +
" Expect the input files in the given directory. MUST BE PLACED BEFORE --in-list!!!\n" +
"\n" +
" --cumulative\n" +
" Estimate the MVN parameters on ALL files instead of individual MVN.\n" +
" --save-parameters file\n" +
" Save the CMVN parameters. This can only be used for single files or in\n" +
" combination with --cumulative. In case of --online, the parameters after\n" +
" are saved after processing all data.\n" +
" --load-parameters file\n" +
" Use the CMVN parameters from the given file instead of individual or\n" +
" cumulative estimates.\n" +
" --simulate\n" +
" Only compute the normalization parameters but no data normalization!\n" +
" --no-variance\n" +
" Do not do variance normalization\n" +
" -v\n" +
" Be verbose\n" +
"\n" +
" -h | --help\n" +
" Display this help text.\n";
public static void main(String[] args) throws Exception, IOException {
if (args.length < 2) {
System.err.println(synopsis);
System.exit(1);
}
logger.setLevel(Level.WARN);
boolean cumulative = false;
boolean simulate = false;
boolean novar = false;
String parameterOutputFile = null;
String parameterInputFile = null;
String inDir = null;
// store all files to be processed in a list
ArrayList<Pair<String, String>> iolist = new ArrayList<Pair<String, String>>();
// parse the command line arguments
for (int i = 0; i < args.length; ++i) {
if (args[i].equals("-h") || args[i].equals("--help")) {
System.err.println(synopsis);
System.exit(1);
} else if (args[i].equals("--simulate"))
simulate = true;
else if (args[i].equals("--cumulative"))
cumulative = true;
else if (args[i].equals("--no-variance"))
novar = true;
else if (args[i].equals("--load-parameters"))
parameterInputFile = args[++i];
else if (args[i].equals("--save-parameters"))
parameterOutputFile = args[++i];
else if (args[i].equals("--io")) {
// add single file pair
iolist.add(new Pair<String, String>(args[i+1], args[i+2]));
i += 2;
} else if (args[i].equals("-v"))
logger.setLevel(Level.ALL);
else if (args[i].equals("--dir"))
inDir = args[++i];
else if (args[i].equals("--in-list")) {
BufferedReader lr = new BufferedReader(new FileReader(args[++i]));
// validate output directory
File outDir = new File(args[++i]);
if (!outDir.canWrite())
throw new IOException("Cannot write to directory " + outDir.getAbsolutePath());
// read in the list
String line = null;
int lineCnt = 1;
while ((line = lr.readLine()) != null) {
String inf = (inDir == null ? line : inDir + System.getProperty("file.separator") + line);
String ouf = outDir + System.getProperty("file.separator") + line;
// check file
if (!(new File(inf)).canRead())
throw new IOException(args[i-1] + "(" + lineCnt + "): Cannot read input file " + line);
iolist.add(new Pair<String, String>(inf, ouf));
lineCnt++;
}
} else if (args[i].equals("--in-out-list")) {
BufferedReader lr = new BufferedReader(new FileReader(args[++i]));
String line = null;
int lineCnt = 1;
while ((line = lr.readLine()) != null) {
String [] help = line.split("\\s+");
if (help.length != 2)
throw new IOException(args[i] + "(" + lineCnt + "): invalid line format");
if (!(new File(help[0])).canRead())
throw new IOException(args[i] + "(" + lineCnt + "): Cannot read input file " + line);
iolist.add(new Pair<String, String>(help[0], help[1]));
lineCnt++;
}
} else {
throw new Exception("unknown parameter: " + args[i]);
}
}
// check some parameters -- not all combinations make sense!
if (cumulative == false && iolist.size() > 1 && parameterOutputFile != null)
throw new Exception("cannot save CMVN parameters for more than 1 file (use --cumulative)");
if (cumulative == true && parameterInputFile != null)
throw new Exception("cumulative and parameterInputFile are exclusive!");
// system summary
logger.info("cumulative: " + cumulative);
logger.info("simulate : " + simulate);
logger.info("params-in : " + (parameterInputFile == null ? "none" : parameterInputFile));
logger.info("params-out: " + (parameterOutputFile == null ? "none" : parameterOutputFile));
logger.info("list-size : " + iolist.size());
MVN work = new MVN();
if (parameterInputFile != null) {
work.loadFromFile(parameterInputFile);
if (novar)
work.setNormalizations(true, false);
logger.info(work.toString());
}
if (cumulative) {
// read all data
for (Pair<String, String> p : iolist) {
FrameInputStream fr = new FrameInputStream(new File(p.a));
work.extendStatistics(fr);
}
// save the parameter if required
if (parameterOutputFile != null)
work.saveToFile(parameterOutputFile);
if (simulate)
System.exit(0);
if (novar)
work.setNormalizations(true, false);
}
for (Pair<String, String> p : iolist) {
// for individual CMVN, we need to process the data first -- if not read from file
if (!cumulative && parameterInputFile == null) {
work.resetStatistics();
work.extendStatistics(new FrameInputStream(new File(p.a)));
if (parameterOutputFile != null)
work.saveToFile(parameterOutputFile);
}
if (simulate)
continue;
work.setFrameSource(new FrameInputStream(new File(p.a)));
FrameOutputStream fw = new FrameOutputStream(work.getFrameSize(), new File(p.b));
double [] buf = new double [work.getFrameSize()];
// read and normalize all samples
while (work.read(buf))
fw.write(buf);
fw.close();
}
}
}
|
thibaultmeyer/chip8-emulator
|
src/gui/gui_settings_callback_btn_input_key.c
|
#include "gui.h"
#include "gui_image_logo.h"
static gboolean apply_new_binding(GtkWidget *widget, GdkEventKey *event, s_gui_key_binding_context *key_binding_ctx) {
// Set new value
gl_gui_settings.keybinding[key_binding_ctx->key_idx] = event->keyval;
// Change button label
gtk_button_set_label(GTK_BUTTON(key_binding_ctx->button), gdk_keyval_name(event->keyval));
// Persists settings
gui_toolbox_save_settings_to_file();
// Close current window
gtk_window_close(GTK_WINDOW(widget));
return (FALSE);
}
void gui_settings_callback_btn_input_key(GtkButton *button, s_gui_key_binding_context *key_binding_ctx) {
// Header
GtkWidget *header = gtk_header_bar_new();
gtk_header_bar_set_show_close_button(GTK_HEADER_BAR(header), TRUE);
gtk_header_bar_set_title(GTK_HEADER_BAR(header), "Bind a new key");
// Window
GtkWidget *window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
gtk_window_set_titlebar(GTK_WINDOW(window), header);
gtk_window_set_resizable(GTK_WINDOW(window), FALSE);
gtk_window_set_default_size(GTK_WINDOW(window), 385, 60);
gtk_window_set_modal(GTK_WINDOW(window), TRUE);
gtk_window_set_position(GTK_WINDOW(window), GTK_WIN_POS_CENTER_ON_PARENT);
gtk_window_set_transient_for(GTK_WINDOW(window), GTK_WINDOW(gl_gui_components.gtk_window_setting));
gtk_container_set_border_width(GTK_CONTAINER(window), 10);
// Window icon
GdkPixbuf *icon = gui_image_load_from_memory_scale(gui_image_logo_bytes, gui_image_logo_length, 256, 256);
gtk_window_set_icon(GTK_WINDOW(window), icon);
// Create fixed grid
GtkWidget *fixed = gtk_fixed_new();
gtk_container_add(GTK_CONTAINER(window), fixed);
// Add help message
GtkWidget *label_help = gtk_label_new("Please press the key you want to assign");
gtk_fixed_put(GTK_FIXED(fixed), label_help, 0, 0);
// Mark some widget/object to be explicitly deleted when window will be destroyed
GSList *components = g_slist_alloc();
components = g_slist_append(components, header);
components = g_slist_append(components, fixed);
components = g_slist_append(components, icon);
// Connect signal
g_signal_connect(window, "key_press_event", G_CALLBACK(apply_new_binding), key_binding_ctx);
g_signal_connect(window, "destroy", G_CALLBACK(gui_settings_callback_window_destroy), components);
// Show the window
gtk_widget_show_all(window);
}
|
kubeform/provider-oci-api
|
client/informers/externalversions/factory.go
|
/*
Copyright AppsCode Inc. and Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by informer-gen. DO NOT EDIT.
package externalversions
import (
reflect "reflect"
sync "sync"
time "time"
versioned "kubeform.dev/provider-oci-api/client/clientset/versioned"
ai "kubeform.dev/provider-oci-api/client/informers/externalversions/ai"
analytics "kubeform.dev/provider-oci-api/client/informers/externalversions/analytics"
apigateway "kubeform.dev/provider-oci-api/client/informers/externalversions/apigateway"
apm "kubeform.dev/provider-oci-api/client/informers/externalversions/apm"
artifacts "kubeform.dev/provider-oci-api/client/informers/externalversions/artifacts"
audit "kubeform.dev/provider-oci-api/client/informers/externalversions/audit"
autoscaling "kubeform.dev/provider-oci-api/client/informers/externalversions/autoscaling"
bastion "kubeform.dev/provider-oci-api/client/informers/externalversions/bastion"
bds "kubeform.dev/provider-oci-api/client/informers/externalversions/bds"
blockchain "kubeform.dev/provider-oci-api/client/informers/externalversions/blockchain"
budget "kubeform.dev/provider-oci-api/client/informers/externalversions/budget"
certificates "kubeform.dev/provider-oci-api/client/informers/externalversions/certificates"
cloud "kubeform.dev/provider-oci-api/client/informers/externalversions/cloud"
containerengine "kubeform.dev/provider-oci-api/client/informers/externalversions/containerengine"
core "kubeform.dev/provider-oci-api/client/informers/externalversions/core"
data "kubeform.dev/provider-oci-api/client/informers/externalversions/data"
database "kubeform.dev/provider-oci-api/client/informers/externalversions/database"
datacatalog "kubeform.dev/provider-oci-api/client/informers/externalversions/datacatalog"
dataflow "kubeform.dev/provider-oci-api/client/informers/externalversions/dataflow"
dataintegration "kubeform.dev/provider-oci-api/client/informers/externalversions/dataintegration"
datascience "kubeform.dev/provider-oci-api/client/informers/externalversions/datascience"
devops "kubeform.dev/provider-oci-api/client/informers/externalversions/devops"
dns "kubeform.dev/provider-oci-api/client/informers/externalversions/dns"
email "kubeform.dev/provider-oci-api/client/informers/externalversions/email"
events "kubeform.dev/provider-oci-api/client/informers/externalversions/events"
file "kubeform.dev/provider-oci-api/client/informers/externalversions/file"
functions "kubeform.dev/provider-oci-api/client/informers/externalversions/functions"
generic "kubeform.dev/provider-oci-api/client/informers/externalversions/generic"
golden "kubeform.dev/provider-oci-api/client/informers/externalversions/golden"
health "kubeform.dev/provider-oci-api/client/informers/externalversions/health"
identity "kubeform.dev/provider-oci-api/client/informers/externalversions/identity"
integration "kubeform.dev/provider-oci-api/client/informers/externalversions/integration"
internalinterfaces "kubeform.dev/provider-oci-api/client/informers/externalversions/internalinterfaces"
jms "kubeform.dev/provider-oci-api/client/informers/externalversions/jms"
kms "kubeform.dev/provider-oci-api/client/informers/externalversions/kms"
limits "kubeform.dev/provider-oci-api/client/informers/externalversions/limits"
loadbalancer "kubeform.dev/provider-oci-api/client/informers/externalversions/loadbalancer"
log "kubeform.dev/provider-oci-api/client/informers/externalversions/log"
logging "kubeform.dev/provider-oci-api/client/informers/externalversions/logging"
management "kubeform.dev/provider-oci-api/client/informers/externalversions/management"
marketplace "kubeform.dev/provider-oci-api/client/informers/externalversions/marketplace"
metering "kubeform.dev/provider-oci-api/client/informers/externalversions/metering"
monitoring "kubeform.dev/provider-oci-api/client/informers/externalversions/monitoring"
mysql "kubeform.dev/provider-oci-api/client/informers/externalversions/mysql"
network "kubeform.dev/provider-oci-api/client/informers/externalversions/network"
nosql "kubeform.dev/provider-oci-api/client/informers/externalversions/nosql"
objectstorage "kubeform.dev/provider-oci-api/client/informers/externalversions/objectstorage"
oce "kubeform.dev/provider-oci-api/client/informers/externalversions/oce"
ocvp "kubeform.dev/provider-oci-api/client/informers/externalversions/ocvp"
oda "kubeform.dev/provider-oci-api/client/informers/externalversions/oda"
ons "kubeform.dev/provider-oci-api/client/informers/externalversions/ons"
opsi "kubeform.dev/provider-oci-api/client/informers/externalversions/opsi"
optimizer "kubeform.dev/provider-oci-api/client/informers/externalversions/optimizer"
osmanagement "kubeform.dev/provider-oci-api/client/informers/externalversions/osmanagement"
sch "kubeform.dev/provider-oci-api/client/informers/externalversions/sch"
service "kubeform.dev/provider-oci-api/client/informers/externalversions/service"
streaming "kubeform.dev/provider-oci-api/client/informers/externalversions/streaming"
vulnerability "kubeform.dev/provider-oci-api/client/informers/externalversions/vulnerability"
waas "kubeform.dev/provider-oci-api/client/informers/externalversions/waas"
waf "kubeform.dev/provider-oci-api/client/informers/externalversions/waf"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
runtime "k8s.io/apimachinery/pkg/runtime"
schema "k8s.io/apimachinery/pkg/runtime/schema"
cache "k8s.io/client-go/tools/cache"
)
// SharedInformerOption defines the functional option type for SharedInformerFactory.
type SharedInformerOption func(*sharedInformerFactory) *sharedInformerFactory
type sharedInformerFactory struct {
client versioned.Interface
namespace string
tweakListOptions internalinterfaces.TweakListOptionsFunc
lock sync.Mutex
defaultResync time.Duration
customResync map[reflect.Type]time.Duration
informers map[reflect.Type]cache.SharedIndexInformer
// startedInformers is used for tracking which informers have been started.
// This allows Start() to be called multiple times safely.
startedInformers map[reflect.Type]bool
}
// WithCustomResyncConfig sets a custom resync period for the specified informer types.
func WithCustomResyncConfig(resyncConfig map[v1.Object]time.Duration) SharedInformerOption {
return func(factory *sharedInformerFactory) *sharedInformerFactory {
for k, v := range resyncConfig {
factory.customResync[reflect.TypeOf(k)] = v
}
return factory
}
}
// WithTweakListOptions sets a custom filter on all listers of the configured SharedInformerFactory.
func WithTweakListOptions(tweakListOptions internalinterfaces.TweakListOptionsFunc) SharedInformerOption {
return func(factory *sharedInformerFactory) *sharedInformerFactory {
factory.tweakListOptions = tweakListOptions
return factory
}
}
// WithNamespace limits the SharedInformerFactory to the specified namespace.
func WithNamespace(namespace string) SharedInformerOption {
return func(factory *sharedInformerFactory) *sharedInformerFactory {
factory.namespace = namespace
return factory
}
}
// NewSharedInformerFactory constructs a new instance of sharedInformerFactory for all namespaces.
func NewSharedInformerFactory(client versioned.Interface, defaultResync time.Duration) SharedInformerFactory {
return NewSharedInformerFactoryWithOptions(client, defaultResync)
}
// NewFilteredSharedInformerFactory constructs a new instance of sharedInformerFactory.
// Listers obtained via this SharedInformerFactory will be subject to the same filters
// as specified here.
// Deprecated: Please use NewSharedInformerFactoryWithOptions instead
func NewFilteredSharedInformerFactory(client versioned.Interface, defaultResync time.Duration, namespace string, tweakListOptions internalinterfaces.TweakListOptionsFunc) SharedInformerFactory {
return NewSharedInformerFactoryWithOptions(client, defaultResync, WithNamespace(namespace), WithTweakListOptions(tweakListOptions))
}
// NewSharedInformerFactoryWithOptions constructs a new instance of a SharedInformerFactory with additional options.
func NewSharedInformerFactoryWithOptions(client versioned.Interface, defaultResync time.Duration, options ...SharedInformerOption) SharedInformerFactory {
factory := &sharedInformerFactory{
client: client,
namespace: v1.NamespaceAll,
defaultResync: defaultResync,
informers: make(map[reflect.Type]cache.SharedIndexInformer),
startedInformers: make(map[reflect.Type]bool),
customResync: make(map[reflect.Type]time.Duration),
}
// Apply all options
for _, opt := range options {
factory = opt(factory)
}
return factory
}
// Start initializes all requested informers.
func (f *sharedInformerFactory) Start(stopCh <-chan struct{}) {
f.lock.Lock()
defer f.lock.Unlock()
for informerType, informer := range f.informers {
if !f.startedInformers[informerType] {
go informer.Run(stopCh)
f.startedInformers[informerType] = true
}
}
}
// WaitForCacheSync waits for all started informers' cache were synced.
func (f *sharedInformerFactory) WaitForCacheSync(stopCh <-chan struct{}) map[reflect.Type]bool {
informers := func() map[reflect.Type]cache.SharedIndexInformer {
f.lock.Lock()
defer f.lock.Unlock()
informers := map[reflect.Type]cache.SharedIndexInformer{}
for informerType, informer := range f.informers {
if f.startedInformers[informerType] {
informers[informerType] = informer
}
}
return informers
}()
res := map[reflect.Type]bool{}
for informType, informer := range informers {
res[informType] = cache.WaitForCacheSync(stopCh, informer.HasSynced)
}
return res
}
// InternalInformerFor returns the SharedIndexInformer for obj using an internal
// client.
func (f *sharedInformerFactory) InformerFor(obj runtime.Object, newFunc internalinterfaces.NewInformerFunc) cache.SharedIndexInformer {
f.lock.Lock()
defer f.lock.Unlock()
informerType := reflect.TypeOf(obj)
informer, exists := f.informers[informerType]
if exists {
return informer
}
resyncPeriod, exists := f.customResync[informerType]
if !exists {
resyncPeriod = f.defaultResync
}
informer = newFunc(f.client, resyncPeriod)
f.informers[informerType] = informer
return informer
}
// SharedInformerFactory provides shared informers for resources in all known
// API group versions.
type SharedInformerFactory interface {
internalinterfaces.SharedInformerFactory
ForResource(resource schema.GroupVersionResource) (GenericInformer, error)
WaitForCacheSync(stopCh <-chan struct{}) map[reflect.Type]bool
Ai() ai.Interface
Analytics() analytics.Interface
Apigateway() apigateway.Interface
Apm() apm.Interface
Artifacts() artifacts.Interface
Audit() audit.Interface
Autoscaling() autoscaling.Interface
Bastion() bastion.Interface
Bds() bds.Interface
Blockchain() blockchain.Interface
Budget() budget.Interface
Certificates() certificates.Interface
Cloud() cloud.Interface
Containerengine() containerengine.Interface
Core() core.Interface
Data() data.Interface
Database() database.Interface
Datacatalog() datacatalog.Interface
Dataflow() dataflow.Interface
Dataintegration() dataintegration.Interface
Datascience() datascience.Interface
Devops() devops.Interface
Dns() dns.Interface
Email() email.Interface
Events() events.Interface
File() file.Interface
Functions() functions.Interface
Generic() generic.Interface
Golden() golden.Interface
Health() health.Interface
Identity() identity.Interface
Integration() integration.Interface
Jms() jms.Interface
Kms() kms.Interface
Limits() limits.Interface
Loadbalancer() loadbalancer.Interface
Log() log.Interface
Logging() logging.Interface
Management() management.Interface
Marketplace() marketplace.Interface
Metering() metering.Interface
Monitoring() monitoring.Interface
Mysql() mysql.Interface
Network() network.Interface
Nosql() nosql.Interface
Objectstorage() objectstorage.Interface
Oce() oce.Interface
Ocvp() ocvp.Interface
Oda() oda.Interface
Ons() ons.Interface
Opsi() opsi.Interface
Optimizer() optimizer.Interface
Osmanagement() osmanagement.Interface
Sch() sch.Interface
Service() service.Interface
Streaming() streaming.Interface
Vulnerability() vulnerability.Interface
Waas() waas.Interface
Waf() waf.Interface
}
func (f *sharedInformerFactory) Ai() ai.Interface {
return ai.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Analytics() analytics.Interface {
return analytics.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Apigateway() apigateway.Interface {
return apigateway.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Apm() apm.Interface {
return apm.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Artifacts() artifacts.Interface {
return artifacts.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Audit() audit.Interface {
return audit.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Autoscaling() autoscaling.Interface {
return autoscaling.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Bastion() bastion.Interface {
return bastion.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Bds() bds.Interface {
return bds.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Blockchain() blockchain.Interface {
return blockchain.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Budget() budget.Interface {
return budget.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Certificates() certificates.Interface {
return certificates.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Cloud() cloud.Interface {
return cloud.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Containerengine() containerengine.Interface {
return containerengine.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Core() core.Interface {
return core.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Data() data.Interface {
return data.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Database() database.Interface {
return database.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Datacatalog() datacatalog.Interface {
return datacatalog.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Dataflow() dataflow.Interface {
return dataflow.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Dataintegration() dataintegration.Interface {
return dataintegration.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Datascience() datascience.Interface {
return datascience.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Devops() devops.Interface {
return devops.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Dns() dns.Interface {
return dns.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Email() email.Interface {
return email.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Events() events.Interface {
return events.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) File() file.Interface {
return file.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Functions() functions.Interface {
return functions.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Generic() generic.Interface {
return generic.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Golden() golden.Interface {
return golden.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Health() health.Interface {
return health.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Identity() identity.Interface {
return identity.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Integration() integration.Interface {
return integration.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Jms() jms.Interface {
return jms.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Kms() kms.Interface {
return kms.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Limits() limits.Interface {
return limits.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Loadbalancer() loadbalancer.Interface {
return loadbalancer.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Log() log.Interface {
return log.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Logging() logging.Interface {
return logging.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Management() management.Interface {
return management.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Marketplace() marketplace.Interface {
return marketplace.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Metering() metering.Interface {
return metering.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Monitoring() monitoring.Interface {
return monitoring.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Mysql() mysql.Interface {
return mysql.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Network() network.Interface {
return network.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Nosql() nosql.Interface {
return nosql.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Objectstorage() objectstorage.Interface {
return objectstorage.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Oce() oce.Interface {
return oce.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Ocvp() ocvp.Interface {
return ocvp.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Oda() oda.Interface {
return oda.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Ons() ons.Interface {
return ons.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Opsi() opsi.Interface {
return opsi.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Optimizer() optimizer.Interface {
return optimizer.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Osmanagement() osmanagement.Interface {
return osmanagement.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Sch() sch.Interface {
return sch.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Service() service.Interface {
return service.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Streaming() streaming.Interface {
return streaming.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Vulnerability() vulnerability.Interface {
return vulnerability.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Waas() waas.Interface {
return waas.New(f, f.namespace, f.tweakListOptions)
}
func (f *sharedInformerFactory) Waf() waf.Interface {
return waf.New(f, f.namespace, f.tweakListOptions)
}
|
GV-KrishnaTeja/Node-JS
|
webServer/web-1.js
|
const express= require('express') //initaiting express library
const app = express()
app.get('',(req,res) =>{
res.send('HELLO WELCOME TO NODE JS')
})
app.get('/help',(req,res) =>{
res.send('WELCOME TO HELP PAGE !!!!')
})
app.get('/about',(req,res)=>{
res.send('ALL THIS ABOUT PAGE')
})
app.get('/weather',(req,res)=>{
res.send('WEATHER APP WELCOMES U ')
})
app.listen(3000,()=>{
console.log('server port is upon the 3000')
})
|
geoff5802/liftie
|
lib/resorts/mt-buller/index.js
|
<gh_stars>10-100
module.exports = {
selector: 'tr[id^="lift"]',
parse: {
name: '0/2',
status: '2/0/1'
}
};
|
gregorias/dfuntest
|
core/src/main/java/me/gregorias/dfuntest/App.java
|
<filename>core/src/main/java/me/gregorias/dfuntest/App.java
package me.gregorias.dfuntest;
import java.io.IOException;
/**
* This type represents tested applications.
*
* It acts as proxy to a real, possibly remote, application.
*
* @author <NAME>
*
*/
public abstract class App<EnvironmentT extends Environment> {
private final int mId;
private final String mName;
public App(int id, String name) {
mId = id;
mName = name;
}
/**
* @return Underlying environment
*/
public abstract EnvironmentT getEnvironment();
/**
* @return Number identifying this application.
*/
public int getId() {
return mId;
}
/**
* @return Human readable name of this application.
*/
public String getName() {
return mName;
}
/**
* Starts the application and allows it to run in background.
*/
@SuppressWarnings("unused")
public abstract void startUp() throws CommandException, IOException;
/**
* Shuts down started application and deallocates all resources associated
* with running application.
*/
@SuppressWarnings("unused")
public abstract void shutDown() throws IOException, InterruptedException;
}
|
jericks/geoserver-shell
|
src/test/java/org/geoserver/shell/CoverageStoreCommandsTest.java
|
<gh_stars>10-100
package org.geoserver.shell;
import org.glassfish.grizzly.http.Method;
import org.glassfish.grizzly.http.util.HttpStatus;
import org.junit.Test;
import org.springframework.shell.support.util.OsUtils;
import java.io.File;
import java.util.Map;
import static com.xebialabs.restito.builder.stub.StubHttp.whenHttp;
import static com.xebialabs.restito.builder.verify.VerifyHttp.verifyHttp;
import static com.xebialabs.restito.semantics.Action.status;
import static com.xebialabs.restito.semantics.Action.stringContent;
import static com.xebialabs.restito.semantics.Condition.*;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertTrue;
public class CoverageStoreCommandsTest extends BaseTest {
@Test
public void listCoverageStores() throws Exception {
String url = "/geoserver/rest/workspaces/nurc/coveragestores.xml";
whenHttp(server).match(get(url)).then(stringContent(getResourceString("coveragestores.xml")), status(HttpStatus.OK_200));
Geoserver geoserver = new Geoserver("http://00.0.0.0:8888/geoserver", "admin", "geoserver");
CoverageStoreCommands commands = new CoverageStoreCommands();
commands.setGeoserver(geoserver);
String actual = commands.list("nurc");
String expected = "arcGridSample" + OsUtils.LINE_SEPARATOR + "mosaic" + OsUtils.LINE_SEPARATOR;
assertEquals(expected, actual);
verifyHttp(server).once(method(Method.GET), uri(url));
}
@Test
public void listAllCoverageStores() throws Exception {
String url1 = "/geoserver/rest/workspaces/it.geosolutions/coveragestores.xml";
whenHttp(server).match(get(url1)).then(stringContent(getResourceString("coveragestores.xml")), status(HttpStatus.OK_200));
String url2 = "/geoserver/rest/workspaces/topp/coveragestores.xml";
whenHttp(server).match(get(url2)).then(stringContent(getResourceString("coveragestores.xml")), status(HttpStatus.OK_200));
String url3 = "/geoserver/rest/workspaces/cite/coveragestores.xml";
whenHttp(server).match(get(url3)).then(stringContent(getResourceString("coveragestores.xml")), status(HttpStatus.OK_200));
String workspaceUrl = "/geoserver/rest/workspaces.xml";
whenHttp(server).match(get(workspaceUrl)).then(stringContent(getResourceString("workspaces.xml")), status(HttpStatus.OK_200));
Geoserver geoserver = new Geoserver("http://00.0.0.0:8888/geoserver", "admin", "geoserver");
CoverageStoreCommands commands = new CoverageStoreCommands();
commands.setGeoserver(geoserver);
String actual = commands.list(null);
String expected = "cite" + OsUtils.LINE_SEPARATOR +
"----" + OsUtils.LINE_SEPARATOR +
"arcGridSample" + OsUtils.LINE_SEPARATOR +
"mosaic" + OsUtils.LINE_SEPARATOR +
"" + OsUtils.LINE_SEPARATOR +
"it.geosolutions" + OsUtils.LINE_SEPARATOR +
"---------------" + OsUtils.LINE_SEPARATOR +
"arcGridSample" + OsUtils.LINE_SEPARATOR +
"mosaic" + OsUtils.LINE_SEPARATOR +
"" + OsUtils.LINE_SEPARATOR +
"topp" + OsUtils.LINE_SEPARATOR +
"----" + OsUtils.LINE_SEPARATOR +
"arcGridSample" + OsUtils.LINE_SEPARATOR +
"mosaic" + OsUtils.LINE_SEPARATOR;
assertEquals(expected, actual);
verifyHttp(server).once(method(Method.GET), uri(url1));
verifyHttp(server).once(method(Method.GET), uri(url2));
verifyHttp(server).once(method(Method.GET), uri(url3));
verifyHttp(server).once(method(Method.GET), uri(workspaceUrl));
}
@Test
public void getCoverageStore() throws Exception {
String url = "/geoserver/rest/workspaces/nurc/coveragestores/arcGridSample.xml";
whenHttp(server).match(get(url)).then(stringContent(getResourceString("coveragestore.xml")), status(HttpStatus.OK_200));
Geoserver geoserver = new Geoserver("http://00.0.0.0:8888/geoserver", "admin", "geoserver");
CoverageStoreCommands commands = new CoverageStoreCommands();
commands.setGeoserver(geoserver);
String actual = commands.get("nurc", "arcGridSample");
String expected = "arcGridSample" + OsUtils.LINE_SEPARATOR +
" Type: ArcGrid" + OsUtils.LINE_SEPARATOR +
" URL: file:coverages/arc_sample/precip30min.asc" + OsUtils.LINE_SEPARATOR +
" Enabled: true" + OsUtils.LINE_SEPARATOR;
assertEquals(expected, actual);
verifyHttp(server).once(method(Method.GET), uri(url));
}
@Test
public void deleteCoverageStore() throws Exception {
String url = "/geoserver/rest/workspaces/nurc/coveragestores/arcGridSample.xml";
whenHttp(server).match(delete(url)).then(stringContent("true"), status(HttpStatus.OK_200));
Geoserver geoserver = new Geoserver("http://00.0.0.0:8888/geoserver", "admin", "geoserver");
CoverageStoreCommands commands = new CoverageStoreCommands();
commands.setGeoserver(geoserver);
boolean result = commands.delete("nurc", "arcGridSample", true);
assertTrue(result);
verifyHttp(server).once(method(Method.DELETE), uri(url));
}
@Test
public void createCoverageStore() throws Exception {
String url = "/geoserver/rest/workspaces/nurc/coveragestores.xml";
whenHttp(server).match(post(url)).then(stringContent("true"), status(HttpStatus.OK_200));
Geoserver geoserver = new Geoserver("http://00.0.0.0:8888/geoserver", "admin", "geoserver");
CoverageStoreCommands commands = new CoverageStoreCommands();
commands.setGeoserver(geoserver);
String workspace = "nurc";
String name = "terrain";
String type = "geotiff";
String fileUrl = "file:/terrain.tif";
boolean enabled = true;
boolean result = commands.create(workspace, name, type, fileUrl, enabled);
assertTrue(result);
String actual = server.getCalls().get(0).getPostBody();
String expected = "<coverageStore><name>terrain</name><type>geotiff</type><url>file:/terrain.tif</url><enabled>true</enabled><workspace><name>nurc</name></workspace></coverageStore>";
assertEquals(expected, actual);
verifyHttp(server).once(method(Method.POST), uri(url));
}
@Test
public void modifyCoverageStore() throws Exception {
String url = "/geoserver/rest/workspaces/nurc/coveragestores/terrain.xml";
whenHttp(server).match(put(url)).then(stringContent("true"), status(HttpStatus.OK_200));
Geoserver geoserver = new Geoserver("http://00.0.0.0:8888/geoserver", "admin", "geoserver");
CoverageStoreCommands commands = new CoverageStoreCommands();
commands.setGeoserver(geoserver);
String workspace = "nurc";
String coverageStore = "terrain";
String name = null;
String type = null;
String fileUrl = null;
String enabled = "false";
boolean result = commands.modify(workspace, coverageStore, name, type, fileUrl, enabled);
assertTrue(result);
String actual = server.getCalls().get(0).getPostBody();
String expected = "<coverageStore><enabled>false</enabled><workspace><name>nurc</name></workspace></coverageStore>";
assertEquals(expected, actual);
verifyHttp(server).once(method(Method.PUT), uri(url));
}
@Test
public void uploadCoverageStore() throws Exception {
String workspace = "nurc";
String coverageStore = "terrain";
File file = getResourceFile("coveragestore.xml");
String type = "geotiff";
String configure = "first";
String coverage = "myterrain";
String recalculate = "nativebbox";
String url = "/geoserver/rest/workspaces/nurc/coveragestores/terrain/file.geotiff";
whenHttp(server).match(put(url)).then(stringContent("true"), status(HttpStatus.OK_200));
Geoserver geoserver = new Geoserver("http://00.0.0.0:8888/geoserver", "admin", "geoserver");
CoverageStoreCommands commands = new CoverageStoreCommands();
commands.setGeoserver(geoserver);
boolean result = commands.upload(workspace, coverageStore, file, type, configure, coverage, recalculate);
assertTrue(result);
String body = server.getCalls().get(0).getPostBody();
String contentType = server.getCalls().get(0).getContentType();
assertEquals("image/tiff", contentType);
Map<String, String[]> params = server.getCalls().get(0).getParameters();
assertEquals(configure, params.get("configure")[0]);
assertEquals(recalculate, params.get("recalculate")[0]);
assertEquals(coverage, params.get("coverageName")[0]);
verifyHttp(server).once(method(Method.PUT), uri(url));
}
}
|
jpuri/Editr
|
packages/embed/embed.js
|
const axios = require("axios").default;
export default (url) =>
new Promise((resolve, reject) => {
try {
const parsedUrl = new URL(url);
const { host } = parsedUrl;
if (host.includes("vimeo.com")) {
axios
.get(`https://vimeo.com/api/oembed.json?url=${url}`)
.then((response) => {
resolve(response.data.html);
})
.catch((err) => {
reject(new Error(`Unable to get embed details ${err.message}`));
});
return;
}
let iframeUrl = url;
if (host.includes("youtube.com")) {
const videoId = parsedUrl.searchParams.get("v");
iframeUrl = `https://www.youtube.com/embed/${videoId}`;
}
resolve(
`<iframe src="${iframeUrl}" width="640" height="360" frameborder="0" allowfullscreen></iframe>`
);
} catch (err) {
resolve(`Unable to get embed details ${err.message}`);
}
});
|
okian/servo
|
lg/interface.go
|
<filename>lg/interface.go
package lg
import "context"
type Interface interface {
Name() string
Info(args ...interface{})
Debug(args ...interface{})
Warn(args ...interface{})
Error(args ...interface{})
Panic(args ...interface{})
Fatal(args ...interface{})
Infof(template string, args ...interface{})
Debugf(template string, args ...interface{})
Warnf(template string, args ...interface{})
Errorf(template string, args ...interface{})
Panicf(template string, args ...interface{})
Fatalf(template string, args ...interface{})
Infow(msg string, keysAndValues ...interface{})
Debugw(msg string, keysAndValues ...interface{})
Warnw(msg string, keysAndValues ...interface{})
Errorw(msg string, keysAndValues ...interface{})
Errorwt(ctx context.Context, msg string, keysAndValues ...interface{})
Panicw(msg string, keysAndValues ...interface{})
Fatalw(msg string, keysAndValues ...interface{})
}
var (
logger Interface
)
func Register(i Interface) {
if logger != nil {
panic("multiple call")
}
logger = i
}
func Info(args ...interface{}) {
logger.Info(args...)
}
func Debug(args ...interface{}) {
logger.Debug(args...)
}
func Warn(args ...interface{}) {
logger.Warn(args...)
}
func Error(args ...interface{}) {
logger.Error(args...)
}
func Panic(args ...interface{}) {
logger.Panic(args...)
}
func Fatal(args ...interface{}) {
logger.Fatal(args...)
}
func Infof(template string, args ...interface{}) {
logger.Infof(template, args...)
}
func Debugf(template string, args ...interface{}) {
logger.Debugf(template, args...)
}
func Warnf(template string, args ...interface{}) {
logger.Warnf(template, args...)
}
func Errorf(template string, args ...interface{}) {
logger.Errorf(template, args...)
}
func Panicf(template string, args ...interface{}) {
logger.Panicf(template, args...)
}
func Fatalf(template string, args ...interface{}) {
logger.Fatalf(template, args...)
}
func Infow(template string, keysAndValues ...interface{}) {
logger.Infow(template, keysAndValues...)
}
func Debugw(template string, keysAndValues ...interface{}) {
logger.Debugw(template, keysAndValues...)
}
func Warnw(template string, keysAndValues ...interface{}) {
logger.Warnw(template, keysAndValues...)
}
func Errorw(template string, keysAndValues ...interface{}) {
logger.Errorw(template, keysAndValues...)
}
func Panicw(template string, keysAndValues ...interface{}) {
logger.Panicw(template, keysAndValues...)
}
func Fatalw(template string, keysAndValues ...interface{}) {
logger.Fatalw(template, keysAndValues...)
}
|
xiaomingsea/javapasswordsafe
|
PasswordSafeLib/src/test/java/org/pwsafe/lib/file/PwsFileV3Test.java
|
<gh_stars>1-10
/*
* $Id$
* Copyright (c) 2008-2014 <NAME> <<EMAIL>>.
* All rights reserved. Use of the code is allowed under the
* Artistic License 2.0 terms, as specified in the LICENSE file
* distributed with this code, or available from
* http://www.opensource.org/licenses/artistic-license-2.0.php
*/
package org.pwsafe.lib.file;
import java.io.File;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import junit.framework.TestCase;
import org.pwsafe.lib.Log;
import org.pwsafe.lib.exception.EndOfFileException;
import org.pwsafe.lib.exception.UnsupportedFileVersionException;
/**
* Test cases for sample v3 record.
*
* @author <NAME>
*
*/
public class PwsFileV3Test extends TestCase {
private static final Log log = Log.getInstance(PwsFileV3Test.class.getName());
private String filename;
final private String PASSPHRASE = "Pa$$word";
private PwsFileV3 pwsFile;
@Override
public void setUp() throws IOException {
File tmpFile = File.createTempFile("sample3-", ".psafe3");
assertTrue(tmpFile.delete());
filename = tmpFile.getAbsolutePath();
try {
pwsFile = TestUtils.createPwsFileV3(filename, new StringBuilder(PASSPHRASE));
} catch (final IOException e) {
throw new RuntimeException(e);
}
assertTrue("testsafe creation failed", new File(filename).exists());
}
@Override
public void tearDown() {
deletePwsFile(filename);
}
private static void deletePwsFile(final String filename) {
final File file = new File(filename);
if (file.exists()) {
assertTrue("Couldn't delete testfile", file.delete());
}
// when a file exists and a change is saved, a backup file gets created. Let's remove
// that too
final File backupFileName = new File(filename + "~");
if (backupFileName.exists()) {
assertTrue(backupFileName.delete());
}
}
public void testPassphrase() throws EndOfFileException, IOException,
UnsupportedFileVersionException, NoSuchAlgorithmException {
final String myPassphrase = "<PASSWORD>";
assertEquals(0, pwsFile.getRecordCount());
log.info("New file records: " + pwsFile.getRecordCount());
final StringBuilder savedPassphrase = new StringBuilder(myPassphrase);
pwsFile.setPassphrase(savedPassphrase);
pwsFile.save();
pwsFile.close();
PwsFileStorage storage2 = new PwsFileStorage(filename);
PwsFileV3 pwsFile2 = new PwsFileV3(storage2, myPassphrase);
assertEquals(myPassphrase, pwsFile2.getPassphrase());
pwsFile2.close();
// should fail with old PASSPHRASE:
storage2 = new PwsFileStorage(filename);
try {
pwsFile2 = new PwsFileV3(storage2, PASSPHRASE);
fail("PASSPHRASE change failed !?");
} catch (final IOException anEx) {
// ok
}
// dispose has to clean everything:
pwsFile.dispose();
assertEquals(null, pwsFile.getPassphrase());
// the original Stringbuilder is also deleted:
assertEquals("", savedPassphrase.toString());
}
public void testReadOnly() throws Exception {
pwsFile.setReadOnly(true);
try {
pwsFile.save();
fail("save on Read-only file without exception");
} catch (final IOException anEx) {
// ok
}
}
public void testConcurrentMod() throws Exception {
final File file = new File(filename);
file.setLastModified(System.currentTimeMillis() + 1000);
pwsFile.setModified();
try {
pwsFile.save();
fail("save concurrently modified file without exception");
} catch (final ConcurrentModificationException e) {
// ok
}
// and after save:
file.setLastModified(System.currentTimeMillis() + 2000);
pwsFile.setModified();
try {
pwsFile.save();
fail("save concurrently modified file without exception");
} catch (final ConcurrentModificationException e) {
// ok
}
}
public void testUnknownField() throws Exception {
final int unknownFieldType = 213;
final String payload = "unknown";
TestUtils.addDummyRecords(pwsFile, 2);
final PwsRecordV3 rec = (PwsRecordV3) pwsFile.getRecord(0);
final PwsUnknownField field = new PwsUnknownField(unknownFieldType, payload.getBytes());
assertEquals(payload, new String(field.getBytes()));
assertEquals(payload, new String((byte[]) field.getValue()));
rec.setField(field);
pwsFile.set(0, rec);
pwsFile.setModified();
pwsFile.save();
pwsFile.close();
pwsFile.dispose();
final PwsFileStorage storage2 = new PwsFileStorage(filename);
final PwsFileV3 pwsFile2 = new PwsFileV3(storage2, PASSPHRASE);
pwsFile2.readAll();
pwsFile2.close();
final PwsRecordV3 savedRec = (PwsRecordV3) pwsFile2.getRecord(0);
final PwsField noField = savedRec.getField(unknownFieldType - 1);
assertNull(noField);
final PwsField savedField = savedRec.getField(unknownFieldType);
assertNotNull(savedField);
assertTrue(savedField instanceof PwsUnknownField);
assertEquals(payload, new String(savedField.getBytes()));
}
public void testLargeFile() throws Exception {
final PwsFileV3 file = (PwsFileV3) PwsFileFactory.newFile();
file.setPassphrase(new StringBuilder(PASSPHRASE));
final int amount = 1000;
TestUtils.addDummyRecords(file, amount);
final PwsFileStorage storage = new PwsFileStorage(filename);
file.setStorage(storage);
System.out.println("\nDone.");
file.save();
System.out.println("Wrote records: " + file.getRecordCount());
file.close();
final PwsFileStorage storage2 = new PwsFileStorage(filename);
final PwsFileV3 file2 = new PwsFileV3(storage2, PASSPHRASE.toString());
file2.readAll();
assertEquals(1000,file2.getRecordCount());
}
/**
* Checks if a record with a new PASSPHRASE policy field (#16) can be
* loaded.
*
* @throws Exception
*/
public void testNewPasswordPolicyField() throws Exception {
final PwsFileV3 theFile = (PwsFileV3) PwsFileFactory.loadFile("new_policy_bug.psafe3",
new StringBuilder("test"));
assertEquals(1, theFile.getRecordCount());
final Iterator i = theFile.getRecords();
assertTrue(i.hasNext());
final PwsRecordV3 theRow = (PwsRecordV3) i.next();
final PwsField theField = theRow.getField(16);
assertNotNull(theField);
}
}
|
hao-wang/Montage
|
js-test-suite/testsuite/6f7acfa2223c0f2e869de5275a5deb39.js
|
function foo() {
return fiatInt52(Math.sqrt(2)) + 1;
}
noInline(foo);
for (var i = 0; i < 1000000; ++i) {
var result = foo();
if (result != Math.sqrt(2) + 1)
throw "Error: bad result: " + result;
}
|
devent/anl-opencl
|
anlopencl-jme3-app-view/src/main/java/com/anrisoftware/anlopencl/jmeapp/view/actors/ViewActor.java
|
<filename>anlopencl-jme3-app-view/src/main/java/com/anrisoftware/anlopencl/jmeapp/view/actors/ViewActor.java<gh_stars>1-10
/*
* Copyright (C) 2021 <NAME> <<EMAIL>>
* Released as open-source under the Apache License, Version 2.0.
*
* ****************************************************************************
* ANL-OpenCL :: JME3 - App - View
* ****************************************************************************
*
* Copyright (C) 2021 <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ****************************************************************************
* ANL-OpenCL :: JME3 - App - View is a derivative work based on Josua Tippetts' C++ library:
* http://accidentalnoise.sourceforge.net/index.html
* ****************************************************************************
*
* Copyright (C) 2011 <NAME>
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
* 3. This notice may not be removed or altered from any source distribution.
*
*
* ****************************************************************************
* ANL-OpenCL :: JME3 - App - View bundles and uses the RandomCL library:
* https://github.com/bstatcomp/RandomCL
* ****************************************************************************
*
* BSD 3-Clause License
*
* Copyright (c) 2018, <NAME>, <NAME>, <NAME>. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.anrisoftware.anlopencl.jmeapp.view.actors;
import static com.anrisoftware.anlopencl.jmeapp.messages.CreateActorMessage.createNamedActor;
import static com.jme3.texture.Image.Format.RGBA8;
import java.time.Duration;
import java.util.Map;
import java.util.concurrent.CompletionStage;
import javax.inject.Inject;
import org.eclipse.collections.impl.factory.Maps;
import org.lwjgl.system.MemoryStack;
import com.anrisoftware.anlopencl.jme.opencl.MappingRanges;
import com.anrisoftware.anlopencl.jmeapp.actors.ActorSystemProvider;
import com.anrisoftware.anlopencl.jmeapp.messages.BuildStartMessage.BuildFinishedMessage;
import com.anrisoftware.anlopencl.jmeapp.messages.MessageActor.Message;
import com.anrisoftware.anlopencl.jmeapp.messages.ResetCameraMessage;
import com.anrisoftware.anlopencl.jmeapp.model.GameMainPanePropertiesProvider;
import com.anrisoftware.anlopencl.jmeapp.model.ObservableGameMainPaneProperties;
import com.anrisoftware.anlopencl.jmeapp.view.components.ImageComponent;
import com.anrisoftware.anlopencl.jmeapp.view.components.KernelComponent;
import com.anrisoftware.anlopencl.jmeapp.view.messages.AttachViewAppStateDoneMessage;
import com.anrisoftware.anlopencl.jmeapp.view.states.CameraPanningAppState;
import com.anrisoftware.anlopencl.jmeapp.view.states.ViewAppState;
import com.badlogic.ashley.core.Engine;
import com.badlogic.ashley.core.Entity;
import com.google.inject.Injector;
import com.google.inject.assistedinject.Assisted;
import com.jme3.app.Application;
import com.jme3.opencl.lwjgl.LwjglBuffer;
import com.jme3.opencl.lwjgl.LwjglContext;
import com.jme3.texture.Texture2D;
import akka.actor.typed.ActorRef;
import akka.actor.typed.Behavior;
import akka.actor.typed.javadsl.ActorContext;
import akka.actor.typed.javadsl.Behaviors;
import akka.actor.typed.javadsl.StashBuffer;
import akka.actor.typed.receptionist.ServiceKey;
import lombok.extern.slf4j.Slf4j;
/**
* Attaches the {@link ViewAppState} to the application.
*
* @author <NAME> {@literal <<EMAIL>}
*/
@Slf4j
public class ViewActor {
/**
* Factory to create the {@link ViewActor}.
*
* @author <NAME>
*/
public interface ViewActorFactory {
ViewActor create(StashBuffer<Message> stash, ActorContext<Message> context);
}
public static final ServiceKey<Message> KEY = ServiceKey.create(Message.class, ViewActor.class.getSimpleName());
public static final String NAME = ViewActor.class.getSimpleName();
public static final int ID = KEY.hashCode();
/**
* Creates the behavior of the {@link ViewActor}.
*
* @param injector the {@link Injector}.
* @return the {@link Behavior} of the {@link ViewActor}.
*/
public static Behavior<Message> create(Injector injector) {
return Behaviors.withStash(100, stash -> Behaviors.setup((context) -> {
return injector.getInstance(ViewActorFactory.class).create(stash, context).start();
}));
}
public static CompletionStage<ActorRef<Message>> create(Injector injector, Duration timeout) {
var system = injector.getInstance(ActorSystemProvider.class).getActorSystem();
return createNamedActor(system, timeout, ID, KEY, NAME, ViewActor.create(injector));
}
private final ObservableGameMainPaneProperties gmpp;
private final LwjglContext clContext;
private final Map<String, Entity> noiseImageEntities;
@Assisted
@Inject
private ActorContext<Message> context;
@Assisted
@Inject
private StashBuffer<Message> buffer;
@Inject
private Application app;
@Inject
private ViewAppState viewAppState;
@Inject
private Engine engine;
@Inject
private CameraPanningAppState cameraPanningAppState;
@Inject
public ViewActor(GameMainPanePropertiesProvider gpp, com.jme3.opencl.Context openclContext) {
this.gmpp = gpp.get();
this.clContext = (LwjglContext) openclContext;
this.noiseImageEntities = Maps.mutable.empty();
}
/**
* Attaches the {@link ViewAppState}. Returns a new behavior that responds to:
* <ul>
* <li>{@link AttachViewAppStateDoneMessage}
* </ul>
*/
public Behavior<Message> start() {
app.enqueue(() -> {
viewAppState.setActor(context.getSelf());
if (!app.getStateManager().hasState(viewAppState.getId())) {
app.getStateManager().attach(viewAppState);
}
if (!app.getStateManager().hasState(cameraPanningAppState.getId())) {
app.getStateManager().attach(cameraPanningAppState);
}
});
return Behaviors.receive(Message.class)//
.onMessage(AttachViewAppStateDoneMessage.class, this::onAttachViewAppStateDone)//
.onMessage(Message.class, (m) -> {
log.debug("stashOtherCommand: {}", m);
buffer.stash(m);
return Behaviors.same();
})//
.build();
}
/**
* Unstash all messages in the buffer. Returns a new behavior that responds to:
* <ul>
* <li>
* </ul>
*/
private Behavior<Message> onAttachViewAppStateDone(AttachViewAppStateDoneMessage m) {
log.debug("onAttachViewAppStateDone");
app.enqueue(() -> {
var entity = engine.createEntity().add(new ImageComponent(10, 10));
noiseImageEntities.put(gmpp.kernelName.get(), entity);
engine.addEntity(entity);
});
return buffer.unstashAll(Behaviors.receive(Message.class)//
.onMessage(ResetCameraMessage.class, this::onResetCamera)//
.onMessage(BuildFinishedMessage.class, this::onBuildFinished)//
.build());
}
private Behavior<Message> onResetCamera(ResetCameraMessage m) {
log.debug("onResetCamera {}", m);
app.enqueue(() -> {
cameraPanningAppState.resetCamera();
});
return Behaviors.same();
}
private Behavior<Message> onBuildFinished(BuildFinishedMessage m) {
log.debug("onBuildFinished {}", m);
app.enqueue(() -> {
updateTexture();
});
return Behaviors.same();
}
private void updateTexture() {
log.debug("updateTexture");
var entity = noiseImageEntities.get(gmpp.kernelName.get());
if (KernelComponent.m.has(entity)) {
var kc = entity.remove(KernelComponent.class);
kc.tex.getImage().dispose();
kc.ranges.release();
}
try (var s = MemoryStack.stackPush()) {
int width = gmpp.width.get();
int height = gmpp.height.get();
var tex = new Texture2D(width, height, 1, RGBA8);
var ranges = MappingRanges.createWithBuffer(s);
if (gmpp.map3d.get()) {
setMap3D(ranges);
} else {
setMap2D(ranges);
}
var rangesb = new LwjglBuffer(ranges.getClBuffer(s, clContext.getContext()));
entity.add(new KernelComponent(tex, rangesb));
}
}
private void setMap2D(MappingRanges ranges) {
ranges.setMap2D(gmpp.mapx0.get(), gmpp.mapx1.get(), gmpp.mapy0.get(), gmpp.mapy1.get());
}
private void setMap3D(MappingRanges ranges) {
ranges.setMap3D(gmpp.mapx0.get(), gmpp.mapx1.get(), gmpp.mapy0.get(), gmpp.mapy1.get(), gmpp.mapz0.get(),
gmpp.mapz1.get());
}
}
|
galperins4/ARK-Python
|
park/api/delegate.py
|
<filename>park/api/delegate.py<gh_stars>1-10
#!/usr/bin/env python
from park.api.api import API
class Delegate(API):
def count(self):
return self.get('api/delegates/count')
def search(self, query, parameters={}):
return self.get('api/delegates/search', {**{"q": query}, **parameters})
def voters(self, publicKey):
return self.get('api/delegates/voters', {"publicKey": publicKey})
def delegate(self, parameters={}):
return self.get('api/delegates/get', parameters)
def delegates(self, parameters={}):
return self.get('api/delegates', parameters)
def fee(self):
return self.get('api/delegates/fee')
def forgedByAccount(self, generatorPublicKey):
return self.get('api/delegates/forging/getForgedByAccount',
{"generatorPublicKey": generatorPublicKey})
def create(self, secret, username, secondSecret=None):
transaction = self.client.delegateBuilder().create(
secret, username, secondSecret)
return self.client.transport().createTransaction(transaction)
def nextForgers(self):
return self.get('api/delegates/getNextForgers')
def enableForging(self, secret, parameters={}):
return self.post('api/delegates/forging/enable', {
**{
"secret": secret
},
**parameters
})
def disableForging(self, secret, parameters={}):
return self.post('api/delegates/forging/disable', {
**{
"secret": secret
},
**parameters
})
def forgingStatus(self, publicKey, parameters={}):
return self.get('api/delegates/forging/status', {
**{
"publicKey": publicKey
},
**parameters
})
|
DeltaEngine/DeltaEngineCpp
|
Hypodermic/ContainerBuilder.hpp
|
<gh_stars>1-10
#ifdef HYPODERMIC_CONTAINER_BUILDER_H_
# ifndef HYPODERMIC_CONTAINER_BUILDER_HPP_
# define HYPODERMIC_CONTAINER_BUILDER_HPP_
# include <type_traits>
# include <Hypodermic/RegistrationBuilderFactory.h>
# include <Hypodermic/RootScopeLifetime.h>
namespace Hypodermic
{
template <class T>
std::shared_ptr< typename ContainerBuilder::RegistrationBuilderInterface< T >::Type >
ContainerBuilder::autowireType()
{
typedef typename T::AutowiredSignature AutowiredSignature;
static_assert(AutowiredSignature::IsSignatureRecognized::value, "Unable to use this autowired constructor.");
auto rb = RegistrationBuilderFactory< ContainerBuilder::RegistrationBuilderInterface >::forDelegate(AutowiredSignature::createDelegate());
registerCallback(
[rb](std::shared_ptr< IComponentRegistry > cr) -> void
{
RegistrationBuilderFactory< ContainerBuilder::RegistrationBuilderInterface >::registerSingleComponent< T >(cr, rb);
});
return rb;
}
template <class T>
std::shared_ptr< typename ContainerBuilder::RegistrationBuilderInterface< T >::Type >
ContainerBuilder::registerType(std::function< T*(IComponentContext&) > delegate)
{
static_assert(!std::is_pod< T >::value || std::is_empty< T >::value || std::is_class< T >::value,
"ContainerBuilder::registerType< T >() is incompatible with POD types.");
auto rb = RegistrationBuilderFactory< ContainerBuilder::RegistrationBuilderInterface >::forDelegate(delegate);
registerCallback(
[rb](std::shared_ptr< IComponentRegistry > cr) -> void
{
RegistrationBuilderFactory< ContainerBuilder::RegistrationBuilderInterface >::registerSingleComponent< T >(cr, rb);
});
return rb;
}
template <class T>
std::shared_ptr< typename ContainerBuilder::RegistrationBuilderInterface< T >::Type >
ContainerBuilder::registerType()
{
static_assert(!std::is_pod< T >::value || std::is_empty< T >::value || std::is_class< T >::value,
"ContainerBuilder::registerType< T >() is incompatible with POD types.");
auto rb = RegistrationBuilderFactory< ContainerBuilder::RegistrationBuilderInterface >::forType< T >();
registerCallback(
[rb](std::shared_ptr< IComponentRegistry > cr) -> void
{
RegistrationBuilderFactory< ContainerBuilder::RegistrationBuilderInterface >::registerSingleComponent< T >(cr, rb);
});
return rb;
}
template <class T>
std::shared_ptr< typename ContainerBuilder::RegistrationBuilderInterface< T >::Type >
ContainerBuilder::registerInstance(std::shared_ptr< T > instance)
{
static_assert(!std::is_pod< T >::value || std::is_empty< T >::value || std::is_class< T >::value,
"ContainerBuilder::registerType< T >(std::shared_ptr< T > instance) is incompatible with POD types.");
auto rb = RegistrationBuilderFactory< ContainerBuilder::RegistrationBuilderInterface >::forInstance(instance);
rb->singleInstance();
registerCallback(
[rb](std::shared_ptr< IComponentRegistry > cr) -> void
{
auto rootScopeLifetime = std::dynamic_pointer_cast< RootScopeLifetime >(rb->registrationData().lifetime());
if (rootScopeLifetime == nullptr || rb->registrationData().sharing() != InstanceSharing::Shared)
throw std::logic_error("Instance registration is single instance only.");
RegistrationBuilderFactory< ContainerBuilder::RegistrationBuilderInterface >::registerSingleComponent< T >(cr, rb);
});
return rb;
}
} // namespace Hypodermic
# endif /* !HYPODERMIC_CONTAINER_BUILDER_HPP_ */
#endif /* HYPODERMIC_CONTAINER_BUILDER_H_ */
|
bozhidarbozhilov/softuni-projects-java
|
Databases Frameworks - Hibernate & Spring Data October 2018/jsonprocessingexercises/src/main/java/app/models/dto/Query4Dto.java
|
<filename>Databases Frameworks - Hibernate & Spring Data October 2018/jsonprocessingexercises/src/main/java/app/models/dto/Query4Dto.java
package app.models.dto;
import app.models.entities.User;
import org.modelmapper.AbstractConverter;
import org.modelmapper.Converter;
import org.modelmapper.ModelMapper;
import javax.xml.bind.annotation.*;
import java.util.Collection;
import java.util.List;
import java.util.Set;
@XmlRootElement(name="users")
@XmlAccessorType(XmlAccessType.FIELD)
public class Query4Dto {
@XmlAttribute(name="count")
private int usersCount;
@XmlElement(name="user")
private List<Query4UsersDto> users;
public Query4Dto() {
}
public Query4Dto(int usersCount, List<Query4UsersDto> usersDtos) {
this.usersCount = usersCount;
this.users = usersDtos;
}
public int getUsersCount() {
return usersCount;
}
public void setUsersCount(int usersCount) {
this.usersCount = usersCount;
}
public List<Query4UsersDto> getUsers() {
return users;
}
public void setUsers(List<Query4UsersDto> usersDtos) {
this.users = usersDtos;
}
}
|
ScalablyTyped/SlinkyTyped
|
a/activex-libreoffice/src/main/scala/typingsSlinky/activexLibreoffice/com_/sun/star/frame/XDocumentTemplates.scala
|
<reponame>ScalablyTyped/SlinkyTyped
package typingsSlinky.activexLibreoffice.com_.sun.star.frame
import typingsSlinky.activexLibreoffice.`type`
import typingsSlinky.activexLibreoffice.com_.sun.star.ucb.XContent
import typingsSlinky.activexLibreoffice.com_.sun.star.uno.XInterface
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
/**
* provides a high level API to organize document templates
*
* Template information are saved as links to the original content and organized in groups. This data should be persistent and can be updated by calling
* special method {@link XDocumentTemplates.update()} . A real implementation of this interface can do that on top of an ucb content provider. Method
* {@link XDocumentTemplates.getContent()} force that.
*/
@js.native
trait XDocumentTemplates extends XInterface {
/**
* provides access to the root of internal used hierarchy
*
* This content can be used for accessing the groups directly.
* @returns the ucb content for template configuration
*/
val Content: XContent = js.native
/**
* creates a new group
* @param GroupName the name of the group to be created
* @returns `TRUE` if operation was successful ; `FALSE` otherwise
*/
def addGroup(GroupName: String): Boolean = js.native
/**
* creates the template with the given name in the given group using the given URL
* @param GroupName specifies the group
* @param TemplateName specifies the template
* @param SourceURL specifies the position of template
* @returns `TRUE` if operation was successful ; `FALSE` otherwise
* @see XDocumentTemplates.storeTemplate()
*/
def addTemplate(GroupName: String, TemplateName: String, SourceURL: String): Boolean = js.native
/**
* provides access to the root of internal used hierarchy
*
* This content can be used for accessing the groups directly.
* @returns the ucb content for template configuration
*/
def getContent(): XContent = js.native
/**
* remove an existing group
* @param GroupName the name of the group to be removed
* @returns `TRUE` if operation was successful ; `FALSE` otherwise
*/
def removeGroup(GroupName: String): Boolean = js.native
/**
* remove a template from specified group
* @param GroupName specifies the group which include the template
* @param TemplateName specifies the template for delete
* @returns `TRUE` if operation was successful ; `FALSE` otherwise
*/
def removeTemplate(GroupName: String, TemplateName: String): Boolean = js.native
/**
* rename an existing group
* @param OldGroupName the old name of the group
* @param NewGroupName the new name of the group
* @returns `TRUE` if operation was successful ; `FALSE` otherwise
*/
def renameGroup(OldGroupName: String, NewGroupName: String): Boolean = js.native
/**
* rename a template inside specified group
* @param GroupName specifies the group which include the template
* @param OldTemplateName specifies the template for renaming
* @param NewTemplateName specifies the new name for the template
* @returns `TRUE` if operation was successful ; `FALSE` otherwise
*/
def renameTemplate(GroupName: String, OldTemplateName: String, NewTemplateName: String): Boolean = js.native
/**
* creates the template with the given name in the given group using the data from the storable
* @param GroupName specifies the group
* @param TemplateName specifies the template
* @param Storable specifies the target
* @returns `TRUE` if operation was successful ; `FALSE` otherwise
* @see XDocumentTemplates.addTemplate()
*/
def storeTemplate(GroupName: String, TemplateName: String, Storable: XStorable): Boolean = js.native
/**
* force an update for internal structures
*
* Because the templates are well known by links and not as direct content they can be outdated. An update force actualization of that to find wrong
* links.
*/
def update(): Unit = js.native
}
object XDocumentTemplates {
@scala.inline
def apply(
Content: XContent,
acquire: () => Unit,
addGroup: String => Boolean,
addTemplate: (String, String, String) => Boolean,
getContent: () => XContent,
queryInterface: `type` => js.Any,
release: () => Unit,
removeGroup: String => Boolean,
removeTemplate: (String, String) => Boolean,
renameGroup: (String, String) => Boolean,
renameTemplate: (String, String, String) => Boolean,
storeTemplate: (String, String, XStorable) => Boolean,
update: () => Unit
): XDocumentTemplates = {
val __obj = js.Dynamic.literal(Content = Content.asInstanceOf[js.Any], acquire = js.Any.fromFunction0(acquire), addGroup = js.Any.fromFunction1(addGroup), addTemplate = js.Any.fromFunction3(addTemplate), getContent = js.Any.fromFunction0(getContent), queryInterface = js.Any.fromFunction1(queryInterface), release = js.Any.fromFunction0(release), removeGroup = js.Any.fromFunction1(removeGroup), removeTemplate = js.Any.fromFunction2(removeTemplate), renameGroup = js.Any.fromFunction2(renameGroup), renameTemplate = js.Any.fromFunction3(renameTemplate), storeTemplate = js.Any.fromFunction3(storeTemplate), update = js.Any.fromFunction0(update))
__obj.asInstanceOf[XDocumentTemplates]
}
@scala.inline
implicit class XDocumentTemplatesMutableBuilder[Self <: XDocumentTemplates] (val x: Self) extends AnyVal {
@scala.inline
def setAddGroup(value: String => Boolean): Self = StObject.set(x, "addGroup", js.Any.fromFunction1(value))
@scala.inline
def setAddTemplate(value: (String, String, String) => Boolean): Self = StObject.set(x, "addTemplate", js.Any.fromFunction3(value))
@scala.inline
def setContent(value: XContent): Self = StObject.set(x, "Content", value.asInstanceOf[js.Any])
@scala.inline
def setGetContent(value: () => XContent): Self = StObject.set(x, "getContent", js.Any.fromFunction0(value))
@scala.inline
def setRemoveGroup(value: String => Boolean): Self = StObject.set(x, "removeGroup", js.Any.fromFunction1(value))
@scala.inline
def setRemoveTemplate(value: (String, String) => Boolean): Self = StObject.set(x, "removeTemplate", js.Any.fromFunction2(value))
@scala.inline
def setRenameGroup(value: (String, String) => Boolean): Self = StObject.set(x, "renameGroup", js.Any.fromFunction2(value))
@scala.inline
def setRenameTemplate(value: (String, String, String) => Boolean): Self = StObject.set(x, "renameTemplate", js.Any.fromFunction3(value))
@scala.inline
def setStoreTemplate(value: (String, String, XStorable) => Boolean): Self = StObject.set(x, "storeTemplate", js.Any.fromFunction3(value))
@scala.inline
def setUpdate(value: () => Unit): Self = StObject.set(x, "update", js.Any.fromFunction0(value))
}
}
|
imaxfp/practical-understanding-concurrency
|
src/main/java/com/understand/concurrency/task2_1/Main.java
|
package com.understand.concurrency.task2_1;
import java.util.LinkedList;
import java.util.Queue;
import java.util.logging.Logger;
public class Main {
public static void main(String[] args) {
Logger.getGlobal().info("Solving Producer/Consumer Problem with 'wait()' 'notify()' implementation");
// In this task Producer/Consumers must take care of Queue synchronization
Queue<Integer> buffer = new LinkedList<>();
Thread producer = new Producer(buffer, 10, "Producer");
Thread consumer = new Consumer(buffer, "Consumer");
producer.start();
consumer.start();
}
}
|
brianchowlab/BcLOV4-FEM
|
FELICITY/Static_Codes/AHF/src_code/Mesh.cc
|
/*
============================================================================================
Class for array based half-facet (AHF) data structure. This is a sub-class of BaseMesh
which stores a *pointer* to a BasePtCoord object. This way, many different meshes,
of varying topological dimension, can share the same vertex (point) coordinates. This
also means that we can implement various methods in a more general way.
Note: Everything is indexed starting at 0!
Also note: using a vector of structs is 2 x faster than using a vector of integers.
Copyright (c) 05-23-2020, <NAME>
============================================================================================
*/
#define _MESH_CC
#ifndef _BASICMATH_H
#include "BasicMath.h" // simple math operations
#endif
#ifndef _BASEMESH_CC
#include "BaseMesh.cc" // base class for all mesh topology stuff
#endif
#ifndef _BASEPTCOORD_CC
#include "BasePtCoord.cc" // base class for all vertex coordinates
#endif
/* C++ class definition */
#define MMC Mesh
// template the topological and geometric dimension
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
class MMC: public BaseMesh<CELL_DIM>
{
public:
MMC();
MMC(BasePtCoord<GEO_DIM>*);
~MMC();
// open Mesh for modification
inline void Open()
{
BaseMesh<CELL_DIM>::Open();
Vtx->Open();
};
// close Mesh; modification is no longer allowed
inline void Close()
{
BaseMesh<CELL_DIM>::Close();
Vtx->Close();
};
// clear all data
void Clear()
{
BaseMesh<CELL_DIM>::Clear();
Vtx->Clear();
};
// set pointer to vertex coordinates
void Set_Vertex_Data(BasePtCoord<GEO_DIM>*);
// coordinate conversion
void Reference_To_Cartesian(const CellIndType&, const CellIndType*, const PointType*, PointType*);
void Cartesian_To_Reference(const CellIndType&, const CellIndType*, const PointType*, PointType*);
void Barycentric_To_Reference(const CellIndType&, const PointType*, PointType*);
void Reference_To_Barycentric(const CellIndType&, const PointType*, PointType*);
void Barycentric_To_Cartesian(const CellIndType&, const CellIndType*, const PointType*, PointType*);
void Cartesian_To_Barycentric(const CellIndType&, const CellIndType*, const PointType*, PointType*);
// cell quantities
void Diameter(const CellIndType&, const CellIndType*, RealType*);
void Bounding_Box(const CellIndType&, const CellIndType*, PointType*, PointType*);
void Bounding_Box(PointType*, PointType*);
void Volume(const CellIndType&, const CellIndType*, RealType*);
// Angles!!!!
// simplex centers
void Barycenter(const CellIndType&, const CellIndType*, PointType*);
void Circumcenter(const CellIndType&, const CellIndType*, PointType*, RealType*);
void Incenter(const CellIndType&, const CellIndType*, PointType*, RealType*);
// others
void Shape_Regularity(const CellIndType&, const CellIndType*, RealType*);
private:
/* keep a pointer to the set of global vertex coordinates */
BasePtCoord<GEO_DIM>* Vtx;
// basic internal method
void init();
};
/***************************************************************************************/
/* constructor */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
MMC<CELL_DIM, GEO_DIM>::MMC() : BaseMesh<CELL_DIM>()
{
init(); // basic initialization
}
/***************************************************************************************/
/* constructor */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
MMC<CELL_DIM, GEO_DIM>::MMC(BasePtCoord<GEO_DIM>* Input_Vtx) : BaseMesh<CELL_DIM>()
{
init(); // basic initialization
Set_Vertex_Data(Input_Vtx);
Open(); // init to open
}
/***************************************************************************************/
/* set pointer for the vertex coordinate data */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::init()
{
// initialize pointer to vertex coordinates
Vtx = (BasePtCoord<GEO_DIM>*) NULL;
// what else to do or check?
if (GEO_DIM < CELL_DIM)
{
std::cout << "Desired topological dimension of a cell is " << CELL_DIM << "." << std::endl;
std::cout << "Desired geometric dimension is " << GEO_DIM << "." << std::endl;
std::cout << "Geometric dimension must be >= topological dimension!" << std::endl;
std::exit(1);
}
//std::cout << "Mesh constructor..." << std::endl;
}
/***************************************************************************************/
/* DE-structor */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
MMC<CELL_DIM, GEO_DIM>::~MMC()
{
Clear();
//std::cout << "Mesh destructor..." << std::endl;
}
/***************************************************************************************/
/* set pointer for the vertex coordinate data */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Set_Vertex_Data(BasePtCoord<GEO_DIM>* Input_Vtx)
{
// set pointer to vertex coordinates
Vtx = Input_Vtx;
}
/***************************************************************************************/
/* convert reference element coordinates to cartesian coordinates, where the reference
element is the "standard" reference simplex.
Inputs: number of cells, and the cell indices (an array),
reference coordinates (an array of consecutive groups of length CELL_DIM
Outputs: cartesian coordinates (an array of consecutive groups of length GEO_DIM
NOTE: the number of groups equals the number of cells. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Reference_To_Cartesian(
const CellIndType& Num_Cell, const CellIndType* CI,
const PointType* PR,
PointType* PC)
{
MMC const& this_mesh = *this; // for const
const BasePtCoord<GEO_DIM>* const& c_Vtx = Vtx; // for const
// loop through the given cells
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
// get the vertex indices of the current cell
const VtxIndType* VI = this_mesh.Get_Cell_vtx(CI[ii]);
// get the relevant ref-coord for the current cell
const PointType* PR_ii = PR + ii*CELL_DIM;
// get relevant output for current cell
PointType* PC_local = PC + ii*GEO_DIM;
Simplex_Reference_To_Cartesian<CELL_DIM, GEO_DIM>(c_Vtx, VI, PR_ii, PC_local);
}
}
/***************************************************************************************/
/* convert cartesian coordinates to reference element coordinates, where the reference
element is the "standard" reference simplex.
Inputs: number of cells, and the cell indices (an array),
cartesian coordinates (an array of consecutive groups of length GEO_DIM
Outputs: reference coordinates (an array of consecutive groups of length CELL_DIM
NOTE: the number of groups equals the number of cells. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Cartesian_To_Reference(
const CellIndType& Num_Cell, const CellIndType* CI,
const PointType* PC,
PointType* PR)
{
MMC const& this_mesh = *this; // for const
const BasePtCoord<GEO_DIM>* const& c_Vtx = Vtx; // for const
// loop through the given cells
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
// get the vertex indices of the current cell
const VtxIndType* VI = this_mesh.Get_Cell_vtx(CI[ii]);
// get relevant cart-coord for current cell
const PointType* PC_local = PC + ii*GEO_DIM;
// get the relevant output for the current cell
PointType* PR_ii = PR + ii*CELL_DIM;
Simplex_Cartesian_To_Reference<CELL_DIM, GEO_DIM>(c_Vtx, VI, PC_local, PR_ii);
}
}
/***************************************************************************************/
/* convert barycentric coordinates to reference element coordinates.
Inputs: number of cells,
barycentric coordinates (an array of consecutive groups of length (CELL_DIM+1)
Outputs: reference coordinates (an array of consecutive groups of length CELL_DIM
NOTE: the number of groups equals the number of cells. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Barycentric_To_Reference(
const CellIndType& Num_Cell, const PointType* PB,
PointType* PR)
{
// just copy over the 1st thru CELL_DIM barycentric coordinates
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
// get relevant bary-coord for current cell
const PointType* PB_local = PB + ii*(CELL_DIM+1);
// get the relevant output for the current cell
PointType* PR_ii = PR + ii*CELL_DIM;
Simplex_Barycentric_To_Reference<CELL_DIM>(PB_local, PR_ii);
}
}
/***************************************************************************************/
/* convert reference element coordinates to barycentric coordinates.
Inputs: number of cells,
reference coordinates (an array of consecutive groups of length CELL_DIM
Outputs: barycentric coordinates (an array of consecutive groups of length (CELL_DIM+1)
NOTE: the number of groups equals the number of cells. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Reference_To_Barycentric(
const CellIndType& Num_Cell, const PointType* PR,
PointType* PB)
{
// just copy over the 1st thru (CELL_DIM) barycentric coordinates
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
// get relevant ref-coord for current cell
const PointType* PR_local = PR + ii*CELL_DIM;
// get the relevant output for the current cell
PointType* PB_ii = PB + ii*(CELL_DIM+1);
Simplex_Reference_To_Barycentric<CELL_DIM>(PR_local, PB_ii);
}
}
/***************************************************************************************/
/* convert barycentric coordinates to cartesian coordinates.
Inputs: number of cells, and the cell indices (an array),
barycentric coordinates (an array of consecutive groups of length (CELL_DIM+1)
Outputs: cartesian coordinates (an array of consecutive groups of length GEO_DIM
NOTE: the number of groups equals the number of cells. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Barycentric_To_Cartesian(
const CellIndType& Num_Cell, const CellIndType* CI,
const PointType* PB,
PointType* PC)
{
MMC const& this_mesh = *this; // for const
const BasePtCoord<GEO_DIM>* const& c_Vtx = Vtx; // for const
// loop through the given cells
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
// get the vertex indices of the current cell
const VtxIndType* VI = this_mesh.Get_Cell_vtx(CI[ii]);
// get the relevant bary-coord for the current cell
const PointType* PB_local = PB + ii*(CELL_DIM+1);
// get relevant output for current cell
PointType* PC_ii = PC + ii*GEO_DIM;
Simplex_Barycentric_To_Cartesian<CELL_DIM, GEO_DIM>(c_Vtx, VI, PB_local, PC_ii);
}
}
/***************************************************************************************/
/* convert cartesian coordinates to barycentric coordinates.
Inputs: number of cells, and the cell indices (an array),
cartesian coordinates (an array of consecutive groups of length GEO_DIM
Outputs: barycentric coordinates (an array of consecutive groups of length (CELL_DIM+1)
NOTE: the number of groups equals the number of cells. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Cartesian_To_Barycentric(
const CellIndType& Num_Cell, const CellIndType* CI,
const PointType* PC,
PointType* PB)
{
// first, convert from cartesian to reference domain coordinates
PointType* PR = new PointType[Num_Cell*CELL_DIM];
Cartesian_To_Reference(Num_Cell, CI, PC, PR);
Reference_To_Barycentric(Num_Cell, PR, PB);
delete(PR);
}
/***************************************************************************************/
/* compute the diameter of (simplex) cells in the mesh.
Inputs: number of cells, and the cell indices (an array)
Outputs: diameter of each simplex (an array), defined to be the maximum edge length
NOTE: the length of the arrays equals the number of cells. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Diameter(
const CellIndType& Num_Cell, const CellIndType* CI,
RealType* Diam)
{
MMC const& this_mesh = *this; // for const
const BasePtCoord<GEO_DIM>* const& c_Vtx = Vtx; // for const
if (CELL_DIM==0) // diameters are zero!
{
// loop through the given cells
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
Diam[ii] = 0.0;
}
}
else
{
// loop through the given cells
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
// get the vertex indices of the current cell
const VtxIndType* VI = this_mesh.Get_Cell_vtx(CI[ii]);
Diam[ii] = Simplex_Diameter<CELL_DIM, GEO_DIM>(c_Vtx, VI);
}
}
}
/***************************************************************************************/
/* return the bounding (cartesian) box of the given cells in the mesh.
Inputs: number of cells, and the cell indices (an array)
Output: min and max limits of the coordinates (component-wise).
example: if GEO_DIM==3, then
BB_min[] = {X_min, Y_min, Z_min},
BB_max[] = {X_max, Y_max, Z_max}. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Bounding_Box(
const CellIndType& Num_Cell, const CellIndType* CI,
PointType* BB_min, PointType* BB_max)
{
MMC const& this_mesh = *this; // for const
const BasePtCoord<GEO_DIM>* const& c_Vtx = Vtx; // for const
std::vector<VtxIndType> mesh_vertices;
mesh_vertices.reserve(Num_Cell*(CELL_DIM+1));
// loop through the given cells
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
// get the vertex indices of the current cell
const VtxIndType* VI = this_mesh.Get_Cell_vtx(CI[ii]);
for (SmallIndType tt=0; tt < (CELL_DIM+1); ++tt)
{
mesh_vertices.push_back(VI[tt]);
}
}
// make the vertex list unique
std::sort( mesh_vertices.begin(), mesh_vertices.end() );
std::vector<VtxIndType>::iterator it;
it = std::unique (mesh_vertices.begin(), mesh_vertices.end());
mesh_vertices.resize( std::distance(mesh_vertices.begin(), it) );
const VtxIndType Num_Vtx = (VtxIndType) mesh_vertices.size();
const VtxIndType* VI = mesh_vertices.data();
c_Vtx->Bounding_Box(Num_Vtx, VI, BB_min, BB_max);
}
/* return the bounding (cartesian) box of the *entire* mesh. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Bounding_Box(PointType* BB_min, PointType* BB_max)
{
MMC const& this_mesh = *this; // for const
const BasePtCoord<GEO_DIM>* const& c_Vtx = Vtx; // for const
std::vector<VtxIndType> mesh_vertices;
this_mesh.Get_Unique_Vertices(mesh_vertices);
const VtxIndType Num_Vtx = (VtxIndType) mesh_vertices.size();
const VtxIndType* VI = mesh_vertices.data();
c_Vtx->Bounding_Box(Num_Vtx, VI, BB_min, BB_max);
}
/***************************************************************************************/
/* compute the volume of (simplex) cells in the mesh.
Inputs: number of cells, and the cell indices (an array)
Outputs: volume of each simplex (an array)
NOTE: the length of the arrays equals the number of cells. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Volume(
const CellIndType& Num_Cell, const CellIndType* CI,
RealType* Vol)
{
MMC const& this_mesh = *this; // for const
const BasePtCoord<GEO_DIM>* const& c_Vtx = Vtx; // for const
// loop through the given cells
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
// get the vertex indices of the current cell
const VtxIndType* VI = this_mesh.Get_Cell_vtx(CI[ii]);
Vol[ii] = Simplex_Volume<CELL_DIM, GEO_DIM>(c_Vtx, VI);
}
}
/***************************************************************************************/
/* compute the barycenter of (simplex) cells in the mesh.
Inputs: number of cells, and the cell indices (an array)
Outputs: cartesian coordinates of center (an array of consecutive groups of length GEO_DIM)
NOTE: the number of groups equals the number of cells. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Barycenter(
const CellIndType& Num_Cell, const CellIndType* CI,
PointType* CC)
{
MMC const& this_mesh = *this; // for const
const BasePtCoord<GEO_DIM>* const& c_Vtx = Vtx; // for const
// loop through the given cells
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
// get the vertex indices of the current cell
const VtxIndType* VI = this_mesh.Get_Cell_vtx(CI[ii]);
// get relevant output for current cell
PointType* PC_local = PC + ii*GEO_DIM;
Simplex_Barycenter<CELL_DIM, GEO_DIM>(c_Vtx, VI, PC_local);
}
}
/***************************************************************************************/
/* compute the circumcenter and circumradius of (simplex) cells in the mesh.
see: https://westy31.home.xs4all.nl/Circumsphere/ncircumsphere.htm#Coxeter
Inputs: number of cells, and the cell indices (an array)
Outputs: barycentric coordinates of center (an array of consecutive groups of length (CELL_DIM+1))
circumradius (an array with same length as number of cells)
NOTE: the number of groups equals the number of cells. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Circumcenter(
const CellIndType& Num_Cell, const CellIndType* CI,
PointType* CB, RealType* CR)
{
MMC const& this_mesh = *this; // for const
const BasePtCoord<GEO_DIM>* const& c_Vtx = Vtx; // for const
// loop through the given cells
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
// get the vertex indices of the current cell
const VtxIndType* VI = this_mesh.Get_Cell_vtx(CI[ii]);
// get relevant output for current cell
PointType* CB_ii = CB + ii*(CELL_DIM+1);
CR[ii] = Simplex_Circumcenter<CELL_DIM, GEO_DIM>(c_Vtx, VI, CB_ii);
}
}
/***************************************************************************************/
/* compute the incenter and inradius of (simplex) cells in the mesh.
see: "Coincidences of simplex centers and related facial structures" by Edmonds, et al.
Inputs: number of cells, and the cell indices (an array)
Outputs: barycentric coordinates of center (an array of consecutive groups of length (CELL_DIM+1))
inradius (an array with same length as number of cells)
NOTE: the number of groups equals the number of cells. */
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Incenter(
const CellIndType& Num_Cell, const CellIndType* CI,
PointType* CB, RealType* CR)
{
MMC const& this_mesh = *this; // for const
const BasePtCoord<GEO_DIM>* const& c_Vtx = Vtx; // for const
// loop through the given cells
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
// get the vertex indices of the current cell
const VtxIndType* VI = this_mesh.Get_Cell_vtx(CI[ii]);
// get relevant output for current cell
PointType* CB_ii = CB + ii*(CELL_DIM+1);
CR[ii] = Simplex_Incenter<CELL_DIM, GEO_DIM>(c_Vtx, VI, CB_ii);
}
}
/***************************************************************************************/
/* compute the "shape regularity" of (simplex) cells in the mesh.
Inputs: number of cells, and the cell indices (an array)
Outputs: the "shape regularity" ratio (an array with same length as number of cells)
*/
template <SmallIndType CELL_DIM, SmallIndType GEO_DIM>
void MMC<CELL_DIM, GEO_DIM>::Shape_Regularity(
const CellIndType& Num_Cell, const CellIndType* CI,
RealType* SR)
{
MMC const& this_mesh = *this; // for const
const BasePtCoord<GEO_DIM>* const& c_Vtx = Vtx; // for const
// loop through the given cells
for (CellIndType ii=0; ii < Num_Cell; ++ii)
{
// get the vertex indices of the current cell
const VtxIndType* VI = this_mesh.Get_Cell_vtx(CI[ii]);
SR[ii] = Simplex_Shape_Regularity<CELL_DIM, GEO_DIM>(c_Vtx, VI);
}
}
// SWW: the only methods that go here are the ones that require BOTH cell connectivity
// *and* vertex coordinates.
// add these methods:
// is it better to have a generic method that calls the other stuff???
/* easy:
Angles (hard if you want to be general...)
(need a local-to-global mapping sub-routine...)
Remove_Unused_Vertices
*/
/*
function [Global_to_Local_Indexing, Local_to_Global_Indexing] =...
Create_Global_Local_Index_Mapping(Ordered_Indices,Max_Global_Index)
%Create_Global_Local_Index_Mapping
%
% This routine creates two vectors of data that act as mappings between 'local' and
% 'global' indexing.
%
% [Global_to_Local_Indexing, Local_to_Global_Indexing] =
% Create_Global_Local_Index_Mapping(Ordered_Indices,Max_Global_Index)
%
% OUTPUTS
% -------
% Global_to_Local_Indexing:
% A (Max_Global_Index x 1) array that maps from global indices to local
% indices. In other words, to get the local index corresponding to global
% index 'j', just look at Global_to_Local_Indexing(j). Note: some of the riws
%
% Local_to_Global_Indexing:
% An Nx1 array that maps from local indices to global indices. In other words,
% to get the global index corresponding to local index 'i', just look at
% Local_to_Global_Indexing(i).
% Note: 'Local_to_Global_Indexing' = 'Ordered_Indices'.
%
% Examples:
%
% Global_to_Local_Indexing:
%
% (Global Indexing) (Col) (Local Indexing)
% Node #1 (Row #1): 6 <-- Local Node #6
% Node #2 (Row #2): 3 <-- Local Node #3
% Node #3 (Row #3): 0 <-- NOT a Local Node
% Node #4 (Row #4): 0 <-- NOT a Local Node
% Node #5 (Row #5): 2 <-- Local Node #2
% Node #6 (Row #6): 1 <-- Local Node #1
% Node #7 (Row #7): 0 <-- NOT a Local Node
% Node #8 (Row #8): 7 <-- Local Node #7
% ...
%
% Local_to_Global_Indexing:
%
% (Local Indexing) (Col) (Global Indexing)
% Node #1 (Row #1): 6 <-- Global Node #6
% Node #2 (Row #2): 5 <-- Global Node #5
% Node #3 (Row #3): 2 <-- Global Node #2
% Node #4 (Row #4): 10 <-- Global Node #10
% Node #5 (Row #5): 14 <-- Global Node #14
% Node #6 (Row #6): 1 <-- Global Node #1
% Node #7 (Row #7): 8 <-- Global Node #8
% Node #8 (Row #8): 17 <-- Global Node #17
% ...
%
% INPUTS
% ------
% Ordered_Indices:
% An Nx1 array of numbers considered to be indices into a global data
% structure. However, row i of 'Ordered_Indices' corresponds to index 'i' in
% the local data structure.
%
% Max_Global_Index:
% Largest index value in the global data structure (must be greater than N).
% Copyright (c) 06-05-2007, <NAME>
Local_to_Global_Indexing = Ordered_Indices;
Local_Index_Vec = (1:1:length(Ordered_Indices))';
Global_to_Local_Indexing = zeros(Max_Global_Index,1);
Global_to_Local_Indexing(Local_to_Global_Indexing,1) = Local_Index_Vec;
end
*/
// make simple Python interface BEFORE doing mesh refinement...
// make uniform refinement a separate thing
// need adaptive refinement
// don't worry about modifying the cell data structure yet, b/c this is connected to refinement
// need topological/connectivity changes!!!! for this, in 3-D, can think of an edge as the intersection of two half-facets?
/* easy:
faceNormal (simplex normal/normal space)
featureEdges (complicated for general dimension)
vertexNormal (averaged normal space when normal space is 1-D)
tetrahedron:
Order_Cell_Vertices_For_Hcurl (special case for tetrahedra, CELL_DIM==3)
faces?
facetNormal?
Get_Facet_Info?
interval:
edgeTangent (simplex tangent/tangent space)
featurePoints
Get_Arclength
vertexTangent (averaged tangent space when tangent space is 1-D)
*/
/* harder:
Get_Adjacency_Matrix
Reorder
Get_Facet_Info?
Get_Laplacian_Smoothing_Matrix
**Refine
*/
/* move to multi-mesh
nearestNeighbor: Vertex closest to a specified point
pointLocation: simplex containing specified point
*/
// For my mesh generator, need to evaluate cut edges. can do this by first finding all intersected tetrahedra. then make an edge mesh from that?
// Or just store all the unique edges of the mesh? think of the edge mesh as a graph! Store each edge as a pair of vertices (with smallest global index first) and the cell that the edge belongs to.
#undef MMC
/***/
|
quanpands/wflow
|
pcraster/pcraster-4.2.0/pcraster-4.2.0/source/pcraster_model_engine/calc_spatialpacking.cc
|
<filename>pcraster/pcraster-4.2.0/pcraster-4.2.0/source/pcraster_model_engine/calc_spatialpacking.cc
#ifndef INCLUDED_STDDEFX
#include "stddefx.h"
#define INCLUDED_STDDEFX
#endif
#ifndef INCLUDED_CALC_SPATIALPACKING
#include "calc_spatialpacking.h"
#define INCLUDED_CALC_SPATIALPACKING
#endif
// Library headers.
// PCRaster library headers.
// Module headers.
/*!
\file
This file contains the implementation of the SpatialPacking class.
*/
//------------------------------------------------------------------------------
// DEFINITION OF STATIC SPATIALPACKING MEMBERS
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
// DEFINITION OF SPATIALPACKING MEMBERS
//------------------------------------------------------------------------------
calc::SpatialPacking::SpatialPacking(const geo::RasterDim& rd):
d_rd(rd)
{
}
calc::SpatialPacking::~SpatialPacking()
{
}
const geo::RasterDim& calc::SpatialPacking::rasterDim() const
{
return d_rd;
}
//------------------------------------------------------------------------------
// DEFINITION OF FREE OPERATORS
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
// DEFINITION OF FREE FUNCTIONS
//------------------------------------------------------------------------------
|
Jaikant/tech47-www
|
src/pages/open-positions.js
|
import React from 'react';
import styled from '@emotion/styled';
import { Link, ArrowButton, Email } from '../components/Common';
import OpenPosition from '../components/OpenPosition';
import openpositions from '../data/positions.js';
import { MainDiv } from '../components/Layout';
const Wrapper = styled.div`
display: grid;
grid-template-rows: 1fr 4fr 1fr;
align-items: center;
`;
const InnerWrapper = styled.div`
display: grid;
justify-items: center;
`;
const OpenPositions = () => (
<MainDiv white>
<Wrapper>
<Link to="/">
<ArrowButton text="Go back" />
</Link>
<InnerWrapper>
{openpositions.map(openposition => (
<OpenPosition
position={openposition.position}
description={openposition.description}
/>
))}
</InnerWrapper>
<InnerWrapper>
<Email text={`Send us an email `} />
</InnerWrapper>
</Wrapper>
</MainDiv>
);
export default OpenPositions;
|
bef178/pd
|
src/pd/json/simplejson/SimpleJsonNumber.java
|
package pd.json.simplejson;
import pd.fenc.TextNumber;
import pd.json.type.IJsonNumber;
public class SimpleJsonNumber extends TextNumber implements IJsonNumber {
/**
*
*/
private static final long serialVersionUID = 1L;
public SimpleJsonNumber() {
super();
}
@Override
public double getFloat64() {
return doubleValue();
}
@Override
public long getInt64() {
return longValue();
}
@Override
public SimpleJsonNumber set(double value) {
super.set(value);
return this;
}
@Override
public SimpleJsonNumber set(long value) {
super.set(value);
return this;
}
@Override
public SimpleJsonNumber set(String raw) {
super.set(raw);
return this;
}
}
|
xfslove/extensible-sheet
|
src/main/java/spreadsheet/mapper/w2o/validation/validator/cell/buildin/NumberScaleRangeValidator.java
|
<reponame>xfslove/extensible-sheet
package spreadsheet.mapper.w2o.validation.validator.cell.buildin;
import org.apache.commons.lang3.math.NumberUtils;
import spreadsheet.mapper.model.core.Cell;
import spreadsheet.mapper.model.meta.FieldMeta;
import spreadsheet.mapper.w2o.param.NumberScaleRangeParam;
import spreadsheet.mapper.w2o.validation.validator.cell.CustomSingleCellValidatorAdapter;
/**
* number scale range validator
* <p>
* Created by hanwen on 2017/1/11.
*/
public class NumberScaleRangeValidator extends CustomSingleCellValidatorAdapter<NumberScaleRangeValidator> {
private NumberScaleRangeParam param;
public NumberScaleRangeValidator param(NumberScaleRangeParam param) {
this.param = param;
return this;
}
@Override
protected NumberScaleRangeValidator getThis() {
return this;
}
@Override
protected boolean customValid(Cell cell, FieldMeta fieldMeta) {
String value = cell.getValue();
if (!NumberUtils.isNumber(value)) {
return false;
}
String[] numberPlace = value.split("\\.");
int scale = 0;
if (numberPlace.length != 1) {
scale = numberPlace[1].length();
}
return param.getGte() <= scale && scale <= param.getLte();
}
}
|
onberry/HowToProgram
|
src/chapter6/Ch631_GuessANumberModified.java
|
<gh_stars>0
package chapter6;
import java.util.Random;
import java.util.Scanner;
/**
* Created by ola on 9/10/17.
* Java. How to program P&H Deitel
* Exercise 6.31
* Number guessing game with counter. (Binary search like)
*/
public class Ch631_GuessANumberModified {
Random r = new Random();
private int generateNumber() {
return 1 + r.nextInt(1000);
}
public void play() {
Scanner s = new Scanner(System.in);
int num = generateNumber();
int guess;
int count = 0;
System.out.println("Guess a number 1-1000:");
do {
guess = s.nextInt();
if (guess > num) {
System.out.println("Too high");
System.out.println("Try again:");
} else {
System.out.println("Too low");
System.out.println("Try again:");
}
count++;
} while (guess != num);
System.out.println("Bingo! you guessed it");
if (count < 10) {
System.out.printf("%d guesses. Either you know a secret or you got lucky!\n", count);
} else if (count == 10) {
System.out.printf("%d guesses. Aha! You know the secret!\n", count);
} else {
System.out.printf("%d guesses. You should be able to do better!\n", count);
}
}
public static void main(String[] args) {
Ch631_GuessANumberModified g = new Ch631_GuessANumberModified();
g.play();
System.out.println("Goodbye");
}
}
|
diptu/Teaching
|
CSE 225L Data Structures and Algorithms/Resources/Codes Previous/Spring-2019-CSE225 1/9.Queue(Linked List)/main(12).cpp
|
<reponame>diptu/Teaching<filename>CSE 225L Data Structures and Algorithms/Resources/Codes Previous/Spring-2019-CSE225 1/9.Queue(Linked List)/main(12).cpp
#include <iostream>
#include "quetype.cpp"
using namespace std;
int main()
{
QueType<int> qt;
int num, temp;
if(qt.IsEmpty())
{
cout<<"queue is empty"<<endl;
}else{
cout<<"queue is not empty"<<endl;
}
for(int i=0; i<4; i++)
{
cin>>num;
qt.Enqueue(num);
}
if(qt.IsEmpty())
{
cout<<"queue is empty"<<endl;
}else{
cout<<"queue is not empty"<<endl;
}
QueType<int> qt2;
for(int i = 0 ; i <4 ; i++){
qt.Dequeue(temp);
cout<<temp;
qt2.Enqueue(temp);
}
for(int i = 0 ; i <4 ; i++){
qt2.Dequeue(temp);
qt.Enqueue(temp);
}
if(qt.IsFull()){
cout<<"queue is full"<<endl;
}else{
cout<<"queue is not full"<<endl;
}
cout<<endl;
for(int i=0; i<1; i++)
{
cin>>num;
qt.Dequeue(num);
}
QueType<int> qt3;
for(int i = 0 ; i <4 ; i++){
qt.Dequeue(temp);
cout<<temp;
qt3.Enqueue(temp);
}
cout<<endl;
for(int i = 0 ; i <4 ; i++){
qt3 .Dequeue(temp);
qt.Enqueue(temp);
}
return 0;
}
|
ckamtsikis/cmssw
|
FastSimulation/TrackingRecHitProducer/interface/TrackingRecHitAlgorithm.h
|
#ifndef FastSimulation_TrackingRecHitProducer_TrackingRecHitAlgorithm_H
#define FastSimulation_TrackingRecHitProducer_TrackingRecHitAlgorithm_H
#include "DataFormats/TrackerCommon/interface/TrackerTopology.h"
#include "Geometry/TrackerGeometryBuilder/interface/TrackerGeometry.h"
#include "FWCore/Framework/interface/ProducerBase.h"
#include "FastSimulation/TrackingRecHitProducer/interface/TrackingRecHitProduct.h"
#include "FastSimulation/Utilities/interface/RandomEngineAndDistribution.h"
// Pixel-related stuff:
#include "CondFormats/SiPixelObjects/interface/SiPixelTemplateDBObject.h"
#include "CondFormats/SiPixelTransient/interface/SiPixelTemplate.h"
#include <string>
#include <memory>
namespace edm {
class Event;
class EventSetup;
class ParameterSet;
class ConsumesCollector;
class Stream;
} // namespace edm
class TrackingRecHitAlgorithm {
private:
const std::string _name;
const std::string _selectionString;
const TrackerTopology* _trackerTopology;
const TrackerGeometry* _trackerGeometry;
const TrackerGeometry* _misalignedTrackerGeometry;
std::shared_ptr<RandomEngineAndDistribution> _randomEngine;
public:
TrackingRecHitAlgorithm(const std::string& name,
const edm::ParameterSet& config,
edm::ConsumesCollector& consumesCollector);
inline const std::string& getName() const { return _name; }
inline const std::string& getSelectionString() const { return _selectionString; }
const TrackerTopology& getTrackerTopology() const;
const TrackerGeometry& getTrackerGeometry() const;
const TrackerGeometry& getMisalignedGeometry() const;
const RandomEngineAndDistribution& getRandomEngine() const;
//this function will only be called once per stream
virtual void beginStream(const edm::StreamID& id);
//this function will only be called once per run
virtual void beginRun(edm::Run const& run,
const edm::EventSetup& eventSetup,
const SiPixelTemplateDBObject* pixelTemplateDBObjectPtr,
std::vector<SiPixelTemplateStore>& tempStoreRef);
//this function will only be called once per event
virtual void beginEvent(edm::Event& event, const edm::EventSetup& eventSetup);
//the main action is here
virtual TrackingRecHitProductPtr process(TrackingRecHitProductPtr product) const;
//this function will only be called once per event
virtual void endEvent(edm::Event& event, const edm::EventSetup& eventSetup);
//this function will only be called once per stream
virtual void endStream();
virtual ~TrackingRecHitAlgorithm();
};
#endif
|
stackrox/stackrox
|
central/compliance/datastore/singleton.go
|
package datastore
import (
"github.com/stackrox/rox/central/compliance/datastore/internal/store"
"github.com/stackrox/rox/central/compliance/datastore/internal/store/rocksdb"
"github.com/stackrox/rox/central/globaldb"
"github.com/stackrox/rox/pkg/sync"
)
var (
once sync.Once
dsInstance DataStore
)
// Singleton returns the compliance DataStore singleton.
func Singleton() DataStore {
var dbStore store.Store
once.Do(func() {
dbStore = rocksdb.NewRocksdbStore(globaldb.GetRocksDB())
dsInstance = NewDataStore(dbStore, NewSacFilter())
})
return dsInstance
}
|
opensourcegamedev/SpaceChaos
|
game/src/main/java/dev/game/spacechaos/game/entities/component/ai/EnemyShuttleAIComponent.java
|
<gh_stars>10-100
package dev.game.spacechaos.game.entities.component.ai;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.math.Vector2;
import dev.game.spacechaos.engine.entity.BaseComponent;
import dev.game.spacechaos.engine.entity.Entity;
import dev.game.spacechaos.engine.entity.IUpdateComponent;
import dev.game.spacechaos.engine.entity.annotation.InjectComponent;
import dev.game.spacechaos.engine.entity.component.PositionComponent;
import dev.game.spacechaos.engine.entity.component.draw.DrawTextureComponent;
import dev.game.spacechaos.engine.entity.component.draw.MoveDependentDrawRotationComponent;
import dev.game.spacechaos.engine.entity.component.movement.MoveComponent;
import dev.game.spacechaos.engine.entity.priority.ECSPriority;
import dev.game.spacechaos.engine.exception.RequiredComponentNotFoundException;
import dev.game.spacechaos.engine.game.BaseGame;
import dev.game.spacechaos.engine.time.GameTime;
import dev.game.spacechaos.engine.utils.RandomUtils;
import dev.game.spacechaos.game.entities.factory.ProjectileFactory;
/**
* Adds an AI-component to an entity, so it follows the player in a specific,
* non-suicidal way.
*
* @author SpaceChaos-Team
* (https://github.com/opensourcegamedev/SpaceChaos/blob/master/CONTRIBUTORS.md)
* @since 1.0.0-PreAlpha
*/
public class EnemyShuttleAIComponent extends BaseComponent implements IUpdateComponent {
@InjectComponent(nullable = false)
private PositionComponent positionComponent = null;
@InjectComponent(nullable = false)
private MoveComponent moveComponent = null;
@InjectComponent(nullable = false)
private MoveDependentDrawRotationComponent moveDependentDrawRotationComponent = null;
// target entity to follow
private Entity targetEntity = null;
private PositionComponent targetPosition = null;
// move direction
private Vector2 moveDir = new Vector2(0, 0);
private long elapsed = 0;
private long shootInterval = 1000;
private Texture projectileTexture = null;
private Vector2 tmpVector = new Vector2(0, 0);
public EnemyShuttleAIComponent(Entity targetEntity, Texture projectileTexture, int shootIntervallMin) {
if (targetEntity == null) {
throw new NullPointerException("target entity cannot be null.");
}
this.projectileTexture = projectileTexture;
this.targetEntity = targetEntity;
this.targetPosition = targetEntity.getComponent(PositionComponent.class);
// check if required component exists
if (this.targetPosition == null) {
throw new RequiredComponentNotFoundException(
"PositionComponent is required on target entity, but cannot be found.");
}
// generate shoot interval
int maxShootInterval = shootIntervallMin * 2;
this.shootInterval = RandomUtils.getRandomNumber(shootIntervallMin, maxShootInterval);
}
@Override
protected void onInit(BaseGame game, Entity entity) {
}
@Override
public void update(BaseGame game, GameTime time) {
//calculate elapsed time
this.elapsed += time.getDeltaTime() * 1000;
// calculate target direction and move in this direction
moveDir.set(targetPosition.getMiddleX() - positionComponent.getMiddleX(),
targetPosition.getMiddleY() - positionComponent.getMiddleY());
//check, if shoot timer is finished, so shuttle can shoot an projectile
if (canShoot()) {
shootProjectile();
}
//check if movement is required or shuttle is near player and dont needs to move
if (!isMovementRequired()) {
// don't move entity
moveComponent.setMoveDirection(0, 0);
moveComponent.setMoving(false);
return;
}
//normalize direction vector (so length = 1)
moveDir.nor();
// set move direction
moveComponent.setMoveDirection(moveDir.x, moveDir.y);
}
@Override
public ECSPriority getUpdateOrder() {
return ECSPriority.HIGH;
}
private boolean isMovementRequired() {
// calculate target direction and move in this direction
moveDir.set(targetPosition.getMiddleX() - positionComponent.getMiddleX(),
targetPosition.getMiddleY() - positionComponent.getMiddleY());
// get length
float length = moveDir.len();
float minDistance = 300;
return length > minDistance;
}
/**
* check, if interval is finished, so shuttle can shoot projectile
*
* @return true, if shuttle is allowed to shoot an projectile
*/
private boolean canShoot() {
return elapsed > shootInterval;
}
/**
* spawn an new projectile and shoot it to the direction of front of shuttle
*/
private void shootProjectile() {
// get projectile direction from enemy direction
float dirX = moveDependentDrawRotationComponent.getFrontVec().x;
float dirY = moveDependentDrawRotationComponent.getFrontVec().y;
tmpVector.set(dirX, dirY);
tmpVector.setLength(100);
Entity projectile = ProjectileFactory.createProjectile(entity.getEntityComponentSystem(),
dirX + positionComponent.getMiddleX() + tmpVector.x - 20,
dirY + positionComponent.getMiddleY() + tmpVector.y - 20, projectileTexture, dirX, dirY, this.entity);
projectile.getComponent(DrawTextureComponent.class)
.setRotationAngle(targetEntity.getComponent(DrawTextureComponent.class).getRotationAngle());
projectile.getComponent(MoveComponent.class).setMoving(true);
game.runOnUIThread(() -> {
// add entity on next gameloop cycle
this.entity.getEntityComponentSystem().addEntity(projectile);
});
// reset elapsed time
this.elapsed = 0;
}
}
|
lrxcy/opscloud4
|
opscloud-common/src/main/java/com/baiyi/opscloud/common/exception/BusinessErrorCodeEnum.java
|
package com.baiyi.opscloud.common.exception;
import org.apache.commons.lang3.StringUtils;
/**
* @Author baiyi
* @Date 2020/4/19 12:20 下午
* @Version 1.0
*/
public enum BusinessErrorCodeEnum implements ErrorCode{
UNSPECIFIED("500", "网络异常,请稍后再试");
/** 错误码 */
private final String code;
/** 描述 */
private final String desc;
private BusinessErrorCodeEnum(final String code, final String desc) {
this.code = code;
this.desc = desc;
}
/**
* 根据编码查询枚举。
*
* @param code 编码。
* @return 枚举。
*/
public static BusinessErrorCodeEnum getByCode(String code) {
for (BusinessErrorCodeEnum value : BusinessErrorCodeEnum.values()) {
if (StringUtils.equals(code, value.getCode())) {
return value;
}
}
return UNSPECIFIED;
}
/**
* 枚举是否包含此code
* @param code 枚举code
* @return 结果
*/
public static Boolean contains(String code){
for (BusinessErrorCodeEnum value : BusinessErrorCodeEnum.values()) {
if (StringUtils.equals(code, value.getCode())) {
return true;
}
}
return false;
}
@Override
public String getCode() {
return code;
}
@Override
public String getDesc() {
return desc;
}
}
|
Ahmad138/satRot
|
docs/SatRot-GUI_2GUI-Main_2src_2main_8cpp.js
|
var SatRot_GUI_2GUI_Main_2src_2main_8cpp =
[
[ "main", "SatRot-GUI_2GUI-Main_2src_2main_8cpp.html#a0ddf1224851353fc92bfbff6f499fa97", null ]
];
|
rc-dukes/dukes
|
rc-car/src/test/java/org/rcdukes/car/TestCar.java
|
package org.rcdukes.car;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import org.junit.Test;
import org.rcdukes.car.CarVerticle;
import org.rcdukes.common.ClusterStarter;
import org.rcdukes.common.Environment;
import org.rcdukes.common.ServoPosition;
import org.rcdukes.common.DukesVerticle.Status;
import io.vertx.core.json.JsonObject;
/**
* test the car verticle
* @author wf
*
*/
public class TestCar {
public static boolean debug=true;
@Test
public void testCar() throws Exception {
int TIME_OUT=40000;
Environment.mock();
ClusterStarter clusterStarter=new ClusterStarter();
CarVerticle carVerticle = new CarVerticle();
clusterStarter.deployVerticles(carVerticle);
carVerticle.waitStatus(Status.started,TIME_OUT,10);
if (!TestSuite.isTravis()) {
clusterStarter.undeployVerticle(carVerticle);
carVerticle.waitStatus(Status.stopped,TIME_OUT,10);
}
}
@Test
public void testServoPositionMapping() {
ServoPosition[] poss= {new ServoPosition(154,10,"°","steering"),new ServoPosition(130,0,"m/s","motor")};
for (ServoPosition pos:poss) {
JsonObject jo=JsonObject.mapFrom(pos);
String json=jo.encodePrettily();
assertNotNull(json);
if (debug)
System.out.println(json);
ServoPosition rpos=jo.mapTo(ServoPosition.class);
assertEquals(pos.kind,rpos.kind);
assertEquals(pos.getServoPos(),rpos.getServoPos());
assertEquals(pos.getValue(),rpos.getValue(),0.001);
assertEquals(pos.unit,rpos.unit);
}
}
}
|
DennisMerkus/Aether
|
ai/conversation/base.py
|
<filename>ai/conversation/base.py<gh_stars>0
# Greet a character appropriately to roles/politeness/time of day
# Ask a question based on some unknown piece of information
# Give some information based on a question/belief of someone wanting some information
|
chlunde/origin
|
pkg/deploy/registry/test/deployconfig.go
|
package test
import (
"sync"
"github.com/openshift/origin/pkg/deploy/api"
metainternal "k8s.io/apimachinery/pkg/apis/meta/internalversion"
"k8s.io/apimachinery/pkg/fields"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/watch"
apirequest "k8s.io/apiserver/pkg/endpoints/request"
)
type DeploymentConfigRegistry struct {
Err error
DeploymentConfig *api.DeploymentConfig
DeploymentConfigs *api.DeploymentConfigList
sync.Mutex
}
func NewDeploymentConfigRegistry() *DeploymentConfigRegistry {
return &DeploymentConfigRegistry{}
}
func (r *DeploymentConfigRegistry) ListDeploymentConfigs(ctx apirequest.Context, label labels.Selector, field fields.Selector) (*api.DeploymentConfigList, error) {
r.Lock()
defer r.Unlock()
return r.DeploymentConfigs, r.Err
}
func (r *DeploymentConfigRegistry) GetDeploymentConfig(ctx apirequest.Context, id string) (*api.DeploymentConfig, error) {
r.Lock()
defer r.Unlock()
return r.DeploymentConfig, r.Err
}
func (r *DeploymentConfigRegistry) CreateDeploymentConfig(ctx apirequest.Context, image *api.DeploymentConfig) error {
r.Lock()
defer r.Unlock()
r.DeploymentConfig = image
return r.Err
}
func (r *DeploymentConfigRegistry) UpdateDeploymentConfig(ctx apirequest.Context, image *api.DeploymentConfig) error {
r.Lock()
defer r.Unlock()
r.DeploymentConfig = image
return r.Err
}
func (r *DeploymentConfigRegistry) DeleteDeploymentConfig(ctx apirequest.Context, id string) error {
r.Lock()
defer r.Unlock()
return r.Err
}
func (r *DeploymentConfigRegistry) WatchDeploymentConfigs(ctx apirequest.Context, options *metainternal.ListOptions) (watch.Interface, error) {
return nil, r.Err
}
|
felipegutierrez/flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/controller/PreAggregateSignalsListener.java
|
package org.apache.flink.runtime.controller;
import org.fusesource.mqtt.client.BlockingConnection;
import org.fusesource.mqtt.client.MQTT;
import org.fusesource.mqtt.client.Message;
import org.fusesource.mqtt.client.QoS;
import org.fusesource.mqtt.client.Topic;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static java.nio.charset.StandardCharsets.UTF_8;
public class PreAggregateSignalsListener extends Thread {
// This is a Map to store state of each pre-agg physical operator using the subtaskIndex as the key
public final Map<Integer, PreAggregateSignalsState> preAggregateState;
// Properties for the MQTT listen channel
private final String topic;
private final String host;
private final int port;
private BlockingConnection subscriber;
private MQTT mqtt;
private boolean running = false;
public PreAggregateSignalsListener(String host, int port, String topic) {
this.host = host;
this.port = port;
this.topic = topic;
this.running = true;
this.preAggregateState = new HashMap<Integer, PreAggregateSignalsState>();
}
private void connect() throws Exception {
this.mqtt = new MQTT();
this.mqtt.setHost(host, port);
this.subscriber = mqtt.blockingConnection();
this.subscriber.connect();
Topic[] topics = new Topic[]{new Topic(this.topic, QoS.AT_LEAST_ONCE)};
this.subscriber.subscribe(topics);
}
public void cancel() {
this.running = false;
}
public void run() {
try {
if (this.mqtt == null) this.connect();
while (running) {
// System.out.println("waiting for messages...");
Message msg = subscriber.receive(10, TimeUnit.SECONDS);
if (msg != null) {
msg.ack();
String message = new String(msg.getPayload(), UTF_8);
if (message != null) {
// Look at PreAggregateControllerService.computeAverageOfSignals() that receives the same message
// System.out.println("[PreAggregateSignalsListener.controller] received msg: " + message);
this.addState(message);
} else {
System.out.println(
"[PreAggregateSignalsListener.controller] The parameter sent is null.");
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void addState(String msg) {
String[] states = msg.split("\\|");
if (states != null && states.length == 12) {
String subtaskIndex = states[0];
String outPoolUsageMin = states[1];
String outPoolUsageMax = states[2];
String outPoolUsageMean = states[3];
String outPoolUsage05 = states[4];
String outPoolUsage075 = states[5];
String outPoolUsage095 = states[6];
String outPoolUsage099 = states[7];
String outPoolUsageStdDev = states[8];
String numRecordsInPerSecond = states[9];
String numRecordsOutPerSecond = states[10];
String intervalMs = states[11];
PreAggregateSignalsState state = this.preAggregateState.get(Integer.parseInt(
subtaskIndex));
if (state == null) {
state = new PreAggregateSignalsState(
subtaskIndex,
outPoolUsageMin,
outPoolUsageMax,
outPoolUsageMean,
outPoolUsage05,
outPoolUsage075,
outPoolUsage095,
outPoolUsage099,
outPoolUsageStdDev,
numRecordsInPerSecond,
numRecordsOutPerSecond,
intervalMs);
} else {
state.update(
subtaskIndex,
outPoolUsageMin,
outPoolUsageMax,
outPoolUsageMean,
outPoolUsage05,
outPoolUsage075,
outPoolUsage095,
outPoolUsage099,
outPoolUsageStdDev,
numRecordsInPerSecond,
numRecordsOutPerSecond,
intervalMs);
}
this.preAggregateState.put(Integer.parseInt(subtaskIndex), state);
} else {
System.out.println(
"[PreAggregateSignalsListener.controller] ERROR: wrong number of parameter to update pre-aggregate state.");
}
}
}
|
kanwals/core
|
broker/pipeline.go
|
package broker
import (
"context"
"fmt"
"io"
"github.com/pkg/errors"
log "github.com/sirupsen/logrus"
"go.gazette.dev/core/broker/fragment"
pb "go.gazette.dev/core/broker/protocol"
)
// pipeline is an in-flight write replication pipeline of a journal.
type pipeline struct {
pb.Header // Header of the pipeline.
spool fragment.Spool // Local, primary replication Spool.
returnCh chan<- fragment.Spool // |spool| return channel.
streams []pb.Journal_ReplicateClient // Established streams to each replication peer.
sendErrs []error // First error on send from each peer.
readBarrierCh chan struct{} // Coordinates hand-off of receive-side of the pipeline.
recvResp []pb.ReplicateResponse // Most recent response gathered from each peer.
recvErrs []error // First error on receive from each peer.
}
// newPipeline returns a new pipeline.
func newPipeline(ctx context.Context, hdr pb.Header, spool fragment.Spool, returnCh chan<- fragment.Spool, jc pb.JournalClient) *pipeline {
if hdr.Route.Primary == -1 {
panic("dial requires Route with Primary != -1")
}
var R = len(hdr.Route.Members)
var pln = &pipeline{
Header: hdr,
spool: spool,
returnCh: returnCh,
streams: make([]pb.Journal_ReplicateClient, R),
sendErrs: make([]error, R),
readBarrierCh: make(chan struct{}),
recvResp: make([]pb.ReplicateResponse, R),
recvErrs: make([]error, R),
}
close(pln.readBarrierCh)
for i := range pln.Route.Members {
if i == int(pln.Route.Primary) {
continue
}
pln.streams[i], pln.sendErrs[i] = jc.Replicate(
pb.WithDispatchRoute(ctx, pln.Route, pln.Route.Members[i]))
}
return pln
}
// scatter asynchronously applies the ReplicateRequest to all replicas.
func (pln *pipeline) scatter(r *pb.ReplicateRequest) {
for i, s := range pln.streams {
if s != nil && pln.sendErrs[i] == nil {
if r.Header != nil {
// Copy and update to peer ProcessID.
r.Header = boxHeaderProcessID(*r.Header, pln.Route.Members[i])
}
// Send may return an io.EOF if the remote peer breaks the stream.
// We read the actual error in the gather() phase.
pln.sendErrs[i] = s.Send(r)
}
}
if i := pln.Route.Primary; pln.sendErrs[i] == nil {
var resp pb.ReplicateResponse
// Map an error into a |sendErr|.
// Status !OK is returned only on proposal mismatch, which cannot happen
// here as all proposals are derived from the Spool itself.
if resp, pln.sendErrs[i] = pln.spool.Apply(r, true); resp.Status != pb.Status_OK {
var respHeap = resp // Escapes.
panic(respHeap.String())
}
}
}
// closeSend closes the send-side of all replica connections.
func (pln *pipeline) closeSend() {
// Apply a proposal to our own spool which rolls back any partial content.
pln.spool.MustApply(&pb.ReplicateRequest{
Proposal: &pln.spool.Fragment.Fragment,
Registers: &pln.spool.Registers,
})
pln.returnCh <- pln.spool // Release ownership of Spool.
for i, s := range pln.streams {
if s != nil && pln.sendErrs[i] == nil {
pln.sendErrs[i] = s.CloseSend()
}
}
}
// sendErr returns the first encountered send-side error.
func (pln *pipeline) sendErr() error {
for i, err := range pln.sendErrs {
if err != nil {
return errors.WithMessagef(err, "send to %s", &pln.Route.Members[i])
}
}
return nil
}
// barrier installs a new barrier in the pipeline. Clients should:
// * Invoke barrier after issuing all sent writes, and release the
// pipeline for other clients.
// * Block until |waitFor| is selectable.
// * Read expected responses from the pipeline.
// * Close |closeAfter|.
// By following this convention a pipeline can safely be passed among multiple
// clients, each performing writes followed by reads, while allowing for those
// writes and reads to happen concurrently.
func (pln *pipeline) barrier() (waitFor <-chan struct{}, closeAfter chan<- struct{}) {
waitFor, pln.readBarrierCh = pln.readBarrierCh, make(chan struct{})
closeAfter = pln.readBarrierCh
return
}
// gather synchronously receives a ReplicateResponse from all replicas.
func (pln *pipeline) gather() {
for i, s := range pln.streams {
if s != nil && pln.recvErrs[i] == nil {
pln.recvErrs[i] = s.RecvMsg(&pln.recvResp[i])
// Map EOF to ErrUnexpectedEOF, as EOFs should only be
// read by gatherEOF().
if pln.recvErrs[i] == io.EOF {
pln.recvErrs[i] = io.ErrUnexpectedEOF
}
}
}
}
// gatherOK calls gather, and treats any non-OK response status as an error.
func (pln *pipeline) gatherOK() {
pln.gather()
for i, s := range pln.streams {
if s == nil || pln.recvErrs[i] != nil {
// Pass.
} else if pln.recvResp[i].Status != pb.Status_OK {
pln.recvErrs[i] = fmt.Errorf("unexpected !OK response: %s", &pln.recvResp[i])
}
}
}
// gatherSync gathers and returns either:
// * Zero-valued |rollToOffset|, |rollToRegisters|, & |readThroughRev| if the sync succeeded, or
// * The largest peer |rollToOffset| which is greater than our own, & accompanying |rollToRegisters|, or
// * The |rollToOffset| equal to our own which must be rolled to in order for a peer to participate, or
// * An Etcd revision to read through.
// It treats any other non-OK response status as an error.
func (pln *pipeline) gatherSync() (rollToOffset int64, rollToRegisters *pb.LabelSet, readThroughRev int64) {
pln.gather()
for i, s := range pln.streams {
if s == nil || pln.recvErrs[i] != nil {
continue
}
switch resp := pln.recvResp[i]; resp.Status {
case pb.Status_OK:
// Pass.
case pb.Status_WRONG_ROUTE:
if !resp.Header.Route.Equivalent(&pln.Route) && resp.Header.Etcd.Revision > pln.Etcd.Revision {
// Peer has a non-equivalent Route at a later etcd revision.
if resp.Header.Etcd.Revision > readThroughRev {
readThroughRev = resp.Header.Etcd.Revision
}
} else {
pln.recvErrs[i] = fmt.Errorf("unexpected WRONG_ROUTE: %s", resp.Header)
}
case pb.Status_PROPOSAL_MISMATCH:
switch {
case resp.Fragment.End > pln.spool.End:
// If peer has a fragment at a greater offset, we must roll forward to
// its End and adopt the peer's registers.
if resp.Fragment.End > rollToOffset {
rollToOffset, rollToRegisters = resp.Fragment.End, resp.Registers
}
case resp.Fragment.End == pln.spool.End && resp.Fragment.ContentLength() == 0:
// If peer rolled its fragment to our End, but it does not have and
// therefore cannot extend fragment content from [Begin, End), we must
// roll to an empty fragment at End to allow the peer to participate.
if resp.Fragment.End > rollToOffset {
rollToOffset = resp.Fragment.End
}
default:
pln.recvErrs[i] = fmt.Errorf("unexpected PROPOSAL_MISMATCH: %v, %v",
resp.Fragment, resp.Registers)
}
default:
var respHeap = resp // Escapes.
pln.recvErrs[i] = fmt.Errorf("unexpected Status: %s", &respHeap)
}
}
return
}
// gatherEOF synchronously gathers expected EOFs from all replicas.
// An unexpected received message is treated as an error.
func (pln *pipeline) gatherEOF() {
for i, s := range pln.streams {
if s == nil || pln.recvErrs[i] != nil {
// Local spool placeholder, or the stream has already failed.
} else if msg, err := s.Recv(); err == io.EOF {
// Graceful stream closure.
} else if err != nil {
pln.recvErrs[i] = err
} else if pln.recvErrs[i] == nil && err == nil {
pln.recvErrs[i] = fmt.Errorf("unexpected response: %s", msg.String())
}
}
}
// recvErr returns the first encountered receive-side error.
func (pln *pipeline) recvErr() error {
for i, err := range pln.recvErrs {
if err != nil {
return errors.WithMessagef(err, "recv from %s", &pln.Route.Members[i])
}
}
return nil
}
// shutdown performs a graceful, blocking shutdown of the pipeline.
func (pln *pipeline) shutdown(expectErr bool) {
var waitFor, closeAfter = pln.barrier()
if pln.closeSend(); !expectErr && pln.sendErr() != nil {
log.WithField("err", pln.sendErr()).Warn("tearing down pipeline: failed to closeSend")
}
<-waitFor
if pln.gatherEOF(); !expectErr && pln.recvErr() != nil {
log.WithField("err", pln.recvErr()).Warn("tearing down pipeline: failed to gatherEOF")
}
close(closeAfter)
}
// String is used to provide debugging output of a pipeline in a request trace.
func (pln *pipeline) String() string {
if pln == nil {
return "<nil>"
}
return fmt.Sprintf("pipeline<header: %s, spool: %s>", &pln.Header, pln.spool.String())
}
func boxHeaderProcessID(hdr pb.Header, id pb.ProcessSpec_ID) *pb.Header {
var out = new(pb.Header)
*out = hdr
out.ProcessId = id
return out
}
|
TUD-KInD/COCTEAU
|
COCTEAU/src/main/java/Application/Domain/Credentials.java
|
<filename>COCTEAU/src/main/java/Application/Domain/Credentials.java<gh_stars>1-10
package Application.Domain;
import javax.persistence.Column;
import javax.persistence.MappedSuperclass;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.NotNull;
/**
* Credentials is the class that maps the database Entity Credentials.
* The Credentials represent the set of attributes useful to understand whether the {@link Application.Domain.User} is able to login in the system or not.
*
* @author <NAME>
* @author <NAME>
* @since 1.0
*
*/
@MappedSuperclass
public class Credentials {
/**
* The username of the {@link Application.Domain.User}.
*/
@NotEmpty
protected String username;
/**
* The password of the {@link Application.Domain.User}.
*/
@NotEmpty
protected String password;
/**
* Whether the {@link Application.Domain.User} accepted the cookies or not.
*/
@NotNull
@Column(name = "cookie_consent")
protected boolean cookieConsent;
/**
* Returns the username of the {@link Application.Domain.User}.
*
* @return The username of the {@link Application.Domain.User}.
*/
public String getUsername() {
return username;
}
/**
* Sets the username of the {@link Application.Domain.User}.
*
* @param username The username of the {@link Application.Domain.User}.
*/
public void setUsername(String username) {
this.username = username;
}
/**
* Returns the password of the {@link Application.Domain.User}.
*
* @return The password of the {@link Application.Domain.User}.
*/
public String getPassword() {
return password;
}
/**
* Sets the password of the {@link Application.Domain.User}.
*
* @param password The password of the {@link Application.Domain.User}.
*/
public void setPassword(String password) {
this.password = password;
}
/**
* Returns whether the {@link Application.Domain.User} accepted the cookies or not.
*
* @return Whether the {@link Application.Domain.User} accepted the cookies or not.
*/
public boolean isCookieConsent() {
return cookieConsent;
}
/**
* Sets whether the {@link Application.Domain.User} accepted the cookies or not.
*
* @param cookieConsent Whether the {@link Application.Domain.User} accepted the cookies or not.
*/
public void setCookieConsent(boolean cookieConsent) {
this.cookieConsent = cookieConsent;
}
}
|
openenergysolutions/modbus-cpp
|
lib/tests/unit/messages/WriteSingleRegisterResponseImplTest.cpp
|
<gh_stars>1-10
/*
* Copyright 2017-2018 Duke Energy Corporation and Open Energy Solutions, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <catch2/catch.hpp>
#include <array>
#include "ser4cpp/container/Buffer.h"
#include "modbus/exceptions/MalformedModbusResponseException.h"
#include "modbus/exceptions/ModbusException.h"
#include "messages/WriteSingleRegisterResponseImpl.h"
using namespace modbus;
TEST_CASE("WriteSingleRegisterResponse")
{
const uint16_t address = 0x1234;
const uint16_t value = 0x6789;
WriteSingleRegisterResponse response{{address, value}};
WriteSingleRegisterResponseImpl response_impl{response};
REQUIRE(response_impl.is_valid() == true);
REQUIRE(response_impl.get_message_length() == 5);
SECTION("When build request, then write appropriate values to the buffer")
{
ser4cpp::Buffer buffer{(uint32_t)response_impl.get_message_length()};
auto slice = buffer.as_wslice();
response_impl.build_message(slice);
REQUIRE(buffer.as_wslice()[0] == 0x06); // Function code
REQUIRE(buffer.as_wslice()[1] == 0x12); // Address MSB
REQUIRE(buffer.as_wslice()[2] == 0x34); // Address LSB
REQUIRE(buffer.as_wslice()[3] == 0x67); // Register value MSB
REQUIRE(buffer.as_wslice()[4] == 0x89); // Register value LSB
}
SECTION("When clone, then effectively creates a copy of the response")
{
auto copy = response_impl.clone();
REQUIRE(&response_impl != copy.get());
}
SECTION("Parse")
{
const uint16_t address = 0x1234;
const uint16_t value = 0x6789;
WriteSingleRegisterRequest request{{address, value}};
WriteSingleRegisterRequestImpl request_impl{request};
SECTION("When proper response, then parse it properly")
{
std::array<uint8_t, 5> proper_response{{
0x06, // Function code
0x12, 0x34, // Address
0x67, 0x89 // Value
}};
ser4cpp::rseq_t buffer{proper_response.data(), static_cast<uint32_t>(proper_response.size())};
auto result = WriteSingleRegisterResponseImpl::parse(request_impl, buffer);
REQUIRE(result.is_valid() == true);
auto response = result.get();
REQUIRE(response.value.address == address);
REQUIRE(response.value.value == value);
}
SECTION("When exception response, then parse report exception")
{
std::array<uint8_t, 2> exception_response{{
0x86, // Exception function code
0x02 // Illegal data address
}};
ser4cpp::rseq_t buffer{exception_response.data(), static_cast<uint32_t>(exception_response.size())};
auto result = WriteSingleRegisterResponseImpl::parse(request_impl, buffer);
REQUIRE(result.has_exception<ModbusException>() == true);
REQUIRE(result.get_exception<ModbusException>().get_exception_type() == ExceptionType::IllegalDataAddress);
}
SECTION("When too small, then return malformed exception")
{
std::array<uint8_t, 1> too_small_response{ {
0x06 // Function code
}};
ser4cpp::rseq_t buffer{too_small_response.data(), static_cast<uint32_t>(too_small_response.size())};
auto result = WriteSingleRegisterResponseImpl::parse(request_impl, buffer);
REQUIRE(result.has_exception<MalformedModbusResponseException>() == true);
}
SECTION("When too big, then return malformed exception")
{
std::array<uint8_t, 7> too_big_response{{
0x06, // Function code
0x12, 0x34, // Address
0x67, 0x89, // Value
0x42, 0x42 // Junk
}};
ser4cpp::rseq_t buffer{too_big_response.data(), static_cast<uint32_t>(too_big_response.size())};
auto result = WriteSingleRegisterResponseImpl::parse(request_impl, buffer);
REQUIRE(result.has_exception<MalformedModbusResponseException>() == true);
}
}
}
|
mdsarfarazalam840/camunda-bpm-platform
|
engine/src/test/java/org/camunda/bpm/engine/test/api/authorization/history/AuthorizationUserOperationLogTest.java
|
<gh_stars>1000+
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.authorization.history;
import static org.camunda.bpm.engine.authorization.Resources.OPERATION_LOG_CATEGORY;
import static org.camunda.bpm.engine.authorization.Resources.PROCESS_DEFINITION;
import static org.camunda.bpm.engine.authorization.UserOperationLogCategoryPermissions.READ;
import static org.camunda.bpm.engine.history.UserOperationLogEntry.CATEGORY_ADMIN;
import static org.camunda.bpm.engine.history.UserOperationLogEntry.CATEGORY_OPERATOR;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import org.camunda.bpm.engine.EntityTypes;
import org.camunda.bpm.engine.ProcessEngineConfiguration;
import org.camunda.bpm.engine.authorization.Authorization;
import org.camunda.bpm.engine.authorization.Permission;
import org.camunda.bpm.engine.authorization.Permissions;
import org.camunda.bpm.engine.authorization.ProcessDefinitionPermissions;
import org.camunda.bpm.engine.authorization.Resource;
import org.camunda.bpm.engine.authorization.Resources;
import org.camunda.bpm.engine.history.UserOperationLogEntry;
import org.camunda.bpm.engine.history.UserOperationLogQuery;
import org.camunda.bpm.engine.impl.cfg.auth.DefaultPermissionProvider;
import org.camunda.bpm.engine.impl.cfg.auth.PermissionProvider;
import org.camunda.bpm.engine.impl.util.StringUtil;
import org.camunda.bpm.engine.test.RequiredHistoryLevel;
import org.camunda.bpm.engine.test.api.authorization.AuthorizationTest;
import org.camunda.bpm.engine.test.api.identity.TestPermissions;
import org.camunda.bpm.engine.test.api.identity.TestResource;
import org.junit.Test;
/**
* @author <NAME>
*
*/
@RequiredHistoryLevel(ProcessEngineConfiguration.HISTORY_FULL)
public class AuthorizationUserOperationLogTest extends AuthorizationTest {
@Test
public void testLogCreatedOnAuthorizationCreation() {
// given
createGrantAuthorizationWithoutAuthentication(OPERATION_LOG_CATEGORY, CATEGORY_ADMIN, userId, READ);
UserOperationLogQuery query = historyService.createUserOperationLogQuery();
assertEquals(0, query.count());
// when
createGrantAuthorizationGroup(PROCESS_DEFINITION, Authorization.ANY, "testGroupId", ProcessDefinitionPermissions.DELETE);
// then
assertEquals(6, query.count());
UserOperationLogEntry entry = query.property("permissionBits").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_CREATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(String.valueOf(ProcessDefinitionPermissions.DELETE.getValue()), entry.getNewValue());
entry = query.property("permissions").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_CREATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(ProcessDefinitionPermissions.DELETE.getName(), entry.getNewValue());
entry = query.property("type").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_CREATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(String.valueOf(Authorization.AUTH_TYPE_GRANT), entry.getNewValue());
entry = query.property("resource").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_CREATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(Resources.PROCESS_DEFINITION.resourceName(), entry.getNewValue());
entry = query.property("resourceId").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_CREATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(Authorization.ANY, entry.getNewValue());
entry = query.property("groupId").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_CREATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals("testGroupId", entry.getNewValue());
}
@Test
public void testLogCreatedOnAuthorizationUpdate() {
// given
UserOperationLogQuery query = historyService.createUserOperationLogQuery();
Authorization authorization = createGrantAuthorizationWithoutAuthentication(Resources.PROCESS_DEFINITION, Authorization.ANY, "testUserId",
Permissions.DELETE);
createGrantAuthorizationWithoutAuthentication(OPERATION_LOG_CATEGORY, CATEGORY_ADMIN, userId, READ);
assertEquals(0, query.count());
// when
authorization.addPermission(Permissions.READ);
authorization.setResource(Resources.PROCESS_INSTANCE);
authorization.setResourceId("abc123");
authorization.setGroupId("testGroupId");
authorization.setUserId(null);
saveAuthorization(authorization);
// then
assertEquals(7, query.count());
UserOperationLogEntry entry = query.property("permissionBits").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_UPDATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(String.valueOf(Permissions.DELETE.getValue() | Permissions.READ.getValue()), entry.getNewValue());
assertEquals(String.valueOf(Permissions.DELETE.getValue()), entry.getOrgValue());
entry = query.property("permissions").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_UPDATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(Permissions.READ.getName() + ", " + Permissions.DELETE.getName(), entry.getNewValue());
assertEquals(Permissions.DELETE.getName(), entry.getOrgValue());
entry = query.property("type").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_UPDATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(String.valueOf(Authorization.AUTH_TYPE_GRANT), entry.getNewValue());
assertEquals(String.valueOf(Authorization.AUTH_TYPE_GRANT), entry.getOrgValue());
entry = query.property("resource").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_UPDATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(Resources.PROCESS_INSTANCE.resourceName(), entry.getNewValue());
assertEquals(Resources.PROCESS_DEFINITION.resourceName(), entry.getOrgValue());
entry = query.property("resourceId").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_UPDATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals("abc123", entry.getNewValue());
assertEquals(Authorization.ANY, entry.getOrgValue());
entry = query.property("userId").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_UPDATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertNull(entry.getNewValue());
assertEquals("testUserId", entry.getOrgValue());
entry = query.property("groupId").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_UPDATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals("testGroupId", entry.getNewValue());
assertNull(entry.getOrgValue());
}
@Test
public void testLogCreatedOnAuthorizationDeletion() {
// given
UserOperationLogQuery query = historyService.createUserOperationLogQuery();
Authorization authorization = createGrantAuthorizationWithoutAuthentication(Resources.PROCESS_DEFINITION, Authorization.ANY, "testUserId",
ProcessDefinitionPermissions.DELETE);
createGrantAuthorizationWithoutAuthentication(OPERATION_LOG_CATEGORY, CATEGORY_ADMIN, userId, READ);
assertEquals(0, query.count());
// when
authorizationService.deleteAuthorization(authorization.getId());
// then
assertEquals(6, query.count());
UserOperationLogEntry entry = query.property("permissionBits").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_DELETE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(String.valueOf(ProcessDefinitionPermissions.DELETE.getValue()), entry.getNewValue());
entry = query.property("permissions").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_DELETE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(ProcessDefinitionPermissions.DELETE.getName(), entry.getNewValue());
entry = query.property("type").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_DELETE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(String.valueOf(Authorization.AUTH_TYPE_GRANT), entry.getNewValue());
entry = query.property("resource").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_DELETE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(Resources.PROCESS_DEFINITION.resourceName(), entry.getNewValue());
entry = query.property("resourceId").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_DELETE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(Authorization.ANY, entry.getNewValue());
entry = query.property("userId").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_DELETE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals("testUserId", entry.getNewValue());
}
@Test
public void testLogCreatedOnAuthorizationCreationWithExceedingPermissionStringList() {
// given
createGrantAuthorizationWithoutAuthentication(OPERATION_LOG_CATEGORY, CATEGORY_ADMIN, userId, READ);
UserOperationLogQuery query = historyService.createUserOperationLogQuery();
assertEquals(0, query.count());
// when
PermissionProvider permissionProvider = processEngineConfiguration.getPermissionProvider();
processEngineConfiguration.setPermissionProvider(new TestPermissionProvider());
createGrantAuthorizationGroup(TestResource.RESOURCE1, Authorization.ANY, "testGroupId", TestPermissions.LONG_NAME);
processEngineConfiguration.setPermissionProvider(permissionProvider);
// then
assertEquals(6, query.count());
UserOperationLogEntry entry = query.property("permissions").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_CREATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(TestPermissions.LONG_NAME.getName().substring(0, StringUtil.DB_MAX_STRING_LENGTH), entry.getNewValue());
}
@Test
public void testLogCreatedOnAuthorizationCreationWithAllPermission() {
// given
createGrantAuthorizationWithoutAuthentication(OPERATION_LOG_CATEGORY, CATEGORY_ADMIN, userId, READ);
UserOperationLogQuery query = historyService.createUserOperationLogQuery();
assertEquals(0, query.count());
// when
PermissionProvider permissionProvider = processEngineConfiguration.getPermissionProvider();
processEngineConfiguration.setPermissionProvider(new TestPermissionProvider());
createGrantAuthorizationGroup(TestResource.RESOURCE1, Authorization.ANY, "testGroupId", TestPermissions.ALL);
processEngineConfiguration.setPermissionProvider(permissionProvider);
// then
assertEquals(6, query.count());
UserOperationLogEntry entry = query.property("permissions").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_CREATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(TestPermissions.ALL.getName(), entry.getNewValue());
}
@Test
public void testLogCreatedOnAuthorizationCreationWithNonePermission() {
// given
createGrantAuthorizationWithoutAuthentication(OPERATION_LOG_CATEGORY, CATEGORY_ADMIN, userId, READ);
UserOperationLogQuery query = historyService.createUserOperationLogQuery();
assertEquals(0, query.count());
// when
PermissionProvider permissionProvider = processEngineConfiguration.getPermissionProvider();
processEngineConfiguration.setPermissionProvider(new TestPermissionProvider());
createGrantAuthorizationGroup(TestResource.RESOURCE1, Authorization.ANY, "testGroupId", TestPermissions.NONE);
processEngineConfiguration.setPermissionProvider(permissionProvider);
// then
assertEquals(6, query.count());
UserOperationLogEntry entry = query.property("permissions").singleResult();
assertEquals(UserOperationLogEntry.OPERATION_TYPE_CREATE, entry.getOperationType());
assertEquals(UserOperationLogEntry.CATEGORY_ADMIN, entry.getCategory());
assertEquals(EntityTypes.AUTHORIZATION, entry.getEntityType());
assertEquals(TestPermissions.NONE.getName(), entry.getNewValue());
}
@Test
public void testLogCreatedOnAuthorizationCreationWithoutAuthorization() {
// given
UserOperationLogQuery query = historyService.createUserOperationLogQuery();
assertEquals(0, query.count());
// when
createGrantAuthorizationGroup(PROCESS_DEFINITION, Authorization.ANY, "testGroupId", ProcessDefinitionPermissions.DELETE);
// then the user is not authorised
assertEquals(0, query.count());
}
@Test
public void testLogCreatedOnAuthorizationCreationWithReadPermissionOnAnyCategoryPermission() {
// given
createGrantAuthorizationWithoutAuthentication(OPERATION_LOG_CATEGORY, Authorization.ANY, userId, READ);
UserOperationLogQuery query = historyService.createUserOperationLogQuery();
assertEquals(0, query.count());
// when
createGrantAuthorizationGroup(PROCESS_DEFINITION, Authorization.ANY, "testGroupId", ProcessDefinitionPermissions.DELETE);
// then the user is authorised
assertEquals(6, query.count());
}
@Test
public void testLogCreatedOnAuthorizationCreationWithReadPermissionOnWrongCategory() {
// given
createGrantAuthorizationWithoutAuthentication(OPERATION_LOG_CATEGORY, CATEGORY_OPERATOR, userId, READ);
UserOperationLogQuery query = historyService.createUserOperationLogQuery();
assertEquals(0, query.count());
// when
createGrantAuthorizationGroup(PROCESS_DEFINITION, Authorization.ANY, "testGroupId", ProcessDefinitionPermissions.DELETE);
// then the user is not authorised
assertEquals(0, query.count());
}
public static class TestPermissionProvider extends DefaultPermissionProvider {
@Override
public String getNameForResource(int resourceType) {
for (Resource resource : TestResource.values()) {
if (resourceType == resource.resourceType()) {
return resource.resourceName();
}
}
return null;
}
@Override
public Permission[] getPermissionsForResource(int resourceType) {
return TestPermissions.values();
}
}
}
|
luis-vera-pomez/effective_orders
|
app/models/effective/access_denied.rb
|
unless defined?(Effective::AccessDenied)
module Effective
class AccessDenied < StandardError
attr_reader :action, :subject
def initialize(message = nil, action = nil, subject = nil)
@message = message
@action = action
@subject = subject
end
def to_s
@message || I18n.t(:'unauthorized.default', :default => 'Access Denied')
end
end
end
end
|
rocksolidwebdesign/eparc
|
token/include/myc/toks.h
|
#ifndef MYC_LEXER_TOKEN_LIST_H
#define MYC_LEXER_TOKEN_LIST_H
#include "myc/toks_t.h"
#include "myc/token_t.h"
#include <stdlib.h>
myc_Toks_p myc_toks_create(const size_t len);
myc_Toks_p myc_toks_new();
myc_Toks_p myc_toks_alloc();
myc_Toks_p
myc_toks_resize(
myc_Toks_p A,
const size_t sz);
myc_Toks_p
myc_toks_grow(
myc_Toks_p A);
myc_Token_p
myc_toks_at(
myc_Toks_p A,
const size_t index);
myc_Toks_p
myc_toks_push(
myc_Toks_p A_arg,
const myc_Token_p tok);
myc_Toks_p
myc_toks_put(
myc_Toks_p A_arg,
const int tok_type,
const int tok_ignore,
const char * tok_str);
void
myc_toks_pop(
myc_Toks_p A);
void myc_toks_init(myc_Toks_p A);
void myc_toks_clean(myc_Toks_p A);
void myc_toks_del(myc_Toks_P A);
#endif
|
damongreen123/fpl-ccd-configuration
|
service/src/main/java/uk/gov/hmcts/reform/fpl/model/children/ChildFinalDecisionDetails.java
|
<reponame>damongreen123/fpl-ccd-configuration
package uk.gov.hmcts.reform.fpl.model.children;
import lombok.Builder;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
import uk.gov.hmcts.reform.fpl.enums.ccd.fixedlists.ChildFinalDecisionReason;
@Value
@Builder
@Jacksonized
public class ChildFinalDecisionDetails {
String childNameLabel;
ChildFinalDecisionReason finalDecisionReason;
}
|
Sciss/pickling
|
core/src/test/scala/pickling/run/vector-int.scala
|
package scala.pickling.vector.int
import org.scalatest.FunSuite
import scala.pickling._, scala.pickling.Defaults._, binary._
class VectorIntTest extends FunSuite {
test("main") {
val v = Vector(1, 2, 3)
val pickle = v.pickle
val expected0 = "BinaryPickle([0,0,0,44,115,99,97,108,97,46,99,111,108,108,101,99,116,105,111,110,46,105,109,109,117,116,97,98,108,101,46,86,101,99,116,111,114,91,115,99,97,108,97,46,73,110,116,93,0,0,0,3,0,0,0,1,0,0,0,2,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0])"
val expected = "BinaryPickle([0,0,0,44,115,99,97,108,97,46,99,111,108,108,101,99,116,105,111,110,46,105,109,109,117,116,97,98,108,101,46,86,101,99,116,111,114,91,115,99,97,108,97,46,73,110,116,93,0,0,0,3,0,0,0,1,0,0,0,2,0,0,0,3])"
assert(pickle.toString === expected0)
assert(pickle.unpickle[Vector[Int]] === v)
}
}
|
ez-deploy/ezdeploy
|
handle/handle.go
|
package handle
import (
"log"
"github.com/ez-deploy/ezdeploy/handle/db"
"github.com/ez-deploy/ezdeploy/handle/k8s"
"github.com/ez-deploy/ezdeploy/handle/project"
"github.com/ez-deploy/ezdeploy/handle/rbac"
"github.com/ez-deploy/ezdeploy/handle/service"
"github.com/ez-deploy/ezdeploy/handle/ticket"
"github.com/ez-deploy/ezdeploy/handle/user"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/tools/clientcmd"
_ "github.com/go-sql-driver/mysql"
)
// handerImpl impl restapi.Handler interface.
type handlerImpl struct {
*ConfigurableImpl
*user.UserOperationImpl
*project.ProjectOperationImpl
*rbac.RBACOperationImpl
*service.ServiceOperationImpl
*ticket.TicketOperationsImpl
}
const dsn = "kratos:123456@tcp(localhost:3306)/ezdeploy?charset=utf8mb4&parseTime=True"
const kubeconfigPath = "/home/wangsaiyu/.kube/config"
func New() *handlerImpl {
tables, err := db.NewTables(dsn)
if err != nil {
log.Fatal(err)
}
// create kubernetes client.
config, err := clientcmd.BuildConfigFromFlags("", kubeconfigPath)
if err != nil {
log.Fatal(err)
}
clientset, err := kubernetes.NewForConfig(config)
if err != nil {
log.Fatal(err)
}
k8sClientSet := k8s.New(clientset)
return &handlerImpl{
ConfigurableImpl: &ConfigurableImpl{},
UserOperationImpl: &user.UserOperationImpl{Tables: tables},
ProjectOperationImpl: &project.ProjectOperationImpl{Tables: tables, K8SManager: k8sClientSet},
RBACOperationImpl: &rbac.RBACOperationImpl{RBACManager: rbac.RBACManager{Tables: tables}},
ServiceOperationImpl: &service.ServiceOperationImpl{
Tables: tables,
ServiceVersionManager: &service.ServiceVersionManager{
Tables: tables,
},
K8SManager: k8sClientSet,
},
TicketOperationsImpl: &ticket.TicketOperationsImpl{Tables: tables},
}
}
|
lucky7323/backprop
|
tests/test_tasks.py
|
from backprop import Emotion, ImageClassification, QA, Summarisation, TextClassification, TextGeneration, TextVectorisation
import numpy as np
import torch
import os
qa_example = {
"q1": "Where does Sally live?",
"q2": "How long has Sally lived in London?",
"q2_conv": "How long has she lived there?",
"q3": "Where did Sally live prior to London?",
"q3_conv": "Where did she live before?",
"c": "Sally has been living in London for 3 years. Previously, Sally lived in Liverpool.",
"a1": "London",
"a2": "3 years",
"a3": "Liverpool"
}
summary_context = """<NAME> FRS (/ˈiːlɒn/ EE-lon; born June 28, 1971) is a business magnate, industrial designer and engineer.[6] He is the founder, CEO, CTO and chief designer of SpaceX; early investor,[b] CEO and product architect of Tesla, Inc.; founder of The Boring Company; co-founder of Neuralink; and co-founder and initial co-chairman of OpenAI. He was elected a Fellow of the Royal Society (FRS) in 2018.[9][10] Also that year, he was ranked 25th on the Forbes list of The World's Most Powerful People,[11] and was ranked joint-first on the Forbes list of the Most Innovative Leaders of 2019.[12] As of December 19, 2020, Musk’s net worth was estimated by Forbes to US$153.5 billion,[1][13] making him the second-richest person in the world, behind <NAME>.[14]"""
classify_context = """I am mad because my product broke the first time I used it"""
classify_labels = ["product issue", "nature"]
classify_correct = "product issue"
main_path = os.path.dirname(__file__)
image_classification_image = os.path.join(main_path, "data/dog.png")
image_classification_labels = ["cat", "dog"]
image_classification_correct = "dog"
device = "cpu"
text_vectorisation = TextVectorisation(device=device)
qa = QA(device=device)
emotion = Emotion(device=device)
summarisation = Summarisation(device=device)
text_classification = TextClassification(device=device)
image_classification = ImageClassification(device=device)
text_generation = TextGeneration(device=device)
def test_vectorisation_single():
out = text_vectorisation("This is a sample thing.")
assert isinstance(out, list), "Not an array"
assert type(out[0]) == float, "Vector dimension not float"
def test_vectorisation_batch():
out = text_vectorisation(["This is a sample thing.",
"This is another sample thing."])
assert isinstance(out, list), "Not an array"
assert len(out) == 2, "Not the right size"
assert type(out[0][0]) == float, "Vector dimension not float"
assert out[0] != out[1], "Two different texts have same vector"
def test_qa_single():
out = qa(qa_example["q1"], qa_example["c"])
assert type(out) == str, "Qa answer not a string"
assert out == qa_example["a1"], "Wrong answer to simple qa"
def test_qa_batch():
out = qa([qa_example["q1"], qa_example["q2"]],
[qa_example["c"], qa_example["c"]])
assert isinstance(out, list), "Output is not a list"
assert len(out) == 2, "Incorrect number of answers"
assert out[0] == qa_example["a1"], "Wrong answer to simple qa"
assert out[1] == qa_example["a2"], "Wrong answer to simple qa"
def test_qa_conv_single():
out = qa(qa_example["q2_conv"], qa_example["c"], prev_qa=[
(qa_example["q1"], qa_example["a1"])])
assert type(out) == str, "Qa answer not a string"
assert out == qa_example["a2"], "Wrong answer to simple conversational qa"
def test_qa_conv_batch():
questions = [qa_example["q2_conv"], qa_example["q3_conv"]]
ctxs = [qa_example["c"], qa_example["c"]]
prev_qa = [[(qa_example["q1"], qa_example["a1"])], [
(qa_example["q2"], qa_example["a2"])]]
out = qa(questions, ctxs, prev_qa=prev_qa)
assert isinstance(out, list), "Output not a list"
assert len(out) == 2, "Incorrect number of answers"
assert out[0] == qa_example["a2"], "Wrong answer to simple conversational qa"
assert out[1] == qa_example["a3"], "Wrong answer to simple conversational qa"
def test_summary_single():
out = summarisation(summary_context)
assert type(out) == str, "Summary not a string"
assert len(out) < len(summary_context), "Summary not shorter than input"
def test_summary_bulk():
out = summarisation([summary_context, summary_context])
assert isinstance(out, list), "Output not a list"
assert len(out) == 2, "Incorrect number of outputs"
assert len(out[0]) < len(summary_context), "Summary not shorter than input"
assert len(out[1]) < len(summary_context), "Summary not shorter than input"
def test_classify_single():
out = text_classification(classify_context, classify_labels)
assert isinstance(out, dict), "Output is not a dict"
assert len(out.keys()) == len(
classify_labels), "Incorrect number of labels"
assert classify_correct == max(
out, key=out.get), "Classification is severely wrong"
def test_classify_batch():
out = text_classification([classify_context, classify_context],
[classify_labels, classify_labels])
assert isinstance(out, list), "Output is not a list"
for out in out:
assert isinstance(out, dict), "List item is not a dict"
assert len(out.keys()) == len(
classify_labels), "Incorrect number of labels"
assert classify_correct == max(
out, key=out.get), "Classification is severely wrong"
def test_emotion_single():
out = emotion("I am really angry.")
assert type(out) == str, "Output is not a string"
def test_emotion_batch():
out = emotion(["I am really angry.", "You are really angry."])
assert isinstance(out, list), "Output is not a list"
for out in out:
assert type(out) == str, "List not made of strings"
def test_image_classification_single():
out = image_classification(image_classification_image, image_classification_labels)
assert isinstance(out, dict), "Output is not a dict"
assert len(out.keys()) == len(
image_classification_labels), "Incorrect number of labels"
assert image_classification_correct == max(
out, key=out.get), "Classification is severely wrong"
def test_image_classification_batch():
out = image_classification([image_classification_image]*2, [image_classification_labels]*2)
assert isinstance(out, list), "Output is not a list"
for out in out:
assert len(out.keys()) == len(
image_classification_labels), "Incorrect number of labels"
assert image_classification_correct == max(
out, key=out.get), "Classification is severely wrong"
def test_text_generation_single():
out = text_generation("This is something")
assert type(out) == str, "Text Generation not a string"
def test_text_generation_bulk():
out = text_generation(["This is something", "This is something too"])
assert isinstance(out, list), "Output not a list"
assert len(out) == 2, "Incorrect number of outputs"
|
zhangxiaoyi090416/ccf
|
noi0105/35.cpp
|
<filename>noi0105/35.cpp<gh_stars>0
// 求出e的值【1.5编程基础之循环控制32】
// 利用公式e = 1 + 1/1! + 1/2! + 1/3! + ... + 1/n! ,求e的值,要求保留小数点后10位。
// 输入:
// 输入只有一行,该行包含一个整数n(2<=n<=15),表示计算e时累加到1/n!。
// 输出:
// 输出只有一行,该行包含计算出来的e的值,要求打印小数点后10位。
// 样例输入:
// 10
// 样例输出:
// 2.7182818011
#include <iostream>
#include <iomanip>
using namespace std;
int main()
{
int n, i;
double p = 1;
cin >> n;
for (int z = 1; z <= n; z++)
{
double k = 1;
for (i = 1; i <= z; i++)
{
k *= i;
}
p += 1 / k;
}
cout << fixed << setprecision(10) << p << endl;
return 0;
}
|
geekyfox90/SolARFramework
|
interfaces/api/solver/pose/I2DTransformFinder.h
|
<gh_stars>1-10
/**
* @copyright Copyright (c) 2017 B-com http://www.b-com.com/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef SOLAR_I2DTRANSFORMFINDER_H
#define SOLAR_I2DTRANSFORMFINDER_H
#include "xpcf/api/IComponentIntrospect.h"
#include "xpcf/core/helpers.h"
#include "datastructure/GeometryDefinitions.h"
#include "datastructure/MathDefinitions.h"
namespace SolAR {
namespace api {
namespace solver {
namespace pose {
class Transform2DFinder {
public:
enum RetCode {
TRANSFORM2D_ESTIMATION_OK = 0, /**< the default OK code*/
TRANSFORM2D_EMPTY, /**< Homgraphy matrix is empty*/
};
};
/**
* @class I2DTransformFinder
* @brief <B>Finds the 2D transform from 2D-2D points correspondences.</B>
* <TT>UUID: 45dd370a-0eab-4a7f-93d0-43453b4c7517</TT>
*/
class I2DTransformFinder : virtual public org::bcom::xpcf::IComponentIntrospect {
public:
///@brief I2DTransformFinder default constructor.
I2DTransformFinder() = default;
///@brief I2DTransformFinder default deconstructor.
virtual ~I2DTransformFinder() = default;
/// @brief Find 2D transform matrix from 2 sets of 2d_points.
/// @param[in] srcPoints set of 2d_points seen in view_1.
/// @param[in] dstPoints set of 2d_points seen in view_2.
/// @param[out] fundamental estimated 2D transform matrix.
XPCF_GRPC_REQUEST("findTransform2DRequest") XPCF_GRPC_RESPONSE("findTransform2DResponse") /// to remove ambiguity with I3D3DCorrespondencesFinder find()
virtual Transform2DFinder::RetCode find(const std::vector<datastructure::Point2Df> & srcPoints,
const std::vector<datastructure::Point2Df> & dstPoints,
datastructure::Transform2Df & fundamental) = 0;
};
}
}
}
}
XPCF_DEFINE_INTERFACE_TRAITS(SolAR::api::solver::pose::I2DTransformFinder,
"45dd370a-0eab-4a7f-93d0-43453b4c7517",
"I2DTransformFinder",
"SolAR::api::solver::pose::I2DTransformFinder");
#endif // SOLAR_IHOMOGRAPHYESTIMATION_H
|
uuk0/mcpython-4
|
chat/command/CommandEntry.py
|
"""mcpython - a minecraft clone written in python licenced under MIT-licence
authors: uuk, xkcdjerry
original game by forgleman licenced under MIT-licence
minecraft by Mojang
blocks based on 1.14.4.jar of minecraft, downloaded on 20th of July, 2019"""
import globals as G
from chat.command.Command import ParseType
class CommandEntry:
"""
an parseable command entry
"""
ENTRY_NAME = None # the name of the entry
@staticmethod
def parse(entrylist: list, start: int, info, arguments, kwargs) -> tuple:
"""
parse an entry in entrylist to an value
:param entrylist: the entrys to parse
:param start: which entry to start at
:param info: the command info to use
:param arguments: overgiven creation arguments
:param kwargs: overgiven optional creative arguments
:return: an (new start, value)-tuple
"""
return start+1, None
@staticmethod
def is_valid(entrylist: list, start: int, arguments, kwargs) -> bool:
"""
checks if entry is valid
:param entrylist: the entrys to check
:param start: which entry to start at
:param arguments: overgiven creation arguments
:param kwargs: overgiven optional creation arguments
:return: if entry is valid
"""
raise NotImplementedError()
def load():
@G.registry
class DefiniteString(CommandEntry):
"""
Entry for definite string
"""
ENTRY_NAME = ParseType.DEFINIED_STRING
@staticmethod
def parse(entrylist: list, start: int, info, arguments, kwargs) -> tuple: return start + 1, entrylist[start]
@staticmethod
def is_valid(entrylist: list, start: int, arguments, kwargs) -> bool: return entrylist[start] == arguments[0]
@G.registry
class IntEntry(CommandEntry):
"""
entry for int
"""
ENTRY_NAME = ParseType.INT
@staticmethod
def parse(entrylist: list, start: int, info, arguments, kwargs) -> tuple: return start + 1, int(entrylist[start])
@staticmethod
def is_valid(entrylist: list, start: int, arguments, kwargs) -> bool:
try:
int(entrylist[start]) # try to convert to int
return True
except:
return False
@G.registry
class StringEntry(CommandEntry):
"""
string entry
"""
ENTRY_NAME = ParseType.STRING
@staticmethod
def parse(entrylist: list, start: int, info, arguments, kwargs) -> tuple:
startc = entrylist[start][0] # with what does it start?
if startc in "'\"":
entrys = [entrylist[start]]
i = 0
while not entrylist[start + i].endswith(startc):
start += 1
entrys.append(entrylist[start + i])
data = " ".join(entrys)
if data[0] in "'\"":
data = data[1:-1]
return start + i + 1, data
@staticmethod
def is_valid(entrylist: list, start: int, arguments, kwargs) -> bool:
startc = entrylist[start][0] # with what does it start?
if startc in "'\"":
entrys = [entrylist[start]]
i = 0
while not entrylist[start + i].endswith(startc):
start += 1
entrys.append(entrylist[start + i])
if start >= len(entrylist):
return False # it does NOT close
return True # it does close
return False # it does NOT start
@G.registry
class StringWithoutQuotesEntry(CommandEntry):
"""
string entry
"""
ENTRY_NAME = ParseType.STRING_WITHOUT_QUOTES
@staticmethod
def parse(entrylist: list, start: int, info, arguments, kwargs) -> tuple:
return start + 1, entrylist[start]
@staticmethod
def is_valid(entrylist: list, start: int, arguments, kwargs) -> bool: return True
@G.registry
class FloatEntry(CommandEntry):
"""
float entry
"""
ENTRY_NAME = ParseType.FLOAT
@staticmethod
def parse(entrylist: list, start: int, info, arguments, kwargs) -> tuple: return start + 1, float(entrylist[start])
@staticmethod
def is_valid(entrylist: list, start: int, arguments, kwargs) -> bool:
try:
float(entrylist[start]) # try to convert to float
return True
except:
return False
@G.registry
class BlockNameEntry(CommandEntry):
"""
blockname entry
"""
ENTRY_NAME = ParseType.BLOCKNAME
@staticmethod
def parse(entrylist: list, start: int, info, arguments, kwargs) -> tuple:
return start + 1, entrylist[start]
@staticmethod
def is_valid(entrylist: list, start: int, arguments, kwargs) -> bool:
return entrylist[start] in G.registry.get_by_name("block").get_attribute("blocks") # is this block arrival?
@G.registry
class ItemNameEntry(CommandEntry):
"""
itemname entry
"""
ENTRY_NAME = ParseType.ITEMNAME
@staticmethod
def parse(entrylist: list, start: int, info, arguments, kwargs) -> tuple:
return start + 1, entrylist[start]
@staticmethod
def is_valid(entrylist: list, start: int, arguments, kwargs) -> bool:
return entrylist[start] in G.registry.get_by_name("item").get_attribute("items") # is this item arrival?
@G.registry
class SelectorEntry(CommandEntry):
"""
Selector entry
"""
ENTRY_NAME = ParseType.SELECTOR
@staticmethod
def parse(entrylist: list, start: int, info, arguments, kwargs) -> tuple:
entry = entrylist[start]
for selector in G.registry.get_by_name("command").get_attribute("selectors"):
if selector.is_valid(entry): # is this the selector we are searching for?
return start + 1, selector.parse(entry, info)
@staticmethod
def is_valid(entrylist: list, start: int, arguments, kwargs) -> bool:
entry = entrylist[start]
# have we any valid selector?
return any([x.is_valid(entry) for x in G.registry.get_by_name("command").get_attribute("selectors")])
@G.registry
class PositionEntry(CommandEntry):
"""
position entry
"""
ENTRY_NAME = ParseType.POSITION
@staticmethod
def parse(entrylist: list, start: int, info, arguments, kwargs) -> tuple:
if SelectorEntry.is_valid(entrylist, start, arguments, kwargs):
return start + 1, SelectorEntry.parse(entrylist, start, info, arguments, kwargs)[0].position
x, y, z = tuple(entrylist[start:start+3])
x = PositionEntry._parse_coordinate_to_real(x, 0, info)
y = PositionEntry._parse_coordinate_to_real(y, 1, info)
z = PositionEntry._parse_coordinate_to_real(z, 2, info)
return start + 3, (x, y, z)
@staticmethod
def _parse_coordinate_to_real(r: str, index: int, info) -> float:
"""
parse an coordinate (could be relative) to an valid coordinate
:param r: the coordinate to use
:param index: the index in the info position
:param info: the info to use
:return: an float value representing this
"""
if r.startswith("~"):
v = info.position[index]
if len(r) > 1:
v += int(r[1:])
return v
return float(r)
@staticmethod
def is_valid(entrylist: list, start: int, arguments, kwargs) -> bool:
if SelectorEntry.is_valid(entrylist, start, arguments, kwargs):
return True
try:
[float(x) if not x.startswith("~") else None for x in entrylist[start:start + 3]]
return True
except ValueError:
return False
@G.registry
class SelectDefinitedStringEntry(CommandEntry):
"""
select definited stirng entry
"""
ENTRY_NAME = ParseType.SELECT_DEFINITED_STRING
@staticmethod
def parse(entrylist: list, start: int, info, arguments, kwargs) -> tuple:
return start + 1, entrylist[start]
@staticmethod
def is_valid(entrylist: list, start: int, arguments, kwargs) -> bool:
return entrylist[start] in arguments # check if should be used
@G.registry
class OpenEndUndefinitedStringEntry(CommandEntry):
"""
open end undefinited stirng entry
"""
ENTRY_NAME = ParseType.OPEN_END_UNDEFINITED_STRING
@staticmethod
def parse(entrylist: list, start: int, info, arguments, kwargs) -> tuple:
end = start + (kwargs["max"] if "max" in kwargs else len(entrylist))
return len(entrylist) - 1, (entrylist[start:] if len(entrylist) < end else entrylist[start:end])
@staticmethod
def is_valid(entrylist: list, start: int, arguments, kwargs) -> bool:
return (kwargs["min"] if "min" in kwargs else 0) <= len(entrylist) - start + 1 # if lenght is in range
|
RahatIbnRafiq/CyberbullyingAndroidAppOngoing
|
app/src/main/java/com/example/cybersafetyapp/HelperClassesPackage/ServerWorks.java
|
package com.example.cybersafetyapp.HelperClassesPackage;
import android.util.Log;
import com.example.cybersafetyapp.UtilityPackage.IntentSwitchVariables;
import com.example.cybersafetyapp.UtilityPackage.UtilityVariables;
import org.json.JSONArray;
import org.json.JSONObject;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
/**
* Created by RahatIbnRafiq on 2/20/2017.
*/
public class ServerWorks {
private static ServerWorks instance = null;
public static ServerWorks getInstance()
{
if(instance == null) {
instance = new ServerWorks();
}
return instance;
}
private String streamToString(InputStream is) throws IOException {
String str = "";
if (is != null) {
StringBuilder sb = new StringBuilder();
String line;
try {
BufferedReader reader = new BufferedReader(
new InputStreamReader(is));
while ((line = reader.readLine()) != null) {
sb.append(line);
}
reader.close();
}
finally {
try{
is.close();
}catch (IOException ex)
{
Log.i(UtilityVariables.tag,"IO Exception in "+this.getClass().getSimpleName());
}
}
str = sb.toString();
}
return str;
}
public ArrayList<String> getMonitoringUsers(String email,String OSNname) throws Exception
{
ArrayList<String> userids = new ArrayList<>();
if(OSNname == IntentSwitchVariables.INSTAGRAM)
{
InputStream inputStream ;
HttpURLConnection urlConnection ;
String urlString = UtilityVariables.INSTAGRAM_GET_MONITORING_USERS+"?email="+email;
URL url = new URL(urlString);
urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setRequestMethod("GET");
inputStream = new BufferedInputStream(urlConnection.getInputStream());
String response = streamToString(inputStream);
//Log.i(UtilityVariables.tag,this.getClass().getSimpleName()+" this is the getMonitoringUsers Response: "+response);
JSONObject resultjson = new JSONObject(response);
JSONArray jsonArray = new JSONArray(resultjson.optString("users"));
for(int i=0;i<jsonArray.length();i++)
{
JSONObject jsonObject = jsonArray.getJSONObject(i);
userids.add(jsonObject.optString("userid"));
//Log.i(UtilityVariables.tag,this.getClass().getSimpleName()+" userid: "+jsonObject.optString("userid"));
}
}
return userids;
}
}
|
rmnick/job4j
|
chapter_009/src/main/java/ru/job4j/foodstorage/food/Food.java
|
<reponame>rmnick/job4j
package ru.job4j.foodstorage.food;
import java.sql.Date;
import java.time.LocalDateTime;
public class Food {
public final double price;
public final LocalDateTime createDate;
public final LocalDateTime expireDate;
protected int discount;
public final String name;
public final boolean canReproduct;
public Food(final double price, final LocalDateTime createDate, final LocalDateTime expireDate, final int discount, final String name, final boolean canReproduct) {
this.price = price;
this.createDate = createDate;
this.expireDate = expireDate;
this.discount = discount;
this.name = name;
this.canReproduct = canReproduct;
}
public double getPrice() {
return price;
}
public LocalDateTime getCreateDate() {
return createDate;
}
public LocalDateTime getExpireDate() {
return expireDate;
}
public int getDiscount() {
return discount;
}
public void setDiscount(int discount) {
this.discount = discount;
}
public boolean isCanReproduct() {
return this.canReproduct;
}
@Override
public String toString() {
return String.format("(%s; price: %.2f; expireDate: %s; discount: %d%s)", name, price, Date.valueOf(expireDate.toLocalDate()), discount, "%");
}
}
|
ComputerSystemsLab/OptimizationCache
|
benchmarks/Angha_original/extr_sign_xmain/extr_sign_xmain.c
|
#define NULL ((void*)0)
typedef unsigned long size_t; // Customize by platform.
typedef long intptr_t; typedef unsigned long uintptr_t;
typedef long scalar_t__; // Either arithmetic or pointer type.
/* By default, we understand bool (as a convenience). */
typedef int bool;
#define false 0
#define true 1
/* Forward declarations */
typedef struct TYPE_2__ TYPE_1__ ;
/* Type definitions */
typedef int /*<<< orphan*/ crypto_sign_state ;
struct TYPE_2__ {unsigned char* sk; unsigned char* pk; unsigned char* m; unsigned char* sig; } ;
/* Variables and functions */
int /*<<< orphan*/ add_l (unsigned char*) ;
int /*<<< orphan*/ assert (int) ;
scalar_t__ crypto_sign (unsigned char*,unsigned long long*,unsigned char const*,unsigned int,unsigned char*) ;
int crypto_sign_BYTES ;
int crypto_sign_PUBLICKEYBYTES ;
int crypto_sign_SECRETKEYBYTES ;
int crypto_sign_SEEDBYTES ;
unsigned int crypto_sign_bytes () ;
scalar_t__ crypto_sign_detached (unsigned char*,unsigned long long*,unsigned char const*,unsigned int,unsigned char*) ;
int crypto_sign_ed25519_PUBLICKEYBYTES ;
int crypto_sign_ed25519_SEEDBYTES ;
unsigned int crypto_sign_ed25519_bytes () ;
unsigned int crypto_sign_ed25519_messagebytes_max () ;
unsigned int crypto_sign_ed25519_publickeybytes () ;
unsigned int crypto_sign_ed25519_secretkeybytes () ;
unsigned int crypto_sign_ed25519_seedbytes () ;
int /*<<< orphan*/ crypto_sign_ed25519_sk_to_pk (unsigned char*,unsigned char*) ;
int /*<<< orphan*/ crypto_sign_ed25519_sk_to_seed (unsigned char*,unsigned char*) ;
scalar_t__ crypto_sign_ed25519ph_statebytes () ;
int /*<<< orphan*/ crypto_sign_final_create (int /*<<< orphan*/ *,unsigned char*,unsigned long long*,unsigned char*) ;
int crypto_sign_final_verify (int /*<<< orphan*/ *,unsigned char*,unsigned char*) ;
int /*<<< orphan*/ crypto_sign_init (int /*<<< orphan*/ *) ;
scalar_t__ crypto_sign_keypair (unsigned char*,unsigned char*) ;
unsigned int crypto_sign_messagebytes_max () ;
int crypto_sign_open (unsigned char*,unsigned long long*,unsigned char*,unsigned long long,unsigned char*) ;
int /*<<< orphan*/ crypto_sign_primitive () ;
unsigned int crypto_sign_publickeybytes () ;
unsigned int crypto_sign_secretkeybytes () ;
scalar_t__ crypto_sign_seed_keypair (unsigned char*,unsigned char*,unsigned char*) ;
unsigned int crypto_sign_seedbytes () ;
scalar_t__ crypto_sign_statebytes () ;
int /*<<< orphan*/ crypto_sign_update (int /*<<< orphan*/ *,unsigned char const*,int) ;
int crypto_sign_verify_detached (unsigned char*,unsigned char const*,unsigned int,unsigned char*) ;
int /*<<< orphan*/ exit (int /*<<< orphan*/ ) ;
unsigned char* keypair_seed ;
scalar_t__ memcmp (unsigned char*,unsigned char*,int) ;
int /*<<< orphan*/ memcpy (unsigned char*,unsigned char*,int) ;
int /*<<< orphan*/ memset (unsigned char*,int,int) ;
unsigned char* non_canonical_p ;
int /*<<< orphan*/ printf (char*,...) ;
int /*<<< orphan*/ sodium_bin2hex (char*,int,unsigned char*,int) ;
int /*<<< orphan*/ sodium_hex2bin (unsigned char*,int,char*,int,int /*<<< orphan*/ *,int /*<<< orphan*/ *,int /*<<< orphan*/ *) ;
scalar_t__ strcmp (int /*<<< orphan*/ ,char*) ;
TYPE_1__* test_data ;
int main(void)
{
crypto_sign_state st;
unsigned char extracted_seed[crypto_sign_ed25519_SEEDBYTES];
unsigned char extracted_pk[crypto_sign_ed25519_PUBLICKEYBYTES];
unsigned char sig[crypto_sign_BYTES];
unsigned char sm[1024 + crypto_sign_BYTES];
unsigned char m[1024];
unsigned char skpk[crypto_sign_SECRETKEYBYTES];
unsigned char pk[crypto_sign_PUBLICKEYBYTES];
unsigned char sk[crypto_sign_SECRETKEYBYTES];
char sig_hex[crypto_sign_BYTES * 2 + 1];
char pk_hex[crypto_sign_PUBLICKEYBYTES * 2 + 1];
char sk_hex[crypto_sign_SECRETKEYBYTES * 2 + 1];
unsigned long long siglen;
unsigned long long smlen;
unsigned long long mlen;
unsigned int i;
unsigned int j;
memset(sig, 0, sizeof sig);
for (i = 0U; i < (sizeof test_data) / (sizeof test_data[0]); i++) {
#ifdef BROWSER_TESTS
if (i % 128U != 127U) {
continue;
}
#endif
memcpy(skpk, test_data[i].sk, crypto_sign_SEEDBYTES);
memcpy(skpk + crypto_sign_SEEDBYTES, test_data[i].pk,
crypto_sign_PUBLICKEYBYTES);
if (crypto_sign(sm, &smlen, (const unsigned char *)test_data[i].m, i,
skpk) != 0) {
printf("crypto_sign() failure: [%u]\n", i);
continue;
}
if (memcmp(test_data[i].sig, sm, crypto_sign_BYTES) != 0) {
printf("signature failure: [%u]\n", i);
continue;
}
if (crypto_sign_open(m, NULL, sm, smlen, test_data[i].pk) != 0) {
printf("crypto_sign_open() failure: [%u]\n", i);
continue;
}
add_l(sm + 32);
#ifndef ED25519_COMPAT
if (crypto_sign_open(m, &mlen, sm, smlen, test_data[i].pk) != -1) {
printf("crypto_sign_open(): signature [%u] is malleable\n", i);
continue;
}
#else
if (crypto_sign_open(m, &mlen, sm, smlen, test_data[i].pk) != 0) {
printf("crypto_sign_open(): signature [%u] is not malleable\n", i);
continue;
}
#endif
if (memcmp(test_data[i].m, m, (size_t)mlen) != 0) {
printf("message verification failure: [%u]\n", i);
continue;
}
sm[i + crypto_sign_BYTES - 1U]++;
if (crypto_sign_open(m, &mlen, sm, smlen, test_data[i].pk) == 0) {
printf("message can be forged: [%u]\n", i);
continue;
}
if (crypto_sign_open(m, &mlen, sm, i % crypto_sign_BYTES,
test_data[i].pk) == 0) {
printf("short signed message verifies: [%u]\n",
i % crypto_sign_BYTES);
continue;
}
if (crypto_sign_detached(sig, &siglen,
(const unsigned char *)test_data[i].m, i, skpk)
!= 0) {
printf("detached signature failed: [%u]\n", i);
continue;
}
if (siglen == 0U || siglen > crypto_sign_BYTES) {
printf("detached signature has an unexpected length: [%u]\n", i);
continue;
}
if (memcmp(test_data[i].sig, sig, crypto_sign_BYTES) != 0) {
printf("detached signature failure: [%u]\n", i);
continue;
}
if (crypto_sign_verify_detached(sig,
(const unsigned char *)test_data[i].m,
i, test_data[i].pk) != 0) {
printf("detached signature verification failed: [%u]\n", i);
continue;
}
}
printf("%u tests\n", i);
i--;
memcpy(sm, test_data[i].m, i);
if (crypto_sign(sm, &smlen, sm, i, skpk) != 0) {
printf("crypto_sign() with overlap failed\n");
}
if (crypto_sign_open(sm, &mlen, sm, smlen, test_data[i].pk) != 0) {
printf("crypto_sign_open() with overlap failed\n");
}
if (memcmp(test_data[i].m, sm, (size_t)mlen) != 0) {
printf("crypto_sign_open() with overlap failed (content)\n");
}
for (j = 1U; j < 8U; j++) {
sig[63] ^= (j << 5);
if (crypto_sign_verify_detached(sig,
(const unsigned char *)test_data[i].m,
i, test_data[i].pk) != -1) {
printf("detached signature verification should have failed\n");
continue;
}
sig[63] ^= (j << 5);
}
#ifndef ED25519_COMPAT
if (crypto_sign_verify_detached(sig,
(const unsigned char *)test_data[i].m,
i, non_canonical_p) != -1) {
printf("detached signature verification with non-canonical key should have failed\n");
}
#endif
memset(pk, 0, sizeof pk);
if (crypto_sign_verify_detached(sig,
(const unsigned char *)test_data[i].m,
i, pk) != -1) {
printf("detached signature verification should have failed\n");
}
memset(sig, 0xff, 32);
sig[0] = 0xdb;
if (crypto_sign_verify_detached(sig,
(const unsigned char *)test_data[i].m,
i, pk) != -1) {
printf("detached signature verification should have failed\n");
}
assert(crypto_sign_detached(sig, NULL,
(const unsigned char *)test_data[i].m, i, skpk) == 0);
sodium_hex2bin(pk, crypto_sign_PUBLICKEYBYTES,
"3eee494fb9eac773144e34b0c755affaf33ea782c0722e5ea8b150e61209ab36",
crypto_sign_PUBLICKEYBYTES * 2, NULL, NULL, NULL);
if (crypto_sign_verify_detached(sig,
(const unsigned char *)test_data[i].m,
i, pk) != -1) {
printf("signature with an invalid public key should have failed\n");
}
sodium_hex2bin(pk, crypto_sign_PUBLICKEYBYTES,
"0200000000000000000000000000000000000000000000000000000000000000",
crypto_sign_PUBLICKEYBYTES * 2, NULL, NULL, NULL);
if (crypto_sign_verify_detached(sig,
(const unsigned char *)test_data[i].m,
i, pk) != -1) {
printf("signature with an invalid public key should have failed\n");
}
sodium_hex2bin(pk, crypto_sign_PUBLICKEYBYTES,
"0500000000000000000000000000000000000000000000000000000000000000",
crypto_sign_PUBLICKEYBYTES * 2, NULL, NULL, NULL);
if (crypto_sign_verify_detached(sig,
(const unsigned char *)test_data[i].m,
i, pk) != -1) {
printf("signature with an invalid public key should have failed\n");
}
if (crypto_sign_seed_keypair(pk, sk, keypair_seed) != 0) {
printf("crypto_sign_seed_keypair() failure\n");
return -1;
}
crypto_sign_init(&st);
crypto_sign_update(&st, (const unsigned char *)test_data[i].m, i);
crypto_sign_final_create(&st, sig, NULL, sk);
sodium_bin2hex(sig_hex, sizeof sig_hex, sig, sizeof sig);
printf("ed25519ph sig: [%s]\n", sig_hex);
crypto_sign_init(&st);
crypto_sign_update(&st, (const unsigned char *)test_data[i].m, i);
if (crypto_sign_final_verify(&st, sig, pk) != 0) {
printf("ed5519ph verification failed\n");
}
crypto_sign_init(&st);
crypto_sign_update(&st, (const unsigned char *)test_data[i].m, 0);
crypto_sign_update(&st, (const unsigned char *)test_data[i].m, i / 2);
crypto_sign_update(&st, ((const unsigned char *)test_data[i].m) + i / 2,
i - i / 2);
if (crypto_sign_final_verify(&st, sig, pk) != 0) {
printf("ed5519ph verification failed\n");
}
sig[0]++;
if (crypto_sign_final_verify(&st, sig, pk) != -1) {
printf("ed5519ph verification could be forged\n");
}
sig[0]--;
pk[0]++;
if (crypto_sign_final_verify(&st, sig, pk) != -1) {
printf("ed5519ph verification could be forged\n");
}
sodium_hex2bin(sk, crypto_sign_SECRETKEYBYTES,
"833fe62409237b9d62ec77587520911e9a759cec1d19755b7da901b96dca3d42",
2 * crypto_sign_SECRETKEYBYTES , NULL, NULL, NULL);
sodium_hex2bin(pk, crypto_sign_PUBLICKEYBYTES,
"ec172b93ad5e563bf4932c70e1245034c35467ef2efd4d64ebf819683467e2bf",
2 * crypto_sign_PUBLICKEYBYTES, NULL, NULL, NULL);
memcpy(sk + crypto_sign_SECRETKEYBYTES - crypto_sign_PUBLICKEYBYTES,
pk, crypto_sign_PUBLICKEYBYTES);
crypto_sign_init(&st);
crypto_sign_update(&st, (const unsigned char *) "abc", 3);
crypto_sign_final_create(&st, sig, &siglen, sk);
if (siglen == 0U || siglen > crypto_sign_BYTES) {
printf("ed25519ph signature has an unexpected length\n");
}
sodium_bin2hex(sig_hex, sizeof sig_hex, sig, sizeof sig);
printf("ed25519ph tv sig: [%s]\n", sig_hex);
crypto_sign_init(&st);
crypto_sign_update(&st, (const unsigned char *) "abc", 3);
if (crypto_sign_final_verify(&st, sig, pk) != 0) {
printf("ed25519ph verification failed\n");
}
if (crypto_sign_keypair(pk, sk) != 0) {
printf("crypto_sign_keypair() failure\n");
}
if (crypto_sign_seed_keypair(pk, sk, keypair_seed) != 0) {
printf("crypto_sign_seed_keypair() failure\n");
return -1;
}
crypto_sign_ed25519_sk_to_seed(extracted_seed, sk);
if (memcmp(extracted_seed, keypair_seed, crypto_sign_ed25519_SEEDBYTES)
!= 0) {
printf("crypto_sign_ed25519_sk_to_seed() failure\n");
}
crypto_sign_ed25519_sk_to_pk(extracted_pk, sk);
if (memcmp(extracted_pk, pk, crypto_sign_ed25519_PUBLICKEYBYTES) != 0) {
printf("crypto_sign_ed25519_sk_to_pk() failure\n");
}
sodium_bin2hex(pk_hex, sizeof pk_hex, pk, sizeof pk);
sodium_bin2hex(sk_hex, sizeof sk_hex, sk, sizeof sk);
printf("pk: [%s]\n", pk_hex);
printf("sk: [%s]\n", sk_hex);
assert(crypto_sign_bytes() > 0U);
assert(crypto_sign_seedbytes() > 0U);
assert(crypto_sign_publickeybytes() > 0U);
assert(crypto_sign_secretkeybytes() > 0U);
assert(crypto_sign_messagebytes_max() > 0U);
assert(strcmp(crypto_sign_primitive(), "ed25519") == 0);
assert(crypto_sign_bytes() == crypto_sign_ed25519_bytes());
assert(crypto_sign_seedbytes() == crypto_sign_ed25519_seedbytes());
assert(crypto_sign_messagebytes_max() == crypto_sign_ed25519_messagebytes_max());
assert(crypto_sign_publickeybytes()
== crypto_sign_ed25519_publickeybytes());
assert(crypto_sign_secretkeybytes()
== crypto_sign_ed25519_secretkeybytes());
assert(crypto_sign_statebytes() == crypto_sign_ed25519ph_statebytes());
#ifdef ED25519_NONDETERMINISTIC
exit(0);
#endif
return 0;
}
|
youngzhu/golab
|
dave/fund/channel/c5/main.go
|
<filename>dave/fund/channel/c5/main.go
package main
func main() {
var c = make(chan int, 100)
for i := 0; i < 10; i++ {
go func() {
for j := 0; j < 10; j++ {
c <- i * j
}
close(c)
}()
}
for i := range c {
println(i)
}
}
|
SapphireSuite/Engine
|
Tests/UnitTests/Maths/Space/Vector3Tests.cpp
|
<gh_stars>1-10
// Copyright (c) 2021 Sapphire Development Team. All Rights Reserved.
#include <UnitTestHelper>
#include <SA/Maths/Space/Vector2.hpp>
#include <SA/Maths/Space/Vector3.hpp>
#include <SA/Maths/Space/Vector4.hpp>
using namespace Sa;
namespace Sa::Vector3_UT
{
Vec3f Generate()
{
return Vec3f(UTH::Rand(-100.0f, 100.0f), UTH::Rand(-100.0f, 100.0f), UTH::Rand(-100.0f, 100.0f));
}
void Constants()
{
SA_UTH_EQ(Vec3f::Zero.x, 0.0f);
SA_UTH_EQ(Vec3f::Zero.y, 0.0f);
SA_UTH_EQ(Vec3f::Zero.z, 0.0f);
SA_UTH_EQ(Vec3f::One.x, 1.0f);
SA_UTH_EQ(Vec3f::One.y, 1.0f);
SA_UTH_EQ(Vec3f::One.z, 1.0f);
// X Axis.
SA_UTH_EQ(Vec3f::Right.x, 1.0f);
SA_UTH_EQ(Vec3f::Right.y, 0.0f);
SA_UTH_EQ(Vec3f::Right.z, 0.0f);
SA_UTH_EQ(Vec3f::Left.x, -1.0f);
SA_UTH_EQ(Vec3f::Left.y, 0.0f);
SA_UTH_EQ(Vec3f::Left.z, 0.0f);
// Y Axis.
SA_UTH_EQ(Vec3f::Up.x, 0.0f);
SA_UTH_EQ(Vec3f::Up.y, 1.0f);
SA_UTH_EQ(Vec3f::Up.z, 0.0f);
SA_UTH_EQ(Vec3f::Down.x, 0.0f);
SA_UTH_EQ(Vec3f::Down.y, -1.0f);
SA_UTH_EQ(Vec3f::Down.z, 0.0f);
// Z Axis.
SA_UTH_EQ(Vec3f::Forward.x, 0.0f);
SA_UTH_EQ(Vec3f::Forward.y, 0.0f);
SA_UTH_EQ(Vec3f::Forward.z, 1.0f);
SA_UTH_EQ(Vec3f::Backward.x, 0.0f);
SA_UTH_EQ(Vec3f::Backward.y, 0.0f);
SA_UTH_EQ(Vec3f::Backward.z, -1.0f);
}
void Constructors()
{
// Default constructor.
const Vec3f v0;
SA_UTH_EQ(v0.x, 0.0f);
SA_UTH_EQ(v0.y, 0.0f);
SA_UTH_EQ(v0.z, 0.0f);
// Value constructor.
const float v1X = UTH::Rand(-100.0f, 100.0f);
const float v1Y = UTH::Rand(-100.0f, 100.0f);
const float v1Z = UTH::Rand(-100.0f, 100.0f);
const Vec3f v1(v1X, v1Y, v1Z);
SA_UTH_EQ(v1.x, v1X);
SA_UTH_EQ(v1.y, v1Y);
SA_UTH_EQ(v1.z, v1Z);
// Scale constructor.
const float v2S = UTH::Rand(-100.0f, 100.0f);
const Vec3f v2_scale(v2S);
SA_UTH_EQ(v2_scale.x, v2S);
SA_UTH_EQ(v2_scale.y, v2S);
SA_UTH_EQ(v2_scale.z, v2S);
// Value cast constructor.
const int32 v3X = UTH::Rand<int32>(-100, 100);
const int32 v3Y = UTH::Rand<int32>(-100, 100);
const int32 v3Z = UTH::Rand<int32>(-100, 100);
const Vec3f v3(Vec3i(v3X, v3Y, v3Z));
SA_UTH_EQ(v3.x, static_cast<float>(v3X));
SA_UTH_EQ(v3.y, static_cast<float>(v3Y));
SA_UTH_EQ(v3.z, static_cast<float>(v3Z));
// Copy constructor.
const Vec3f v4(v1);
SA_UTH_EQ(v4.x, v1.x);
SA_UTH_EQ(v4.y, v1.y);
SA_UTH_EQ(v4.z, v1.z);
// From Vec2.
const Vec2f v5(UTH::Rand(-100.0f, 100.0f), UTH::Rand(-100.0f, 100.0f));
const float v6Z = UTH::Rand(-100.0f, 100.0f);
const Vec3f v6(v5, v6Z);
SA_UTH_EQ(v6.x, v5.x);
SA_UTH_EQ(v6.y, v5.y);
SA_UTH_EQ(v6.z, v6Z);
// From Vec4.
const Vec4f v7(UTH::Rand(-100.0f, 100.0f), UTH::Rand(-100.0f, 100.0f), UTH::Rand(-100.0f, 100.0f), UTH::Rand(-100.0f, 100.0f));
const Vec3f v8(v7);
SA_UTH_EQ(v8.x, v7.x);
SA_UTH_EQ(v8.y, v7.y);
SA_UTH_EQ(v8.z, v7.z);
}
void Equals()
{
const Vec3f v1 = Generate();
const Vec3f v2 = Generate();
SA_UTH_RMF(false, v1, IsZero);
SA_UTH_MF(Vec3f::Zero, IsZero);
SA_UTH_MF(v1, Equals, v1);
SA_UTH_RMF(false, v1, Equals, v2);
SA_UTH_OP(v1, ==, v1);
SA_UTH_OP(v1, !=, v2);
}
void Lenght()
{
Vec3f v1 = Generate();
const float vLenSqr = v1.x * v1.x + v1.y * v1.y + v1.z * v1.z;
const float vLen = Maths::Sqrt(vLenSqr);
SA_UTH_RMF(vLen, v1, Length);
SA_UTH_RMF(vLenSqr, v1, SqrLength);
const Vec3f nV1 = v1.GetNormalized();
SA_UTH_EQ(nV1.x, v1.x / vLen);
SA_UTH_EQ(nV1.y, v1.y / vLen);
SA_UTH_EQ(nV1.z, v1.z / vLen);
SA_UTH_MF(nV1, IsNormalized);
SA_UTH_EQ(nV1.Length(), 1.0f, std::numeric_limits<float>::epsilon());
SA_UTH_RMF(false, v1, IsNormalized);
v1.Normalize();
SA_UTH_OP(v1, == , nV1);
}
void Projection()
{
// Reflect
const Vec3f v1(1.0f, 1.0f, 1.0f);
const Vec3f norm(-1.0f, 0.0f, 0.0f);
const Vec3f refl = Vec3f(-1.0f, 1.0f, 1.0f);
SA_UTH_RMF(refl, v1, Reflect, norm);
// TODO: Projections.
//SA_UTH_EQ(true, false);
}
void DotCross()
{
const Vec3f v1 = Generate();
const Vec3f v2 = Generate();
float dot = v1.x * v2.x + v1.y * v2.y + v1.z * v2.z;
SA_UTH_RSF(dot, Vec3f::Dot, v1, v2);
SA_UTH_ROP(dot, v1, | , v2);
Vec3f cross = Vec3f(
v1.y * v2.z - v1.z * v2.y,
v1.z * v2.x - v1.x * v2.z,
v1.x * v2.y - v1.y * v2.x
);
SA_UTH_RSF(cross, Vec3f::Cross, v1, v2);
SA_UTH_ROP(cross, v1, ^, v2);
}
void Angle()
{
const Vec3f v1(-2.0, 1.0, 0.0f);
const Vec3f v2(1.0, 2.0, 0.0f);
SA_UTH_RSF(-90.0_deg, Vec3f::Angle, v1, v2, Vec3f::Forward);
SA_UTH_RSF(90.0_deg, Vec3f::AngleUnsigned, v1, v2);
}
void Dist()
{
const Vec3f v1 = Generate();
const Vec3f v2 = Generate();
SA_UTH_RSF((v1 - v2).Length(), Vec3f::Dist, v1, v2);
SA_UTH_RSF((v1 - v2).SqrLength(), Vec3f::SqrDist, v1, v2);
}
void Dir()
{
const Vec3f v1 = Generate();
const Vec3f v2 = Generate();
const Vec3f vDir = v2 - v1;
SA_UTH_RSF(vDir, Vec3f::Dir, v1, v2);
SA_UTH_RSF(vDir.GetNormalized(), Vec3f::DirN, v1, v2);
}
void Lerp()
{
const Vec3f v1(2.0f, 2.0f, 0.0f);
const Vec3f v2(-2.0f, 4.0f, 8.0f);
const Vec3f lerp_res05 = Vec3f(0.0f, 3.0f, 4.0f);
SA_UTH_RSF(lerp_res05, Vec3f::Lerp, v1, v2, 0.5f);
SA_LOGLVL_DIS_SECTB(Warning)
// Intended warning.
SA_UTH_RSF(v2, Vec3f::Lerp, v1, v2, 2.0f);
SA_LOGLVL_DIS_SECTE()
const Vec3f ulerp_res1 = Vec3f(6.0f, 0.0f, -8.0f);
SA_UTH_RSF(ulerp_res1, Vec3f::LerpUnclamped, v1, v2, -1.0f);
const Vec3f slerp_v1(2.0f, 2.0f, 0.0f);
const Vec3f slerp_v2(-2.0f, 2.0f, 0.0f);
const Vec3f slerp_res05 = Vec3f(0.0f, slerp_v1.Length(), 0.0f);
SA_UTH_RSF(slerp_res05, Vec3f::SLerp, slerp_v1, slerp_v2, 0.5f);
}
void Operators()
{
const Vec3f v1 = Generate();
const Vec3f mv1 = Vec3f(-v1.x, -v1.y, -v1.z);
SA_UTH_EQ(-v1, mv1);
// Scalar Scale.
float scale = UTH::Rand(-100.0f, 100.0f);
const Vec3f sv1 = Vec3f(v1.x * scale, v1.y * scale, v1.z * scale);
SA_UTH_EQ(v1 * scale, sv1);
SA_UTH_EQ(scale * v1, sv1);
const Vec3f usv1 = Vec3f(v1.x / scale, v1.y / scale, v1.z / scale);
const Vec3f susv1 = Vec3f(scale / v1.x, scale / v1.y, scale / v1.z);
SA_UTH_EQ(v1 / scale, usv1);
SA_UTH_EQ(scale / v1, susv1);
// Vec2 operators.
const Vec3f v2 = Generate();
const Vec3f v1pv2 = Vec3f(v1.x + v2.x, v1.y + v2.y, v1.z + v2.z);
SA_UTH_EQ(v1 + v2, v1pv2);
const Vec3f v1mv2 = Vec3f(v1.x - v2.x, v1.y - v2.y, v1.z - v2.z);
SA_UTH_EQ(v1 - v2, v1mv2);
const Vec3f v1mltv2 = Vec3f(v1.x * v2.x, v1.y * v2.y, v1.z * v2.z);
SA_UTH_EQ(v1 * v2, v1mltv2);
const Vec3f v1dv2 = Vec3f(v1.x / v2.x, v1.y / v2.y, v1.z / v2.z);
SA_UTH_EQ(v1 / v2, v1dv2);
// op *= scalar.
Vec3f v3 = v1;
v3 *= scale;
SA_UTH_EQ(v3, sv1);
// op /= scalar.
Vec3f v4 = v1;
v4 /= scale;
SA_UTH_EQ(v4, usv1);
// op += Vec2.
Vec3f v5 = v1;
v5 += v2;
SA_UTH_EQ(v5, v1pv2);
// op -= Vec2.
Vec3f v6 = v1;
v6 -= v2;
SA_UTH_EQ(v6, v1mv2);
// op *= Vec2.
Vec3f v7 = v1;
v7 *= v2;
SA_UTH_EQ(v7, v1mltv2);
// op /= Vec2.
Vec3f v8 = v1;
v8 /= v2;
SA_UTH_EQ(v8, v1dv2);
}
void Accessors()
{
const Vec3f v1 = Generate();
SA_UTH_EQ(v1[0], v1.x);
SA_UTH_EQ(v1[1], v1.y);
SA_UTH_EQ(v1[2], v1.z);
SA_UTH_EQ(v1.Data(), &v1.x);
SA_UTH_EQ(const_cast<Vec3f&>(v1).Data(), &const_cast<Vec3f&>(v1).x);
}
}
void Vector3Tests()
{
using namespace Vector3_UT;
SA_UTH_GP(Constants());
SA_UTH_GP(Constructors());
SA_UTH_GP(Equals());
SA_UTH_GP(Lenght());
SA_UTH_GP(Projection());
SA_UTH_GP(DotCross());
SA_UTH_GP(Angle());
SA_UTH_GP(Dist());
SA_UTH_GP(Dir());
SA_UTH_GP(Lerp());
SA_UTH_GP(Operators());
SA_UTH_GP(Accessors());
}
|
vespa-mrs/vespa
|
vespa/interfaces/cli_batch/analysis_cli_fructose.py
|
# Python modules
import os
import sys
# 3rd party modules
# Our modules
import vespa.analysis.mrs_dataset as mrs_dataset
import vespa.analysis.block_prep_timeseries as block_prep_timeseries
import vespa.analysis.util_import as util_import
import vespa.analysis.fileio.dicom_siemens_timeseries as fileio_dicom_siemens_timeseries
import vespa.analysis.fileio.util_exceptions as util_exceptions
import vespa.common.util.export as util_export
import vespa.common.mrs_data_raw_timeseries as mrs_data_raw_timeseries
DESC = \
"""Command line interface to process MRS data in Vespa-Analysis.
Data filename, preset file name, data type string and CSV output
file name values are all required for this command to function
properly.
Note. You may have to enclose data/preset/output strings in double
quotation marks for them to process properly if they have
spaces or other special characters embedded in them.
"""
def do_analysis(dataset, preset, verbose=False, debug=False):
# Update dataset with preset ------------------------------------
dataset.apply_preset(preset, voxel=(0,0,0))
# Process and fit data ------------------------------------------
if verbose: print(" Running dataset chain objects")
_process_all_blocks(dataset)
# Save Dataset with results to VIFF XML if flag set -------------------------------
dirpath, _ = os.path.split(dataset.blocks["raw"].data_source)
dirpath, _ = os.path.split(dirpath)
dirpath, fbase = os.path.split(dirpath)
filename = fbase + "_all_files.xml"
filename = os.path.join(dirpath, filename)
if verbose: print(""" Saving dataset to XML file "%s". """ % filename)
dataset.dataset_filename = filename
try:
bob = 10
util_export.export(filename, [dataset], None, None, False)
except:
msg = """I can't write the file "%s".""" % outxml
print(msg, file=sys.stderr)
print(msg, file=sys.stdout)
sys.exit(-1)
def _process_all_blocks(dataset):
""" for all voxels, run chain in all blocks to update """
tmp = dataset.blocks['prep'].chain.run([0,0,0])
dataset.batch_fit_all()
def _import_preset(presetfile):
try:
msg = ""
try:
importer = util_import.DatasetImporter(presetfile)
except IOError:
msg = """I can't read the preset file "%s".""" % presetfile
except SyntaxError:
msg = """The preset file "%s" isn't valid Vespa Interchange File Format.""" % presetfile
if msg:
print(msg, file=sys.stderr)
print(msg, file=sys.stdout)
sys.exit(-1)
else:
# Time to rock and roll!
presets = importer.go()
preset = presets[0]
return preset
except:
msg = """Unknown exception reading Preset file "%s".""" % filename
print(msg, file=sys.stderr)
print(msg, file=sys.stdout)
sys.exit(-1)
def _import_siemens_dicom_timeseries(filenames, open_dataset=None):
"""
Stolen from Analysis main.py module - trimmed for CLI usage
Assumption here is that we are opening one file in the reader. If there is
an 'open_dataset' sent in, then the current reader is for an associated
file. We will associate the current file with the open one at the end of
the code.
"""
try:
reader = fileio_dicom_siemens_timeseries.RawReaderDicomSiemensTimeseries()
reader.filenames = filenames
datasets = [ ]
msg = ""
try:
# Step 1
#
# Return one or more DataRawXxxx objects that indicate what
# sort of data was read in by the reader
raws = reader.read_raws(open_dataset=open_dataset)
except IOError:
msg = "One or more of the files couldn't be read due to a disk error."
except util_exceptions.MultifileAttributeMismatchError:
msg = _MSG_MULTIFILE_ATTRIBUTE_MISMATCH
except util_exceptions.MultifileTypeMismatchError:
msg = _MSG_MULTIFILE_TYPE_MISMATCH
except util_exceptions.UnsupportedDimensionalityError:
# Note that this also catches SIDataError which is a
# subclass of UnsupportedDimensionalityError
msg = _MSG_UNSUPPORTED_DIMENSIONALITY
except util_exceptions.OpenFileAttributeMismatchError:
msg = _MSG_OPEN_ATTRIBUTE_MISMATCH
except util_exceptions.OpenFileTypeMismatchError:
msg = _MSG_OPEN_TYPE_MISMATCH
except util_exceptions.FileNotFoundError as error_instance:
msg = str(error_instance)
except util_exceptions.OpenFileUserReadRawError as error_instance:
if not error_instance:
error_instance = "User read_raw raised OpenFileUserReadRawError"
msg = str(error_instance)
if msg:
print(msg, file=sys.stdout)
print(msg, file=sys.stderr)
sys.exit(-1)
else:
# All is well. Convert these raw objects into fully-fledged
# dataset objects.
if open_dataset:
zero_fill_multiplier = open_dataset.zero_fill_multiplier
else:
zero_fill_multiplier = 0
# Step 2
#
# See if any data types need special classes. We usually only
# look for raw fidsum classes which trigger a prep fidsum block.
block_class_specs = [ ]
for raw in raws:
d = { }
if isinstance(raw, mrs_data_raw_timeseries.DataRawTimeseries):
d["prep"] = block_prep_timeseries.BlockPrepTimeseries
block_class_specs.append(d)
f = lambda raw, block_classes: mrs_dataset.dataset_from_raw(raw,
block_classes,
zero_fill_multiplier)
datasets = list(map(f, raws, block_class_specs))
if datasets:
if open_dataset is not None:
open_dataset.blocks['raw'].set_associated_datasets([datasets[0], ])
return datasets[0], open_dataset
else:
return None, open_dataset
except:
msg = """Unknown exception reading Data file "%s".""" % filenames[0]
print(msg, file=sys.stderr)
print(msg, file=sys.stdout)
sys.exit(-1)
def main():
verbose = True
# Processing of SVS_EDIT_DIFF files
STARTDIR = 'D:\\Users\\bsoher\\temp\\current\\data'
presetfile = STARTDIR+'\\_preset_raw_fruct_v4.xml'
paths = list(filter(os.path.isdir, [os.path.join(STARTDIR,f) for f in os.listdir(STARTDIR)]))
paths = paths[::-1]
i = 0
for path in paths:
path = path + '\\series_sum'
filenames = list(filter(os.path.isfile, [os.path.join(path,f) for f in os.listdir(path)]))
print('Processing Subject = '+path+' with '+str(len(filenames))+' files')
# Load Main Dataset --------------------------
dataset = _import_siemens_dicom_timeseries(filenames, open_dataset=None)
dataset = dataset[0]
# Load Preset data for Main Dataset ----------------------------------------------
if verbose: print(" Read Preset object")
preset = _import_preset(presetfile)
do_analysis(dataset, preset, verbose = verbose, debug = False)
i += 1
# if i >= 3: break # debug statement to exit after one file processed
bob = 10
bob += 1
if __name__ == '__main__':
main()
|
MewX/contendo-viewer-v1.6.3
|
org/apache/pdfbox/pdmodel/interactive/form/PlainText.java
|
/* */ package org.apache.pdfbox.pdmodel.interactive.form;
/* */
/* */ import java.io.IOException;
/* */ import java.text.AttributedCharacterIterator;
/* */ import java.text.AttributedString;
/* */ import java.text.BreakIterator;
/* */ import java.util.ArrayList;
/* */ import java.util.Arrays;
/* */ import java.util.List;
/* */ import org.apache.pdfbox.pdmodel.font.PDFont;
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */ class PlainText
/* */ {
/* */ private static final float FONTSCALE = 1000.0F;
/* */ private final List<Paragraph> paragraphs;
/* */
/* */ PlainText(String textValue) {
/* 54 */ List<String> parts = Arrays.asList(textValue.replaceAll("\t", " ").split("\\r\\n|\\n|\\r|\\u2028|\\u2029"));
/* 55 */ this.paragraphs = new ArrayList<Paragraph>();
/* 56 */ for (String part : parts) {
/* */
/* */
/* 59 */ if (part.length() == 0)
/* */ {
/* 61 */ part = " ";
/* */ }
/* 63 */ this.paragraphs.add(new Paragraph(part));
/* */ }
/* */ }
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */ PlainText(List<String> listValue) {
/* 77 */ this.paragraphs = new ArrayList<Paragraph>();
/* 78 */ for (String part : listValue)
/* */ {
/* 80 */ this.paragraphs.add(new Paragraph(part));
/* */ }
/* */ }
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */ List<Paragraph> getParagraphs() {
/* 91 */ return this.paragraphs;
/* */ }
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */ static class TextAttribute
/* */ extends AttributedCharacterIterator.Attribute
/* */ {
/* */ private static final long serialVersionUID = -3138885145941283005L;
/* */
/* */
/* */
/* */
/* */
/* */
/* 111 */ public static final AttributedCharacterIterator.Attribute WIDTH = new TextAttribute("width");
/* */
/* */
/* */ protected TextAttribute(String name) {
/* 115 */ super(name);
/* */ }
/* */ }
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */ static class Paragraph
/* */ {
/* */ private final String textContent;
/* */
/* */
/* */
/* */
/* */
/* */
/* */ Paragraph(String text) {
/* 135 */ this.textContent = text;
/* */ }
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */ String getText() {
/* 145 */ return this.textContent;
/* */ }
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */
/* */ List<PlainText.Line> getLines(PDFont font, float fontSize, float width) throws IOException {
/* 159 */ BreakIterator iterator = BreakIterator.getLineInstance();
/* 160 */ iterator.setText(this.textContent);
/* */
/* 162 */ float scale = fontSize / 1000.0F;
/* */
/* 164 */ int start = iterator.first();
/* 165 */ int end = iterator.next();
/* 166 */ float lineWidth = 0.0F;
/* */
/* 168 */ List<PlainText.Line> textLines = new ArrayList<PlainText.Line>();
/* 169 */ PlainText.Line textLine = new PlainText.Line();
/* */
/* 171 */ while (end != -1) {
/* */
/* 173 */ String word = this.textContent.substring(start, end);
/* 174 */ float wordWidth = font.getStringWidth(word) * scale;
/* */
/* 176 */ lineWidth += wordWidth;
/* */
/* */
/* 179 */ if (lineWidth >= width && Character.isWhitespace(word.charAt(word.length() - 1))) {
/* */
/* 181 */ float whitespaceWidth = font.getStringWidth(word.substring(word.length() - 1)) * scale;
/* 182 */ lineWidth -= whitespaceWidth;
/* */ }
/* */
/* 185 */ if (lineWidth >= width) {
/* */
/* 187 */ textLine.setWidth(textLine.calculateWidth(font, fontSize));
/* 188 */ textLines.add(textLine);
/* 189 */ textLine = new PlainText.Line();
/* 190 */ lineWidth = font.getStringWidth(word) * scale;
/* */ }
/* */
/* 193 */ AttributedString as = new AttributedString(word);
/* 194 */ as.addAttribute(PlainText.TextAttribute.WIDTH, Float.valueOf(wordWidth));
/* 195 */ PlainText.Word wordInstance = new PlainText.Word(word);
/* 196 */ wordInstance.setAttributes(as);
/* 197 */ textLine.addWord(wordInstance);
/* 198 */ start = end;
/* 199 */ end = iterator.next();
/* */ }
/* 201 */ textLine.setWidth(textLine.calculateWidth(font, fontSize));
/* 202 */ textLines.add(textLine);
/* 203 */ return textLines;
/* */ }
/* */ }
/* */
/* */
/* */
/* */
/* */ static class Line
/* */ {
/* 212 */ private final List<PlainText.Word> words = new ArrayList<PlainText.Word>();
/* */
/* */ private float lineWidth;
/* */
/* */ float getWidth() {
/* 217 */ return this.lineWidth;
/* */ }
/* */
/* */
/* */ void setWidth(float width) {
/* 222 */ this.lineWidth = width;
/* */ }
/* */
/* */
/* */ float calculateWidth(PDFont font, float fontSize) throws IOException {
/* 227 */ float scale = fontSize / 1000.0F;
/* 228 */ float calculatedWidth = 0.0F;
/* 229 */ for (PlainText.Word word : this.words) {
/* */
/* 231 */ calculatedWidth += ((Float)word
/* 232 */ .getAttributes().getIterator().getAttribute(PlainText.TextAttribute.WIDTH)).floatValue();
/* 233 */ String text = word.getText();
/* 234 */ if (this.words.indexOf(word) == this.words.size() - 1 && Character.isWhitespace(text.charAt(text.length() - 1))) {
/* */
/* 236 */ float whitespaceWidth = font.getStringWidth(text.substring(text.length() - 1)) * scale;
/* 237 */ calculatedWidth -= whitespaceWidth;
/* */ }
/* */ }
/* 240 */ return calculatedWidth;
/* */ }
/* */
/* */
/* */ List<PlainText.Word> getWords() {
/* 245 */ return this.words;
/* */ }
/* */
/* */
/* */ float getInterWordSpacing(float width) {
/* 250 */ return (width - this.lineWidth) / (this.words.size() - 1);
/* */ }
/* */
/* */
/* */ void addWord(PlainText.Word word) {
/* 255 */ this.words.add(word);
/* */ }
/* */ }
/* */
/* */
/* */
/* */
/* */ static class Word
/* */ {
/* */ private AttributedString attributedString;
/* */
/* */
/* */ private final String textContent;
/* */
/* */
/* */
/* */ Word(String text) {
/* 272 */ this.textContent = text;
/* */ }
/* */
/* */
/* */ String getText() {
/* 277 */ return this.textContent;
/* */ }
/* */
/* */
/* */ AttributedString getAttributes() {
/* 282 */ return this.attributedString;
/* */ }
/* */
/* */
/* */ void setAttributes(AttributedString as) {
/* 287 */ this.attributedString = as;
/* */ }
/* */ }
/* */ }
/* Location: /mnt/r/ConTenDoViewer.jar!/org/apache/pdfbox/pdmodel/interactive/form/PlainText.class
* Java compiler version: 6 (50.0)
* JD-Core Version: 1.1.3
*/
|
subramp-prep/leetcode
|
problems/024.Swap_Nodes_in_Pairs/AC_simulation_n.cpp
|
<reponame>subramp-prep/leetcode<filename>problems/024.Swap_Nodes_in_Pairs/AC_simulation_n.cpp<gh_stars>0
/*
* Author: illuz <<EMAIL>[at]<EMAIL>>
* File: AC_simulation_n.cpp
* Create Date: 2014-12-23 14:22:07
* Descripton: simulation: add a new node in the head first
*/
#include <bits/stdc++.h>
using namespace std;
const int N = 0;
// Definition for singly-linked list.
struct ListNode {
int val;
ListNode *next;
ListNode(int x) : val(x), next(NULL) {}
};
class Solution {
public:
ListNode *swapPairs(ListNode *head) {
ListNode *newHead = new ListNode(0);
newHead->next = head;
ListNode *preNode = newHead, *curNode = head;
int cnt = 1;
while (curNode != NULL && curNode->next != NULL) {
// swap curNode and curNode->next
preNode->next = curNode->next;
curNode->next = preNode->next->next;
preNode->next->next = curNode;
// go over two nodes
preNode = curNode;
curNode = curNode->next;
}
head = newHead->next;
delete newHead;
return head;
}
};
int main() {
ListNode *h = new ListNode(1);
h->next = new ListNode(2);
Solution s;
cout << s.swapPairs(h) << endl;
return 0;
}
|
daoshengtech/rongcloud-uniapp-imlib
|
iOS/RCUniIM/RCUniIM/frameworks/RongIMLibCore.xcframework/ios-i386_x86_64-simulator/RongIMLibCore.framework/Headers/RCGroupMessageReaderV2.h
|
<filename>iOS/RCUniIM/RCUniIM/frameworks/RongIMLibCore.xcframework/ios-i386_x86_64-simulator/RongIMLibCore.framework/Headers/RCGroupMessageReaderV2.h
//
// RCMessageReadUser.h
// RongIMLibCore
//
// Created by RongCloud on 2021/2/22.
// Copyright © 2021 RongCloud. All rights reserved.
//
#import <Foundation/Foundation.h>
/**
* \~chinese
已读用户对象
* \~english
Read user object
*/
@interface RCGroupMessageReaderV2 : NSObject
/**
* \~chinese
已读用户 id
* \~english
Read user id
*/
@property (nonatomic, copy) NSString *userId;
/**
* \~chinese
已读时间
* \~english
Read time
*/
@property (nonatomic, assign) long long readTime;
@end
|
michelle714/paprika
|
packages/Popover/stories/examples/StateValue.js
|
<gh_stars>10-100
import React from "react";
import Button from "@paprika/button";
import { Gap } from "storybook/assets/styles/common.styles";
import Popover from "../../src";
export default function StateValue() {
return (
<>
<p>
For an uncontrolled <code><Popover></code>, if you use a render function with the{" "}
<code><Popover.Trigger></code> component, then the state value, called <code>isOpen</code>, is provided as
the third parameter.
</p>
<p>
Additionally, this example shows the <code><Popover></code> with the <code>defaultIsOpen</code> prop.
</p>
<Gap />
<Popover defaultIsOpen>
<Popover.Trigger>
{(handler, a11yAttributes, isOpen) => (
<Button onClick={handler} {...a11yAttributes}>
{isOpen ? "Click to close" : "Click to open"}
</Button>
)}
</Popover.Trigger>
<Popover.Content>
<Popover.Card>Lorem hipsum single-origin kombucha.</Popover.Card>
</Popover.Content>
<Popover.Tip />
</Popover>
</>
);
}
|
pavlitsky/vscode-yard
|
src/test/project/constant/constant_expected.rb
|
<reponame>pavlitsky/vscode-yard
# @return [<Type>] <description>
FOO_BAR='baz'.freeze
|
Rc-Cookie/Engine2D
|
src/main/java/com/github/rccookie/engine2d/online/MessageType.java
|
<filename>src/main/java/com/github/rccookie/engine2d/online/MessageType.java
package com.github.rccookie.engine2d.online;
/**
* Type of message between server and client.
*/
public enum MessageType {
/**
* Message from the server to all clients.
*/
SERVER_TO_CLIENT,
/**
* Message from one client to all other clients.
*/
CLIENT_TO_CLIENT,
/**
* Message from one client to the server.
*/
CLIENT_TO_SERVER,
/**
* Error message.
*/
ERROR,
/**
* Other / unknown message type.
*/
OTHER
}
|
jeyavelnkl/Jel_Apps
|
tracks/test/helpers/rendering_helper_test.rb
|
require "test_helper"
class RenderingHelperTest < ActionView::TestCase
include RenderingHelper
test "auto_link_message" do
html = "This is a sample with a message - message://<123456789>. There we go."
rendered_html = auto_link_message(html)
assert(
rendered_html.include?(%|<a href="message://<123456789>">message://<123456789></a>|),
"Message was not correctly rendered. Rendered message:\n#{rendered_html}"
)
html = %|This message is already tagged: <a href="message://<12345>">Call bob</a>."|
rendered_html = auto_link_message(html)
assert_equal(html, rendered_html)
end
test "textile" do
raw_textile = "This should end up *strong*."
rendered_textile = textile(raw_textile)
assert_equal("<p>This should end up <strong>strong</strong>.</p>", rendered_textile)
end
test "render_text" do
simple_textile = render_text("This is *strong*.")
assert_equal("<p>This is <strong>strong</strong>.</p>", simple_textile)
autolink_message = render_text("Call message://<123>.")
assert_equal(%|<p>Call <a href="message://<123>">message://<123></a>.</p>|, autolink_message)
onenote_links = render_text(%|Link to onenote <a href="onenote://foobar">here</a>.|)
assert_equal(%|<p>Link to onenote <a href="onenote://foobar">here</a>.</p>|, onenote_links)
end
end
|
pemontto/liquidpy
|
liquid/python/tags/tag_while.py
|
<reponame>pemontto/liquidpy<filename>liquid/python/tags/tag_while.py
"""Tag while"""
import copy
from .inherited import tag_manager
from .tag_if import TagIf
from ...tags.transformer import render_segment
@tag_manager.register
class TagWhile(TagIf):
"""The for tag
Attributes:
flag_break: The flag for break statement
flag_continue: The flag for continue statement
"""
__slots__ = TagIf.__slots__ + ('flag_break', 'flag_continue')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.flag_continue = False # type: bool
self.flag_break = False # type: bool
def _render(self, local_vars, global_vars):
# type: (dict, dict) -> str
rendered = ''
value = render_segment(self.parsed, local_vars, global_vars)
value0 = copy.copy(value)
local_vars_copy = None
while value:
local_vars_copy = local_vars_copy or local_vars.copy()
for child in self.children:
child_rendered, _ = child.render(local_vars_copy, global_vars)
rendered += child_rendered
if self.flag_break or self.flag_continue:
self.flag_continue = False
break
if self.flag_break:
break
value = render_segment(self.parsed, local_vars_copy, global_vars)
if not value0 or not self.flag_break: # while ... else
rendered += self._render_next(local_vars, global_vars, True)
return rendered
|
n-zer/HUDTweaks
|
src/main/java/com/github/burgerguy/hudtweaks/hud/element/DefaultJumpBarElement.java
|
<reponame>n-zer/HUDTweaks<filename>src/main/java/com/github/burgerguy/hudtweaks/hud/element/DefaultJumpBarElement.java
package com.github.burgerguy.hudtweaks.hud.element;
import com.github.burgerguy.hudtweaks.hud.HTIdentifier;
import com.github.burgerguy.hudtweaks.util.Util;
import net.minecraft.client.MinecraftClient;
public class DefaultJumpBarElement extends HudElement {
public static final HTIdentifier IDENTIFIER = new HTIdentifier(Util.MINECRAFT_MODID, new HTIdentifier.ElementId("jumpbar", "hudtweaks.element.jumpbar"));
public DefaultJumpBarElement() {
super(IDENTIFIER);
}
@Override
protected float calculateWidth(MinecraftClient client) {
return 182;
}
@Override
protected float calculateHeight(MinecraftClient client) {
return 5;
}
@Override
protected float calculateDefaultX(MinecraftClient client) {
return client.getWindow().getScaledWidth() / 2.0f - 91;
}
@Override
protected float calculateDefaultY(MinecraftClient client) {
return client.getWindow().getScaledHeight() - 29;
}
}
|
clemensgg/comdex
|
x/vault/keeper/msg_server.go
|
<filename>x/vault/keeper/msg_server.go
package keeper
import (
"context"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/comdex-official/comdex/x/vault/types"
)
var (
_ types.MsgServiceServer = (*msgServer)(nil)
)
type msgServer struct {
Keeper
}
func NewMsgServiceServer(keeper Keeper) types.MsgServiceServer {
return &msgServer{
Keeper: keeper,
}
}
func (k *msgServer) MsgCreate(c context.Context, msg *types.MsgCreateRequest) (*types.MsgCreateResponse, error) {
ctx := sdk.UnwrapSDKContext(c)
from, err := sdk.AccAddressFromBech32(msg.From)
if err != nil {
return nil, err
}
if k.HasVaultForAddressByPair(ctx, from, msg.PairID) {
return nil, types.ErrorDuplicateVault
}
pair, found := k.GetPair(ctx, msg.PairID)
if !found {
return nil, types.ErrorPairDoesNotExist
}
assetIn, found := k.GetAsset(ctx, pair.AssetIn)
if !found {
return nil, types.ErrorAssetDoesNotExist
}
assetOut, found := k.GetAsset(ctx, pair.AssetOut)
if !found {
return nil, types.ErrorAssetDoesNotExist
}
if err := k.VerifyCollaterlizationRatio(ctx, msg.AmountIn, assetIn, msg.AmountOut, assetOut, pair.LiquidationRatio); err != nil {
return nil, err
}
if err := k.SendCoinFromAccountToModule(ctx, from, types.ModuleName, sdk.NewCoin(assetIn.Denom, msg.AmountIn)); err != nil {
return nil, err
}
if err := k.MintCoin(ctx, types.ModuleName, sdk.NewCoin(assetOut.Denom, msg.AmountOut)); err != nil {
return nil, err
}
if err := k.SendCoinFromModuleToAccount(ctx, types.ModuleName, from, sdk.NewCoin(assetOut.Denom, msg.AmountOut)); err != nil {
return nil, err
}
var (
id = k.GetID(ctx)
vault = types.Vault{
ID: id + 1,
PairID: msg.PairID,
Owner: msg.From,
AmountIn: msg.AmountIn,
AmountOut: msg.AmountOut,
}
)
k.SetID(ctx, id+1)
k.SetVault(ctx, vault)
k.SetVaultForAddressByPair(ctx, from, vault.PairID, vault.ID)
return &types.MsgCreateResponse{}, nil
}
func (k *msgServer) MsgDeposit(c context.Context, msg *types.MsgDepositRequest) (*types.MsgDepositResponse, error) {
ctx := sdk.UnwrapSDKContext(c)
from, err := sdk.AccAddressFromBech32(msg.From)
if err != nil {
return nil, err
}
vault, found := k.GetVault(ctx, msg.ID)
if !found {
return nil, types.ErrorVaultDoesNotExist
}
if msg.From != vault.Owner {
return nil, types.ErrorUnauthorized
}
pair, found := k.GetPair(ctx, vault.PairID)
if !found {
return nil, types.ErrorPairDoesNotExist
}
assetIn, found := k.GetAsset(ctx, pair.AssetIn)
if !found {
return nil, types.ErrorAssetDoesNotExist
}
vault.AmountIn = vault.AmountIn.Add(msg.Amount)
if !vault.AmountIn.IsPositive() {
return nil, types.ErrorInvalidAmount
}
if err := k.SendCoinFromAccountToModule(ctx, from, types.ModuleName, sdk.NewCoin(assetIn.Denom, msg.Amount)); err != nil {
return nil, err
}
k.SetVault(ctx, vault)
return &types.MsgDepositResponse{}, nil
}
func (k *msgServer) MsgWithdraw(c context.Context, msg *types.MsgWithdrawRequest) (*types.MsgWithdrawResponse, error) {
ctx := sdk.UnwrapSDKContext(c)
from, err := sdk.AccAddressFromBech32(msg.From)
if err != nil {
return nil, err
}
vault, found := k.GetVault(ctx, msg.ID)
if !found {
return nil, types.ErrorVaultDoesNotExist
}
if msg.From != vault.Owner {
return nil, types.ErrorUnauthorized
}
pair, found := k.GetPair(ctx, vault.PairID)
if !found {
return nil, types.ErrorPairDoesNotExist
}
assetIn, found := k.GetAsset(ctx, pair.AssetIn)
if !found {
return nil, types.ErrorAssetDoesNotExist
}
assetOut, found := k.GetAsset(ctx, pair.AssetOut)
if !found {
return nil, types.ErrorAssetDoesNotExist
}
vault.AmountIn = vault.AmountIn.Sub(msg.Amount)
if !vault.AmountIn.IsPositive() {
return nil, types.ErrorInvalidAmount
}
if err := k.VerifyCollaterlizationRatio(ctx, vault.AmountIn, assetIn, vault.AmountOut, assetOut, pair.LiquidationRatio); err != nil {
return nil, err
}
if err := k.SendCoinFromModuleToAccount(ctx, types.ModuleName, from, sdk.NewCoin(assetIn.Denom, msg.Amount)); err != nil {
return nil, err
}
k.SetVault(ctx, vault)
return &types.MsgWithdrawResponse{}, nil
}
func (k *msgServer) MsgDraw(c context.Context, msg *types.MsgDrawRequest) (*types.MsgDrawResponse, error) {
ctx := sdk.UnwrapSDKContext(c)
from, err := sdk.AccAddressFromBech32(msg.From)
if err != nil {
return nil, err
}
vault, found := k.GetVault(ctx, msg.ID)
if !found {
return nil, types.ErrorVaultDoesNotExist
}
if msg.From != vault.Owner {
return nil, types.ErrorUnauthorized
}
pair, found := k.GetPair(ctx, vault.PairID)
if !found {
return nil, types.ErrorPairDoesNotExist
}
assetIn, found := k.GetAsset(ctx, pair.AssetIn)
if !found {
return nil, types.ErrorAssetDoesNotExist
}
assetOut, found := k.GetAsset(ctx, pair.AssetOut)
if !found {
return nil, types.ErrorAssetDoesNotExist
}
vault.AmountOut = vault.AmountOut.Add(msg.Amount)
if !vault.AmountOut.IsPositive() {
return nil, types.ErrorInvalidAmount
}
if err := k.VerifyCollaterlizationRatio(ctx, vault.AmountIn, assetIn, vault.AmountOut, assetOut, pair.LiquidationRatio); err != nil {
return nil, err
}
if err := k.MintCoin(ctx, types.ModuleName, sdk.NewCoin(assetOut.Denom, msg.Amount)); err != nil {
return nil, err
}
if err := k.SendCoinFromModuleToAccount(ctx, types.ModuleName, from, sdk.NewCoin(assetOut.Denom, msg.Amount)); err != nil {
return nil, err
}
k.SetVault(ctx, vault)
return &types.MsgDrawResponse{}, nil
}
func (k *msgServer) MsgRepay(c context.Context, msg *types.MsgRepayRequest) (*types.MsgRepayResponse, error) {
ctx := sdk.UnwrapSDKContext(c)
from, err := sdk.AccAddressFromBech32(msg.From)
if err != nil {
return nil, err
}
vault, found := k.GetVault(ctx, msg.ID)
if !found {
return nil, types.ErrorVaultDoesNotExist
}
if msg.From != vault.Owner {
return nil, types.ErrorUnauthorized
}
if !msg.Amount.Equal(vault.AmountOut) {
return nil, types.ErrorInvalidAmount
}
pair, found := k.GetPair(ctx, vault.PairID)
if !found {
return nil, types.ErrorPairDoesNotExist
}
assetIn, found := k.GetAsset(ctx, pair.AssetIn)
if !found {
return nil, types.ErrorAssetDoesNotExist
}
assetOut, found := k.GetAsset(ctx, pair.AssetOut)
if !found {
return nil, types.ErrorAssetDoesNotExist
}
if err := k.SendCoinFromAccountToModule(ctx, from, types.ModuleName, sdk.NewCoin(assetOut.Denom, vault.AmountOut)); err != nil {
return nil, err
}
if err := k.BurnCoin(ctx, types.ModuleName, sdk.NewCoin(assetOut.Denom, vault.AmountOut)); err != nil {
return nil, err
}
if err := k.SendCoinFromModuleToAccount(ctx, types.ModuleName, from, sdk.NewCoin(assetIn.Denom, vault.AmountIn)); err != nil {
return nil, err
}
k.DeleteVault(ctx, vault.ID)
k.DeleteVaultForAddressByPair(ctx, from, vault.PairID)
return &types.MsgRepayResponse{}, nil
}
func (k *msgServer) MsgClose(c context.Context, msg *types.MsgCloseRequest) (*types.MsgCloseResponse, error) {
panic("implement me")
}
|
Nickel-Angel/ACM-and-OI
|
AtCoder/ABC215A.cpp
|
/*
* @author Nickel_Angel (<EMAIL>)
* @copyright Copyright (c) 2022
*/
#include <algorithm>
#include <cmath>
#include <cstdio>
#include <cstring>
#include <vector>
using std::min;
using std::max;
using std::sort;
using std::swap;
using std::vector;
using std::pair;
typedef long long ll;
char s[20], t[] = "Hello,World!";
int main()
{
scanf("%s", s);
int len = strlen(s);
if (len != 12)
{
puts("WA");
return 0;
}
for (int i = 0; i < len; ++i)
{
if (s[i] != t[i])
{
puts("WA");
return 0;
}
}
puts("AC");
return 0;
}
|
XXL6/resticweb
|
resticweb/blueprints/repositories/forms.py
|
<reponame>XXL6/resticweb
from flask_wtf import FlaskForm
from wtforms import StringField, IntegerField, SubmitField, ValidationError, \
SelectField, TextAreaField, HiddenField, BooleanField
from resticweb.tools.local_session import LocalSession
from resticweb.models.general import Repository, RepositoryType
from wtforms.validators import DataRequired
from resticweb.dictionary.resticweb_constants import RepositoryTypeBindings
class AddRepositoryTypeForm(FlaskForm):
name = StringField('Name', validators=[DataRequired()])
type = StringField('Type', validators=[DataRequired()])
description = TextAreaField('Description')
internal_binding = SelectField('Internal Binding')
submit = SubmitField('Submit')
def __init__(self):
super().__init__()
bindings = []
for item in RepositoryTypeBindings.binding_list:
bindings.append((item, item))
self.internal_binding.choices = bindings
def validate_name(self, name):
with LocalSession() as session:
repository_type = session.query(RepositoryType).filter_by(name=name.data).first()
if repository_type:
raise ValidationError(f"Repository type with name {name.data} already exists. Please pick a different name.")
class RWCredentialField(StringField):
pass
class EditRepositoryTypeForm(FlaskForm):
repository_type_id = HiddenField('Id')
name = StringField('Name', validators=[DataRequired()])
type = StringField('Type', validators=[DataRequired()])
description = TextAreaField('Description')
internal_binding = SelectField('Internal Binding')
submit = SubmitField('Submit')
def __init__(self):
super().__init__()
bindings = []
for item in RepositoryTypeBindings.binding_list:
bindings.append((item, item))
self.internal_binding.choices = bindings
def validate_name(self, name):
with LocalSession() as session:
repository_type = session.query(RepositoryType).filter_by(name=name.data).first()
if repository_type and repository_type.id != int(self.repository_type_id.data):
raise ValidationError(f"Repository type with name {name.data} already exists. Please pick a different name.")
class AddRepositoryFormBase(FlaskForm):
name = StringField("Name", validators=[DataRequired()])
repo_password = <PASSWORD>Field("Repo Password", validators=[DataRequired()])
description = TextAreaField("Description")
cache_repo = BooleanField("Cache repository objects")
concurrent_uses = IntegerField("Concurrent job uses", default=2)
timeout = IntegerField("Timeout (minutes)", default=60)
submit = SubmitField("Submit")
def validate_name(self, name):
with LocalSession() as session:
repository = session.query(Repository).filter_by(name=name.data).first()
if repository:
raise ValidationError(f"Repository with name {name.data} already exists. Please pick a different name.")
class EditRepositoryFormBase(FlaskForm):
repository_id = HiddenField('Id')
name = StringField("Name", validators=[DataRequired()])
description = TextAreaField("Description")
cache_repo = BooleanField("Cache repository objects")
concurrent_uses = IntegerField("Concurrent job uses")
timeout = IntegerField("Timeout (minutes)")
submit = SubmitField("Submit")
def validate_name(self, name):
with LocalSession() as session:
repository = session.query(Repository).filter_by(name=name.data).first()
if repository and repository.id != int(self.repository_id.data):
raise ValidationError(f"Repository with name {name.data} already exists. Please pick a different name.")
# 1 == local
class AddRepositoryForm1(AddRepositoryFormBase):
address = StringField("Address", validators=[DataRequired()])
class EditRepositoryForm1(EditRepositoryFormBase):
address = StringField("Address", validators=[DataRequired()])
def set_current_data(self, current_data):
self.address.data = current_data['address']
# 2 == amazons3
class AddRepositoryForm2(AddRepositoryFormBase):
bucket_name = StringField("Bucket Name", validators=[DataRequired()])
AWS_ACCESS_KEY_ID = RWCredentialField("AWS Access Key Id", validators=[DataRequired()])
AWS_SECRET_ACCESS_KEY = RWCredentialField("AWS Secret Access Key", validators=[DataRequired()])
class EditRepositoryForm2(EditRepositoryFormBase):
bucket_name = StringField("Bucket Name", validators=[DataRequired()])
def set_current_data(self, current_data):
self.bucket_name.data = current_data['bucket_name']
# 3 == rclone
class AddRepositoryForm3(AddRepositoryFormBase):
rclone_address = StringField("Rclone Address", validators=[DataRequired()])
class EditRepositoryForm3(EditRepositoryFormBase):
rclone_address = StringField("Rclone Address", validators=[DataRequired()])
def set_current_data(self, current_data):
self.rclone_address.data = current_data['rclone_address']
def get_add_repository_form(repository_type):
if repository_type == 'local':
return AddRepositoryForm1()
elif repository_type == 'amazons3':
return AddRepositoryForm2()
elif repository_type == 'rclone':
return AddRepositoryForm3()
else:
raise Exception("Unsupported repository type")
def get_edit_repository_form(repository_type):
if repository_type == 'local':
return EditRepositoryForm1()
elif repository_type == 'amazons3':
return EditRepositoryForm2()
elif repository_type == 'rclone':
return EditRepositoryForm3()
else:
raise Exception("Unsupported repository type")
|
AllRoundeer/Keil
|
C51/Examples/ST uPSD/upsd3300/DK3300-ELCD/EEPROM_emul/eeprom_emul_demo.c
|
/*------------------------------------------------------------------------------
eeprom_emul_DEMO.c
Version:
9/08/2004 - Ver 1.0 - Initial release.
Description:
Simple EEPROM Emulation Demo code for uPSD33xx. This demo loops forever
writing an incrementing value to the emulated EEPROM contents. Upon reset, the
flash is checked to see if it was previously set up for emulating an EEPROM. If
it was not, the flash is initialized and formated for EEPROM emulation. If
it was, the current value is read from the record in the emulated EEPROM and is
displayed on the LCD. This value plus 1 becomes the starting value that is
incrementally written to the emulated EEPROM.
Hardware Platform:
DK3300-ELCD
Note:
The first time this demo code is executed, it will initialize the flash for
EEPROM emulation. While the demo is running, take note of the value that was
just written to the emulated EEPROM. Press the reset switch or cycle the power
and then observe that the demo detects that the flash was previously initialized
for EEPROM emulation. It will then retrieve and display the value that was last
written to the EEPROM.
********************************************************************************
Important Notes:
(1) EEPROM record size is set in eeprom.h.
(2) This demo requires XDATA to be initialized to 0 for the EEPROM Emulation
driver to work properly. This is currently handled in the startup file.
(3) This demo doesn't handle error processing. In most cases if an error
occurs, the demo stops.
********************************************************************************
Copyright (c) 2005 STMicroelectronics
This example demo code is provided as is and has no warranty,
implied or otherwise. You are free to use/modify any of the provided
code at your own risk in your applications with the expressed limitation
of liability (see below) so long as your product using the code contains
at least one uPSD products (device).
LIMITATION OF LIABILITY: NEITHER STMicroelectronics NOR ITS VENDORS OR
AGENTS SHALL BE LIABLE FOR ANY LOSS OF PROFITS, LOSS OF USE, LOSS OF DATA,
INTERRUPTION OF BUSINESS, NOR FOR INDIRECT, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES OF ANY KIND WHETHER UNDER THIS AGREEMENT OR
OTHERWISE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
------------------------------------------------------------------------------*/
#include "upsd3300_hardware.h" // environment hardware specific defines
#include "upsd3300.h" // special function register declarations for UPSD
#include "upsd3300_lcd.h" // prototype declarations and defines for uPSD IP drivers
#include "upsd3300_timer.h"
#include "eeprom.h"
xdata PSD_REGS PSD_reg _at_ PSD_REG_ADDR; // Define PSD registers at address "csiop" space
void main (void)
{
unsigned char EEPROM_write_value; // Counter - value to write to record
unsigned char status;
unsigned char flash_previously_init; // Flag to indicate if the flash was
// previously (before reset) initialized
// for EEPROM emulation.
BYTE xdata buf[1]; //Buffer that holds data to write
BYTE xdata tmpbuf[1]; //Buffer which holds data read back from the flash
PSD_reg.VM |= 0x80; // enable peripheral I/O mode for LCD display
timer0_init(); // initialize timer0 interrupt
lcd_init(); // initialize LCD. 8 bits, 2 lines, 5x7 font,
// no blink, cursor off, clear
printfLCD("EEPROM Emulation\n"); //display on LCD
printfLCD("demo on uPSD3300\n");
delay_2sec();
flash_previously_init = TRUE; // Flag set to previously initialized.
status = Eeprom_Init(); // Determines if the flash was previously
// initialized for EEPROM emulation. Also does
// some recovery if contents corrupted due to
// power failure.
if (status != 0) // Flash was not previously initialized.
{
lcd_clear();
printfLCD("Flash not init\n");
printfLCD("for EEPROM Emul.\n");
delay_2sec();
lcd_clear();
printfLCD("Formatting flash\n");
printfLCD("for EEPROM Emul.\n");
delay_2sec();
flash_previously_init = FALSE; // Flag to indicate the flash was not
// previously initialized for EEPROM
// emulation.
status = EEPROM_Format(0x0001); // Initializes flash for EEPROM
// emulation. Format the flash
// for one record.
switch (status) // Check for format errors and process.
{
case ILLEGAL_RECORD_NUMBER:
lcd_clear();
printfLCD("Illegal records\n");
printfLCD("reduce number\n");
delay_2sec();
while(1); // Process the error.
case FORMAT_FAILED:
lcd_clear();
printfLCD("Format Failed\n");
printfLCD("Reset\n");
delay_1sec();
while(1); // Process the error.
case SECTOR_ERASE_ERROR:
lcd_clear();
printfLCD("Sector Erase\n");
printfLCD("Error-Reset\n");
delay_1sec();
while(1); // Process the error.
}
}
if (flash_previously_init == TRUE) // Flash was init for EEPROM Emul prior
// to reset.
{
lcd_clear(); // Indicate on display it was init.
printfLCD("Previously init\n");
printfLCD("for EEPROM Emul.\n");
delay_2sec();
lcd_clear();
printfLCD("Reading Rec# 0\n"); // Indicate that Rec# 0 will be read
printfLCD("contents...\n"); // and displayed.
delay_2sec();
status = Read_Record(0,&tmpbuf[0]); // Read the previously stored record.
if (status) // Check for read errors.
{
lcd_clear();
printfLCD("RD Error =%x\n",status);
while(1); // Process the error.
}
lcd_clear();
printfLCD("Rec# 0 contains\n"); // Display contents of record.
printfLCD("the value: 0x%x\n",tmpbuf[0]);
delay_2sec();
delay_2sec();
EEPROM_write_value = ++tmpbuf[0]; // Set next value to write to EEPROM
}
else
{
EEPROM_write_value = 0; // EEPROM was not previously initialized so
// use this value as the starting value to
// to write to EEPROM.
}
lcd_clear(); // Describe operation of demo with
printfLCD("Now going to \n"); // messages on LCD display.
printfLCD("update Rec# 0 w/\n");
delay_2sec();
lcd_clear();
printfLCD("incrementing val\n");
printfLCD("every 2 seconds.\n");
delay_2sec();
while (1)
{
buf[0] = EEPROM_write_value++;
status = Update_Record(0, &buf); // Write record with the content in buf[0]
if (status)
{
lcd_clear();
printfLCD("WR Error =%x\n",status);
printfLCD("writing: 0x%x\n",buf[0]);
while(1); // Process the error.
}
status = Read_Record(0,&tmpbuf[0]); // read the record
if (status)
{
lcd_clear();
printfLCD("RD Error =%x\n",status);
while(1); // Process the error.
}
if (tmpbuf[0] == buf[0]) // Verify the record was written
// correctly.
{
lcd_clear(); // Display the written value.
printfLCD("Wrote Rec# 0\n");
printfLCD("with value: 0x%x\n",tmpbuf[0]);
delay_2sec();
}
else
{
lcd_clear();
printfLCD("Read & Compare.\n");
printfLCD("Miscompared!\n");
while(1); // Process the error.
}
}
}
|
RunsFor/cartridge-cli
|
cli/commands/status.go
|
package commands
import (
"fmt"
"github.com/apex/log"
"github.com/spf13/cobra"
"github.com/tarantool/cartridge-cli/cli/running"
)
func init() {
var statusCmd = &cobra.Command{
Use: "status [INSTANCE_NAME...]",
Short: "Get instance(s) status",
Long: fmt.Sprintf("Get instance(s) status\n\n%s", runningCommonUsage),
Run: func(cmd *cobra.Command, args []string) {
err := runStatusCmd(cmd, args)
if err != nil {
log.Fatalf(err.Error())
}
},
ValidArgsFunction: ShellCompRunningInstances,
}
rootCmd.AddCommand(statusCmd)
// FLAGS
configureFlags(statusCmd)
// application name flag
addNameFlag(statusCmd)
// stateboard flags
addStateboardRunningFlags(statusCmd)
// common running paths
addCommonRunningPathsFlags(statusCmd)
}
func runStatusCmd(cmd *cobra.Command, args []string) error {
setStateboardFlagIsSet(cmd)
if err := running.FillCtx(&ctx, args); err != nil {
return err
}
if err := running.Status(&ctx); err != nil {
return err
}
return nil
}
|
porames25/xray-oxygen
|
code/engine.vc2008/xrRender/xrRender/xrD3DDefs.h
|
#pragma once
#ifdef USE_DX11
# include "..\xrRenderDX10\DXCommonTypes.h"
#else
typedef IDirect3DVertexShader9 ID3DVertexShader;
typedef IDirect3DPixelShader9 ID3DPixelShader;
typedef ID3DXBuffer ID3DBlob;
typedef D3DXMACRO D3D_SHADER_MACRO;
typedef IDirect3DQuery9 ID3DQuery;
typedef D3DVIEWPORT9 D3D_VIEWPORT;
typedef ID3DXInclude ID3DInclude;
typedef IDirect3DTexture9 ID3DTexture2D;
typedef IDirect3DSurface9 ID3DRenderTargetView;
typedef IDirect3DSurface9 ID3DDepthStencilView;
typedef IDirect3DBaseTexture9 ID3DBaseTexture;
typedef D3DSURFACE_DESC D3D_TEXTURE2D_DESC;
typedef IDirect3DVertexBuffer9 ID3DVertexBuffer;
typedef IDirect3DIndexBuffer9 ID3DIndexBuffer;
typedef IDirect3DVolumeTexture9 ID3DTexture3D;
typedef IDirect3DStateBlock9 ID3DState;
#define DX10_ONLY(expr) do {} while (0)
#endif
|
xillio/xill-platform
|
plugin-document/src/main/java/nl/xillio/xill/plugins/mongodb/services/serializers/MongoRegexSerializer.java
|
<reponame>xillio/xill-platform<gh_stars>1-10
/**
* Copyright (C) 2014 Xillio (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.xillio.xill.plugins.mongodb.services.serializers;
import nl.xillio.xill.api.components.MetaExpression;
import nl.xillio.xill.api.components.MetaExpressionDeserializer;
import nl.xillio.xill.api.components.MetaExpressionSerializer;
import nl.xillio.xill.plugins.mongodb.data.MongoRegex;
import java.util.regex.Pattern;
import static nl.xillio.xill.api.components.ExpressionBuilderHelper.fromValue;
/**
* Provides a deserializer for a MongoRegex.
*/
public class MongoRegexSerializer implements MetaExpressionSerializer, MetaExpressionDeserializer {
@Override
public MetaExpression parseObject(Object object) {
if (!(object instanceof Pattern)) {
return null;
}
MetaExpression result = fromValue(object.toString());
result.storeMeta(new MongoRegex(object.toString()));
return result;
}
@Override
public Object extractValue(MetaExpression metaExpression) {
MongoRegex result = metaExpression.getMeta(MongoRegex.class);
if (result != null) {
return result.getPattern();
}
return null;
}
}
|
MacHu-GWU/Dev-Exp-Share
|
docs/source/01-AWS/07-Analytics/07-ElasticSearch-Service/python-api/s3_search_by_filter.py
|
<reponame>MacHu-GWU/Dev-Exp-Share
# -*- coding: utf-8 -*-
"""
**中文文档**
- ES 数据类型文档: https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-types.html
"""
import connect
from datetime import datetime
from elasticsearch_dsl import (
Index, Document, Integer, Float, Date, Text,
)
from superjson import json
def jprint(data):
print(json.dumps(data, indent=4, sort_keys=True))
class Laptop(Document):
price = Integer()
weight = Float()
release_date = Date()
product_name = Text()
product_model = Text()
class Index:
name = "laptop"
def document_crud():
# create the mappings in Elasticsearch
Laptop.init()
# instantiate the document
laptops = [
Laptop(
price=1199,
weight=2.75,
release_date=datetime(2017, 1, 1),
product_name="MacBook",
product_model="2017",
meta={"id": 1},
),
Laptop(
price=999,
weight=2.1,
release_date=datetime(2017, 1, 1),
product_name="MacBookAir",
product_model="2017",
meta={"id": 2},
),
Laptop(
price=2399,
weight=5.4,
release_date=datetime(2017, 1, 1),
product_name="MacBookPro",
product_model="2017",
meta={"id": 3},
),
]
# for laptop in laptops:
# laptop.save()
# retrieve the document
res = Laptop.search() \
.filter("range", price={"gt": 1100}) \
.filter("range", weight={"gt": 3}) \
.execute()
assert len(list(res)) == 1
assert list(res)[0].meta.id == str(3)
document_crud()
def index_crud():
blogs = Index("laptop")
# delete the index, ignore if it doesn't exist
blogs.delete(ignore=404)
# index_crud()
|
a8uhnf/pack
|
vendor/github.com/kubepack/onessl/cmds/get_kube_ca.go
|
<filename>vendor/github.com/kubepack/onessl/cmds/get_kube_ca.go
package cmds
import (
"fmt"
"io/ioutil"
"os"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"k8s.io/client-go/tools/clientcmd"
)
// kubectl config view --minify=true --flatten -o json | onessl jsonpath '{.clusters[0].cluster.certificate-authority-data}'
func NewCmdGetKubeCA(clientConfig clientcmd.ClientConfig) *cobra.Command {
cmd := &cobra.Command{
Use: "kube-ca",
Short: "Prints CA certificate for Kubernetes cluster from Kubeconfig",
DisableAutoGenTag: true,
Run: func(cmd *cobra.Command, args []string) {
cfg, err := clientConfig.ClientConfig()
if err != nil {
Fatal(errors.Wrap(err, "failed to read kubeconfig"))
}
if len(cfg.CAData) > 0 {
fmt.Println(string(cfg.CAData))
} else if len(cfg.CAFile) > 0 {
data, err := ioutil.ReadFile(cfg.CAFile)
if err != nil {
Fatal(errors.Wrapf(err, "failed to load ca file %s", cfg.CAFile))
}
fmt.Println(string(data))
}
os.Exit(0)
},
}
return cmd
}
|
wkoszek/book-programming-ruby
|
src/ex0227.rb
|
# Sample code from Programing Ruby, page 92
artist = "<NAME>"
use_nicknames = "yes"
if artist == "<NAME>"
artist = "'Trane"
end unless use_nicknames == "no"
|
LeoWshington/Exercicios_CursoEmVideo_Python
|
ex056.py
|
<reponame>LeoWshington/Exercicios_CursoEmVideo_Python
sidade = 0
hidade = 0
conth = 0
contm = 0
nomeh = ''
for c in range(1, 5):
nome = str(input(f'=-=-=-=-=- DADOS {c}ª PESSOA -=-=-=-=-='
f'Digite o nome da pessoa: ')).lower().strip()
idade = int(input('Digite a idade da pessoa: '))
sexo = str(input('Digite o sexo da pessoa: ')).lower().strip()
if sexo == "m":
conth += 1
if conth == 1:
hidade = idade
nomeh = nome
else:
if hidade < idade:
hidade = idade
nomeh = nome
if sexo == "f" and idade < 20:
contm += 1
sidade += idade
print(f'A média de idade do grupo é {(sidade / 4) :.0f}.\n'
f'O homem mais velho do grupo é {nomeh} com {hidade} anos.\n'
f'O numero de mulheres com menos de 20 anos é {contm}.')
|
jeffrey-io/farmcron
|
src/main/java/farm/bsg/wake/sources/Source.java
|
/*
* Copyright 2014 <NAME>; see LICENSE for more details
*/
package farm.bsg.wake.sources;
import java.util.Set;
import java.util.function.BiConsumer;
/**
* A source is basically a very special key value pair map where we lazily enable redefinition of the map.
* <p>
* The idea is that you can ask a source for any key, and it will return the value if it exists (null otherwise).
* <p>
* The kicker (and the real novel idea here) is that the the source has no standard domain/keySet. Instead, you ask for the domain and it will tell you what keys it provides. The nice thing about this is a wrapping layer may intercept that key and use it.
*/
public abstract class Source implements Comparable<Source> {
public static enum SourceType {
Snippet, Template, Page
}
@Override
public int compareTo(final Source o) {
return Long.compare(this.order(), o.order());
}
/**
* get the value from the source
*
* @param key
* the key (found in the set that is populated via populateDomain; "body" is a fairly common key)
* @return
*/
public abstract String get(String key);
public SourceType getType() {
if ("snippet".equalsIgnoreCase(get("type"))) {
return SourceType.Snippet;
}
if (null != get("template-name")) {
return SourceType.Template;
}
return SourceType.Page;
}
/**
* Get the order from the object; if you sort this
*
* @return
*/
public long order() {
final String ord = get("order");
if (ord == null) {
return Integer.MAX_VALUE;
}
return Integer.parseInt(ord);
}
/**
* Populate the given set with all the possible strings that can be acquired from the get() method
*
* @param domain
*/
public abstract void populateDomain(Set<String> domain);
/**
* Test whether or not the key has a boolean value like yes or true
*/
public boolean testBoolean(final String key) {
String value = get(key);
if (value == null) {
return false;
}
value = value.toLowerCase();
if (value.length() == 0) {
return false;
}
if ("1".equals(value)) {
return true;
}
return value.startsWith("t") || value.startsWith("y");
}
/**
* A source may define things too complicated to return as a string, so instead, a source may provide things in the form of an Object that the top level can decide what to do with
*
* @param injectComplex
*/
public abstract void walkComplex(BiConsumer<String, Object> injectComplex);
}
|
reels-research/iOS-Private-Frameworks
|
Celestial.framework/BWInferenceVideoRequirement.h
|
/* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/Celestial.framework/Celestial
*/
@interface BWInferenceVideoRequirement : BWInferenceMediaRequirement <NSCopying> {
BWInferenceVideoFormat * _videoFormat;
}
@property (nonatomic, readonly) BWInferenceVideoFormat *videoFormat;
- (id)copyWithZone:(struct _NSZone { }*)arg1;
- (void)dealloc;
- (id)description;
- (id)init;
- (id)initWithAttachedMediaKey:(id)arg1;
- (id)initWithAttachedMediaKey:(id)arg1 videoFormat:(id)arg2;
- (id)initWithVideoRequirement:(id)arg1;
- (bool)isSatisfiedByRequirement:(id)arg1;
- (unsigned long long)satisfactionHash;
- (id)videoFormat;
@end
|
michaelbeaumont/goformation
|
cloudformation/aws-s3-bucket_accesscontroltranslation.go
|
package cloudformation
import (
"encoding/json"
)
// AWSS3Bucket_AccessControlTranslation AWS CloudFormation Resource (AWS::S3::Bucket.AccessControlTranslation)
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-accesscontroltranslation.html
type AWSS3Bucket_AccessControlTranslation struct {
// Owner AWS CloudFormation Property
// Required: true
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-accesscontroltranslation.html#cfn-s3-bucket-accesscontroltranslation-owner
Owner *Value `json:"Owner,omitempty"`
}
// AWSCloudFormationType returns the AWS CloudFormation resource type
func (r *AWSS3Bucket_AccessControlTranslation) AWSCloudFormationType() string {
return "AWS::S3::Bucket.AccessControlTranslation"
}
func (r *AWSS3Bucket_AccessControlTranslation) MarshalJSON() ([]byte, error) {
return json.Marshal(*r)
}
|
DanielFran/jhipster-bot
|
lib/export/app_description_file_writer.js
|
'use strict';
const fs = require('fs'),
exec = require('child_process').execSync;
module.exports = {
write: write,
remove: remove
};
/**
* args: {
* directory: string,
* applicationDescription: Object
* }
*/
function write(args) {
var fileName = '.yo-rc.json';
if(args.directory && !fs.existsSync(args.directory)){
fs.mkdirSync(args.directory);
fileName = args.directory + '/' + fileName;
}
fs.writeFileSync(fileName, JSON.stringify(args.applicationDescription), null, ' ');
}
/**
* args: {
* directory: string
* }
*/
function remove(args) {
exec('rm -rf ' + args.directory);
}
|
twmicro/BedrockLanguage
|
build/tmp/expandedArchives/forge-1.16.2-33.0.5_mapped_snapshot_20200514-1.16-sources.jar_9d07f867eb9352dbb54585a794282e64/net/minecraft/world/gen/feature/LakesFeature.java
|
<gh_stars>0
package net.minecraft.world.gen.feature;
import com.mojang.serialization.Codec;
import java.util.Random;
import net.minecraft.block.BlockState;
import net.minecraft.block.Blocks;
import net.minecraft.block.material.Material;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.SectionPos;
import net.minecraft.world.ISeedReader;
import net.minecraft.world.LightType;
import net.minecraft.world.biome.Biome;
import net.minecraft.world.gen.ChunkGenerator;
import net.minecraft.world.gen.feature.structure.Structure;
public class LakesFeature extends Feature<BlockStateFeatureConfig> {
private static final BlockState AIR = Blocks.CAVE_AIR.getDefaultState();
public LakesFeature(Codec<BlockStateFeatureConfig> p_i231968_1_) {
super(p_i231968_1_);
}
public boolean func_241855_a(ISeedReader p_241855_1_, ChunkGenerator p_241855_2_, Random p_241855_3_, BlockPos p_241855_4_, BlockStateFeatureConfig p_241855_5_) {
while(p_241855_4_.getY() > 5 && p_241855_1_.isAirBlock(p_241855_4_)) {
p_241855_4_ = p_241855_4_.down();
}
if (p_241855_4_.getY() <= 4) {
return false;
} else {
p_241855_4_ = p_241855_4_.down(4);
if (p_241855_1_.func_241827_a(SectionPos.from(p_241855_4_), Structure.field_236381_q_).findAny().isPresent()) {
return false;
} else {
boolean[] aboolean = new boolean[2048];
int i = p_241855_3_.nextInt(4) + 4;
for(int j = 0; j < i; ++j) {
double d0 = p_241855_3_.nextDouble() * 6.0D + 3.0D;
double d1 = p_241855_3_.nextDouble() * 4.0D + 2.0D;
double d2 = p_241855_3_.nextDouble() * 6.0D + 3.0D;
double d3 = p_241855_3_.nextDouble() * (16.0D - d0 - 2.0D) + 1.0D + d0 / 2.0D;
double d4 = p_241855_3_.nextDouble() * (8.0D - d1 - 4.0D) + 2.0D + d1 / 2.0D;
double d5 = p_241855_3_.nextDouble() * (16.0D - d2 - 2.0D) + 1.0D + d2 / 2.0D;
for(int l = 1; l < 15; ++l) {
for(int i1 = 1; i1 < 15; ++i1) {
for(int j1 = 1; j1 < 7; ++j1) {
double d6 = ((double)l - d3) / (d0 / 2.0D);
double d7 = ((double)j1 - d4) / (d1 / 2.0D);
double d8 = ((double)i1 - d5) / (d2 / 2.0D);
double d9 = d6 * d6 + d7 * d7 + d8 * d8;
if (d9 < 1.0D) {
aboolean[(l * 16 + i1) * 8 + j1] = true;
}
}
}
}
}
for(int k1 = 0; k1 < 16; ++k1) {
for(int l2 = 0; l2 < 16; ++l2) {
for(int k = 0; k < 8; ++k) {
boolean flag = !aboolean[(k1 * 16 + l2) * 8 + k] && (k1 < 15 && aboolean[((k1 + 1) * 16 + l2) * 8 + k] || k1 > 0 && aboolean[((k1 - 1) * 16 + l2) * 8 + k] || l2 < 15 && aboolean[(k1 * 16 + l2 + 1) * 8 + k] || l2 > 0 && aboolean[(k1 * 16 + (l2 - 1)) * 8 + k] || k < 7 && aboolean[(k1 * 16 + l2) * 8 + k + 1] || k > 0 && aboolean[(k1 * 16 + l2) * 8 + (k - 1)]);
if (flag) {
Material material = p_241855_1_.getBlockState(p_241855_4_.add(k1, k, l2)).getMaterial();
if (k >= 4 && material.isLiquid()) {
return false;
}
if (k < 4 && !material.isSolid() && p_241855_1_.getBlockState(p_241855_4_.add(k1, k, l2)) != p_241855_5_.state) {
return false;
}
}
}
}
}
for(int l1 = 0; l1 < 16; ++l1) {
for(int i3 = 0; i3 < 16; ++i3) {
for(int i4 = 0; i4 < 8; ++i4) {
if (aboolean[(l1 * 16 + i3) * 8 + i4]) {
p_241855_1_.setBlockState(p_241855_4_.add(l1, i4, i3), i4 >= 4 ? AIR : p_241855_5_.state, 2);
}
}
}
}
for(int i2 = 0; i2 < 16; ++i2) {
for(int j3 = 0; j3 < 16; ++j3) {
for(int j4 = 4; j4 < 8; ++j4) {
if (aboolean[(i2 * 16 + j3) * 8 + j4]) {
BlockPos blockpos = p_241855_4_.add(i2, j4 - 1, j3);
if (isDirt(p_241855_1_.getBlockState(blockpos).getBlock()) && p_241855_1_.getLightFor(LightType.SKY, p_241855_4_.add(i2, j4, j3)) > 0) {
Biome biome = p_241855_1_.getBiome(blockpos);
if (biome.func_242440_e().func_242502_e().getTop().isIn(Blocks.MYCELIUM)) {
p_241855_1_.setBlockState(blockpos, Blocks.MYCELIUM.getDefaultState(), 2);
} else {
p_241855_1_.setBlockState(blockpos, Blocks.GRASS_BLOCK.getDefaultState(), 2);
}
}
}
}
}
}
if (p_241855_5_.state.getMaterial() == Material.LAVA) {
for(int j2 = 0; j2 < 16; ++j2) {
for(int k3 = 0; k3 < 16; ++k3) {
for(int k4 = 0; k4 < 8; ++k4) {
boolean flag1 = !aboolean[(j2 * 16 + k3) * 8 + k4] && (j2 < 15 && aboolean[((j2 + 1) * 16 + k3) * 8 + k4] || j2 > 0 && aboolean[((j2 - 1) * 16 + k3) * 8 + k4] || k3 < 15 && aboolean[(j2 * 16 + k3 + 1) * 8 + k4] || k3 > 0 && aboolean[(j2 * 16 + (k3 - 1)) * 8 + k4] || k4 < 7 && aboolean[(j2 * 16 + k3) * 8 + k4 + 1] || k4 > 0 && aboolean[(j2 * 16 + k3) * 8 + (k4 - 1)]);
if (flag1 && (k4 < 4 || p_241855_3_.nextInt(2) != 0) && p_241855_1_.getBlockState(p_241855_4_.add(j2, k4, k3)).getMaterial().isSolid()) {
p_241855_1_.setBlockState(p_241855_4_.add(j2, k4, k3), Blocks.STONE.getDefaultState(), 2);
}
}
}
}
}
if (p_241855_5_.state.getMaterial() == Material.WATER) {
for(int k2 = 0; k2 < 16; ++k2) {
for(int l3 = 0; l3 < 16; ++l3) {
int l4 = 4;
BlockPos blockpos1 = p_241855_4_.add(k2, 4, l3);
if (p_241855_1_.getBiome(blockpos1).doesWaterFreeze(p_241855_1_, blockpos1, false)) {
p_241855_1_.setBlockState(blockpos1, Blocks.ICE.getDefaultState(), 2);
}
}
}
}
return true;
}
}
}
}
|
dominathan/node-fhir-server-core
|
src/server/profiles/sequence/sequence.arguments.js
|
/**
* @name exports
* @description All the possible arguments defined in one place
*/
module.exports = {
CHROMOSOME: {
name: 'chromosome',
type: 'token',
definition: 'https://www.hl7.org/fhir/searchparameter-registry.html#sequence',
documentation: 'Chromosome number of the reference sequence',
},
COORDINATE: {
name: 'coordinate',
type: 'composite',
definition: 'https://www.hl7.org/fhir/searchparameter-registry.html#sequence',
documentation: 'Search parameter for region of the reference DNA sequence string. This will refer to part of a locus or part of a gene where search region will be represented in 1-based system. Since the coordinateSystem can either be 0-based or 1-based, this search query will include the result of both coordinateSystem that contains the equivalent segment of the gene or whole genome sequence. For example, a search for sequence can be represented as coordinate=1$lt345$gt123, this means it will search for the Sequence resource on chromosome 1 and with position >123 and <345, where in 1-based system resource, all strings within region 1:124-344 will be revealed, while in 0-based system resource, all strings within region 1:123-344 will be revealed. You may want to check detail about 0-based v.s. 1-based above.',
},
END: {
name: 'end',
type: 'number',
definition: 'https://www.hl7.org/fhir/searchparameter-registry.html#sequence',
documentation: 'End position (0-based exclusive, which menas the acid at this position will not be included, 1-based inclusive, which means the acid at this position will be included) of the reference sequence.',
},
IDENTIFIER: {
name: 'identifier',
type: 'token',
definition: 'https://www.hl7.org/fhir/searchparameter-registry.html#sequence',
documentation: 'The unique identity for a particular sequence',
},
PATIENT: {
name: 'patient',
type: 'reference',
definition: 'https://www.hl7.org/fhir/searchparameter-registry.html#sequence',
documentation: 'The subject that the observation is about',
},
START: {
name: 'start',
type: 'number',
definition: 'https://www.hl7.org/fhir/searchparameter-registry.html#sequence',
documentation: 'Start position (0-based inclusive, 1-based inclusive, that means the nucleic acid or amino acid at this position will be included) of the reference sequence.',
},
TYPE: {
name: 'type',
type: 'token',
definition: 'https://www.hl7.org/fhir/searchparameter-registry.html#sequence',
documentation: 'Amino Acid Sequence/ DNA Sequence / RNA Sequence',
},
};
|
dymmeh/mil-sym-android
|
buildSrc/src/main/java/armyc2/c2sd/xml/parser/SymbolDefParser.java
|
package armyc2.c2sd.xml.parser;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
public class SymbolDefParser implements BinaryWriter {
private static Map<String, SymbolDef> _SymbolDefinitionsB = null;
private static ArrayList<SymbolDef> _SymbolDefDupsB = null;
private static Map<String, SymbolDef> _SymbolDefinitionsC = null;
private static ArrayList<SymbolDef> _SymbolDefDupsC = null;
public final synchronized void init(String[] symbolConstantsXML)
{
_SymbolDefinitionsB = new HashMap<>();
_SymbolDefDupsB = new ArrayList<>();
_SymbolDefinitionsC = new HashMap<>();
_SymbolDefDupsC = new ArrayList<>();
String lookupXmlB = symbolConstantsXML[0];// FileHandler.InputStreamToString(xmlStreamB);
String lookupXmlC = symbolConstantsXML[1];
;// FileHandler.InputStreamToString(xmlStreamC);
populateLookup(lookupXmlB, 0);
populateLookup(lookupXmlC, 1);
}
private void populateLookup(String xml, int symStd)
{
Document document = XMLParser.getDomElement(xml);
SymbolDef sd = null;
NodeList symbols = XMLUtil.getItemList(document, "SYMBOL");
for (int i = 0; i < symbols.getLength(); i++) {
Node node = symbols.item(i);
String symbolID = XMLUtil.parseTagValue(node, "SYMBOLID");
String geometry = XMLUtil.parseTagValue(node, "GEOMETRY");
String drawCategory = XMLUtil.parseTagValue(node, "DRAWCATEGORY");
String maxpoints = XMLUtil.parseTagValue(node, "MAXPOINTS");
String minpoints = XMLUtil.parseTagValue(node, "MINPOINTS");
String modifiers = XMLUtil.parseTagValue(node, "MODIFIERS");
String description = XMLUtil.parseTagValue(node, "DESCRIPTION");
description = description.replaceAll("&", "&");
String hierarchy = XMLUtil.parseTagValue(node, "HIERARCHY");
String path = XMLUtil.parseTagValue(node, "PATH");
sd = new SymbolDef(symbolID, description, Integer.valueOf(drawCategory), hierarchy,
Integer.valueOf(minpoints), Integer.valueOf(maxpoints), modifiers, path);
boolean isMCSSpecific = SymbolUtilities.isMCSSpecificTacticalGraphic(sd);
if (symStd == 0) {
if (_SymbolDefinitionsB.containsKey(symbolID) == false && isMCSSpecific == false) {
_SymbolDefinitionsB.put(symbolID, sd);
} else if (isMCSSpecific == false) {
_SymbolDefDupsB.add(sd);
}
} else if (symStd == 1) {
if (_SymbolDefinitionsC.containsKey(symbolID) == false && isMCSSpecific == false) {
_SymbolDefinitionsC.put(symbolID, sd);
} else if (isMCSSpecific == false) {
_SymbolDefDupsC.add(sd);
}
}
}
}
public void writeToBinary(DataOutputStream dos) throws IOException
{
dos.writeInt(_SymbolDefinitionsB.size());
for (Map.Entry<String, SymbolDef> entry : _SymbolDefinitionsB.entrySet()) {
entry.getValue().writeBinary(dos);
}
dos.writeInt(_SymbolDefDupsB.size());
for (SymbolDef entry : _SymbolDefDupsB) {
entry.writeBinary(dos);
}
dos.writeInt(_SymbolDefinitionsC.size());
for (Map.Entry<String, SymbolDef> entry : _SymbolDefinitionsC.entrySet()) {
entry.getValue().writeBinary(dos);
}
dos.writeInt(_SymbolDefDupsC.size());
for (SymbolDef entry : _SymbolDefDupsC) {
entry.writeBinary(dos);
}
}
}
|
Megapop/Norad-Eduapp4syria
|
Sima/Kukua/Classes/WatchmanScene/WatchmanSceneStateCalculator.cpp
|
#include "WatchmanSceneStateCalculator.h"
#include "cocos2d.h"
WatchmanSceneStateCalculator::WatchmanSceneStateCalculator(WatchmanScene &watchmanScene)
{
watchman = &watchmanScene;
dataProvider = DataProvider::getInstance();
// watchman->state = dataProvider->getWatchmanState();
}
WatchmanSceneStateCalculator::~WatchmanSceneStateCalculator() {
clearVectorGamesState();
}
void WatchmanSceneStateCalculator::clearVectorGamesState()
{
for (vector<DBGame*>::iterator iterator = vectorGamesState.begin(); iterator != vectorGamesState.end(); ++iterator) {
delete (*iterator);
}
vectorGamesState.clear();
}
void WatchmanSceneStateCalculator::calculateState()
{
string currentPhoneme = dataProvider->getCurrentDBKnowledgeBubble()->getPhoneme();
CCLOG("--calculateState START! currentPhoneme = %s", currentPhoneme.c_str());
vectorGamesState = dataProvider->getDBGames( *dataProvider->getCurrentDBKnowledgeBubble() );
CCLOG("--vectorGamesState size = %lu", vectorGamesState.size() );
int numberOfGames = getNumberOfGamesForPhoneme(currentPhoneme);
if (dataProvider->isIntensifierGameTurn) numberOfGames = 1;
if ( areAllGamesCompleted() ) {
/*
if ( isEpisodeCompleted() ) {
watchman->mustShowNightRunnerScene = true;
return;
}
*/
dataProvider->updateCurrentStatusToNext(); // get new phoneme (e set it as new current phoneme)
if ( (dataProvider->isSimplePhoneme( dataProvider->getCurrentDBKnowledgeBubble()->getPhoneme() ) ) &&
(dataProvider->isIntensifierGameTurn == false) ) { // TODO: migliorare questo if
if (dataProvider->getCurrentEpisode() == 1) {
watchman->newPhonemeDiscovered = true; // mark/save che il guardiano deve mostrare la fascia della nuova lettera
}
}
clearVectorGamesState();
calculateState();
} else {
if (numberOfGames == 1) {
CCLOG("----------------------------------------------------------------mostra UN portale");
watchman->state = WatchmanScene::SHOW_1_PORTAL;
} else if (numberOfGames == 2) {
CCLOG("----------------------------------------------------------------mostra DUE portali");
watchman->state = WatchmanScene::SHOW_2_PORTALS;
} else if (numberOfGames == 3) {
CCLOG("----------------------------------------------------------------mostra TRE portali");
watchman->state = WatchmanScene::SHOW_3_PORTALS;
}
if (dataProvider->isIntensifierGameTurn) {
watchman->state = WatchmanScene::SHOW_1_PORTAL;
CCLOG("----------------------------------------------------------------no, correzione, mostra UN solo portale");
}
}
/*
state = dataProvider->getWatchmanState();
int currentEpisode = dataProvider->getCurrentEpisode();
if (currentEpisode == 1) calculateStateEpisode1();
else if (currentEpisode == 2) calculateStateEpisode2();
else CCLOG("Error in WatchmanSceneStateCalculator! currentEpisode = %d", currentEpisode);
watchman->state = state;
*/
}
bool WatchmanSceneStateCalculator::areAllGamesCompleted() {
if ( (dataProvider->isIntensifierGameTurn) && (dataProvider->intensifierGameCompleted) ) {
return true;
}
int completedGames = 0;
int numbersOfGames = 0;
cocos2d::UserDefault *userDef=cocos2d::UserDefault::getInstance();
for (auto iterator = vectorGamesState.begin(); iterator != vectorGamesState.end(); ++iterator) {
DBGame *dbgame = *iterator;
string gameName = dbgame->getName();
numbersOfGames++;
if (userDef->getBoolForKey(gameName.append("_completed").c_str(), false)) {
completedGames++;
}
}
if (dataProvider->isIntensifierGameTurn) numbersOfGames = 1;
if ( (dataProvider->getCurrentEpisode() == 2) && ( (numbersOfGames==3) || (dataProvider->isIntensifierGameTurn) ) ) {
completedGames = 0;
cocos2d::UserDefault *userDef=cocos2d::UserDefault::getInstance();
if (userDef->getBoolForKey("WritingLetters_completedPortal1", false)) {
completedGames++;
}
if (userDef->getBoolForKey("WritingLetters_completedPortal2", false)) {
completedGames++;
}
if (userDef->getBoolForKey("WritingLetters_completedPortal3", false)) {
completedGames++;
}
}
CCLOG("numbersOfGames = %d", numbersOfGames);
CCLOG("completedGames = %d", completedGames);
if (numbersOfGames == completedGames) return true;
return false;
}
int WatchmanSceneStateCalculator::getNumberOfGamesForPhoneme(string phoneme) {
return (int)vectorGamesState.size();
}
bool WatchmanSceneStateCalculator::isEpisodeCompleted() {
int currEp = dataProvider->getCurrentEpisode();
string currentPhoneme = dataProvider->getCurrentDBKnowledgeBubble()->getPhoneme();
int numEpisodeCompleted = -1;
// TODO: controllare..
if ( currentPhoneme == "Blending_ep1_w5_2" ) numEpisodeCompleted = 1;
else if ( (currEp == 2) && (currentPhoneme == "y") ) numEpisodeCompleted = 2;
else if ( currentPhoneme == "Blending_ep3_w5_6") numEpisodeCompleted = 3;
else if ( currentPhoneme == "IntensifierGame56") numEpisodeCompleted = 4;
else if ( currentPhoneme == "extraBlending_ep5_w4_6") numEpisodeCompleted = 5;
else if ( currentPhoneme == "bubble12_ep6") numEpisodeCompleted = 6;
else if ( currentPhoneme == "extraOralPassagebubble20_ep7") numEpisodeCompleted = 7;
else if ( currentPhoneme == "extraOralPassagebubble20_ep8") numEpisodeCompleted = 8;
else if ( currentPhoneme == "extraOralPassagebubble20_ep9") numEpisodeCompleted = 9;
if ( numEpisodeCompleted != -1) {
CCLOG("--Episode num %d completed!", numEpisodeCompleted);
return true;
} else {
return false;
}
}
|
ci-fuzz/cppcms
|
tests/secure_post_test.cpp
|
<gh_stars>100-1000
///////////////////////////////////////////////////////////////////////////////
//
// Copyright (C) 2008-2012 <NAME> (Tonkikh) <<EMAIL>>
//
// See accompanying file COPYING.TXT file for licensing details.
//
///////////////////////////////////////////////////////////////////////////////
#include <cppcms/service.h>
#include <cppcms/application.h>
#include <cppcms/applications_pool.h>
#include <cppcms/http_request.h>
#include <cppcms/http_response.h>
#include <cppcms/http_context.h>
#include <cppcms/session_interface.h>
#include <cppcms/serialization.h>
#include <cppcms/json.h>
#include <cppcms/form.h>
#include <iostream>
#include "client.h"
#include "test.h"
class unit_test : public cppcms::application {
public:
unit_test(cppcms::service &s) : cppcms::application(s)
{
}
virtual void main(std::string u)
{
if(u=="/gettoken") {
session().set("x",1);
response().out();
response().out() << session().get_csrf_token();
}
else if(u=="/post") {
cppcms::form frm;
cppcms::widgets::text txt;
cppcms::widgets::file fl;
txt.name("test");
fl.name("file");
frm.add(txt);
frm.add(fl);
try {
frm.load(context());
response().out() << "ok";
}
catch(cppcms::request_forgery_error const &e) {
response().out() << "fail";
}
}
else {
response().out() << "not there";
}
}
};
int main(int argc,char **argv)
{
try {
cppcms::service srv(argc,argv);
srv.applications_pool().mount( cppcms::create_pool<unit_test>());
srv.after_fork(submitter(srv));
srv.run();
}
catch(std::exception const &e) {
std::cerr << e.what() << std::endl;
return EXIT_FAILURE;
}
return run_ok ? EXIT_SUCCESS : EXIT_FAILURE;
}
|
roadnarrows-robotics/rnr-sdk
|
Eudoxus/sw/gst/rntemplate/gstrnnamefilter.h
|
////////////////////////////////////////////////////////////////////////////////
//
// Package: Eudoxus
//
// SubPackage: GStreamer
//
// Plug-In: libgstrnnamefilter
//
// File: gstrnnamefilter.h
//
/*! \file
*
* $LastChangedDate: 2015-11-20 15:52:58 -0700 (Fri, 20 Nov 2015) $
* $Rev: 4213 $
*
* \brief RoadNarrows GStreamer Example Template Plug-In.
*
* The filter simple copies input received on sink pad to output source pad.
*
* \author <NAME> (<EMAIL>)
*
* \todo Replace rnnamefilter with the name of the plug-in and the suffix
* 'filter' component by function: src demux mux filter sink
*
* \todo Replace RNNAMEFILTER with the name of the plug-in and the suffix
* 'filter' component by function: SRC DEMUX MUX FILTER SINK
*
* \copyright
* \h_copy 2012-2017. RoadNarrows LLC.\n
* http://www.roadnarrows.com\n
* All Rights Reserved
*/
/*
* @EulaBegin@
// Unless otherwise noted, all materials contained are copyrighted and may not
// be used except as provided in these terms and conditions or in the copyright
// notice (documents and software ) or other proprietary notice provided with
// the relevant materials.
//
//
// IN NO EVENT SHALL THE AUTHOR, ROADNARROWS, OR ANY MEMBERS/EMPLOYEES/
// CONTRACTORS OF ROADNARROWS OR DISTRIBUTORS OF THIS SOFTWARE BE LIABLE TO ANY
// PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL
// DAMAGES ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION,
// EVEN IF THE AUTHORS OR ANY OF THE ABOVE PARTIES HAVE BEEN ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
// THE AUTHORS AND ROADNARROWS SPECIFICALLY DISCLAIM ANY WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
// FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN
// "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE NO OBLIGATION TO
// PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
//
* @EulaEnd@
*/
////////////////////////////////////////////////////////////////////////////////
/*
* GStreamer
* Copyright (C) 2005 <NAME> <<EMAIL>>
* Copyright (C) 2005 <NAME> <<EMAIL>>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_RNNAMEFILTER_H__
#define __GST_RNNAMEFILTER_H__
#include <gst/gst.h>
G_BEGIN_DECLS
/* #defines don't like whitespacey bits */
#define GST_TYPE_RNNAMEFILTER \
(gst_rnnamefilter_get_type())
#define GST_RNNAMEFILTER(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RNNAMEFILTER,Gstrnnamefilter))
#define GST_RNNAMEFILTER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RNNAMEFILTER,GstrnnamefilterClass))
#define GST_IS_RNNAMEFILTER(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RNNAMEFILTER))
#define GST_IS_RNNAMEFILTER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RNNAMEFILTER))
typedef struct _Gstrnnamefilter Gstrnnamefilter;
typedef struct _GstrnnamefilterClass GstrnnamefilterClass;
struct _Gstrnnamefilter
{
GstElement element;
GstPad *sinkpad, *srcpad;
gboolean silent;
};
struct _GstrnnamefilterClass
{
GstElementClass parent_class;
};
GType gst_rnnamefilter_get_type (void);
G_END_DECLS
#endif /* __GST_RNNAMEFILTER_H__ */
|
dcabezas98/FP
|
Sesion15/TablaRectangularEnteros.cpp
|
<filename>Sesion15/TablaRectangularEnteros.cpp
/*
Programa que lee los datos de una matriz, indica si es simétrica o
no, calcula su traspuesta y la multiplica por su traspuesta.
*/
#include<iostream>
using namespace std;
const int MAX_FILAS = 50;
const int MAX_COLUMNAS = 50;
class SecuenciaEnteros{
private:
static const int TAMANIO = 50;
long vector_privado[TAMANIO];
int total_utilizados;
void IntercambiaComponentes_en_Posiciones(int pos_izda, int pos_dcha){
long intercambia;
intercambia = vector_privado[pos_izda];
vector_privado[pos_izda] = vector_privado[pos_dcha];
vector_privado[pos_dcha] = intercambia;
}
public:
SecuenciaEnteros()
:total_utilizados(0) {
}
int TotalUtilizados(){
return total_utilizados;
}
int Capacidad(){
return TAMANIO;
}
void EliminaTodos(){
total_utilizados = 0;
}
void Aniade(long nuevo){
if (total_utilizados < TAMANIO){
vector_privado[total_utilizados] = nuevo;
total_utilizados++;
}
}
void Modifica(int posicion, long nuevo){
if (posicion >= 0 && posicion < total_utilizados)
vector_privado[posicion] = nuevo;
}
long Elemento(int indice){
return vector_privado[indice];
}
};
class LectorSecuenciaEnteros{
private:
long terminador;
public:
LectorSecuenciaEnteros(long entero_terminador)
:terminador(entero_terminador)
{
}
SecuenciaEnteros Lee(){
SecuenciaEnteros a_leer;
int total_introducidos; // Esta variable es para contar los introducidos y que no nos pasemos de la capacidad de la secuencia.
int capacidad_secuencia;
long entero;
total_introducidos = 0;
capacidad_secuencia = a_leer.Capacidad();
cin >> entero;
while (entero != terminador && total_introducidos < capacidad_secuencia){
a_leer.Aniade(entero);
total_introducidos++;
cin >> entero;
}
return a_leer;
}
};
class TablaRectangularEnteros {
private:
int matriz_privada[MAX_FILAS][MAX_COLUMNAS];
int filas, columnas;
public:
TablaRectangularEnteros(int j) {
columnas = j;
filas = 0;
}
TablaRectangularEnteros(int i, int j) {
columnas = j;
filas = i;
}
/* Métodos get */
/* Método que devuelve el número de filas utilizadas */
int FilasUtilizadas() {
return filas;
}
/* Método que devuelve el número de columnas utilizadas */
int ColumnasUtilizadas() {
return columnas;
}
/* Método que devuelve el entero en la posición (i,j) */
int Elemento(int i, int j) {
return matriz_privada[i][j];
}
/* Devuelve una fila de la matriz como objeto SecuenciaEnteros */
SecuenciaEnteros Fila(int i) {
SecuenciaEnteros fila;
for(int j = 0; j < columnas; j++) {
fila.Aniade(matriz_privada[i][j]);
}
return fila;
}
/* Métodos set */
/* Añade una fila completa, como objeto SecuenciaEnteros */
void AniadeFila(SecuenciaEnteros fila) {
int i = filas;
for(int j = 0; j < columnas; j++) {
matriz_privada[i][j] = fila.Elemento(j);
}
filas++;
}
/* Modifica una casilla (i,j) */
void Modifica(int i, int j, int nuevo) {
// if(i >= 0 && i < MAX_FILAS && j >= 0 && j < MAX_COLUMNAS)
matriz_privada[i][j] = nuevo;
}
/* Otros métodos */
/* Devuelve la traspuesta de la matriz */
TablaRectangularEnteros Traspuesta() {
TablaRectangularEnteros traspuesta(columnas,filas);
for(int i = 0; i < filas; i++) {
for(int j = 0; j < columnas; j++) {
traspuesta.Modifica(j,i,matriz_privada[i][j]);
}
}
return traspuesta;
}
/* Combrueba si la matriz es igual a otra */
bool Igual(TablaRectangularEnteros matriz) {
bool iguales = true;
if(filas != matriz.FilasUtilizadas() || columnas != matriz.ColumnasUtilizadas())
iguales = false;
for(int i = 0; i < filas && iguales; i++) {
for(int j = 0; j < columnas && iguales; j++) {
iguales = matriz_privada[i][j] == matriz.Elemento(i,j);
}
}
return iguales;
}
/* Comprueba si es simétrica, llamando a Igual y a Traspuesta */
bool Simetrica() {
TablaRectangularEnteros traspuesta(Traspuesta());
return Igual(traspuesta);
}
/* Comprueba si es simétrica directamente */
bool Simetrica2() {
bool simetrica = true;
for(int i = 0; i < filas && simetrica; i++) {
for(int j = i; j < columnas && simetrica; j++) {
simetrica = matriz_privada[i][j] == matriz_privada[j][i];
}
}
return simetrica;
}
TablaRectangularEnteros Multiplica(TablaRectangularEnteros matriz) {
if(columnas == matriz.FilasUtilizadas()) {
TablaRectangularEnteros producto(matriz.ColumnasUtilizadas());
int i, j, k, suma;
for(i = 0; i < filas; i++) {
for(j = 0; j < matriz.ColumnasUtilizadas(); j++) {
suma = 0;
for(k = 0; k < columnas; k++) {
suma = suma + matriz_privada[i][k] * matriz.Elemento(k,j);
}
producto.Modifica(i, j, suma);
}
}
return producto;
}
}
};
int main() {
int fil, col, i, j;
const long TERMINADOR = -1;
LectorSecuenciaEnteros lector_secuencias(TERMINADOR);
do {
cout << "Filas: ";
cin >> fil;
} while(fil <= 0 || fil > MAX_FILAS);
do {
cout << "Columnas: ";
cin >> col;
} while(col <= 0 || col > MAX_COLUMNAS);
TablaRectangularEnteros matriz(col);
for(i = 0; i < fil; i++) {
SecuenciaEnteros fila;
cout << "Fila " << i+1 << ":\n";
fila = lector_secuencias.Lee();
matriz.AniadeFila(fila);
}
TablaRectangularEnteros traspuesta(matriz.Traspuesta());
if(matriz.Simetrica())
cout << "Simétrica\n";
else {
cout << "No simétrica\n\n";
cout << "Traspuesta\n\n";
for(i = 0; i < col; i++) {
for(j = 0; j < fil; j++) {
cout << " " << traspuesta.Elemento(i,j) << " ";
}
cout << "\n";
}
}
TablaRectangularEnteros producto(matriz.Multiplica(traspuesta));
cout << "Producto por su traspuesta \n\n";
for(i = 0; i < fil; i++) {
for(j = 0; j < fil; j++) {
cout << " " << producto.Elemento(i,j) << " ";
}
cout << "\n";
}
}
|
ravewillow6383/data-structures-and-algorithms-python
|
challenges/array_shift/test_array_shift.py
|
from array_shift import insert_shift_array
# def test_insert_to_middle():
# expected = [1, 2, 3]
# actual = insert_shift_array([1, 3], 2)
# assert expected == actual
def test_insert_to_middle():
expected = [1, 2, 3, 4, 5, 6]
actual = insert_shift_array([1, 2, 3, 5, 6], 4)
assert expected == actual
|
lakehui/Vim_config
|
.vim/sourceCode/glibc-2.16.0/sysdeps/ieee754/dbl-64/s_fmaf.c
|
<reponame>lakehui/Vim_config
/* Compute x * y + z as ternary operation.
Copyright (C) 2010-2012 Free Software Foundation, Inc.
This file is part of the GNU C Library.
Contributed by <NAME> <<EMAIL>>, 2010.
The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
The GNU C Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with the GNU C Library; if not, see
<http://www.gnu.org/licenses/>. */
#include <math.h>
#include <fenv.h>
#include <ieee754.h>
#include <math_private.h>
/* This implementation relies on double being more than twice as
precise as float and uses rounding to odd in order to avoid problems
with double rounding.
See a paper by Boldo and Melquiond:
http://www.lri.fr/~melquion/doc/08-tc.pdf */
float
__fmaf (float x, float y, float z)
{
fenv_t env;
/* Multiplication is always exact. */
double temp = (double) x * (double) y;
union ieee754_double u;
libc_feholdexcept_setround (&env, FE_TOWARDZERO);
/* Perform addition with round to odd. */
u.d = temp + (double) z;
/* Ensure the addition is not scheduled after fetestexcept call. */
math_force_eval (u.d);
/* Reset rounding mode and test for inexact simultaneously. */
int j = libc_feupdateenv_test (&env, FE_INEXACT) != 0;
if ((u.ieee.mantissa1 & 1) == 0 && u.ieee.exponent != 0x7ff)
u.ieee.mantissa1 |= j;
/* And finally truncation with round to nearest. */
return (float) u.d;
}
#ifndef __fmaf
weak_alias (__fmaf, fmaf)
#endif
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.