repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
qiwx2012/flutter_admin_backend
|
src/main/java/com/cry/flutter/admin/service/IMessageReplayService.java
|
package com.cry.flutter.admin.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.cry.flutter.admin.common.RequestBodyApi;
import com.cry.flutter.admin.entity.Message;
import com.cry.flutter.admin.entity.MessageReplay;
import com.baomidou.mybatisplus.extension.service.IService;
import com.cry.flutter.admin.vo.MessageReplayVO;
/**
* <p>
* 服务类
* </p>
*
* @author cairuoyu
* @homepage: http://cairuoyu.com
* @github: https://github.com/cairuoyu/flutter_admin_backend
* @since 2021-03-30
*/
public interface IMessageReplayService extends IService<MessageReplay> {
IPage<MessageReplayVO> queryPage(RequestBodyApi<Message> requestBodyApi);
}
|
shadowlansec/project-fantastic
|
packages/fantastic-default_auth/files/public/account.js
|
<gh_stars>10-100
fetch('/auth/myaccount')
.then(res => res.json())
.then(res => {
if (res.error) return document.getElementById('content').innerHTML = res.error
document.getElementById('username').innerHTML = res.username
document.getElementById('role').innerHTML = `Role: ${res.role}`
})
document.onkeyup = e => {
if (e.key == 'Enter') document.getElementById('continue').click()
}
function deleteButton(button){
const parent = button.parentElement
parent.removeChild(button)
const confirm = parent.appendChild(document.createElement('div'))
confirm.className = 'confirm'
confirm.appendChild(document.createTextNode('Really delete your account?'))
const yes = parent.appendChild(document.createElement('div'))
yes.className = 'button'
yes.appendChild(document.createTextNode('Yes'))
yes.onclick = e => {
fetch('/auth/deleteaccount', {method: 'post', redirect: 'follow'})
.then(res => window.open(res.url, '_self'))
}
const no = parent.appendChild(document.createElement('div'))
no.className = 'button'
no.appendChild(document.createTextNode('No'))
no.onclick = e => {
parent.removeChild(confirm)
parent.removeChild(yes)
parent.removeChild(no)
parent.appendChild(button)
}
}
|
swanhong/CiFEr
|
test/abe/fame.c
|
/*
* Copyright (c) 2018 XLAB d.o.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cifer/internal/common.h>
#include <amcl/pair_BN254.h>
#include "cifer/test.h"
#include "cifer/abe/fame.h"
#include "cifer/serialization/fame_ser.h"
#include "cifer/serialization/data_ser.h"
MunitResult test_fame_end_to_end(const MunitParameter *params, void *data) {
// create a new FAME struct
cfe_fame fame;
cfe_fame_init(&fame);
// initialize and generate a public key and a secret key for the scheme
cfe_fame_pub_key pk;
cfe_fame_sec_key sk;
cfe_fame_sec_key_init(&sk);
cfe_fame_generate_master_keys(&pk, &sk, &fame);
// create a message to be encrypted
FP12_BN254 msg;
FP12_BN254_one(&msg);
// create a msp structure out of a boolean expression representing the
// policy specifying which attributes are needed to decrypt the ciphertext
char bool_exp[] = "(5 OR 3) AND ((2 OR 4) OR (1 AND 6))";
size_t bool_exp_len = 36; // length of the boolean expression string
cfe_msp msp;
cfe_error err = cfe_boolean_to_msp(&msp, bool_exp, bool_exp_len, false);
munit_assert(err == CFE_ERR_NONE);
// initialize a ciphertext and encrypt the message based on the msp structure
// describing the policy
cfe_fame_cipher cipher;
cfe_fame_cipher_init(&cipher, &msp);
cfe_fame_encrypt(&cipher, &msg, &msp, &pk, &fame);
// produce keys that are given to an entity with a set
// of attributes in owned_attrib
int owned_attrib[] = {1, 3, 6};
cfe_fame_attrib_keys keys;
cfe_fame_attrib_keys_init(&keys, 3); // the number of attributes needs to be specified
cfe_fame_generate_attrib_keys(&keys, owned_attrib, 3, &sk, &fame);
// decrypt the message with owned keys
FP12_BN254 decryption;
err = cfe_fame_decrypt(&decryption, &cipher, &keys, &fame);
munit_assert(err == CFE_ERR_NONE);
// check if the decryption equals the starting message
munit_assert(FP12_BN254_equals(&msg, &decryption) == 1);
//clear up
cfe_fame_free(&fame);
cfe_fame_sec_key_free(&sk);
cfe_fame_cipher_free(&cipher);
cfe_fame_attrib_keys_free(&keys);
cfe_msp_free(&msp);
return MUNIT_OK;
}
MunitTest fame_tests[] = {
{(char *) "/end-to-end", test_fame_end_to_end, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL},
{NULL, NULL, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL}
};
MunitSuite fame_suite = {
(char *) "/abe/fame", fame_tests, NULL, 1, MUNIT_SUITE_OPTION_NONE
};
|
quis/pay-selfservice
|
app/controllers/payment-links/get-review-controller.js
|
'use strict'
// NPM dependencies
const lodash = require('lodash')
// Local dependencies
const {response} = require('../../utils/response.js')
const paths = require('../../paths')
module.exports = (req, res) => {
const pageData = lodash.get(req, 'session.pageData.createPaymentLink', {})
return response(req, res, 'payment-links/review', {
pageData,
paymentLinkTitle: pageData.paymentLinkTitle,
paymentLinkDescription: pageData.paymentLinkDescription,
paymentLinkAmount: pageData.paymentLinkAmount,
nextPage: paths.paymentLinks.review,
changeInformation: paths.paymentLinks.information,
changeAmount: paths.paymentLinks.amount,
returnToStart: paths.paymentLinks.start,
manage: paths.paymentLinks.manage
})
}
|
zcemycl/algoTest
|
py/tests/testMaxDepthBT/test_MaxDepthBT.py
|
import unittest
from parameterized import parameterized as p
from solns.maxDepthBT.maxDepthBT import *
class UnitTest_MaxDepthBT(unittest.TestCase):
@p.expand([
[[3,9,20,None,None,15,7],3],[[1,None,2],2]
])
def test_naive(self,nums,expected):
root = TreeNode.treeNode_fromList2(nums)
self.assertEqual(Solution.naive(root),expected)
|
danstowell/signalflow
|
source/include/signalflow/node/stochastic/random-exponential-dist.h
|
<reponame>danstowell/signalflow
#pragma once
#include "signalflow/node/stochastic/stochastic-node.h"
namespace signalflow
{
class RandomExponentialDist : public StochasticNode
{
public:
RandomExponentialDist(NodeRef scale = 0.0, NodeRef clock = nullptr, NodeRef reset = nullptr);
virtual void alloc() override;
virtual void process(Buffer &out, int num_frames) override;
virtual void trigger(std::string = SIGNALFLOW_DEFAULT_TRIGGER, float value = 0.0) override;
private:
NodeRef scale;
NodeRef clock;
std::vector<sample> value;
};
REGISTER(RandomExponentialDist, "random-exponential-dist")
}
|
Noddy76/data-highway
|
monitoring/highway-patrol/src/main/java/com/hotels/road/highwaypatrol/Sender.java
|
/**
* Copyright (C) 2016-2019 Expedia, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hotels.road.highwaypatrol;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static com.google.common.base.Preconditions.checkArgument;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import lombok.extern.slf4j.Slf4j;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.hotels.road.client.AsyncRoadClient;
import com.hotels.road.rest.model.StandardResponse;
@Slf4j
@Component
public class Sender implements AutoCloseable {
private final ScheduledExecutorService service;
private final AsyncRoadClient<TestMessage> onrampClient;
private final TestMessageContextManager contextManager;
private final long messageRate;
@Autowired
public Sender(
AsyncRoadClient<TestMessage> onrampClient,
TestMessageContextManager contextManager,
@Value("${messageHz}") long messageRate) {
this(onrampClient, contextManager, messageRate,
Executors.newScheduledThreadPool(4, new ThreadFactoryBuilder().setNameFormat("sender-%d").build()));
}
Sender(
AsyncRoadClient<TestMessage> onrampClient,
TestMessageContextManager contextManager,
long messageRate,
ScheduledExecutorService service) {
this.messageRate = messageRate;
checkArgument(messageRate > 0, "Message frequency (messageHz) must be greater than 0Hz");
checkArgument(messageRate <= 1000, "Message frequency (messageHz) must be 1000Hz or below");
checkArgument(1000 % messageRate == 0, "Message frequency value (messageHz) must be a factor of 1000");
this.onrampClient = onrampClient;
this.contextManager = contextManager;
this.service = service;
}
public void start() {
log.info("Starting message sender at {}Hz", messageRate);
service.scheduleAtFixedRate(this::worker, 0, 1000 / messageRate, MILLISECONDS);
}
public void worker() {
try {
TestMessageContext context = contextManager.nextContext();
// Send the message
CompletableFuture<StandardResponse> response = onrampClient.sendMessage(context.getMessage());
context.messageSent(response);
} catch (Throwable t) {
log.warn("Problem creating and sending message", t);
}
}
@Override
public void close() throws Exception {
service.shutdown();
if (!service.awaitTermination(5, TimeUnit.MINUTES)) {
throw new Exception("Timed out waiting for sender service to terminate");
}
log.info("Shutdown sender");
}
}
|
Munyola/sentry
|
tests/sentry/api/serializers/test_grouptagkey.py
|
from __future__ import absolute_import
import six
from sentry import tagstore
from sentry.api.serializers import serialize
from sentry.testutils import TestCase
class GroupTagKeySerializerTest(TestCase):
def test(self):
user = self.create_user()
project = self.create_project()
tagkey = tagstore.create_tag_key(
project_id=project.id,
key='key'
)
grouptagkey = tagstore.create_group_tag_key(
project_id=project.id,
group_id=self.create_group(project=project).id,
key=tagkey.key
)
result = serialize(grouptagkey, user)
assert result['id'] == six.text_type(grouptagkey.id)
assert result['key'] == 'key'
|
NanoCode012/ITStep
|
C++ Programming/Unicode/Tchar.cpp
|
<gh_stars>1-10
#include "Tchar.h"
wchar_t* ConvertANSIToUnicode(const char * c)
{
int len = mbstowcs(NULL, c, 0);
wchar_t* ch = new wchar_t(len);
mbstowcs(ch, c, len);
return ch;
}
const char* GetChar(const char * c)
{
return c;
}
|
cmdallas/aws-app-mesh-controller-for-k8s
|
test/e2e/framework/helm/release.go
|
package helm
import (
"fmt"
"github.com/aws/aws-app-mesh-controller-for-k8s/test/e2e/framework/utils"
"github.com/pkg/errors"
"go.uber.org/zap"
"helm.sh/helm/v3/pkg/action"
"helm.sh/helm/v3/pkg/chart/loader"
"helm.sh/helm/v3/pkg/cli"
"helm.sh/helm/v3/pkg/release"
"k8s.io/cli-runtime/pkg/genericclioptions"
"strings"
)
type Manager interface {
// reset appMesh controller to default one installed by helm charts
ResetAppMeshController() error
// upgrade appMesh controller to new one with image overridden.
UpgradeAppMeshController(controllerImage string) error
// reset appMesh injector to default one installed by helm charts
ResetAppMeshInjector() error
// upgrade appMesh injector to new one with image overridden.
UpgradeAppMeshInjector(injectorImage string) error
// Upgrade a helm release
UpgradeHelmRelease(chartRepo string, chartName string, namespace string, releaseName string, vals map[string]interface{}) (*release.Release, error)
}
func NewManager(kubeConfig string) Manager {
return &defaultManager{
kubeConfig: kubeConfig,
logger: utils.NewGinkgoLogger(),
}
}
type defaultManager struct {
kubeConfig string
logger *zap.Logger
}
func (m *defaultManager) ResetAppMeshController() error {
vals := make(map[string]interface{})
_, err := m.UpgradeHelmRelease(eksHelmChartsRepo, appMeshControllerHelmChart, appMeshSystemNamespace, appMeshControllerHelmReleaseName, vals)
return err
}
func (m *defaultManager) UpgradeAppMeshController(controllerImage string) error {
vals := make(map[string]interface{})
imageRepo, imageTag, err := splitImageRepoAndTag(controllerImage)
if err != nil {
return err
}
vals["image"] = map[string]interface{}{
"repository": imageRepo,
"tag": imageTag,
}
_, err = m.UpgradeHelmRelease(eksHelmChartsRepo, appMeshControllerHelmChart, appMeshSystemNamespace, appMeshControllerHelmReleaseName, vals)
return err
}
func (m *defaultManager) ResetAppMeshInjector() error {
vals := make(map[string]interface{})
_, err := m.UpgradeHelmRelease(eksHelmChartsRepo, appMeshInjectorHelmChart, appMeshSystemNamespace, appMeshInjectorHelmReleaseName, vals)
return err
}
func (m *defaultManager) UpgradeAppMeshInjector(injectorImage string) error {
vals := make(map[string]interface{})
imageRepo, imageTag, err := splitImageRepoAndTag(injectorImage)
if err != nil {
return err
}
vals["image"] = map[string]interface{}{
"repository": imageRepo,
"tag": imageTag,
}
_, err = m.UpgradeHelmRelease(eksHelmChartsRepo, appMeshInjectorHelmChart, appMeshSystemNamespace, appMeshInjectorHelmReleaseName, vals)
return err
}
func (m *defaultManager) UpgradeHelmRelease(chartRepo string, chartName string,
namespace string, releaseName string, vals map[string]interface{}) (*release.Release, error) {
cfgFlags := genericclioptions.NewConfigFlags(false)
cfgFlags.KubeConfig = &m.kubeConfig
cfgFlags.Namespace = &namespace
actionConfig := new(action.Configuration)
actionConfig.Init(cfgFlags, namespace, "secrets", func(format string, v ...interface{}) {
message := fmt.Sprintf(format, v...)
m.logger.Info(message)
})
upgradeAction := action.NewUpgrade(actionConfig)
upgradeAction.ChartPathOptions.RepoURL = chartRepo
upgradeAction.Namespace = namespace
upgradeAction.ResetValues = true
upgradeAction.Wait = true
cp, err := upgradeAction.ChartPathOptions.LocateChart(chartName, cli.New())
chartRequested, err := loader.Load(cp)
if err != nil {
return nil, err
}
return upgradeAction.Run(releaseName, chartRequested, vals)
}
// splitImageRepoAndTag parses a docker image in format <imageRepo>:<imageTag> into `imageRepo` and `imageTag`
func splitImageRepoAndTag(dockerImage string) (string, string, error) {
parts := strings.Split(dockerImage, ":")
if len(parts) != 2 {
return "", "", errors.Errorf("dockerImage expects <imageRepo>:<imageTag>, got: %s", dockerImage)
}
return parts[0], parts[1], nil
}
|
IvanPashchenko/intellij-plugin-verifier
|
intellij-plugin-verifier/verifier-test/before-idea/src/main/java/com/intellij/util/xmlb/annotations/Attribute.java
|
<reponame>IvanPashchenko/intellij-plugin-verifier<filename>intellij-plugin-verifier/verifier-test/before-idea/src/main/java/com/intellij/util/xmlb/annotations/Attribute.java
package com.intellij.util.xmlb.annotations;
import org.jetbrains.annotations.NonNls;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.TYPE, ElementType.METHOD})
public @interface Attribute {
@NonNls String value() default "";
}
|
dashevo/tendermint
|
crypto/crypto_test.go
|
package crypto
import (
"encoding/hex"
"testing"
"github.com/stretchr/testify/require"
)
func TestChecksum(t *testing.T) {
// since sha256 hash algorithm is critical for tenderdash, this test is needed to inform us
// if for any reason the hash algorithm is changed
actual := Checksum([]byte("dash is the best cryptocurrency in the world"))
want, err := hex.DecodeString("FFE75CFE38997723E7C33D0457521B0BA75AB48B39BC467413BDC853ACC7476F")
require.NoError(t, err)
require.Equal(t, want, actual)
}
|
windmaomao/adventofcode
|
utils/js/duplicate.js
|
const duplicates = (arr, keyBy = v => `${v}`) => {
if (arr.length < 2) return []
const m = {}
arr.forEach(p => {
const k = keyBy(p)
m[k] = m[k] || []
m[k].push(p)
})
return Object
.values(m)
.filter(v => v.length > 1)
}
export default duplicates
|
Nikita-tech-writer/ignite-4
|
modules/sql-engine/src/main/java/org/apache/ignite/internal/sql/engine/RootQuery.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.sql.engine;
import static org.apache.ignite.internal.sql.engine.util.Commons.FRAMEWORK_CONFIG;
import static org.apache.ignite.internal.util.CollectionUtils.nullOrEmpty;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.tools.Frameworks;
import org.apache.calcite.util.CancelFlag;
import org.apache.ignite.internal.sql.engine.exec.ExchangeService;
import org.apache.ignite.internal.sql.engine.exec.ExecutionContext;
import org.apache.ignite.internal.sql.engine.exec.rel.Node;
import org.apache.ignite.internal.sql.engine.exec.rel.RootNode;
import org.apache.ignite.internal.sql.engine.prepare.Fragment;
import org.apache.ignite.internal.sql.engine.prepare.MultiStepPlan;
import org.apache.ignite.internal.sql.engine.prepare.PlanningContext;
import org.apache.ignite.internal.sql.engine.util.BaseQueryContext;
import org.apache.ignite.internal.tostring.S;
import org.apache.ignite.lang.IgniteInternalCheckedException;
import org.apache.ignite.lang.IgniteInternalException;
import org.apache.ignite.lang.IgniteLogger;
/**
* The RootQuery is created on the query initiator (originator) node as the first step of a query run; It contains the information about
* query state, contexts, remote fragments; It provides 'cancel' functionality for running query like a base query class.
*/
public class RootQuery<RowT> extends Query<RowT> {
/** SQL query. */
private final String sql;
/** Parameters. */
private final Object[] params;
private final Set<String> remotes;
private final Set<RemoteFragmentKey> waiting;
private volatile RootNode<RowT> root;
private volatile PlanningContext pctx;
private final BaseQueryContext ctx;
/** Creates the object. */
public RootQuery(
String sql,
SchemaPlus schema,
Object[] params,
ExchangeService exch,
Consumer<Query<RowT>> unregister,
IgniteLogger log
) {
this(sql, schema, params, new QueryCancel(), exch, unregister, log);
}
/** Creates the object. */
public RootQuery(
String sql,
SchemaPlus schema,
Object[] params,
QueryCancel cancel,
ExchangeService exch,
Consumer<Query<RowT>> unregister,
IgniteLogger log
) {
super(
UUID.randomUUID(),
null,
cancel,
exch,
unregister,
log
);
this.sql = sql;
this.params = params;
remotes = new HashSet<>();
waiting = new HashSet<>();
ctx = BaseQueryContext.builder()
.cancel(cancel)
.frameworkConfig(
Frameworks.newConfigBuilder(FRAMEWORK_CONFIG)
.defaultSchema(schema)
.build()
)
.logger(log)
.build();
}
/**
* Creates the new root that inherits the query parameters from {@code this} query. Is used to execute DML query immediately after
* (inside) DDL. e.g.: CREATE TABLE MY_TABLE AS SELECT ... FROM ...;
*
* @param schema new schema.
*/
public RootQuery<RowT> childQuery(SchemaPlus schema) {
return new RootQuery<>(sql, schema, params, cancel, exchangeService, unregister, log);
}
public BaseQueryContext context() {
return ctx;
}
public String sql() {
return sql;
}
public Object[] parameters() {
return params;
}
/**
* Starts mapping phase for the query.
*/
public void mapping() {
synchronized (mux) {
if (state == QueryState.CLOSED) {
throw new IgniteInternalException("The query was cancelled while executing.");
}
state = QueryState.MAPPING;
}
}
/**
* Starts execution phase for the query and setup remote fragments.
*/
public void run(ExecutionContext<RowT> ctx, MultiStepPlan plan, Node<RowT> root) {
synchronized (mux) {
if (state == QueryState.CLOSED) {
throw new IgniteInternalException("The query was cancelled while executing.");
}
RootNode<RowT> rootNode = new RootNode<>(ctx, plan.metadata().rowType(), this::tryClose);
rootNode.register(root);
addFragment(new RunningFragment<>(rootNode, ctx));
this.root = rootNode;
for (int i = 1; i < plan.fragments().size(); i++) {
Fragment fragment = plan.fragments().get(i);
List<String> nodes = plan.mapping(fragment).nodeIds();
remotes.addAll(nodes);
for (String node : nodes) {
waiting.add(new RemoteFragmentKey(node, fragment.fragmentId()));
}
}
state = QueryState.EXECUTING;
}
}
/**
* Can be called multiple times after receive each error at {@link #onResponse(RemoteFragmentKey, Throwable)}.
*/
@Override
protected void tryClose() {
QueryState state0 = null;
synchronized (mux) {
if (state == QueryState.CLOSED) {
return;
}
if (state == QueryState.INITED || state == QueryState.PLANNING || state == QueryState.MAPPING) {
state = QueryState.CLOSED;
return;
}
if (state == QueryState.EXECUTING) {
state0 = state = QueryState.CLOSING;
root.closeInternal();
}
if (state == QueryState.CLOSING && waiting.isEmpty()) {
state0 = state = QueryState.CLOSED;
}
}
if (state0 == QueryState.CLOSED) {
try {
IgniteInternalException wrpEx = null;
for (String nodeId : remotes) {
try {
if (!nodeId.equals(root.context().localNodeId())) {
exchangeService.closeQuery(nodeId, id());
}
} catch (IgniteInternalCheckedException e) {
if (wrpEx == null) {
wrpEx = new IgniteInternalException("Failed to send cancel message. [nodeId=" + nodeId + ']', e);
} else {
wrpEx.addSuppressed(e);
}
}
}
if (wrpEx != null) {
log.warn("An exception occures during the query cancel", wrpEx);
}
} finally {
super.tryClose();
}
}
}
/** {@inheritDoc} */
@Override
public void cancel() {
cancel.cancel();
tryClose();
}
/** Returns a planning context to prepare this query. */
public PlanningContext planningContext() {
synchronized (mux) {
if (state == QueryState.CLOSED || state == QueryState.CLOSING) {
throw new IgniteInternalException("The query was cancelled while executing.");
}
if (state == QueryState.EXECUTING || state == QueryState.MAPPING) {
throw new IgniteInternalException("Invalid query flow");
}
if (pctx == null) {
state = QueryState.PLANNING;
pctx = PlanningContext.builder()
.parentContext(ctx)
.query(sql)
.parameters(params)
.build();
try {
cancel.add(() -> pctx.unwrap(CancelFlag.class).requestCancel());
} catch (QueryCancelledException e) {
throw new IgniteInternalException(e.getMessage(), e);
}
}
return pctx;
}
}
public Iterator<RowT> iterator() {
return root;
}
/** {@inheritDoc} */
@Override
public void onNodeLeft(String nodeId) {
List<RemoteFragmentKey> fragments;
synchronized (mux) {
fragments = waiting.stream().filter(f -> f.nodeId().equals(nodeId)).collect(Collectors.toList());
}
if (!nullOrEmpty(fragments)) {
IgniteInternalException ex = new IgniteInternalException(
"Failed to start query, node left. nodeId=" + nodeId);
for (RemoteFragmentKey fragment : fragments) {
onResponse(fragment, ex);
}
}
}
public void onResponse(String nodeId, long fragmentId, Throwable error) {
onResponse(new RemoteFragmentKey(nodeId, fragmentId), error);
}
private void onResponse(RemoteFragmentKey fragment, Throwable error) {
QueryState state;
synchronized (mux) {
waiting.remove(fragment);
state = this.state;
}
if (error != null) {
onError(error);
} else if (state == QueryState.CLOSING) {
tryClose();
}
}
/**
* The handler that should be called if any error occurs during the query execution.
*
* @param error The occurred error.
*/
public void onError(Throwable error) {
root.onError(error);
tryClose();
}
/** {@inheritDoc} */
@Override
public String toString() {
return S.toString(RootQuery.class, this);
}
}
|
Metalhead33-Foundation/WoDDem2
|
ComboBoxIdentifierDelegate.cpp
|
#include "ComboBoxIdentifierDelegate.hpp"
#include <QComboBox>
#include "PointerRole.hpp"
ComboBoxIdentifierDelegate::ComboBoxIdentifierDelegate(int column, QAbstractTableModel *model, QObject *parent) : QStyledItemDelegate(parent),
column(column), model(model)
{
}
QWidget *ComboBoxIdentifierDelegate::createEditor(QWidget *parent, const QStyleOptionViewItem &option, const QModelIndex &index) const
{
(void)option;
(void)index;
// Create the combobox and populate it
QComboBox *cb = new QComboBox(parent);
cb->setModel(model);
cb->setModelColumn(column);
return cb;
}
void ComboBoxIdentifierDelegate::setEditorData(QWidget *editor, const QModelIndex &index) const
{
QComboBox *cb = qobject_cast<QComboBox *>(editor);
Q_ASSERT(cb);
// get the index of the text in the combobox that matches the current value of the item
cb->setCurrentIndex(cb->findData(index.data(MH::PointerRole), MH::PointerRole));
/*for(int I = 0; I < cb->count(); I++) {
if(index.data(MH::PointerRole) == cb->itemData(I,MH::PointerRole)) {
cb->setCurrentIndex(I);
}
}*/
}
void ComboBoxIdentifierDelegate::setModelData(QWidget *editor, QAbstractItemModel *model, const QModelIndex &index) const
{
QComboBox *cb = qobject_cast<QComboBox *>(editor);
Q_ASSERT(cb);
model->setData(index, cb->currentData(MH::PointerRole), Qt::EditRole);
}
|
cathal-killeen/ucd-modules
|
static/src/components/Cards/WordCloud.js
|
import React from 'react';
import TagCloud from 'react-tag-cloud';
import randomColor from 'randomcolor';
import { Link } from 'react-router';
export default class WordCloud extends React.Component {
constructor(props) {
super(props);
}
render() {
let word_dict = this.props.wordDict || {};
return (
<TagCloud
style={{
fontFamily: 'sans-serif',
fontSize: 30,
fontWeight: 'bold',
fontStyle: 'italic',
color: () => randomColor({hue:'blue'}),
padding: 5,
width: '100%',
height: '50%'
}}>
{
Object.keys(word_dict).map(word => {
let weight = parseFloat(word_dict[word]);
let fontSize = Math.floor(weight * 80);
return <Link key={word} style={{fontSize: fontSize}} to={`/search?q=${word}`}>{word}</Link>
})
}
</TagCloud>
);
}
}
|
npatel37/dmrgornl
|
src/Models/FeAsBasedScExtended/FeAsBasedScExtended.h
|
/*
Copyright (c) 2009, UT-Battelle, LLC
All rights reserved
[DMRG++, Version 2.0.0]
[by G.A., Oak Ridge National Laboratory]
UT Battelle Open Source Software License 11242008
OPEN SOURCE LICENSE
Subject to the conditions of this License, each
contributor to this software hereby grants, free of
charge, to any person obtaining a copy of this software
and associated documentation files (the "Software"), a
perpetual, worldwide, non-exclusive, no-charge,
royalty-free, irrevocable copyright license to use, copy,
modify, merge, publish, distribute, and/or sublicense
copies of the Software.
1. Redistributions of Software must retain the above
copyright and license notices, this list of conditions,
and the following disclaimer. Changes or modifications
to, or derivative works of, the Software should be noted
with comments and the contributor and organization's
name.
2. Neither the names of UT-Battelle, LLC or the
Department of Energy nor the names of the Software
contributors may be used to endorse or promote products
derived from this software without specific prior written
permission of UT-Battelle.
3. The software and the end-user documentation included
with the redistribution, with or without modification,
must include the following acknowledgment:
"This product includes software produced by UT-Battelle,
LLC under Contract No. DE-AC05-00OR22725 with the
Department of Energy."
*********************************************************
DISCLAIMER
THE SOFTWARE IS SUPPLIED BY THE COPYRIGHT HOLDERS AND
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT OWNER, CONTRIBUTORS, UNITED STATES GOVERNMENT,
OR THE UNITED STATES DEPARTMENT OF ENERGY BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
NEITHER THE UNITED STATES GOVERNMENT, NOR THE UNITED
STATES DEPARTMENT OF ENERGY, NOR THE COPYRIGHT OWNER, NOR
ANY OF THEIR EMPLOYEES, REPRESENTS THAT THE USE OF ANY
INFORMATION, DATA, APPARATUS, PRODUCT, OR PROCESS
DISCLOSED WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS.
*********************************************************
*/
/** \ingroup DMRG */
/*@{*/
/*! \file FeBasedScExtedned.h
*
* An implementation of a Hubbard model for Fe-based superconductors
* to use with the DmrgSolver
* This extends the FeAsBasedSc model to include JNN and JNNN couplings
* FIXME: Merge into FeAsBasedSc
*
*/
#ifndef FEAS_BASED_SC_EX
#define FEAS_BASED_SC_EX
#include "../Models/FeAsModel/ModelFeBasedSc.h"
#include "LinkProductFeAsExtended.h"
#include "ModelCommon.h"
namespace Dmrg {
template<typename ModelBaseType>
class FeAsBasedScExtended : public ModelBaseType {
public:
typedef typename ModelBaseType::VectorSizeType VectorSizeType;
typedef ModelFeBasedSc<ModelBaseType> ModelFeAsType;
typedef typename ModelFeAsType::HilbertState HilbertState;
typedef typename ModelFeAsType::HilbertBasisType HilbertBasisType;
typedef typename ModelBaseType::ModelHelperType ModelHelperType;
typedef typename ModelBaseType::GeometryType GeometryType;
typedef typename ModelBaseType::LeftRightSuperType LeftRightSuperType;
typedef typename ModelBaseType::LinkProductStructType LinkProductStructType;
typedef typename ModelBaseType::LinkType LinkType;
typedef typename ModelHelperType::OperatorsType OperatorsType;
typedef typename OperatorsType::OperatorType OperatorType;
typedef typename PsimagLite::Vector<OperatorType>::Type VectorOperatorType;
typedef typename ModelHelperType::RealType RealType;
typedef TargetQuantumElectrons<RealType> TargetQuantumElectronsType;
typedef typename ModelHelperType::SparseMatrixType SparseMatrixType;
typedef typename SparseMatrixType::value_type SparseElementType;
typedef typename OperatorType::Su2RelatedType Su2RelatedType;
typedef LinkProductFeAsExtended<ModelHelperType> LinkProductType;
typedef ModelCommon<ModelBaseType,LinkProductType> ModelCommonType;
typedef typename ModelBaseType::MyBasis MyBasis;
typedef typename ModelBaseType::BasisWithOperatorsType MyBasisWithOperators;
typedef typename MyBasis::SymmetryElectronsSzType SymmetryElectronsSzType;
typedef typename MyBasis::BlockType BlockType;
typedef typename ModelBaseType::SolverParamsType SolverParamsType;
typedef typename ModelBaseType::VectorType VectorType;
typedef typename ModelBaseType::InputValidatorType InputValidatorType;
static const SizeType SPIN_UP = ModelFeAsType::SPIN_UP;
static const SizeType SPIN_DOWN = ModelFeAsType::SPIN_DOWN;
FeAsBasedScExtended(const SolverParamsType& solverParams,
InputValidatorType& io,
GeometryType const &geometry)
: ModelBaseType(io,new ModelCommonType(solverParams,geometry)),
modelParameters_(io),
geometry_(geometry),
modelFeAs_(solverParams,io,geometry),
orbitals_(modelParameters_.orbitals)
{}
SizeType memResolv(PsimagLite::MemResolv& mres,
SizeType,
PsimagLite::String msg = "") const
{
PsimagLite::String str = msg;
str += "FeAsBasedScExtended";
const char* start = reinterpret_cast<const char *>(this);
const char* end = reinterpret_cast<const char *>(&modelParameters_);
SizeType total = end - start;
mres.push(PsimagLite::MemResolv::MEMORY_TEXTPTR,
total,
start,
msg + " FeAsBasedScExtended vptr");
start = end;
end = start + PsimagLite::MemResolv::SIZEOF_HEAPPTR;
total += mres.memResolv(&modelParameters_, end-start, str + " modelParameters");
start = end;
end = reinterpret_cast<const char *>(&modelFeAs_);
total += (end - start);
mres.push(PsimagLite::MemResolv::MEMORY_HEAPPTR,
PsimagLite::MemResolv::SIZEOF_HEAPREF,
start,
str + " ref to geometry");
mres.memResolv(&geometry_, 0, str + " geometry");
start = end;
end = reinterpret_cast<const char *>(&orbitals_);
total += mres.memResolv(&modelFeAs_, end-start, str + " modelFeAs");
total += mres.memResolv(&orbitals_,
sizeof(*this) - total,
str + " orbitals");
return total;
}
SizeType hilbertSize(SizeType site) const { return modelFeAs_.hilbertSize(site); }
void print(std::ostream& os) const { modelFeAs_.print(os); }
//! find creation operator matrices for (i,sigma) in the natural basis,
//! find quantum numbers and number of electrons
//! for each state in the basis
void setNaturalBasis(typename PsimagLite::Vector<OperatorType> ::Type&creationMatrix,
SparseMatrixType &hamiltonian,
SymmetryElectronsSzType &q,
BlockType const &block,
const RealType& time) const
{
blockIsSize1OrThrow(block);
modelFeAs_.setNaturalBasis(creationMatrix,hamiltonian,q,block,time);
// add S^+_i to creationMatrix
setSplus(creationMatrix,block);
// add S^z_i to creationMatrix
setSz(creationMatrix,block);
// add J_{ij} S^+_i S^-_j + S^-_i S^+_j to Hamiltonia
addSplusSminus(hamiltonian,creationMatrix,block);
// add J_{ij} S^z_i S^z_j to Hamiltonian
addSzSz(hamiltonian,creationMatrix,block);
}
//! set creation matrices for sites in block
void setOperatorMatrices(
typename PsimagLite::Vector<OperatorType> ::Type&creationMatrix,
BlockType const &block) const
{
blockIsSize1OrThrow(block);
modelFeAs_.setOperatorMatrices(creationMatrix,block);
// add S^+_i to creationMatrix
setSplus(creationMatrix,block);
// add S^z_i to creationMatrix
setSz(creationMatrix,block);
}
OperatorType naturalOperator(const PsimagLite::String& what,
SizeType site,
SizeType dof) const
{
BlockType block;
block.resize(1);
block[0]=site;
typename PsimagLite::Vector<OperatorType>::Type creationMatrix;
setOperatorMatrices(creationMatrix,block);
if (what=="naturalSz") {
VectorSizeType allowed(1,0);
ModelBaseType::checkNaturalOperatorDof(dof,what,allowed);
SizeType x = 2*orbitals_+1;
return creationMatrix[x];
}
if (what=="naturalSplus") {
VectorSizeType allowed(1,0);
ModelBaseType::checkNaturalOperatorDof(dof,what,allowed);
SizeType x = 2*orbitals_;
return creationMatrix[x];
}
if (what=="naturalSminus") { // delta = c^\dagger * c^dagger
VectorSizeType allowed(1,0);
ModelBaseType::checkNaturalOperatorDof(dof,what,allowed);
SizeType x = 2*orbitals_;
creationMatrix[x].conjugate();
return creationMatrix[x];
}
return modelFeAs_.naturalOperator(what,site,dof);
}
//! find all states in the natural basis for a block of n sites
//! N.B.: HAS BEEN CHANGED TO ACCOMODATE FOR MULTIPLE BANDS
void setNaturalBasis(typename PsimagLite::Vector<HilbertState> ::Type&basis,
typename PsimagLite::Vector<SizeType>::Type& q,
const typename PsimagLite::Vector<SizeType>::Type& block) const
{
modelFeAs_.setNaturalBasis(basis,q,block);
}
void findElectrons(typename PsimagLite::Vector<SizeType>::Type& electrons,
const typename PsimagLite::Vector<HilbertState>::Type& basis,
SizeType site) const
{
modelFeAs_.findElectrons(electrons,basis,site);
}
virtual void addDiagonalsInNaturalBasis(SparseMatrixType &hmatrix,
const VectorOperatorType& cm,
const BlockType& block,
RealType time,
RealType factorForDiagonals=1.0) const
{
modelFeAs_.addDiagonalsInNaturalBasis(hmatrix,cm,block,time,factorForDiagonals);
}
virtual const TargetQuantumElectronsType& targetQuantum() const
{
return modelFeAs_.targetQuantum();
}
private:
// add S^+_i to creationMatrix
void setSplus(
typename PsimagLite::Vector<OperatorType> ::Type&creationMatrix,
const BlockType& block) const
{
SparseMatrixType m;
cDaggerC(m,creationMatrix,block,1.0,SPIN_UP,SPIN_DOWN);
Su2RelatedType su2related;
SizeType offset = 2*orbitals_;
su2related.source.push_back(offset);
su2related.source.push_back(offset+1);
su2related.source.push_back(offset);
su2related.transpose.push_back(-1);
su2related.transpose.push_back(-1);
su2related.transpose.push_back(1);
su2related.offset = 1;
OperatorType sPlus(m,1,typename OperatorType::PairType(2,2),-1,
su2related);
creationMatrix.push_back(sPlus);
}
// add S^z_i to creationMatrix
void setSz(
typename PsimagLite::Vector<OperatorType> ::Type&creationMatrix,
const BlockType& block) const
{
SparseMatrixType m1,m2;
cDaggerC(m1,creationMatrix,block,0.5,SPIN_UP,SPIN_UP);
cDaggerC(m2,creationMatrix,block,-0.5,SPIN_DOWN,SPIN_DOWN);
Su2RelatedType su2related2;
SparseMatrixType m = m1;
m += m2;
OperatorType sz(m,1,typename OperatorType::PairType(2,1),
1.0/sqrt(2.0),su2related2);
creationMatrix.push_back(sz);
}
// add S^+_i to creationMatrix
void cDaggerC(
SparseMatrixType& sum,
const typename PsimagLite::Vector<OperatorType> ::Type&creationMatrix,
const BlockType&,
RealType value,
SizeType spin1,
SizeType spin2) const
{
SparseMatrixType tmpMatrix,tmpMatrix2;
for (SizeType orbital=0;orbital<orbitals_;orbital++) {
transposeConjugate(tmpMatrix2,
creationMatrix[orbital+spin2*orbitals_].data);
multiply(tmpMatrix,
creationMatrix[orbital+spin1*orbitals_].data,
tmpMatrix2);
if (orbital == 0) sum = value*tmpMatrix;
else sum += value*tmpMatrix;
}
}
// add J_{ij} S^+_i S^-_j + S^-_i S^+_j to Hamiltonia
void addSplusSminus(
SparseMatrixType &,
const typename PsimagLite::Vector<OperatorType> ::Type&,
const BlockType&) const
{
// nothing if block.size == 1
}
// add J_{ij} S^z_i S^z_j to Hamiltonian
void addSzSz(
SparseMatrixType&,
const typename PsimagLite::Vector<OperatorType> ::Type&,
const BlockType&) const
{
// nothing if block.size == 1
}
void blockIsSize1OrThrow(const BlockType& block) const
{
if (block.size()==1) return;
throw PsimagLite::RuntimeError("FeAsBasedExtended:: blocks must be of size 1\n");
}
//serializr start class FeAsBasedScExtended
//serializr vptr
//serializr normal modelParameters_
ParametersModelFeAs<RealType> modelParameters_;
//serializr ref geometry_ start
const GeometryType& geometry_;
//serializr normal modelFeAs_
ModelFeAsType modelFeAs_;
//serializr normal orbitals_
SizeType orbitals_;
}; //class FeAsBasedScExtended
} // namespace Dmrg
/*@}*/
#endif // FEAS_BASED_SC_EX
|
venkyhegde/ITIS-5180-mobile-application-development
|
In-Class/In-Class-01/IC01/src/edu/uncc/cci/mobileapps/MainPart4.java
|
<gh_stars>0
package edu.uncc.cci.mobileapps;
//import java.util.HashMap;
import java.util.HashMap;
public class MainPart4{
/*
Question 4
You are provided with the Data class that contains an items array (Data.items) which is an array of items in a store. Each element in the array represents a single item record. Each record in the array represents a single item record. Each record is a string formatted as : Name, ID, Price. Also, you are provided with an array called shoppingCart (Data.shoppingCart) which is an array of items’ quantities. Each element in the array represents a single item record. Each record is a string formatted as : ID, quantity. You are asked to perform the following tasks:
You are provided with the Data class that contains an items array (Data.items) which is an array of items in a store. Each element in the array represents a single item record.
Each record in the array represents a single item record. Each record is a string formatted as : Name, ID, Price. Also, you are provided with an array called
shoppingCart (Data.shoppingCart) which is an array of items’ quantities. Each element the array represents a single item record. Each record is a string formatted as : ID,quantity. You are asked to perform the following tasks:
1. Your implementation for this question should be included in MainPart3.java file.
2. Create a StoreItem class that should parse all the parameters for each item. Hint: extract each value from a item's record using Java's String.split() method and set the
delimiter to a comma, see provided code below. Each item record should to be assigned to a StoreItem object.
3. Create the most efficient data structure that best fit the goal. Hint: The selected data structure should facilitate the retrieval of the item details based on the ID.
4. The goal is to print out the receipt bill in the following format:
ID Name Quantity Price * Quantity
123 Tomatoes 10 $30
.
.
Total Bill: $400
*/
public static void main(String[] args) {
HashMap<Integer, Integer> shoppingCart = new HashMap<> ();
for(String str: Data.shoppingCart){
String[] temp = str.split (",");
shoppingCart.put (Integer.parseInt (temp[0]), Integer.parseInt (temp[1]));
}
for(String str: Data.items){
String[] temp = str.split (",");
StoreItem item = new StoreItem ();
item.setName (temp[0]);
item.setId (Integer.parseInt (temp[1]));
item.setPrice (Double.parseDouble (temp[2]));
if(shoppingCart.containsKey (item.getId ())){
System.out.println ("ID \t Name \t Quantity \t Price * Quantity");
System.out.print (item.getId ()+"\t");
System.out.print (item.getName ()+"\t");
System.out.print (shoppingCart.get (item.getId ())+"\t");
System.out.print (shoppingCart.get (item.getId ()) * item.getPrice ()+"\t");
System.out.println ();
}
}
}
}
|
mousedogpig/solr5.5.4
|
lucene/spatial/src/java/org/apache/lucene/spatial/util/GeoUtils.java
|
<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.spatial.util;
import java.util.ArrayList;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.lang.Math.PI;
import static java.lang.Math.abs;
import static org.apache.lucene.util.SloppyMath.asin;
import static org.apache.lucene.util.SloppyMath.cos;
import static org.apache.lucene.util.SloppyMath.sin;
import static org.apache.lucene.util.SloppyMath.TO_DEGREES;
import static org.apache.lucene.util.SloppyMath.TO_RADIANS;
import static org.apache.lucene.spatial.util.GeoEncodingUtils.TOLERANCE;
import static org.apache.lucene.spatial.util.GeoProjectionUtils.MAX_LAT_RADIANS;
import static org.apache.lucene.spatial.util.GeoProjectionUtils.MAX_LON_RADIANS;
import static org.apache.lucene.spatial.util.GeoProjectionUtils.MIN_LAT_RADIANS;
import static org.apache.lucene.spatial.util.GeoProjectionUtils.MIN_LON_RADIANS;
import static org.apache.lucene.spatial.util.GeoProjectionUtils.pointFromLonLatBearingGreatCircle;
import static org.apache.lucene.spatial.util.GeoProjectionUtils.SEMIMAJOR_AXIS;
/**
* Basic reusable geo-spatial utility methods
*
* @lucene.experimental
*/
public final class GeoUtils {
/** Minimum longitude value. */
public static final double MIN_LON_INCL = -180.0D;
/** Maximum longitude value. */
public static final double MAX_LON_INCL = 180.0D;
/** Minimum latitude value. */
public static final double MIN_LAT_INCL = -90.0D;
/** Maximum latitude value. */
public static final double MAX_LAT_INCL = 90.0D;
// No instance:
private GeoUtils() {
}
/** validates latitude value is within standard +/-90 coordinate bounds */
public static boolean isValidLat(double lat) {
return Double.isNaN(lat) == false && lat >= MIN_LAT_INCL && lat <= MAX_LAT_INCL;
}
/** validates longitude value is within standard +/-180 coordinate bounds */
public static boolean isValidLon(double lon) {
return Double.isNaN(lon) == false && lon >= MIN_LON_INCL && lon <= MAX_LON_INCL;
}
/** Puts longitude in range of -180 to +180. */
public static double normalizeLon(double lon_deg) {
if (lon_deg >= -180 && lon_deg <= 180) {
return lon_deg; //common case, and avoids slight double precision shifting
}
double off = (lon_deg + 180) % 360;
if (off < 0) {
return 180 + off;
} else if (off == 0 && lon_deg > 0) {
return 180;
} else {
return -180 + off;
}
}
/** Puts latitude in range of -90 to 90. */
public static double normalizeLat(double lat_deg) {
if (lat_deg >= -90 && lat_deg <= 90) {
return lat_deg; //common case, and avoids slight double precision shifting
}
double off = abs((lat_deg + 90) % 360);
return (off <= 180 ? off : 360-off) - 90;
}
/**
* Converts a given circle (defined as a point/radius) to an approximated line-segment polygon
*
* @param lon longitudinal center of circle (in degrees)
* @param lat latitudinal center of circle (in degrees)
* @param radiusMeters distance radius of circle (in meters)
* @return a list of lon/lat points representing the circle
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public static ArrayList<double[]> circleToPoly(final double lon, final double lat, final double radiusMeters) {
double angle;
// a little under-sampling (to limit the number of polygonal points): using archimedes estimation of pi
final int sides = 25;
ArrayList<double[]> geometry = new ArrayList();
double[] lons = new double[sides];
double[] lats = new double[sides];
double[] pt = new double[2];
final int sidesLen = sides - 1;
for (int i = 0; i < sidesLen; ++i) {
angle = (i * 360 / sides);
pt = pointFromLonLatBearingGreatCircle(lon, lat, angle, radiusMeters, pt);
lons[i] = pt[0];
lats[i] = pt[1];
}
// close the poly
lons[sidesLen] = lons[0];
lats[sidesLen] = lats[0];
geometry.add(lons);
geometry.add(lats);
return geometry;
}
/** Compute Bounding Box for a circle using WGS-84 parameters */
public static GeoRect circleToBBox(final double centerLon, final double centerLat, final double radiusMeters) {
final double radLat = TO_RADIANS * centerLat;
final double radLon = TO_RADIANS * centerLon;
double radDistance = radiusMeters / SEMIMAJOR_AXIS;
double minLat = radLat - radDistance;
double maxLat = radLat + radDistance;
double minLon;
double maxLon;
if (minLat > MIN_LAT_RADIANS && maxLat < MAX_LAT_RADIANS) {
double deltaLon = asin(sin(radDistance) / cos(radLat));
minLon = radLon - deltaLon;
if (minLon < MIN_LON_RADIANS) {
minLon += 2d * PI;
}
maxLon = radLon + deltaLon;
if (maxLon > MAX_LON_RADIANS) {
maxLon -= 2d * PI;
}
} else {
// a pole is within the distance
minLat = max(minLat, MIN_LAT_RADIANS);
maxLat = min(maxLat, MAX_LAT_RADIANS);
minLon = MIN_LON_RADIANS;
maxLon = MAX_LON_RADIANS;
}
return new GeoRect(TO_DEGREES * minLon, TO_DEGREES * maxLon, TO_DEGREES * minLat, TO_DEGREES * maxLat);
}
/** Compute Bounding Box for a polygon using WGS-84 parameters */
public static GeoRect polyToBBox(double[] polyLons, double[] polyLats) {
if (polyLons.length != polyLats.length) {
throw new IllegalArgumentException("polyLons and polyLats must be equal length");
}
double minLon = Double.POSITIVE_INFINITY;
double maxLon = Double.NEGATIVE_INFINITY;
double minLat = Double.POSITIVE_INFINITY;
double maxLat = Double.NEGATIVE_INFINITY;
for (int i=0;i<polyLats.length;i++) {
if (GeoUtils.isValidLon(polyLons[i]) == false) {
throw new IllegalArgumentException("invalid polyLons[" + i + "]=" + polyLons[i]);
}
if (GeoUtils.isValidLat(polyLats[i]) == false) {
throw new IllegalArgumentException("invalid polyLats[" + i + "]=" + polyLats[i]);
}
minLon = min(polyLons[i], minLon);
maxLon = max(polyLons[i], maxLon);
minLat = min(polyLats[i], minLat);
maxLat = max(polyLats[i], maxLat);
}
// expand bounding box by TOLERANCE factor to handle round-off error
return new GeoRect(max(minLon - TOLERANCE, MIN_LON_INCL), min(maxLon + TOLERANCE, MAX_LON_INCL),
max(minLat - TOLERANCE, MIN_LAT_INCL), min(maxLat + TOLERANCE, MAX_LAT_INCL));
}
}
|
HawxChen/barrelfishOS
|
lib/newlib/libgloss/or1k/timer.c
|
<filename>lib/newlib/libgloss/or1k/timer.c
/* timer.c -- tick timer functions for OpenRISC 1000.
*
* Copyright (c) 2011, 2014 Authors
*
* Contributor <NAME> <<EMAIL>>
* Contributor <NAME> <<EMAIL>>
*
* The authors hereby grant permission to use, copy, modify, distribute,
* and license this software and its documentation for any purpose, provided
* that existing copyright notices are retained in all copies and that this
* notice is included verbatim in any distributions. No written agreement,
* license, or royalty fee is required for any of the authorized uses.
* Modifications to this software may be copyrighted by their authors
* and need not follow the licensing terms described here, provided that
* the new terms are clearly indicated on the first page of each file where
* they apply.
*/
#include "include/or1k-support.h"
#include "include/or1k-sprs.h"
#include "or1k-internals.h"
#include "board.h"
/* --------------------------------------------------------------------------*/
/*!Tick timer interrupt handler
Increment timer ticks counter, reload TTMR
*/
/* --------------------------------------------------------------------------*/
void
_or1k_timer_interrupt_handler(void)
{
OR1K_REENT.or1k_timer_ticks++;
uint32_t ttmr = or1k_mfspr(OR1K_SPR_TICK_TTMR_ADDR);
ttmr = OR1K_SPR_TICK_TTMR_IE_SET(ttmr, 1);
ttmr = OR1K_SPR_TICK_TTMR_MODE_SET(ttmr, OR1K_SPR_TICK_TTMR_MODE_RESTART);
ttmr = OR1K_SPR_TICK_TTMR_IP_SET(ttmr, 0);
or1k_mtspr(OR1K_SPR_TICK_TTMR_ADDR, ttmr);
}
/* --------------------------------------------------------------------------*/
/*!Enable tick timer
Install handler, calculate TTMR period, reset tick counter
@param[in] hz Rate at which to trigger timer ticks */
/* --------------------------------------------------------------------------*/
int
or1k_timer_init(unsigned int hz)
{
uint32_t upr = or1k_mfspr(OR1K_SPR_SYS_UPR_ADDR);
if (OR1K_SPR_SYS_UPR_TTP_GET(upr) == 0) {
return -1;
}
/* Set this, for easy access when reloading */
uint32_t period = (_or1k_board_clk_freq/hz) & OR1K_SPR_TICK_TTMR_TP_MASK;
OR1K_REENT.or1k_timer_period = period;
or1k_mtspr(OR1K_SPR_TICK_TTMR_ADDR, period);
/* Reset timer tick counter */
OR1K_REENT.or1k_timer_ticks = 0;
/* Install handler */
or1k_exception_handler_add(0x5, _or1k_timer_interrupt_handler);
OR1K_REENT.or1k_timer_mode = OR1K_SPR_TICK_TTMR_MODE_RESTART;
/* Reset counter register */
or1k_mtspr(OR1K_SPR_TICK_TTCR_ADDR, 0);
return 0;
}
void
or1k_timer_set_period(uint32_t hz)
{
uint32_t period = (_or1k_board_clk_freq/hz) & OR1K_SPR_TICK_TTMR_TP_MASK;
uint32_t ttmr = or1k_mfspr(OR1K_SPR_TICK_TTMR_ADDR);
ttmr = OR1K_SPR_TICK_TTMR_TP_SET(ttmr, period);
or1k_mtspr(OR1K_SPR_TICK_TTMR_ADDR, ttmr);
OR1K_REENT.or1k_timer_period = period;
}
void
or1k_timer_set_handler(void (*handler)(void))
{
or1k_exception_handler_add(0x5, handler);
}
void
or1k_timer_set_mode(uint32_t mode)
{
// Store mode in variable
OR1K_REENT.or1k_timer_mode = mode;
uint32_t ttmr = or1k_mfspr(OR1K_SPR_TICK_TTMR_ADDR);
// If the timer is currently running, we also change the mode
if (OR1K_SPR_TICK_TTMR_MODE_GET(ttmr) != 0) {
ttmr = OR1K_SPR_TICK_TTMR_MODE_SET(ttmr, mode);
or1k_mtspr(OR1K_SPR_TICK_TTMR_ADDR, ttmr);
}
}
/* --------------------------------------------------------------------------*/
/*!Enable tick timer
Enable timer interrupt, install handler, load TTMR
*/
/* --------------------------------------------------------------------------*/
void
or1k_timer_enable(void)
{
uint32_t ttmr = or1k_mfspr(OR1K_SPR_TICK_TTMR_ADDR);
ttmr = OR1K_SPR_TICK_TTMR_IE_SET(ttmr, 1);
ttmr = OR1K_SPR_TICK_TTMR_MODE_SET(ttmr, OR1K_REENT.or1k_timer_mode);
or1k_mtspr(OR1K_SPR_TICK_TTMR_ADDR, ttmr);
uint32_t sr = or1k_mfspr(OR1K_SPR_SYS_SR_ADDR);
sr = OR1K_SPR_SYS_SR_TEE_SET(sr, 1);
or1k_mtspr(OR1K_SPR_SYS_SR_ADDR, sr);
}
/* --------------------------------------------------------------------------*/
/*!Disable tick timer
Disable timer interrupt in SR
*/
/* --------------------------------------------------------------------------*/
uint32_t
or1k_timer_disable(void)
{
uint32_t oldsr = or1k_mfspr(OR1K_SPR_SYS_SR_ADDR);
uint32_t sr = OR1K_SPR_SYS_SR_TEE_SET(oldsr, 0);
or1k_mtspr(OR1K_SPR_SYS_SR_ADDR, sr);
return OR1K_SPR_SYS_SR_TEE_GET(oldsr);
}
void
or1k_timer_restore(uint32_t sr_tee)
{
uint32_t sr = or1k_mfspr(OR1K_SPR_SYS_SR_ADDR);
sr = OR1K_SPR_SYS_SR_TEE_SET(sr, 1);
or1k_mtspr(OR1K_SPR_SYS_SR_ADDR, sr);
}
void
or1k_timer_pause(void)
{
uint32_t ttmr = or1k_mfspr(OR1K_SPR_TICK_TTMR_ADDR);
ttmr = OR1K_SPR_TICK_TTMR_MODE_SET(ttmr, OR1K_SPR_TICK_TTMR_MODE_DISABLE);
or1k_mtspr(OR1K_SPR_TICK_TTMR_ADDR, ttmr);
}
void
or1k_timer_reset(void)
{
uint32_t ttmr = or1k_mfspr(OR1K_SPR_TICK_TTMR_ADDR);
ttmr = OR1K_SPR_TICK_TTMR_IP_SET(ttmr, 0);
or1k_mtspr(OR1K_SPR_TICK_TTMR_ADDR, ttmr);
or1k_mtspr(OR1K_SPR_TICK_TTCR_ADDR, 0);
}
/* --------------------------------------------------------------------------*/
/*!Get tick timer
Return value of tick timer
*/
/* --------------------------------------------------------------------------*/
unsigned long
or1k_timer_get_ticks(void)
{
return OR1K_REENT.or1k_timer_ticks;
}
/* --------------------------------------------------------------------------*/
/*!Reset tick timer
Clear value of tick timer
*/
/* --------------------------------------------------------------------------*/
void
or1k_timer_reset_ticks(void)
{
OR1K_REENT.or1k_timer_ticks = 0;
}
|
return/BeOSSampleCode
|
open_gl/3Dlife-Voodoo/lifeApp.h
|
<gh_stars>1-10
/*
Copyright 1999, Be Incorporated. All Rights Reserved.
This file may be used under the terms of the Be Sample Code License.
*/
#include <Application.h>
#include "lifeWin.h"
class lifeApp : public BApplication
{
public:
lifeApp();
void AboutRequested();
void MessageReceived(BMessage *msg); //voodoo
private:
lifeWin *mw;
};
|
ArjunAtlast/js-datastructure
|
dist/ds/abstract/abstract-list.js
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const abstract_collection_1 = require("./abstract-collection");
/**
An abstract implementation of List interface
*/
class AbstractList extends abstract_collection_1.AbstractCollection {
constructor(...args) {
super(...args);
}
add(item, index) {
if (index !== undefined) {
return !!this._store.splice(index, 0, item);
}
else {
return super.add(item);
}
}
addAll(items, index) {
if (index != undefined) {
return !!this._store.splice(index, 0, ...items);
}
else {
return super.addAll(items);
}
}
/**
* Read an item from the list
* @example
* //list contains [1,2,3]
* console.log(list.get(2));
* //Output: 3
*/
get(index) {
return this._store[index];
}
/**
* Returns the index of the first occurrence of the specified element in this list, or -1 if this list does not contain the element
* @example
* //list contains [1,2,3,2,4]
* console.log(list.indexOf(2));
* //Output: 1
*/
indexOf(item) {
return this._store.indexOf(item);
}
/**
* Returns the index of last occurrence of the element.
* //list contains [1,2,3,2,4]
* console.log(list.lastIndexOf(2));
* //Output: 3
*/
lastIndexOf(item) {
return this._store.lastIndexOf(item);
}
/**
* Returns the median of this list. If the list is empty returns undefined.
* In case of two medians the later one is preferred.
* @example
* //list contains [1, 2, 3, 4]
* list.median(); //returns 3
*/
median() {
return this.get(Math.floor(this.size() / 2));
}
/**
* Removes the element at the specified position in this list.
* @example
* //list contains [1,2,3]
* console.log(list.removeAt(1)); //[1,3]
* //Output: 2
*/
removeAt(index) {
return this._store.splice(index, 1)[0];
}
/**
* Removes from this list all of the elements whose index is between fromIndex, inclusive, and toIndex, exclusive.
* @example
* //list contains [1,2,3,4,5,6]
* console.log(list.removeRange(1,4)); //[1,5,6]
* //Output: [2,3,4]
*/
removeRange(fromIndex, toIndex) {
return this._store.splice(fromIndex, (toIndex - fromIndex));
}
/**
* Replaces the element at the specified position in this list with the specified element
* @example
* //list contains [1,2,3]
* console.log(list.set(1,2.5)); //[1,2.5,3]
* //Output: 2
*/
set(index, item) {
return this._store.splice(index, 1, item)[0];
}
/**
* Sorts this list according to the compareFn.
* compareFn is a function which should return a number value (-ve, zero, or +ve).
* x is considered smaller than y if the compareFn(x,y) is negetive
* @example
* //list contains [1,8,6,2,7,3,9,11]
* list.sort(function(x,y) {
* return x-y;
* }); // [1, 2, 3, 6, 7, 8, 9, 11]
*
*/
sort(compareFn) {
this._store.sort(compareFn);
return this;
}
/**
* Returns a view of the portion of this list between the specified fromIndex, inclusive, and toIndex, exclusive.
* @example
* //list contains [1,8,6,2,7,3,9,11]
* list.subList(2,7); // returns a list containing [6, 2, 7, 3, 9]
*/
subList(fromIndex, toIndex) {
return new this.constructor(...this._store.slice(fromIndex, toIndex));
}
/**
* Reverse the list.
* @example
* //list contains [1,2,3,4,5]
* list.reverse() //now list contains [5,4,3,2,1]
*/
reverse() {
this._store.reverse();
return this;
}
}
exports.AbstractList = AbstractList;
|
shaojiankui/iOS10-Runtime-Headers
|
PrivateFrameworks/PhotoLibraryServices.framework/PLSidecarFile.h
|
/* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/PhotoLibraryServices.framework/PhotoLibraryServices
*/
@interface PLSidecarFile : _PLSidecarFile
@property (nonatomic, readonly, retain) NSURL *fileURL;
- (id)fileURL;
- (void)setCaptureDateFromExifDictionary:(id)arg1;
@end
|
javagl/CommonUI
|
src/main/java/de/javagl/common/ui/utils/desktop/JDesktopPaneLayout.java
|
<filename>src/main/java/de/javagl/common/ui/utils/desktop/JDesktopPaneLayout.java
/*
* www.javagl.de - Common - UI
*
* Copyright (c) 2013-2015 <NAME> - http://www.javagl.de
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package de.javagl.common.ui.utils.desktop;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.LayoutManager;
import java.awt.Rectangle;
import java.util.HashMap;
import java.util.Map;
import javax.swing.JDesktopPane;
import javax.swing.JInternalFrame;
import javax.swing.JInternalFrame.JDesktopIcon;
import javax.swing.JPanel;
import javax.swing.SwingUtilities;
import javax.swing.event.InternalFrameAdapter;
import javax.swing.event.InternalFrameEvent;
/**
* A layout manager for {@link JDesktopPane}s.<br>
* <br>
* <b>This class is even more preliminary than all other classes in
* this library!</b>
*/
public class JDesktopPaneLayout
{
/**
* A container that is associated with a desktop pane layout.
* It is used internally for computing the actual layout.
*/
private static class LayoutContainer extends Container
{
/**
* Serial UID
*/
private static final long serialVersionUID = 6455895807345631762L;
/**
* The owner of this container
*/
private final JDesktopPaneLayout owner;
/**
* Creates a new layout container with the given owner
*
* @param owner The owner
*/
LayoutContainer(JDesktopPaneLayout owner)
{
this.owner = owner;
}
}
/**
* The class for the dummy components that are added to the container
* to determine the size of the internal frames
*/
private class FrameComponent extends JPanel
{
/**
* Serial UID
*/
private static final long serialVersionUID = 7397992807282457681L;
/**
* The internal frame for which this component was created
*/
private final JInternalFrame internalFrame;
/**
* Creates a new dummy component for the given internal frame,
* which was added with the given layout information
*
* @param internalFrame The internal frame
* @param constraints The constraints
* @param index The index
*/
FrameComponent(
JInternalFrame internalFrame,
final Object constraints, final int index)
{
this.internalFrame = internalFrame;
internalFrame.addInternalFrameListener(new InternalFrameAdapter()
{
@Override
public void internalFrameIconified(InternalFrameEvent e)
{
container.remove(FrameComponent.this);
}
@Override
public void internalFrameDeiconified(InternalFrameEvent e)
{
container.add(FrameComponent.this, constraints, index);
}
@Override
public void internalFrameClosed(InternalFrameEvent e)
{
container.remove(FrameComponent.this);
}
});
}
/**
* Returns the internal frame for which this component was created
*
* @return The internal frame
*/
JInternalFrame getInternalFrame()
{
return internalFrame;
}
@Override
public Dimension getPreferredSize()
{
return internalFrame.getPreferredSize();
}
@Override
public Dimension getMaximumSize()
{
return internalFrame.getMaximumSize();
}
@Override
public Dimension getMinimumSize()
{
return internalFrame.getMinimumSize();
}
}
/**
* The parent layout
*/
private final JDesktopPaneLayout parent;
/**
* The desktop pane that this layout manager is applied to
*/
private final JDesktopPane desktopPane;
/**
* The dummy container that will be used for computing the layout
*/
private final LayoutContainer container;
/**
* The root container. This is the layout container of the root
* of the hierarchy of desktop pane layouts.
*/
private final LayoutContainer rootContainer;
/**
* A map from internal frames to {@link FrameComponent}s
* that are placed into the dummy container
*/
private final Map<JInternalFrame, FrameComponent> frameToComponent;
/**
* Creates a new desktop pane layout for the given desktop pane
*
* @param desktopPane The desktop pane
*/
public JDesktopPaneLayout(JDesktopPane desktopPane)
{
this(null, desktopPane, null);
}
/**
* Constructor for a child layout that was created with
* {@link #createChild()}
*
* @param parent The parent layout
* @param desktopPane The desktop pane of the parent
* @param rootContainer The root container of the parent
*/
private JDesktopPaneLayout(
JDesktopPaneLayout parent,
JDesktopPane desktopPane,
LayoutContainer rootContainer)
{
this.parent = parent;
this.desktopPane = desktopPane;
this.container = new LayoutContainer(this);
if (rootContainer == null)
{
this.rootContainer = container;
}
else
{
this.rootContainer = rootContainer;
}
this.frameToComponent = new HashMap<JInternalFrame, FrameComponent>();
}
/**
* Create a new desktop pane layout that may be added as a child
* to this layout
*
* @return The new child layout
*/
public JDesktopPaneLayout createChild()
{
return new JDesktopPaneLayout(this, desktopPane, rootContainer);
}
/**
* Add the given desktop pane layout as a child to this one
*
* @param child The child to add
* @throws IllegalArgumentException If the given child was not
* created by calling {@link #createChild()} on this layout
*/
public void add(JDesktopPaneLayout child)
{
add(child, null);
}
/**
* Add the given desktop pane layout as a child to this one
*
* @param child The child to add
* @param constraints The constraints.
* See {@link Container#add(Component, Object)}
* @throws IllegalArgumentException If the given child was not
* created by calling {@link #createChild()} on this layout
*/
public void add(JDesktopPaneLayout child, Object constraints)
{
add(child, constraints, -1);
}
/**
* Add the given desktop pane layout as a child to this one
*
* @param child The child to add
* @param constraints The constraints.
* See {@link Container#add(Component, Object)}
* @param index The index.
* See {@link Container#add(Component, Object, int)}
* @throws IllegalArgumentException If the given child was not
* created by calling {@link #createChild()} on this layout
*/
public void add(JDesktopPaneLayout child, Object constraints, int index)
{
if (child.parent != this)
{
throw new IllegalArgumentException(
"Layout is not a child of this layout");
}
container.add(child.container, constraints, index);
}
/**
* Remove the given child layout
*
* @param child The child to remove
* @throws IllegalArgumentException If the given child was not
* created by calling {@link #createChild()} on this layout
*/
public void remove(JDesktopPaneLayout child)
{
if (child.parent != this)
{
throw new IllegalArgumentException(
"Layout is not a child of this layout");
}
container.remove(child.container);
}
/**
* Set the delegate layout manager for this layout
*
* @param layoutManager The delegate layout manager
*/
public void setLayout(LayoutManager layoutManager)
{
container.setLayout(layoutManager);
}
/**
* Add the given internal frame to this layout manager.<br>
* <br>
* Note that this will <b>not</b> add the internal frame to the
* desktop pane that this layout belongs to!
*
* @param internalFrame The internal frame to add
*/
public void add(JInternalFrame internalFrame)
{
add(internalFrame, null);
}
/**
* Add the given internal frame to this layout manager.<br>
* <br>
* Note that this will <b>not</b> add the internal frame to the
* desktop pane that this layout belongs to!
*
* @param internalFrame The internal frame to add
* @param constraints The constraints.
* See {@link Container#add(Component, Object)}
*/
public void add(
JInternalFrame internalFrame, Object constraints)
{
add(internalFrame, constraints, -1);
}
/**
* Add the given internal frame to this layout manager.<br>
* <br>
* Note that this will <b>not</b> add the internal frame to the
* desktop pane that this layout belongs to!
*
* @param internalFrame The internal frame to add
* @param constraints The constraints.
* See {@link Container#add(Component, Object)}
* @param index The index.
* See {@link Container#add(Component, Object, int)}
*/
public void add(
JInternalFrame internalFrame, Object constraints, int index)
{
FrameComponent frameComponent =
new FrameComponent(internalFrame, constraints, index);
frameToComponent.put(internalFrame, frameComponent);
if (!internalFrame.isIcon())
{
container.add(frameComponent, constraints, index);
}
}
/**
* Remove the given internal frame from this layout.<br>
* <br>
* Note that this will <b>not</b> remove the internal frame from the
* desktop pane that this layout belongs to!
*
* @param internalFrame The internal frame to remove
*/
void remove(JInternalFrame internalFrame)
{
Component component = frameToComponent.get(internalFrame);
container.remove(component);
}
/**
* Validate the layout after internal frames have been added
* or removed
*/
public void validate()
{
Dimension size = desktopPane.getSize();
size.height -= computeDesktopIconsSpace();
layoutInternalFrames(size);
}
/**
* Compute the space for iconified desktop icons
*
* @return The space
*/
private int computeDesktopIconsSpace()
{
for (JInternalFrame f : frameToComponent.keySet())
{
if (f.isIcon())
{
JDesktopIcon desktopIcon = f.getDesktopIcon();
return desktopIcon.getPreferredSize().height;
}
}
return 0;
}
/**
* Layout the internal frames for the given size
*
* @param size The size
*/
private void layoutInternalFrames(Dimension size)
{
container.setSize(size);
callDoLayout(container);
applyLayout();
}
/**
* Recursively call doLayout on the container and all its
* sub-containers
*
* @param container The container
*/
private void callDoLayout(Container container)
{
container.doLayout();
int n = container.getComponentCount();
for (int i=0; i<n; i++)
{
Component component = container.getComponent(i);
if (component instanceof Container)
{
Container subContainer = (Container)component;
callDoLayout(subContainer);
}
}
}
/**
* Apply the current layout to the internal frames
*/
private void applyLayout()
{
int n = container.getComponentCount();
for (int i=0; i<n; i++)
{
Component component = container.getComponent(i);
if (component instanceof FrameComponent)
{
FrameComponent frameComponent = (FrameComponent)component;
JInternalFrame internalFrame =
frameComponent.getInternalFrame();
Rectangle bounds = SwingUtilities.convertRectangle(
container, component.getBounds(), rootContainer);
//System.out.println(
// "Set bounds of "+internalFrame.getTitle()+" to "+bounds);
internalFrame.setBounds(bounds);
}
else
{
LayoutContainer childLayoutContainer =
(LayoutContainer)component;
//System.out.println(
// "Child with "+childLayoutContainer.getLayout());
childLayoutContainer.owner.applyLayout();
}
}
}
}
|
MitkoDG/mySoftUniJorney
|
Fundamentals/textProcessing/excercise/02.modernTimes.js
|
function modernTask(text) {
let result = [];
for (let element of text.split(" ")) {
if (element[0] === "#" && element.length !== 1) {
result.push(element.substring(1));
}
}
let final = [];
for (let el of result) {
let array = el.split("");
let arrL = array.length;
let flag = true;
for (let i = 0; i < arrL; i++) {
let currentCode = array[i].charCodeAt(0);
if (
(currentCode < 97 || currentCode > 122) &&
(currentCode < 65 || currentCode > 90)
) {
flag = false;
}
}
if (flag) {
final.push(el);
}
}
console.log(final.join("\n"));
// result.forEach(el=>console.log((el)))
}
modernTask("Nowadays everyone uses # to tag a #special word in #socialMedia");
// function solve(string) {
// let array = string.split(' ');
// for (let word of array) {
// if (word.startsWith('#') && word.length > 1 && checkForDigit(word)) {
// console.log(word.substr(1));
// }
// }
// function checkForDigit(word) {
// let isWord = true;
// for (let ch of word) {
// if (!isNaN(ch)) {
// return false;
// }
// }
// if (isWord) {
// return true;
// }
// }
// }
|
Sirokujira/MicroFrameworkPK_v4_3
|
Solutions/SAM7X_EK/DeviceCode/EMAC/SAM7X_EMAC_Config.cpp
|
<gh_stars>1-10
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Copyright (c) Microsoft Corporation. All rights reserved.
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#include <tinyhal.h>
#include <DeviceCode\AT91_EMAC\AT91_EMAC_adapter.h>
#define AT91_EMAC_PHY_POWERDOWN_PIO AT91_GPIO_Driver::PB18
#if defined(ADS_LINKER_BUG__NOT_ALL_UNUSED_VARIABLES_ARE_REMOVED)
#pragma arm section rwdata = "g_AT91_EMAC_Config"
#endif
AT91_EMAC_DRIVER_CONFIG g_AT91_EMAC_Config =
{
AT91_EMAC_PHY_POWERDOWN_PIO,
};
#if defined(ADS_LINKER_BUG__NOT_ALL_UNUSED_VARIABLES_ARE_REMOVED)
#pragma arm section rwdata
#endif
|
ostseegloeckchen/basics
|
Laden_mithilfe_pickle.py
|
<reponame>ostseegloeckchen/basics<gh_stars>0
#Das Skript funktioniert nicht
import pickle
wissen=open("Daten\\test6.txt","w")
buch=pickle.load(wissen)
wissen.close()
|
starlingx-staging/stx-ha
|
service-mgmt/sm-1.0.0/src/sm_heartbeat_msg.h
|
//
// Copyright (c) 2014-2017 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
#ifndef __SM_HEARTBEAT_MSG_H__
#define __SM_HEARTBEAT_MSG_H__
#include <stdint.h>
#include "sm_limits.h"
#include "sm_types.h"
#ifdef __cplusplus
extern "C" {
#endif
typedef bool (*SmHeartbeatMsgAuthCallbackT) (char interface_name[],
SmNetworkAddressT* network_address, int network_port,
void* msg, int msg_size, uint8_t auth_vector[]);
typedef void (*SmHeartbeatMsgAliveCallbackT) (char node_name[],
SmNetworkAddressT* network_address, int network_port, int version,
int revision, char interface_name[]);
typedef void (*SmHeartbeatMsgIfStateCallbackT) (const char node_name[],
SmHeartbeatMsgIfStateT if_state);
typedef struct
{
SmHeartbeatMsgAuthCallbackT auth;
SmHeartbeatMsgAliveCallbackT alive;
SmHeartbeatMsgIfStateCallbackT if_state;
} SmHeartbeatMsgCallbacksT;
// ****************************************************************************
// Heartbeat Messaging - Register Callbacks
// ========================================
extern SmErrorT sm_heartbeat_msg_register_callbacks(
SmHeartbeatMsgCallbacksT* callbacks );
// ****************************************************************************
// ****************************************************************************
// Heartbeat Messaging - Deregister Callbacks
// ==========================================
extern SmErrorT sm_heartbeat_msg_deregister_callbacks(
SmHeartbeatMsgCallbacksT* callbacks );
// ****************************************************************************
// ****************************************************************************
// Heartbeat Messaging - Send Alive
// ================================
extern SmErrorT sm_heartbeat_msg_send_alive( SmNetworkTypeT network_type,
char node_name[], SmNetworkAddressT* network_address,
SmNetworkAddressT* network_multicast, int network_port,
char interface_name[], SmAuthTypeT auth_type, char auth_key[],
int multicast_socket );
// ****************************************************************************
// ****************************************************************************
// Heartbeat Messaging - Open Sockets
// ==================================
extern SmErrorT sm_heartbeat_msg_open_sockets( SmNetworkTypeT network_type,
SmNetworkAddressT* network_address, SmNetworkAddressT* network_multicast,
int network_port, char interface_name[], int* multicast_socket, int* unicast_socket );
// ****************************************************************************
// ****************************************************************************
// Heartbeat Messaging - Close Sockets
// ===================================
extern SmErrorT sm_heartbeat_msg_close_sockets( int* multicast_socket );
// ****************************************************************************
// ****************************************************************************
// Heartbeat Message - Initialize
// ==============================
extern SmErrorT sm_heartbeat_msg_initialize( void );
// ****************************************************************************
// ****************************************************************************
// Heartbeart Message - Finalize
// =============================
extern SmErrorT sm_heartbeat_msg_finalize( void );
// ****************************************************************************
#ifdef __cplusplus
}
#endif
#endif // __SM_HEARTBEAT_MSG_H__
|
hifi/matrix-media-repo
|
templating/templates.go
|
package templating
import (
"fmt"
"html/template"
"path"
"sync"
"github.com/turt2live/matrix-media-repo/common/config"
)
type templates struct {
cached map[string]*template.Template
}
var instance *templates
var singletonLock = &sync.Once{}
func GetInstance() *templates {
if instance == nil {
singletonLock.Do(func() {
instance = &templates{
cached: make(map[string]*template.Template),
}
})
}
return instance
}
func GetTemplate(name string) (*template.Template, error) {
i := GetInstance()
//if v, ok := i.cached[name]; ok {
// return v, nil
//}
fname := fmt.Sprintf("%s.html", name)
tmplPath := path.Join(config.Runtime.TemplatesPath, fname)
t, err := template.New(fname).ParseFiles(tmplPath)
if err != nil {
return nil, err
}
i.cached[name] = t
return t, nil
}
|
automation-club/pinmame
|
src/unix/dirio.c
|
#include "xmame.h"
#include <stdarg.h>
#include <unistd.h>
#include <dirent.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <zlib.h>
#ifdef BSD43 /* old style directory handling */
#include <sys/types.h>
#include <sys/dir.h>
#define dirent direct
#endif
/* #define FILEIO_DEBUG */
#define MAXPATHC 20 /* at most 20 path entries */
#define MAXPATHL BUF_SIZE /* at most BUF_SIZE-1 character path length */
#ifdef MESS
int osd_num_devices(void)
{
return 0; /* unix doesn't have devices ( a device = a: b: etc) */
}
void osd_change_device(const char *device)
{
}
const char *osd_get_device_name(int idx)
{
return "";
}
struct osd_dir {
DIR *dir;
char dirname[MAXPATHL];
char filemask[MAXPATHL];
};
void *osd_dir_open(const char *dirname, const char *filemask)
{
struct osd_dir *dir = NULL;
if(!(dir = calloc(1, sizeof(struct osd_dir))))
return NULL;
if(!(dir->dir = opendir(dirname)))
{
osd_dir_close(dir);
return NULL;
}
strncpy(dir->dirname, dirname, MAXPATHL-1);
strncpy(dir->filemask, filemask, MAXPATHL-1);
return dir;
}
void osd_dir_close(void *dir)
{
struct osd_dir *my_dir = dir;
if(my_dir->dir)
closedir(my_dir->dir);
free(my_dir);
}
#ifndef __QNXNTO__
static int fnmatch(const char *f1, const char *f2)
{
while (*f1 && *f2)
{
if (*f1 == '*')
{
/* asterisk is not the last character? */
if (f1[1])
{
/* skip until first occurance of the character after the asterisk */
while (*f2 && toupper(f1[1]) != toupper(*f2))
f2++;
/* skip repetitions of the character after the asterisk */
while (*f2 && toupper(f1[1]) == toupper(f2[1]))
f2++;
}
else
{
/* skip until end of string */
while (*f2)
f2++;
}
}
else
if (*f1 == '?')
{
/* skip one character */
f2++;
}
else
{
/* mismatch? */
if (toupper(*f1) != toupper(*f2))
return 0;
/* skip one character */
f2++;
}
/* skip mask */
f1++;
}
/* no match if anything is left */
if (*f1 || *f2)
return 0;
return 1;
}
#endif
int osd_dir_get_entry(void *dir, char *name, int namelength, int *is_dir)
{
struct osd_dir *my_dir = dir;
struct dirent *d = NULL;
struct stat stat_buf;
char buf[MAXPATHL];
*is_dir = 0;
while((d = readdir(my_dir->dir)))
{
snprintf(buf, MAXPATHL, "%s/%s", my_dir->dirname, d->d_name);
/* stat it */
if(stat(buf, &stat_buf))
continue;
/* check that it is a dir or matches our filemask */
#ifdef BSD43
if(S_IFDIR & stat_buf.st_mode)
#else
if(S_ISDIR(stat_buf.st_mode))
#endif
{
*is_dir = 1;
}
#ifndef __QNXNTO__
else if (!fnmatch(my_dir->filemask, d->d_name))
#else
else if (!fnmatch(my_dir->filemask, d->d_name,0))
#endif
continue;
strncpy(name, d->d_name, namelength-1);
name[namelength-1] = 0;
return strlen(name);
}
return 0;
}
void osd_change_directory(const char *directory)
{
chdir(directory);
}
const char *osd_get_cwd(void)
{
static char cwd[MAXPATHL + 1];
#ifdef BSD43
getwd(cwd);
#else
getcwd(cwd, MAXPATHL);
#endif
strcat(cwd, "/");
return cwd;
}
/*============================================================ */
/* osd_dirname */
/*============================================================ */
char *osd_dirname(const char *filename)
{
char *dirname;
char *c;
/* NULL begets NULL */
if (!filename)
return NULL;
/* allocate space for it */
dirname = malloc(strlen(filename) + 1);
if (!dirname)
{
fprintf(stderr_file, "error: malloc failed in osd_dirname\n");
return NULL;
}
/* copy in the name */
strcpy(dirname, filename);
/* search backward for a slash */
for (c = dirname + strlen(dirname) - 1; c >= dirname; c--)
if (*c == '\\' || *c == '/')
{
/* found it: NULL terminate and return */
*(c + 1) = 0;
return dirname;
}
/* otherwise, return an empty string */
dirname[0] = 0;
return dirname;
}
/*============================================================ */
/* osd_basename */
/*============================================================ */
char *osd_basename(char *filename)
{
char *c;
/* NULL begets NULL */
if (!filename)
return NULL;
/* start at the end and return when we hit a slash */
for (c = filename + strlen(filename) - 1; c >= filename; c--)
if (*c == '\\' || *c == '/')
return c + 1;
/* otherwise, return the whole thing */
return filename;
}
/*============================================================ */
/* osd_path_separator */
/*============================================================ */
const char *osd_path_separator(void)
{
return "/";
}
/*============================================================ */
/* osd_is_path_separator */
/*============================================================ */
int osd_is_path_separator(char ch)
{
return (ch == '\\') || (ch == '/');
}
/*============================================================ */
/* osd_is_absolute_path */
/*============================================================ */
int osd_is_absolute_path(const char *path)
{
int result;
if ((path[0] == '/') || (path[0] == '/'))
result = 1;
else
result = 0;
return result;
}
#endif
|
sc1987910/quick-cocos2d-x
|
template/PROJECT_TEMPLATE_01/proj.win32/ProjectConfigDialog.h
|
#pragma once
#include "resource.h"
// C RunTime Header Files
#include "CCStdC.h"
#include <string>
#include "SimulatorConfig.h"
class ProjectConfigDialog
{
public:
static bool showModal(HWND hwnd, ProjectConfig *project, const string dialogCaption = string(""), const string buttonCaption = string(""));
~ProjectConfigDialog(void) {
s_sharedInstance = NULL;
}
private:
ProjectConfigDialog(HWND hwnd)
: m_dialogResult(false)
, m_hwnd(hwnd)
, m_hwndDialog(NULL)
{
assert(s_sharedInstance == NULL);
s_sharedInstance = this;
}
static ProjectConfigDialog *sharedInstance(void) {
return s_sharedInstance;
}
static ProjectConfigDialog *s_sharedInstance;
ProjectConfig m_project;
string m_dialogCaption;
string m_buttonCaption;
bool m_dialogResult;
HWND m_hwnd;
HWND m_hwndDialog;
bool showDialog(ProjectConfig *project, const string dialogCaption, const string buttonCaption);
bool checkConfig(void);
void onInitDialog(HWND hwndDialog);
void onSelectProjectDir(void);
void onSelectScriptFile(void);
void onSelectWritablePath(void);
void onScreenSizeChanged(void);
void onScreenDirectionChanged(void);
void onOK(void);
// windows callback
static INT_PTR CALLBACK DialogCallback(HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam);
static int CALLBACK BrowseFolderCallback(HWND hwnd, UINT uMsg, LPARAM lParam, LPARAM lpData);
// update ui
void updateProjectDir(void);
void updateScriptFile(void);
void updateWritablePath(void);
// helper
const string browseFolder(const string baseDir);
static BOOL DirectoryExists(const string path);
static BOOL FileExists(const string path);
};
|
myles-novick/ml-workflow
|
feature_store/src/test/tests/test_get_features_by_name.py
|
<gh_stars>0
from typing import List
from requests.auth import HTTPBasicAuth
from shared.logger.logging_config import logger
from ...rest_api import crud
from ..fixtures.conftest import APP, get_my_session, override_get_db, test_app
from ..fixtures.feature_set import create_fset_with_features
APP.dependency_overrides[crud.get_db] = override_get_db
basic_auth = HTTPBasicAuth('user','password')
def test_get_feature_no_auth(test_app):
response = test_app.get('/features',params={"name": ['name','income']})
assert response.status_code == 401, 'Should fail because there is no authentication'
mes = response.json()['message']
assert mes == 'Not authenticated', mes
def test_get_feature_no_features(test_app, create_fset_with_features):
"""
Test get_features_by_name and not passing in any feature names
"""
APP.dependency_overrides[crud.get_db] = lambda: (yield create_fset_with_features) # Give the "server" the same db session
response = test_app.get('/features',params={"name": []}, auth=basic_auth)
assert response.status_code in range(400,500), 'Should fail because no names were provided'
mes = response.json()['message']
assert mes == 'Please provide at least one name', mes
def test_get_feature_bad_features(test_app, create_fset_with_features):
"""
Test get_features_by_name and passing in a name that doesn't have a corresponding feature
"""
APP.dependency_overrides[crud.get_db] = lambda: (yield create_fset_with_features) # Give the "server" the same db session
response = test_app.get('/features',params={"name": ["name","not_a_feature"]}, auth=basic_auth)
assert response.status_code in range(400,500), 'Should fail because a feature that does not exist was requested'
mes = response.json()['message']
logger.info('MESSAGE for bad_features')
logger.info(mes)
def test_get_features(test_app, create_fset_with_features):
"""
Test get_features_by_name
"""
APP.dependency_overrides[crud.get_db] = lambda: (yield create_fset_with_features) # Give the "server" the same db session
response = test_app.get('/features',params={"name": ["name","income"]}, auth=basic_auth)
assert response.status_code == 200, 'succeed'
mes = response.json()
logger.info('MESSAGE for get_features')
logger.info(mes)
assert len(mes) == 2, "Should return 2 features because 2 were requested"
|
LeonChen1024/algorithm-4th-exercise
|
alg4/src/edu/princeton/cs/exercise/chapter2_5/E02_05_04.java
|
package edu.princeton.cs.exercise.chapter2_5;
import java.util.ArrayList;
import java.util.Arrays;
import edu.princeton.cs.algs4.StdOut;
/**
* 2.5.4 Implement a method String[] dedup(String[] a) that returns the objects in a[] in sorted
* order, with duplicates removed.
*
* <p>实现一个方法 String[] dedup(String[] a) 将 a[] 中的对象有序返回,并且删除重复项
*
* @author LeonChen
* @since 12/16/20
*/
class E02_05_04 {
/**
*
*/
public static void main(String[] args) {
String[] input =
new String[]{
"afterthougnt",
"test",
"thougnt",
"common",
"after",
"abandon",
"cool",
"test",
"java",
"coolguy",
"C",
"python",
"guy"
};
String[] res = dedup(input);
for (String re : res) {
StdOut.println(re);
}
}
private static String[] dedup(String[] a) {
ArrayList<String> uniqList = new ArrayList();
Arrays.sort(a);
uniqList.add(a[0]);
for (int i = 1; i < a.length; i++) {
if (a[i].compareTo(a[i - 1]) != 0) {
uniqList.add(a[i]);
}
}
String[] res = new String[uniqList.size()];
uniqList.toArray(res);
return res;
}
}
|
earthcomputing/Netronome
|
nfp-sdk-6.0.1/p4/components/dcfl/me/lib/dcfl/libdcfl.c
|
<gh_stars>0
/*
* Copyright (C) 2014, Netronome Systems, Inc. All rights reserved.
*
* @file lib/dcfl/libdcfl.c
* @brief DCFL library for MicroC
*/
#ifndef _DCFL__LIBDCFL_C_
#define _DCFL__LIBDCFL_C_
#include <assert.h>
#include <nfp.h>
#include <nfp/me.h>
#include <stdint.h>
#include <types.h>
#ifdef DCFL_PIPELINED
#include <nfp/mem_ring.h>
#include <std/reg_utils.h>
#endif
#include <dcfl_ctrl.h>
#include "_c/field_lkp.c"
#include "_c/an.c"
#ifdef DCFL_DEBUG
#include <nfp/mem_ring.h>
MEM_JOURNAL_DECLARE(DCFL_DBGJRNL_NAME, DCFL_DBGJRNL_SIZE);
#endif
/**
* ME declarations:
*/
__export __imem struct dcfl_me_config dcfl_me_cfg;
#ifdef DCFL_PIPELINED
MEM_WORKQ_DECLARE(DCFL_ANQ_NAME, DCFL_ANQ_ENTRIES);
#endif
__addr40 __imem uint8_t
*dcfl_get_conf(uint32_t table,
__xread void *custom_cfg)
{
__xread uint32_t active_buffer;
__addr40 __imem uint8_t *cfg_ptr;
/* read the active buffer */
cfg_ptr = (__addr40 __imem uint8_t *) &dcfl_me_cfg;
cfg_ptr += sizeof(struct dcfl_me_table_config_buffers) * table;
mem_read32(&active_buffer, cfg_ptr, sizeof(uint32_t));
if (active_buffer == 0)
return 0;
__critical_path();
/* move the pointer beyond the active_buffer word */
cfg_ptr += sizeof(active_buffer);
/* move the pointer to the correct buffer instance */
if (active_buffer == 2) {
cfg_ptr += sizeof(struct dcfl_me_table_config);
}
#ifdef DCFL_CUSTOM_TABLE_LW
/* read the dcfl table config */
mem_read32((void *)custom_cfg, cfg_ptr,
sizeof(struct dcfl_me_custom_config));
cfg_ptr += sizeof(struct dcfl_me_custom_config);
#endif
return cfg_ptr;
}
/**
* Bubble sort results.
*
* Comparing the whole 32 bits means that results are first
* sorted by priority (increasing), then by rule_id
* (increasing). */
int
dcfl_sort_results(__lmem struct dcfl_label_set *results,
uint32_t nresults)
{
__gpr uint32_t sorted = 0;
__gpr uint32_t i, temp;
while (sorted == 0) {
sorted = 1;
for (i = 0; i < nresults - 1; i++) {
if (results->values[i] >
results->values[i + 1]) {
/* Comparing the whole 32 bits means that results are first
* sorted by priority (increasing), then by rule_id
* (increasing). */
temp = results->values[i];
results->values[i] = results->values[i + 1];
results->values[i + 1] = temp;
sorted = 0;
}
}
}
for (i = 0; i < nresults; i++) {
results->labels[i] = results->results[i].rule_id;
}
return nresults;
}
#ifdef DCFL_PIPELINED
/**
* Get island, ME nr and context number of this thread, and pack into
* structure along with the signal that is to be set when DCFL worker
* completes.
*
* @param sig_num Signal number declared as volatile in this ME, to be
* set with an interthread_signal call.
*/
struct dcfl_thread_info
dcfl_get_thread_info(int sig_num)
{
struct dcfl_thread_info info;
uint32_t sts = local_csr_read(local_csr_active_ctx_sts);
info.isl_id = (sts >> 25) & 0x3f;
info.me = (sts >> 3) & 0xf;
info.ctx = sts & 0x7;
info.sig = sig_num;
return info;
}
/**
* Write labels to a label buffer symbol.
*
* @param labels Pointer to label set containing dcfl results
* @param nrules Number of results in label set
* @param dest Destination address of results
*/
static void
dcfl_write_labels(__lmem struct dcfl_label_set *labels, uint32_t nrules,
__addr40 uint8_t *dest)
{
__xwrite uint32_t write_buf[DCFL_WRITE_BUF_LW];
uint32_t offset = 0;
__lmem struct dcfl_label_set *src;
src = labels;
for (; offset < (nrules * sizeof(uint16_t)); offset += sizeof(write_buf)) {
reg_cp(write_buf, src, sizeof(write_buf));
mem_write32(write_buf, (__addr40 uint8_t *)dest + offset,
sizeof(write_buf));
src += sizeof(write_buf);
}
}
static void
dcfl_read_labels_to_lmem(__lmem uint32_t *labelset,
__addr40 struct dcfl_label_set *src,
int nlabels
)
{
__xread uint32_t read_buf[DCFL_LABELSET_LW];
unsigned int i, lmoff = 0;
mem_read32(read_buf, (__addr40 uint32_t *)src,
sizeof(read_buf));
if (nlabels > DCFL_MAX_LABELS_PER_LKP) {
nlabels = DCFL_MAX_LABELS_PER_LKP;
}
for (i = 0; i < ((nlabels + 1)/2); i++) {
labelset[lmoff] = read_buf[i];
lmoff += 1;
}
if (nlabels % 2) {
labelset[lmoff] &= 0xFFFF0000;
}
}
/**
* Read child worker thread_info from thread_info_ptr,
* and raise interthread_signal for child to continue.
*/
static void
dcfl_signal_an_worker(__addr40 uint32_t *worker_thread_info) {
__gpr uint32_t ithread_sig;
__xread struct dcfl_thread_info read_buf[1];
do {
mem_read32(read_buf, worker_thread_info, sizeof(read_buf));
} while (read_buf[0].value == 0);
ithread_sig = read_buf[0].value & DCFL_ITHREAD_MASK;
__asm { ct[interthread_signal, --, ithread_sig, 0, --] }
}
/*
* Public methods of MicroC API:
*/
void
dcfl_pipeline_stage(struct dcfl_an_workq_entry *anq_entry,
mem_ring_addr_t anq_raddr)
{
__xread struct dcfl_me_table_config_small tblcfg;
__xread struct dcfl_me_field_config *current_f_cfg;
__xwrite uint32_t anq_xfer[DCFL_AN_WORKQ_LW];
__xwrite uint32_t write_buf[1];
__xread uint32_t read_buf[1];
struct dcfl_an_workq_entry next_anq_entry;
__addr40 __imem uint32_t worker_thread_info = 0;
__gpr unsigned int valid_val = 0;
__gpr unsigned int labelset_idx, nlabels, prev_nlabels, nmatches = 0;
__gpr unsigned int next_field_idx;
__addr40 uint8_t *cfg_ptr;
__xrw uint32_t field_value[4];
__lmem struct dcfl_label_set label_set[3];
volatile SIGNAL parent_sig;
#ifdef DCFL_DEBUG
__xwrite uint32_t jrnl_buf[DCFL_DBGJRNL_BUFLEN];
mem_ring_addr_t jrnl_raddr = mem_ring_get_addr(&DCFL_DBGJRNL_NAME);
#endif
cfg_ptr = anq_entry->cfg_ptr;
mem_read32(&tblcfg, (__imem void *) cfg_ptr, sizeof(tblcfg));
if (tblcfg.num_fields == 0)
goto dcfl_pipeline_complete;
__critical_path();
/* Pipeline next stage so long. Have it wait for inputs to be ready */
if (tblcfg.num_fields > 2) {
if (anq_entry->field_idx == 0) {
next_field_idx = 2;
} else {
next_field_idx = anq_entry->field_idx + 1;
}
if ((tblcfg.num_fields - next_field_idx) > 0) {
/* Setup work for next stage in pipeline */
next_anq_entry.field_idx = next_field_idx;
next_anq_entry.key_ptr = anq_entry->key_ptr;
next_anq_entry.cfg_ptr = anq_entry->cfg_ptr;
next_anq_entry.labelset_ptr = anq_entry->labelset_ptr;
next_anq_entry.nlabels_ptr = anq_entry->nlabels_ptr;
next_anq_entry.app_thread_info = anq_entry->app_thread_info;
next_anq_entry.thread_info_ptr = &worker_thread_info;
next_anq_entry.labelset_idx = 1 - anq_entry->labelset_idx;
reg_cp(anq_xfer, (void *)&next_anq_entry, sizeof(anq_xfer));
mem_workq_add_work(DCFL_AN_QNUM, anq_raddr, &anq_xfer,
sizeof(anq_xfer));
}
}
/* move cfg_ptr past num_fields to the start of the field configs */
cfg_ptr += sizeof(tblcfg.num_fields);
dcfl_next_field:
if (anq_entry->field_idx > 0) {
cfg_ptr += anq_entry->field_idx * sizeof(struct dcfl_me_field_config);
mem_read32(&tblcfg.f_cfg[0], cfg_ptr,
sizeof(struct dcfl_me_field_config));
}
current_f_cfg = &tblcfg.f_cfg[0];
/* 2a) get field value from the key */
dcfl_get_field_value_from_mem(&valid_val, &field_value[0],
anq_entry->key_ptr, current_f_cfg);
reg_cp(field_value, field_value, sizeof(field_value));
#ifdef DCFL_P4_STRICT_HDR_VALID
if (current_f_cfg->hdr_valid_bitlen > 0) {
/* Check header validity */
dcfl_get_hdr_valid_val_from_mem(&hdr_valid_val, anq_entry->key_ptr,
current_f_cfg);
hdr_valid = (hdr_valid_val == current_f_cfg->valid_value);
}
#endif
/* 2b) field lookup */
/* for the first field_id we put the data in the intermediate buffer
* this is because no AN calc happens for the first field
*/
labelset_idx = 2;
if ((tblcfg.num_fields > 1) && (anq_entry->field_idx == 0)) {
labelset_idx = 0;
}
#ifdef DCFL_P4_STRICT_HDR_VALID
nlabels =
dcfl_field_lookup(&(label_set[labelset_idx]), valid_val,
field_value, current_f_cfg, hdr_valid);
#else
nlabels =
dcfl_field_lookup(&(label_set[labelset_idx]), valid_val,
field_value, current_f_cfg, 1);
#endif
#ifdef DCFL_DEBUG
jrnl_buf[0] = ((valid_val & 0xFFFF) << 16) + nlabels;
jrnl_buf[1] = label_set[labelset_idx].values[0];
mem_ring_journal(DCFL_DBGJRNL_QNUM, jrnl_raddr, jrnl_buf,
sizeof(jrnl_buf));
#endif
if (nlabels == 0) {
write_buf[0] = 0;
mem_write32(write_buf, anq_entry->nlabels_ptr, sizeof(write_buf));
if (anq_entry->field_idx > 0) {
dcfl_signal_an_worker(&worker_thread_info);
}
goto dcfl_pipeline_complete;
}
/* No AN work on first field */
if (tblcfg.num_fields > 1) {
if (anq_entry->field_idx == 0) {
anq_entry->field_idx++;
prev_nlabels = nlabels;
goto dcfl_next_field;
}
}
/* we ping-pong the set id for output set and intermediate set,
* lookup set is fixed at 2
*/
labelset_idx = 1 - anq_entry->labelset_idx;
/* 2c) AN 'crossproduct */
if (tblcfg.num_fields == 1) {
/* single field AN: init a label_set with one label of value 0 */
prev_nlabels = 1;
(label_set+(anq_entry->labelset_idx))->labels[0] = 0;
} else {
/* Read "foreign" labelset from dcfl_label_buffer into LM */
if (anq_entry->field_idx > 1) {
/* Child worker, assign signal for parent to assert */
__assign_relative_register(&parent_sig, DCFL_STAGE_SIG);
{
/* Write thread info into thread_info_ptr,
* so that parent can signal child when inputs are ready */
__xwrite struct dcfl_thread_info write_buf[1];
struct dcfl_thread_info thread_info;
write_buf[0] = dcfl_get_thread_info(DCFL_STAGE_SIG);
mem_write32(write_buf, anq_entry->thread_info_ptr,
sizeof(write_buf));
}
wait_for_all(&parent_sig);
mem_read32(read_buf, anq_entry->nlabels_ptr, sizeof(read_buf));
if (read_buf[0] == 0) {
dcfl_signal_an_worker(&worker_thread_info);
return;
}
prev_nlabels = read_buf[0];
dcfl_read_labels_to_lmem((__lmem uint32_t *)&label_set[anq_entry
->labelset_idx],
(__addr40 struct dcfl_label_set *)
anq_entry->labelset_ptr,
prev_nlabels);
}
}
nmatches = dcfl_an(&(label_set[anq_entry->labelset_idx]),
prev_nlabels,
&(label_set[2]),
nlabels,
&(label_set[labelset_idx]),
current_f_cfg->an_stage_base,
current_f_cfg->an_stage_base_hi,
(tblcfg.num_fields - anq_entry->field_idx - 1));
/* Store number of matches in DRAM */
write_buf[0] = nmatches;
mem_write32(write_buf, anq_entry->nlabels_ptr, sizeof(write_buf));
if (nmatches == 0) {
/* signal child worker to complete and be freed */
dcfl_signal_an_worker(&worker_thread_info);
goto dcfl_pipeline_complete;
}
if ((tblcfg.num_fields - anq_entry->field_idx) == 1) {
/* If this is the final stage, possibly sort results,
* then fall through to dcfl_pipeline_complete */
#if (defined(DCFL_PRIORITY_SORT) && !defined(DCFL_SUPPORT_MATCH_FILTERING))
dcfl_sort_results(&(label_set[labelset_idx]),
nmatches);
#endif
dcfl_write_labels(&label_set[labelset_idx], DCFL_MAX_LABELS_PER_LKP,
(__addr40 uint8_t *)anq_entry->labelset_ptr);
} else {
dcfl_write_labels(&label_set[labelset_idx], DCFL_MAX_LABELS_PER_LKP,
(__addr40 uint8_t *)anq_entry->labelset_ptr);
dcfl_signal_an_worker(&worker_thread_info);
return;
}
dcfl_pipeline_complete:
local_csr_write(local_csr_mailbox_3, nmatches);
{
uint32_t ithread_sig;
/* Signal app thread of completion */
ithread_sig = anq_entry->app_thread_info.value & DCFL_ITHREAD_MASK;
__asm { ct[interthread_signal, --, ithread_sig, 0, --] }
}
}
uint32_t
dcfl_classify_pipeline(__addr40 uint8_t *key, uint32_t table,
__addr40 struct dcfl_label_set *label_set,
__addr40 uint32_t *nlabels,
__addr40 __imem uint8_t *cfg,
mem_ring_addr_t anq_raddr
)
{
__xread uint32_t read_buf[1];
struct dcfl_an_workq_entry anq_entry;
volatile SIGNAL pipeline_sig;
#ifdef DCFL_P4_STRICT_HDR_VALID
__gpr unsigned int hdr_valid = 1;
__gpr unsigned int hdr_valid_val;
#endif
if (table >= DCFL_MAX_NUM_TABLES)
return 0;
__critical_path();
#if DCFL_ME_TABLE_CONFIG_SMALL_NUM_FIELDS != 1
/* The code below expects the field prefetch to be 1 */
#error DCFL_ME_TABLE_CONFIG_SMALL_NUM_FIELDS must be 1
#endif
anq_entry.field_idx = 0;
anq_entry.key_ptr = key;
anq_entry.cfg_ptr = cfg;
anq_entry.labelset_ptr = &label_set[0];
anq_entry.nlabels_ptr = nlabels;
anq_entry.labelset_idx = 0;
anq_entry.app_thread_info = dcfl_get_thread_info(
__signal_number(&pipeline_sig));
/* Put first stage in pipeline for lookup and crossproducting.
* This stage will kick off all further stages. */
dcfl_pipeline_stage(&anq_entry, anq_raddr);
wait_for_all(&pipeline_sig);
mem_read32(read_buf, nlabels, sizeof(read_buf));
return read_buf[0];
}
#endif
uint32_t
dcfl_classify(void *key, uint32_t table,
__lmem struct dcfl_label_set *label_set,
uint32_t *labels_id, __addr40 __imem uint8_t *cfg)
{
__xread struct dcfl_me_table_config_small tblcfg;
__xread struct dcfl_me_field_config *current_f_cfg;
__addr40 __imem uint8_t *cfg_ptr = cfg;
__gpr uint32_t field_id;
__gpr uint32_t buffer;
__lmem uint32_t nlabels[3];
__lmem uint32_t *localkey = key;
/* which label set to use as output */
__gpr uint8_t id_out;
#ifdef DCFL_P4_STRICT_HDR_VALID
__gpr unsigned int hdr_valid = 1;
__gpr unsigned int hdr_valid_val;
#endif
#ifdef DCFL_DEBUG
__xwrite uint32_t jrnl_buf[DCFL_DBGJRNL_BUFLEN];
mem_ring_addr_t jrnl_raddr = mem_ring_get_addr(&DCFL_DBGJRNL_NAME);
#endif
if (table >= DCFL_MAX_NUM_TABLES)
return 0;
__critical_path();
#if DCFL_ME_TABLE_CONFIG_SMALL_NUM_FIELDS != 1
/* The code below expects the field prefetch to be 1 */
#error DCFL_ME_TABLE_CONFIG_SMALL_NUM_FIELDS must be 1
#endif
mem_read32(&tblcfg, (__imem void *) cfg_ptr, sizeof(tblcfg));
if (tblcfg.num_fields == 0)
return 0;
__critical_path();
/* point cfg_ptr to the first field entry */
cfg_ptr += sizeof(tblcfg.num_fields);
/* 2) Start the DCFL classification process*/
for (field_id = 0; field_id < tblcfg.num_fields; field_id++)
{
__gpr unsigned int lkup_set;
__gpr unsigned int valid_val = 0;
__xrw uint32_t field_value[4];
/* We grab one field at a time */
if (field_id >= 1) { /* the first one is already in the buffer */
mem_read32(&tblcfg.f_cfg[0], cfg_ptr,
sizeof(struct dcfl_me_field_config));
}
cfg_ptr += sizeof(struct dcfl_me_field_config);
current_f_cfg = &tblcfg.f_cfg[0];
/* 2a) get field value from the key */
dcfl_get_field_value(&valid_val, &field_value[0], localkey,
current_f_cfg);
#ifdef DCFL_P4_STRICT_HDR_VALID
if (current_f_cfg->hdr_valid_bitlen > 0) {
/* Check header validity */
dcfl_get_hdr_valid_val(&hdr_valid_val, localkey, current_f_cfg);
hdr_valid = (hdr_valid_val == current_f_cfg->valid_value);
}
#endif
/* 2b) field lookup */
/* for the first field_id we put the data in the intermediate buffer
* this is because no AN calc happens for the first field
*/
lkup_set = 2;
if ((tblcfg.num_fields > 1) && (field_id == 0))
lkup_set = 0;
#ifdef DCFL_P4_STRICT_HDR_VALID
nlabels[lkup_set] =
dcfl_field_lookup(&(label_set[lkup_set]), valid_val,
field_value, current_f_cfg, hdr_valid);
#else
nlabels[lkup_set] =
dcfl_field_lookup(&(label_set[lkup_set]), valid_val,
field_value, current_f_cfg, 1);
#endif
#ifdef DCFL_DEBUG
jrnl_buf[0] = ((valid_val & 0xFFFF) << 16) + nlabels[lkup_set];
jrnl_buf[1] = label_set[lkup_set].values[0];
mem_ring_journal(DCFL_DBGJRNL_QNUM, jrnl_raddr, jrnl_buf,
sizeof(jrnl_buf));
#endif
/* set the output buffer, used in field count is 1 and lookup miss */
id_out = lkup_set;
if (nlabels[lkup_set] == 0)
break;
#ifdef DCFL_NO_AN
goto dcfl_skip_an;
#endif
/* No AN work on first field */
if ((tblcfg.num_fields > 1) && (field_id == 0))
continue;
/* 2c) AN 'crossproduct */
/* we ping-pong the set id for output set and intermediate set,
* lookup set is fixed at 2
*/
id_out = field_id & 0x1;
if (tblcfg.num_fields == 1) {
/* single field AN: init a label_set with one label of value 0 */
nlabels[(id_out + 1) & 0x1] = 1;
(label_set+((id_out + 1) & 0x1))->labels[0] = 0;
}
nlabels[id_out] =
dcfl_an(&(label_set[(id_out + 1) & 0x1]),
nlabels[(id_out + 1) & 0x1],
&(label_set[2]),
nlabels[2],
&(label_set[id_out]),
current_f_cfg->an_stage_base,
current_f_cfg->an_stage_base_hi,
(tblcfg.num_fields - field_id - 1)
);
if (nlabels[id_out] == 0)
break;
} /* for (field_id=0 */
#if (defined(DCFL_PRIORITY_SORT) && !defined(DCFL_SUPPORT_MATCH_FILTERING))
dcfl_sort_results(&(label_set[id_out]),
nlabels[id_out]
);
#endif
dcfl_skip_an:
*labels_id = id_out;
return nlabels[id_out];
}
#endif /* !_DCFL__LIBDCFL_C_ */
|
matthewloring/core
|
x/ref/services/identity/internal/server/identityd.go
|
<gh_stars>0
// Copyright 2015 The Vanadium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// HTTP server that uses OAuth to create security.Blessings objects.
package server
import (
"fmt"
mrand "math/rand"
"net"
"net/http"
"strconv"
"strings"
"syscall"
"time"
"v.io/v23"
"v.io/v23/context"
"v.io/v23/naming"
"v.io/v23/rpc"
"v.io/v23/security"
"v.io/v23/verror"
"v.io/x/ref/lib/security/audit"
"v.io/x/ref/lib/signals"
"v.io/x/ref/services/discharger"
"v.io/x/ref/services/identity/internal/auditor"
"v.io/x/ref/services/identity/internal/blesser"
"v.io/x/ref/services/identity/internal/caveats"
"v.io/x/ref/services/identity/internal/dischargerlib"
"v.io/x/ref/services/identity/internal/handlers"
"v.io/x/ref/services/identity/internal/oauth"
"v.io/x/ref/services/identity/internal/revocation"
"v.io/x/ref/services/identity/internal/templates"
)
const (
macaroonService = "macaroon"
dischargerService = "discharger"
)
type IdentityServer struct {
oauthProvider oauth.OAuthProvider
auditor audit.Auditor
blessingLogReader auditor.BlessingLogReader
revocationManager revocation.RevocationManager
caveatSelector caveats.CaveatSelector
rootedObjectAddrs []naming.Endpoint
assetsPrefix string
mountNamePrefix string
dischargerLocation string
registeredApps handlers.RegisteredAppMap
}
// NewIdentityServer returns a IdentityServer that:
// - uses oauthProvider to authenticate users
// - auditor and blessingLogReader to audit the root principal and read audit logs
// - revocationManager to store revocation data and grant discharges
func NewIdentityServer(oauthProvider oauth.OAuthProvider, auditor audit.Auditor, blessingLogReader auditor.BlessingLogReader, revocationManager revocation.RevocationManager, caveatSelector caveats.CaveatSelector, assetsPrefix, mountNamePrefix, dischargerLocation string, registeredApps handlers.RegisteredAppMap) *IdentityServer {
return &IdentityServer{
oauthProvider: oauthProvider,
auditor: auditor,
blessingLogReader: blessingLogReader,
revocationManager: revocationManager,
caveatSelector: caveatSelector,
assetsPrefix: assetsPrefix,
mountNamePrefix: mountNamePrefix,
dischargerLocation: dischargerLocation,
registeredApps: registeredApps,
}
}
// findUnusedPort finds an unused port and returns it. Of course, no guarantees
// are made that the port will actually be available by the time the caller
// gets around to binding to it. If no port can be found, (0, nil) is returned.
// If an error occurs while creating a socket, that error is returned and the
// other return value is 0.
func findUnusedPort() (int, error) {
random := mrand.New(mrand.NewSource(time.Now().UnixNano()))
for i := 0; i < 1000; i++ {
fd, err := syscall.Socket(syscall.AF_INET, syscall.SOCK_STREAM, syscall.IPPROTO_TCP)
if err != nil {
return 0, err
}
port := int(1024 + random.Int31n(64512))
sa := &syscall.SockaddrInet4{Port: port}
err = syscall.Bind(fd, sa)
syscall.Close(fd)
if err == nil {
return port, nil
}
}
return 0, nil
}
func (s *IdentityServer) Serve(ctx, oauthCtx *context.T, externalHttpAddr, httpAddr, tlsConfig string) {
ctx, err := v23.WithPrincipal(ctx, audit.NewPrincipal(ctx, s.auditor))
if err != nil {
ctx.Panic(err)
}
oauthCtx, err = v23.WithPrincipal(oauthCtx, audit.NewPrincipal(oauthCtx, s.auditor))
if err != nil {
ctx.Panic(err)
}
httphost, httpport, err := net.SplitHostPort(httpAddr)
if err != nil || httpport == "0" {
httpportNum, err := findUnusedPort()
if err != nil {
ctx.Panic(err)
}
httpAddr = net.JoinHostPort(httphost, strconv.Itoa(httpportNum))
}
ctx, cancel := context.WithCancel(ctx)
rpcServer, _, externalAddr := s.Listen(ctx, oauthCtx, externalHttpAddr, httpAddr, tlsConfig)
fmt.Printf("HTTP_ADDR=%s\n", externalAddr)
if len(s.rootedObjectAddrs) > 0 {
fmt.Printf("NAME=%s\n", s.rootedObjectAddrs[0].Name())
}
<-signals.ShutdownOnSignals(ctx)
ctx.Infof("Received shutdown request.")
cancel()
<-rpcServer.Closed()
ctx.Infof("Successfully stopped the rpc server.")
}
func (s *IdentityServer) Listen(ctx, oauthCtx *context.T, externalHttpAddr, httpAddr, tlsConfig string) (rpc.Server, []string, string) {
// json-encoded public key and blessing names of this server
principal := v23.GetPrincipal(ctx)
http.Handle("/auth/blessing-root", handlers.BlessingRoot{principal})
rpcServer, published, err := s.setupBlessingServices(ctx, oauthCtx)
if err != nil {
ctx.Fatalf("Failed to setup vanadium services for blessing: %v", err)
}
externalHttpAddr = httpAddress(externalHttpAddr, httpAddr)
http.HandleFunc("/health", func(w http.ResponseWriter, _ *http.Request) {
w.WriteHeader(http.StatusNoContent)
})
n := "/auth/google/"
args := oauth.HandlerArgs{
Principal: principal,
Addr: fmt.Sprintf("%s%s", externalHttpAddr, n),
BlessingLogReader: s.blessingLogReader,
RevocationManager: s.revocationManager,
DischargerLocation: s.dischargerLocation,
MacaroonBlessingService: func() []string {
status := rpcServer.Status()
names := make([]string, len(status.Endpoints))
for i, e := range status.Endpoints {
names[i] = naming.JoinAddressName(e.Name(), macaroonService)
}
return names
},
OAuthProvider: s.oauthProvider,
CaveatSelector: s.caveatSelector,
AssetsPrefix: s.assetsPrefix,
}
if s.revocationManager != nil {
args.DischargeServers = appendSuffixTo(published, dischargerService)
}
http.Handle(n, oauth.NewHandler(ctx, args))
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
self, _ := principal.BlessingStore().Default()
tmplArgs := struct {
Self security.Blessings
DischargeServers []string
ListBlessingsRoute string
AssetsPrefix string
Email string
}{
Self: self,
DischargeServers: args.DischargeServers,
ListBlessingsRoute: oauth.ListBlessingsRoute,
AssetsPrefix: s.assetsPrefix,
}
if err := templates.Home.Execute(w, tmplArgs); err != nil {
ctx.Info("Failed to render template:", err)
}
})
ctx.Infof("Running HTTP server at: %v", externalHttpAddr)
go runHTTPSServer(ctx, httpAddr, tlsConfig)
return rpcServer, published, externalHttpAddr
}
func appendSuffixTo(objectname []string, suffix string) []string {
names := make([]string, len(objectname))
for i, o := range objectname {
names[i] = naming.JoinAddressName(o, suffix)
}
return names
}
// Starts the Vanadium and HTTP services for blessing, and the Vanadium service for discharging.
// All Vanadium services are started on the same port.
func (s *IdentityServer) setupBlessingServices(ctx, oauthCtx *context.T) (rpc.Server, []string, error) {
disp := newDispatcher()
p := v23.GetPrincipal(ctx)
b, _ := p.BlessingStore().Default()
blessingNames := security.BlessingNames(p, b)
if len(blessingNames) == 0 {
return nil, nil, verror.New(verror.ErrInternal, ctx, fmt.Sprintf("identity server has no blessings?"))
}
if len(blessingNames) > 1 {
return nil, nil, verror.New(verror.ErrInternal, ctx, fmt.Sprintf("cannot configure identity server with >1 (%d = %v) blessings - not quite sure what names to select for the discharger service etc.", len(blessingNames), blessingNames))
}
objectAddr := naming.Join(s.mountNamePrefix, naming.EncodeAsNameElement(blessingNames[0]))
ctx, server, err := v23.WithNewDispatchingServer(ctx, objectAddr, disp)
if err != nil {
return nil, nil, err
}
s.rootedObjectAddrs = server.Status().Endpoints
var rootedObjectAddr string
if naming.Rooted(objectAddr) {
rootedObjectAddr = objectAddr
} else if nsroots := v23.GetNamespace(ctx).Roots(); len(nsroots) >= 1 {
rootedObjectAddr = naming.Join(nsroots[0], objectAddr)
} else {
rootedObjectAddr = s.rootedObjectAddrs[0].Name()
}
if s.dischargerLocation == "" {
s.dischargerLocation = naming.Join(rootedObjectAddr, dischargerService)
}
ctx.Infof("Vanadium Blessing and discharger services will be published at %v", rootedObjectAddr)
// Start the HTTP Handler for the OAuth2 access token based blesser.
handlerParams := handlers.OAuthBlesserParams{
OAuthProvider: s.oauthProvider,
BlessingDuration: 365 * 24 * time.Hour,
RevocationManager: s.revocationManager,
DischargerLocation: s.dischargerLocation,
}
http.Handle("/auth/google/bless", handlers.NewOAuthBlessingHandler(oauthCtx, handlerParams, s.registeredApps))
return server, []string{rootedObjectAddr}, nil
}
// newDispatcher returns a dispatcher for both the blessing and the discharging
// service.
func newDispatcher() *dispatcher {
d := &dispatcher{
m: map[string]interface{}{
macaroonService: blesser.NewMacaroonBlesserServer(),
dischargerService: discharger.DischargerServer(dischargerlib.NewDischarger()),
},
}
// Set up the glob invoker.
var children []string
for k, _ := range d.m {
children = append(children, k)
}
d.m[""] = rpc.ChildrenGlobberInvoker(children...)
return d
}
type dispatcher struct {
m map[string]interface{}
}
func (d *dispatcher) Lookup(ctx *context.T, suffix string) (interface{}, security.Authorizer, error) {
if invoker := d.m[suffix]; invoker != nil {
return invoker, security.AllowEveryone(), nil
}
return nil, nil, verror.New(verror.ErrNoExist, ctx, suffix)
}
func runHTTPSServer(ctx *context.T, addr, tlsConfig string) {
if len(tlsConfig) == 0 {
ctx.Fatal("Please set the --tls-config flag")
}
paths := strings.Split(tlsConfig, ",")
if len(paths) != 2 {
ctx.Fatalf("Could not parse --tls-config. Must have exactly two components, separated by a comma")
}
ctx.Infof("Starting HTTP server with TLS using certificate [%s] and private key [%s] at https://%s", paths[0], paths[1], addr)
if err := http.ListenAndServeTLS(addr, paths[0], paths[1], nil); err != nil {
ctx.Fatalf("http.ListenAndServeTLS failed: %v", err)
}
}
func httpAddress(externalHttpAddr, httpAddr string) string {
// If an externalHttpAddr is provided use that.
if externalHttpAddr != "" {
httpAddr = externalHttpAddr
}
return fmt.Sprintf("https://%v", httpAddr)
}
|
DanielSpasov/Game-Hub
|
src/components/InfoBox/index.js
|
<reponame>DanielSpasov/Game-Hub
import InfoBox from './InfoBox'
export default InfoBox
|
civet148/gotools
|
mq/test/consumer/main.go
|
package main
import (
"github.com/civet148/gotools/log"
"github.com/civet148/gotools/mq"
_ "github.com/civet148/gotools/mq/rabbit"
)
type ConsumerHandler struct {
}
func main() {
var err error
//var strTopicRoutingKey = "TOPIC.NEWS.ROUTINGKEY"
var strTopicBindingKey = "TOPIC.NEWS.#" //#表示一个或多个单词(以.分割),*表示一个单词
var strQueueName = "TOPIC.QUEUE"
strConnUrl := "amqp://127.0.0.1:5672"
rabbitMQ, _ := mq.NewMQ(mq.Adapter_RabbitMQ)
if err = rabbitMQ.Connect(mq.Mode_Topic, strConnUrl); err != nil {
log.Errorf("connect to MQ broker error [%v]", err.Error())
return
}
_ = TopicConsumer(rabbitMQ, strTopicBindingKey, strQueueName)
}
func (c *ConsumerHandler) OnConsume(adapter mq.Adapter, strBindingKey, strQueueName, strKey, strValue string) {
log.Infof("[%+v] binding key [%v] queue name [%v] message key [%+v] value [%+v]", adapter, strBindingKey, strQueueName, strKey, strValue)
}
func TopicConsumer(rabbitMQ mq.IReactMQ, strBindingKey, strQueueName string) (err error) {
err = rabbitMQ.Consume(strBindingKey, strQueueName, &ConsumerHandler{})
if err != nil {
log.Error("%v", err.Error())
return
}
return
}
|
BSeppke/vigra
|
include/vigra/random_forest/rf_split.hxx
|
<reponame>BSeppke/vigra<gh_stars>100-1000
/************************************************************************/
/* */
/* Copyright 2008-2009 by <NAME> and <NAME> */
/* */
/* This file is part of the VIGRA computer vision library. */
/* The VIGRA Website is */
/* http://hci.iwr.uni-heidelberg.de/vigra/ */
/* Please direct questions, bug reports, and contributions to */
/* <EMAIL> or */
/* <EMAIL> */
/* */
/* Permission is hereby granted, free of charge, to any person */
/* obtaining a copy of this software and associated documentation */
/* files (the "Software"), to deal in the Software without */
/* restriction, including without limitation the rights to use, */
/* copy, modify, merge, publish, distribute, sublicense, and/or */
/* sell copies of the Software, and to permit persons to whom the */
/* Software is furnished to do so, subject to the following */
/* conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the */
/* Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES */
/* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND */
/* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT */
/* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, */
/* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING */
/* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR */
/* OTHER DEALINGS IN THE SOFTWARE. */
/* */
/************************************************************************/
#ifndef VIGRA_RANDOM_FOREST_SPLIT_HXX
#define VIGRA_RANDOM_FOREST_SPLIT_HXX
#include <algorithm>
#include <cstddef>
#include <map>
#include <numeric>
#include <math.h>
#include "../mathutil.hxx"
#include "../array_vector.hxx"
#include "../sized_int.hxx"
#include "../matrix.hxx"
#include "../random.hxx"
#include "../functorexpression.hxx"
#include "rf_nodeproxy.hxx"
//#include "rf_sampling.hxx"
#include "rf_region.hxx"
//#include "../hokashyap.hxx"
//#include "vigra/rf_helpers.hxx"
namespace vigra
{
// Incomplete Class to ensure that findBestSplit is always implemented in
// the derived classes of SplitBase
class CompileTimeError;
namespace detail
{
template<class Tag>
class Normalise
{
public:
template<class Iter>
static void exec(Iter /*begin*/, Iter /*end*/)
{}
};
template<>
class Normalise<ClassificationTag>
{
public:
template<class Iter>
static void exec (Iter begin, Iter end)
{
double bla = std::accumulate(begin, end, 0.0);
for(int ii = 0; ii < end - begin; ++ii)
begin[ii] = begin[ii]/bla ;
}
};
}
/** Base Class for all SplitFunctors used with the \ref RandomForest class
defines the interface used while learning a tree.
**/
template<class Tag>
class SplitBase
{
public:
typedef Tag RF_Tag;
typedef DT_StackEntry<ArrayVectorView<Int32>::iterator>
StackEntry_t;
ProblemSpec<> ext_param_;
NodeBase::T_Container_type t_data;
NodeBase::P_Container_type p_data;
NodeBase node_;
/** returns the DecisionTree Node created by
\ref SplitBase::findBestSplit() or \ref SplitBase::makeTerminalNode().
**/
template<class T>
void set_external_parameters(ProblemSpec<T> const & in)
{
ext_param_ = in;
t_data.push_back(in.column_count_);
t_data.push_back(in.class_count_);
}
NodeBase & createNode()
{
return node_;
}
int classCount() const
{
return int(t_data[1]);
}
int featureCount() const
{
return int(t_data[0]);
}
/** resets internal data. Should always be called before
calling findBestSplit or makeTerminalNode
**/
void reset()
{
t_data.resize(2);
p_data.resize(0);
}
/** findBestSplit has to be re-implemented in derived split functor.
The defaut implementation only insures that a CompileTime error is issued
if no such method was defined.
**/
template<class T, class C, class T2, class C2, class Region, class Random>
int findBestSplit(MultiArrayView<2, T, C> /*features*/,
MultiArrayView<2, T2, C2> /*labels*/,
Region /*region*/,
ArrayVector<Region> /*childs*/,
Random /*randint*/)
{
#ifndef __clang__
// FIXME: This compile-time checking trick does not work for clang.
CompileTimeError SplitFunctor__findBestSplit_member_was_not_defined;
#endif
return 0;
}
/** Default action for creating a terminal Node.
sets the Class probability of the remaining region according to
the class histogram
**/
template<class T, class C, class T2,class C2, class Region, class Random>
int makeTerminalNode(MultiArrayView<2, T, C> /* features */,
MultiArrayView<2, T2, C2> /* labels */,
Region & region,
Random /* randint */)
{
Node<e_ConstProbNode> ret(t_data, p_data);
node_ = ret;
if(ext_param_.class_weights_.size() != region.classCounts().size())
{
std::copy(region.classCounts().begin(),
region.classCounts().end(),
ret.prob_begin());
}
else
{
std::transform(region.classCounts().begin(),
region.classCounts().end(),
ext_param_.class_weights_.begin(),
ret.prob_begin(), std::multiplies<double>());
}
detail::Normalise<RF_Tag>::exec(ret.prob_begin(), ret.prob_end());
// std::copy(ret.prob_begin(), ret.prob_end(), std::ostream_iterator<double>(std::cerr, ", " ));
// std::cerr << std::endl;
ret.weights() = region.size();
return e_ConstProbNode;
}
};
/** Functor to sort the indices of a feature Matrix by a certain dimension
**/
template<class DataMatrix>
class SortSamplesByDimensions
{
DataMatrix const & data_;
MultiArrayIndex sortColumn_;
double thresVal_;
public:
SortSamplesByDimensions(DataMatrix const & data,
MultiArrayIndex sortColumn,
double thresVal = 0.0)
: data_(data),
sortColumn_(sortColumn),
thresVal_(thresVal)
{}
void setColumn(MultiArrayIndex sortColumn)
{
sortColumn_ = sortColumn;
}
void setThreshold(double value)
{
thresVal_ = value;
}
bool operator()(MultiArrayIndex l, MultiArrayIndex r) const
{
return data_(l, sortColumn_) < data_(r, sortColumn_);
}
bool operator()(MultiArrayIndex l) const
{
return data_(l, sortColumn_) < thresVal_;
}
};
template<class DataMatrix>
class DimensionNotEqual
{
DataMatrix const & data_;
MultiArrayIndex sortColumn_;
public:
DimensionNotEqual(DataMatrix const & data,
MultiArrayIndex sortColumn)
: data_(data),
sortColumn_(sortColumn)
{}
void setColumn(MultiArrayIndex sortColumn)
{
sortColumn_ = sortColumn;
}
bool operator()(MultiArrayIndex l, MultiArrayIndex r) const
{
return data_(l, sortColumn_) != data_(r, sortColumn_);
}
};
template<class DataMatrix>
class SortSamplesByHyperplane
{
DataMatrix const & data_;
Node<i_HyperplaneNode> const & node_;
public:
SortSamplesByHyperplane(DataMatrix const & data,
Node<i_HyperplaneNode> const & node)
:
data_(data),
node_(node)
{}
/** calculate the distance of a sample point to a hyperplane
*/
double operator[](MultiArrayIndex l) const
{
double result_l = -1 * node_.intercept();
for(int ii = 0; ii < node_.columns_size(); ++ii)
{
result_l += rowVector(data_, l)[node_.columns_begin()[ii]]
* node_.weights()[ii];
}
return result_l;
}
bool operator()(MultiArrayIndex l, MultiArrayIndex r) const
{
return (*this)[l] < (*this)[r];
}
};
/** makes a Class Histogram given indices in a labels_ array
* usage:
* MultiArrayView<2, T2, C2> labels = makeSomeLabels()
* ArrayVector<int> hist(numberOfLabels(labels), 0);
* RandomForestClassCounter<T2, C2, ArrayVector> counter(labels, hist);
*
* Container<int> indices = getSomeIndices()
* std::for_each(indices, counter);
*/
template <class DataSource, class CountArray>
class RandomForestClassCounter
{
DataSource const & labels_;
CountArray & counts_;
public:
RandomForestClassCounter(DataSource const & labels,
CountArray & counts)
: labels_(labels),
counts_(counts)
{
reset();
}
void reset()
{
counts_.init(0);
}
void operator()(MultiArrayIndex l) const
{
counts_[labels_[l]] +=1;
}
};
/** Functor To Calculate the Best possible Split Based on the Gini Index
given Labels and Features along a given Axis
*/
namespace detail
{
template<int N>
class ConstArr
{
public:
double operator[](size_t) const
{
return (double)N;
}
};
}
/** Functor to calculate the entropy based impurity
*/
class EntropyCriterion
{
public:
/**calculate the weighted gini impurity based on class histogram
* and class weights
*/
template<class Array, class Array2>
double operator() (Array const & hist,
Array2 const & weights,
double total = 1.0) const
{
return impurity(hist, weights, total);
}
/** calculate the gini based impurity based on class histogram
*/
template<class Array>
double operator()(Array const & hist, double total = 1.0) const
{
return impurity(hist, total);
}
/** static version of operator(hist total)
*/
template<class Array>
static double impurity(Array const & hist, double total)
{
return impurity(hist, detail::ConstArr<1>(), total);
}
/** static version of operator(hist, weights, total)
*/
template<class Array, class Array2>
static double impurity (Array const & hist,
Array2 const & weights,
double total)
{
int class_count = hist.size();
double entropy = 0.0;
if(class_count == 2)
{
double p0 = (hist[0]/total);
double p1 = (hist[1]/total);
entropy = 0 - weights[0]*p0*std::log(p0) - weights[1]*p1*std::log(p1);
}
else
{
for(int ii = 0; ii < class_count; ++ii)
{
double w = weights[ii];
double pii = hist[ii]/total;
entropy -= w*( pii*std::log(pii));
}
}
entropy = total * entropy;
return entropy;
}
};
/** Functor to calculate the gini impurity
*/
class GiniCriterion
{
public:
/**calculate the weighted gini impurity based on class histogram
* and class weights
*/
template<class Array, class Array2>
double operator() (Array const & hist,
Array2 const & weights,
double total = 1.0) const
{
return impurity(hist, weights, total);
}
/** calculate the gini based impurity based on class histogram
*/
template<class Array>
double operator()(Array const & hist, double total = 1.0) const
{
return impurity(hist, total);
}
/** static version of operator(hist total)
*/
template<class Array>
static double impurity(Array const & hist, double total)
{
return impurity(hist, detail::ConstArr<1>(), total);
}
/** static version of operator(hist, weights, total)
*/
template<class Array, class Array2>
static double impurity (Array const & hist,
Array2 const & weights,
double total)
{
int class_count = hist.size();
double gini = 0.0;
if(class_count == 2)
{
double w = weights[0] * weights[1];
gini = w * (hist[0] * hist[1] / total);
}
else
{
for(int ii = 0; ii < class_count; ++ii)
{
double w = weights[ii];
gini += w*( hist[ii]*( 1.0 - w * hist[ii]/total ) );
}
}
return gini;
}
};
template <class DataSource, class Impurity= GiniCriterion>
class ImpurityLoss
{
DataSource const & labels_;
ArrayVector<double> counts_;
ArrayVector<double> const class_weights_;
double total_counts_;
Impurity impurity_;
public:
template<class T>
ImpurityLoss(DataSource const & labels,
ProblemSpec<T> const & ext_)
: labels_(labels),
counts_(ext_.class_count_, 0.0),
class_weights_(ext_.class_weights_),
total_counts_(0.0)
{}
void reset()
{
counts_.init(0);
total_counts_ = 0.0;
}
template<class Counts>
double increment_histogram(Counts const & counts)
{
std::transform(counts.begin(), counts.end(),
counts_.begin(), counts_.begin(),
std::plus<double>());
total_counts_ = std::accumulate( counts_.begin(),
counts_.end(),
0.0);
return impurity_(counts_, class_weights_, total_counts_);
}
template<class Counts>
double decrement_histogram(Counts const & counts)
{
std::transform(counts.begin(), counts.end(),
counts_.begin(), counts_.begin(),
std::minus<double>());
total_counts_ = std::accumulate( counts_.begin(),
counts_.end(),
0.0);
return impurity_(counts_, class_weights_, total_counts_);
}
template<class Iter>
double increment(Iter begin, Iter end)
{
for(Iter iter = begin; iter != end; ++iter)
{
counts_[labels_(*iter, 0)] +=1.0;
total_counts_ +=1.0;
}
return impurity_(counts_, class_weights_, total_counts_);
}
template<class Iter>
double decrement(Iter const & begin, Iter const & end)
{
for(Iter iter = begin; iter != end; ++iter)
{
counts_[labels_(*iter,0)] -=1.0;
total_counts_ -=1.0;
}
return impurity_(counts_, class_weights_, total_counts_);
}
template<class Iter, class Resp_t>
double init (Iter /*begin*/, Iter /*end*/, Resp_t resp)
{
reset();
std::copy(resp.begin(), resp.end(), counts_.begin());
total_counts_ = std::accumulate(counts_.begin(), counts_.end(), 0.0);
return impurity_(counts_,class_weights_, total_counts_);
}
ArrayVector<double> const & response()
{
return counts_;
}
};
template <class DataSource>
class RegressionForestCounter
{
public:
typedef MultiArrayShape<2>::type Shp;
DataSource const & labels_;
ArrayVector <double> mean_;
ArrayVector <double> variance_;
ArrayVector <double> tmp_;
size_t count_;
int* end_;
template<class T>
RegressionForestCounter(DataSource const & labels,
ProblemSpec<T> const & ext_)
:
labels_(labels),
mean_(ext_.response_size_, 0.0),
variance_(ext_.response_size_, 0.0),
tmp_(ext_.response_size_),
count_(0)
{}
template<class Iter>
double increment (Iter begin, Iter end)
{
for(Iter iter = begin; iter != end; ++iter)
{
++count_;
for(unsigned int ii = 0; ii < mean_.size(); ++ii)
tmp_[ii] = labels_(*iter, ii) - mean_[ii];
double f = 1.0 / count_,
f1 = 1.0 - f;
for(unsigned int ii = 0; ii < mean_.size(); ++ii)
mean_[ii] += f*tmp_[ii];
for(unsigned int ii = 0; ii < mean_.size(); ++ii)
variance_[ii] += f1*sq(tmp_[ii]);
}
double res = std::accumulate(variance_.begin(),
variance_.end(),
0.0,
std::plus<double>());
//std::cerr << res << " ) = ";
return res;
}
template<class Iter> //This is BROKEN
double decrement (Iter begin, Iter end)
{
for(Iter iter = begin; iter != end; ++iter)
{
--count_;
}
begin = end;
end = end + count_;
for(unsigned int ii = 0; ii < mean_.size(); ++ii)
{
mean_[ii] = 0;
for(Iter iter = begin; iter != end; ++iter)
{
mean_[ii] += labels_(*iter, ii);
}
mean_[ii] /= count_;
variance_[ii] = 0;
for(Iter iter = begin; iter != end; ++iter)
{
variance_[ii] += (labels_(*iter, ii) - mean_[ii])*(labels_(*iter, ii) - mean_[ii]);
}
}
double res = std::accumulate(variance_.begin(),
variance_.end(),
0.0,
std::plus<double>());
//std::cerr << res << " ) = ";
return res;
}
template<class Iter, class Resp_t>
double init (Iter begin, Iter end, Resp_t /*resp*/)
{
reset();
return this->increment(begin, end);
}
ArrayVector<double> const & response()
{
return mean_;
}
void reset()
{
mean_.init(0.0);
variance_.init(0.0);
count_ = 0;
}
};
template <class DataSource>
class RegressionForestCounter2
{
public:
typedef MultiArrayShape<2>::type Shp;
DataSource const & labels_;
ArrayVector <double> mean_;
ArrayVector <double> variance_;
ArrayVector <double> tmp_;
size_t count_;
template<class T>
RegressionForestCounter2(DataSource const & labels,
ProblemSpec<T> const & ext_)
:
labels_(labels),
mean_(ext_.response_size_, 0.0),
variance_(ext_.response_size_, 0.0),
tmp_(ext_.response_size_),
count_(0)
{}
template<class Iter>
double increment (Iter begin, Iter end)
{
for(Iter iter = begin; iter != end; ++iter)
{
++count_;
for(int ii = 0; ii < mean_.size(); ++ii)
tmp_[ii] = labels_(*iter, ii) - mean_[ii];
double f = 1.0 / count_,
f1 = 1.0 - f;
for(int ii = 0; ii < mean_.size(); ++ii)
mean_[ii] += f*tmp_[ii];
for(int ii = 0; ii < mean_.size(); ++ii)
variance_[ii] += f1*sq(tmp_[ii]);
}
double res = std::accumulate(variance_.begin(),
variance_.end(),
0.0,
std::plus<double>())
/((count_ == 1)? 1:(count_ -1));
//std::cerr << res << " ) = ";
return res;
}
template<class Iter> //This is BROKEN
double decrement (Iter begin, Iter end)
{
for(Iter iter = begin; iter != end; ++iter)
{
double f = 1.0 / count_,
f1 = 1.0 - f;
for(int ii = 0; ii < mean_.size(); ++ii)
mean_[ii] = (mean_[ii] - f*labels_(*iter,ii))/(1-f);
for(int ii = 0; ii < mean_.size(); ++ii)
variance_[ii] -= f1*sq(labels_(*iter,ii) - mean_[ii]);
--count_;
}
double res = std::accumulate(variance_.begin(),
variance_.end(),
0.0,
std::plus<double>())
/((count_ == 1)? 1:(count_ -1));
//std::cerr << "( " << res << " + ";
return res;
}
/* west's algorithm for incremental variance
// calculation
template<class Iter>
double increment (Iter begin, Iter end)
{
for(Iter iter = begin; iter != end; ++iter)
{
++count_;
for(int ii = 0; ii < mean_.size(); ++ii)
tmp_[ii] = labels_(*iter, ii) - mean_[ii];
double f = 1.0 / count_,
f1 = 1.0 - f;
for(int ii = 0; ii < mean_.size(); ++ii)
mean_[ii] += f*tmp_[ii];
for(int ii = 0; ii < mean_.size(); ++ii)
variance_[ii] += f1*sq(tmp_[ii]);
}
return std::accumulate(variance_.begin(),
variance_.end(),
0.0,
std::plus<double>())
/(count_ -1);
}
template<class Iter>
double decrement (Iter begin, Iter end)
{
for(Iter iter = begin; iter != end; ++iter)
{
--count_;
for(int ii = 0; ii < mean_.size(); ++ii)
tmp_[ii] = labels_(*iter, ii) - mean_[ii];
double f = 1.0 / count_,
f1 = 1.0 + f;
for(int ii = 0; ii < mean_.size(); ++ii)
mean_[ii] -= f*tmp_[ii];
for(int ii = 0; ii < mean_.size(); ++ii)
variance_[ii] -= f1*sq(tmp_[ii]);
}
return std::accumulate(variance_.begin(),
variance_.end(),
0.0,
std::plus<double>())
/(count_ -1);
}*/
template<class Iter, class Resp_t>
double init (Iter begin, Iter end, Resp_t resp)
{
reset();
return this->increment(begin, end, resp);
}
ArrayVector<double> const & response()
{
return mean_;
}
void reset()
{
mean_.init(0.0);
variance_.init(0.0);
count_ = 0;
}
};
template<class Tag, class Datatyp>
struct LossTraits;
struct LSQLoss
{};
template<class Datatype>
struct LossTraits<GiniCriterion, Datatype>
{
typedef ImpurityLoss<Datatype, GiniCriterion> type;
};
template<class Datatype>
struct LossTraits<EntropyCriterion, Datatype>
{
typedef ImpurityLoss<Datatype, EntropyCriterion> type;
};
template<class Datatype>
struct LossTraits<LSQLoss, Datatype>
{
typedef RegressionForestCounter<Datatype> type;
};
/** Given a column, choose a split that minimizes some loss
*/
template<class LineSearchLossTag>
class BestGiniOfColumn
{
public:
ArrayVector<double> class_weights_;
ArrayVector<double> bestCurrentCounts[2];
double min_gini_;
std::ptrdiff_t min_index_;
double min_threshold_;
ProblemSpec<> ext_param_;
BestGiniOfColumn()
{}
template<class T>
BestGiniOfColumn(ProblemSpec<T> const & ext)
:
class_weights_(ext.class_weights_),
ext_param_(ext)
{
bestCurrentCounts[0].resize(ext.class_count_);
bestCurrentCounts[1].resize(ext.class_count_);
}
template<class T>
void set_external_parameters(ProblemSpec<T> const & ext)
{
class_weights_ = ext.class_weights_;
ext_param_ = ext;
bestCurrentCounts[0].resize(ext.class_count_);
bestCurrentCounts[1].resize(ext.class_count_);
}
/** calculate the best gini split along a Feature Column
* \param column the feature vector - has to support the [] operator
* \param labels the label vector
* \param begin
* \param end (in and out)
* begin and end iterators to the indices of the
* samples in the current region.
* the range begin - end is sorted by the column supplied
* during function execution.
* \param region_response
* ???
* class histogram of the range.
*
* precondition: begin, end valid range,
* class_counts positive integer valued array with the
* class counts in the current range.
* labels.size() >= max(begin, end);
* postcondition:
* begin, end sorted by column given.
* min_gini_ contains the minimum gini found or
* NumericTraits<double>::max if no split was found.
* min_index_ contains the splitting index in the range
* or invalid data if no split was found.
* BestCirremtcounts[0] and [1] contain the
* class histogram of the left and right region of
* the left and right regions.
*/
template< class DataSourceF_t,
class DataSource_t,
class I_Iter,
class Array>
void operator()(DataSourceF_t const & column,
DataSource_t const & labels,
I_Iter & begin,
I_Iter & end,
Array const & region_response)
{
std::sort(begin, end,
SortSamplesByDimensions<DataSourceF_t>(column, 0));
typedef typename
LossTraits<LineSearchLossTag, DataSource_t>::type LineSearchLoss;
LineSearchLoss left(labels, ext_param_); //initialize left and right region
LineSearchLoss right(labels, ext_param_);
min_gini_ = right.init(begin, end, region_response);
min_threshold_ = *begin;
min_index_ = 0; //the starting point where to split
DimensionNotEqual<DataSourceF_t> comp(column, 0);
I_Iter iter = begin;
I_Iter next = std::adjacent_find(iter, end, comp);
//std::cerr << std::distance(begin, end) << std::endl;
while( next != end)
{
double lr = right.decrement(iter, next + 1);
double ll = left.increment(iter , next + 1);
double loss = lr +ll;
//std::cerr <<lr << " + "<< ll << " " << loss << " ";
#ifdef CLASSIFIER_TEST
if(loss < min_gini_ && !closeAtTolerance(loss, min_gini_))
#else
if(loss < min_gini_ )
#endif
{
bestCurrentCounts[0] = left.response();
bestCurrentCounts[1] = right.response();
#ifdef CLASSIFIER_TEST
min_gini_ = loss < min_gini_? loss : min_gini_;
#else
min_gini_ = loss;
#endif
min_index_ = next - begin +1 ;
min_threshold_ = (double(column(*next,0)) + double(column(*(next +1), 0)))/2.0;
}
iter = next +1 ;
next = std::adjacent_find(iter, end, comp);
}
//std::cerr << std::endl << " 000 " << std::endl;
//int in;
//std::cin >> in;
}
template<class DataSource_t, class Iter, class Array>
double loss_of_region(DataSource_t const & labels,
Iter & begin,
Iter & end,
Array const & region_response) const
{
typedef typename
LossTraits<LineSearchLossTag, DataSource_t>::type LineSearchLoss;
LineSearchLoss region_loss(labels, ext_param_);
return
region_loss.init(begin, end, region_response);
}
};
namespace detail
{
template<class T>
struct Correction
{
template<class Region, class LabelT>
static void exec(Region & /*in*/, LabelT & /*labels*/)
{}
};
template<>
struct Correction<ClassificationTag>
{
template<class Region, class LabelT>
static void exec(Region & region, LabelT & labels)
{
if(std::accumulate(region.classCounts().begin(),
region.classCounts().end(), 0.0) != region.size())
{
RandomForestClassCounter< LabelT,
ArrayVector<double> >
counter(labels, region.classCounts());
std::for_each( region.begin(), region.end(), counter);
region.classCountsIsValid = true;
}
}
};
}
/** Chooses mtry columns and applies ColumnDecisionFunctor to each of the
* columns. Then Chooses the column that is best
*/
template<class ColumnDecisionFunctor, class Tag = ClassificationTag>
class ThresholdSplit: public SplitBase<Tag>
{
public:
typedef SplitBase<Tag> SB;
ArrayVector<Int32> splitColumns;
ColumnDecisionFunctor bgfunc;
double region_gini_;
ArrayVector<double> min_gini_;
ArrayVector<std::ptrdiff_t> min_indices_;
ArrayVector<double> min_thresholds_;
int bestSplitIndex;
double minGini() const
{
return min_gini_[bestSplitIndex];
}
int bestSplitColumn() const
{
return splitColumns[bestSplitIndex];
}
double bestSplitThreshold() const
{
return min_thresholds_[bestSplitIndex];
}
template<class T>
void set_external_parameters(ProblemSpec<T> const & in)
{
SB::set_external_parameters(in);
bgfunc.set_external_parameters( SB::ext_param_);
int featureCount_ = SB::ext_param_.column_count_;
splitColumns.resize(featureCount_);
for(int k=0; k<featureCount_; ++k)
splitColumns[k] = k;
min_gini_.resize(featureCount_);
min_indices_.resize(featureCount_);
min_thresholds_.resize(featureCount_);
}
template<class T, class C, class T2, class C2, class Region, class Random>
int findBestSplit(MultiArrayView<2, T, C> features,
MultiArrayView<2, T2, C2> labels,
Region & region,
ArrayVector<Region>& childRegions,
Random & randint)
{
typedef typename Region::IndexIterator IndexIterator;
if(region.size() == 0)
{
std::cerr << "SplitFunctor::findBestSplit(): stackentry with 0 examples encountered\n"
"continuing learning process....";
}
// calculate things that haven't been calculated yet.
detail::Correction<Tag>::exec(region, labels);
// Is the region pure already?
region_gini_ = bgfunc.loss_of_region(labels,
region.begin(),
region.end(),
region.classCounts());
if(region_gini_ <= SB::ext_param_.precision_)
return this->makeTerminalNode(features, labels, region, randint);
// select columns to be tried.
for(int ii = 0; ii < SB::ext_param_.actual_mtry_; ++ii)
std::swap(splitColumns[ii],
splitColumns[ii+ randint(features.shape(1) - ii)]);
// find the best gini index
bestSplitIndex = 0;
double current_min_gini = region_gini_;
int num2try = features.shape(1);
for(int k=0; k<num2try; ++k)
{
//this functor does all the work
bgfunc(columnVector(features, splitColumns[k]),
labels,
region.begin(), region.end(),
region.classCounts());
min_gini_[k] = bgfunc.min_gini_;
min_indices_[k] = bgfunc.min_index_;
min_thresholds_[k] = bgfunc.min_threshold_;
#ifdef CLASSIFIER_TEST
if( bgfunc.min_gini_ < current_min_gini
&& !closeAtTolerance(bgfunc.min_gini_, current_min_gini))
#else
if(bgfunc.min_gini_ < current_min_gini)
#endif
{
current_min_gini = bgfunc.min_gini_;
childRegions[0].classCounts() = bgfunc.bestCurrentCounts[0];
childRegions[1].classCounts() = bgfunc.bestCurrentCounts[1];
childRegions[0].classCountsIsValid = true;
childRegions[1].classCountsIsValid = true;
bestSplitIndex = k;
num2try = SB::ext_param_.actual_mtry_;
}
}
//std::cerr << current_min_gini << "curr " << region_gini_ << std::endl;
// did not find any suitable split
// FIXME: this is wrong: sometimes we must execute bad splits to make progress,
// especially near the root.
if(closeAtTolerance(current_min_gini, region_gini_))
return this->makeTerminalNode(features, labels, region, randint);
//create a Node for output
Node<i_ThresholdNode> node(SB::t_data, SB::p_data);
SB::node_ = node;
node.threshold() = min_thresholds_[bestSplitIndex];
node.column() = splitColumns[bestSplitIndex];
// partition the range according to the best dimension
SortSamplesByDimensions<MultiArrayView<2, T, C> >
sorter(features, node.column(), node.threshold());
IndexIterator bestSplit =
std::partition(region.begin(), region.end(), sorter);
// Save the ranges of the child stack entries.
childRegions[0].setRange( region.begin() , bestSplit );
childRegions[0].rule = region.rule;
childRegions[0].rule.push_back(std::make_pair(1, 1.0));
childRegions[1].setRange( bestSplit , region.end() );
childRegions[1].rule = region.rule;
childRegions[1].rule.push_back(std::make_pair(1, 1.0));
return i_ThresholdNode;
}
};
typedef ThresholdSplit<BestGiniOfColumn<GiniCriterion> > GiniSplit;
typedef ThresholdSplit<BestGiniOfColumn<EntropyCriterion> > EntropySplit;
typedef ThresholdSplit<BestGiniOfColumn<LSQLoss>, RegressionTag> RegressionSplit;
namespace rf
{
/** This namespace contains additional Splitfunctors.
*
* The Split functor classes are designed in a modular fashion because new split functors may
* share a lot of code with existing ones.
*
* ThresholdSplit implements the functionality needed for any split functor, that makes its
* decision via one dimensional axis-parallel cuts. The Template parameter defines how the split
* along one dimension is chosen.
*
* The BestGiniOfColumn class chooses a split that minimizes one of the Loss functions supplied
* (GiniCriterion for classification and LSQLoss for regression). Median chooses the Split in a
* kD tree fashion.
*
*
* Currently defined typedefs:
* \code
* typedef ThresholdSplit<BestGiniOfColumn<GiniCriterion> > GiniSplit;
* typedef ThresholdSplit<BestGiniOfColumn<LSQLoss>, RegressionTag> RegressionSplit;
* typedef ThresholdSplit<Median> MedianSplit;
* \endcode
*/
namespace split
{
/** This Functor chooses the median value of a column
*/
class Median
{
public:
typedef GiniCriterion LineSearchLossTag;
ArrayVector<double> class_weights_;
ArrayVector<double> bestCurrentCounts[2];
double min_gini_;
std::ptrdiff_t min_index_;
double min_threshold_;
ProblemSpec<> ext_param_;
Median()
{}
template<class T>
Median(ProblemSpec<T> const & ext)
:
class_weights_(ext.class_weights_),
ext_param_(ext)
{
bestCurrentCounts[0].resize(ext.class_count_);
bestCurrentCounts[1].resize(ext.class_count_);
}
template<class T>
void set_external_parameters(ProblemSpec<T> const & ext)
{
class_weights_ = ext.class_weights_;
ext_param_ = ext;
bestCurrentCounts[0].resize(ext.class_count_);
bestCurrentCounts[1].resize(ext.class_count_);
}
template< class DataSourceF_t,
class DataSource_t,
class I_Iter,
class Array>
void operator()(DataSourceF_t const & column,
DataSource_t const & labels,
I_Iter & begin,
I_Iter & end,
Array const & region_response)
{
std::sort(begin, end,
SortSamplesByDimensions<DataSourceF_t>(column, 0));
typedef typename
LossTraits<LineSearchLossTag, DataSource_t>::type LineSearchLoss;
LineSearchLoss left(labels, ext_param_);
LineSearchLoss right(labels, ext_param_);
right.init(begin, end, region_response);
min_gini_ = NumericTraits<double>::max();
min_index_ = floor(double(end - begin)/2.0);
min_threshold_ = column[*(begin + min_index_)];
SortSamplesByDimensions<DataSourceF_t>
sorter(column, 0, min_threshold_);
I_Iter part = std::partition(begin, end, sorter);
DimensionNotEqual<DataSourceF_t> comp(column, 0);
if(part == begin)
{
part= std::adjacent_find(part, end, comp)+1;
}
if(part >= end)
{
return;
}
else
{
min_threshold_ = column[*part];
}
min_gini_ = right.decrement(begin, part)
+ left.increment(begin , part);
bestCurrentCounts[0] = left.response();
bestCurrentCounts[1] = right.response();
min_index_ = part - begin;
}
template<class DataSource_t, class Iter, class Array>
double loss_of_region(DataSource_t const & labels,
Iter & begin,
Iter & end,
Array const & region_response) const
{
typedef typename
LossTraits<LineSearchLossTag, DataSource_t>::type LineSearchLoss;
LineSearchLoss region_loss(labels, ext_param_);
return
region_loss.init(begin, end, region_response);
}
};
typedef ThresholdSplit<Median> MedianSplit;
/** This Functor chooses a random value of a column
*/
class RandomSplitOfColumn
{
public:
typedef GiniCriterion LineSearchLossTag;
ArrayVector<double> class_weights_;
ArrayVector<double> bestCurrentCounts[2];
double min_gini_;
std::ptrdiff_t min_index_;
double min_threshold_;
ProblemSpec<> ext_param_;
typedef RandomMT19937 Random_t;
Random_t random;
RandomSplitOfColumn()
{}
template<class T>
RandomSplitOfColumn(ProblemSpec<T> const & ext)
:
class_weights_(ext.class_weights_),
ext_param_(ext),
random(RandomSeed)
{
bestCurrentCounts[0].resize(ext.class_count_);
bestCurrentCounts[1].resize(ext.class_count_);
}
template<class T>
RandomSplitOfColumn(ProblemSpec<T> const & ext, Random_t & random_)
:
class_weights_(ext.class_weights_),
ext_param_(ext),
random(random_)
{
bestCurrentCounts[0].resize(ext.class_count_);
bestCurrentCounts[1].resize(ext.class_count_);
}
template<class T>
void set_external_parameters(ProblemSpec<T> const & ext)
{
class_weights_ = ext.class_weights_;
ext_param_ = ext;
bestCurrentCounts[0].resize(ext.class_count_);
bestCurrentCounts[1].resize(ext.class_count_);
}
template< class DataSourceF_t,
class DataSource_t,
class I_Iter,
class Array>
void operator()(DataSourceF_t const & column,
DataSource_t const & labels,
I_Iter & begin,
I_Iter & end,
Array const & region_response)
{
std::sort(begin, end,
SortSamplesByDimensions<DataSourceF_t>(column, 0));
typedef typename
LossTraits<LineSearchLossTag, DataSource_t>::type LineSearchLoss;
LineSearchLoss left(labels, ext_param_);
LineSearchLoss right(labels, ext_param_);
right.init(begin, end, region_response);
min_gini_ = NumericTraits<double>::max();
int tmp_pt = random.uniformInt(std::distance(begin, end));
min_index_ = tmp_pt;
min_threshold_ = column[*(begin + min_index_)];
SortSamplesByDimensions<DataSourceF_t>
sorter(column, 0, min_threshold_);
I_Iter part = std::partition(begin, end, sorter);
DimensionNotEqual<DataSourceF_t> comp(column, 0);
if(part == begin)
{
part= std::adjacent_find(part, end, comp)+1;
}
if(part >= end)
{
return;
}
else
{
min_threshold_ = column[*part];
}
min_gini_ = right.decrement(begin, part)
+ left.increment(begin , part);
bestCurrentCounts[0] = left.response();
bestCurrentCounts[1] = right.response();
min_index_ = part - begin;
}
template<class DataSource_t, class Iter, class Array>
double loss_of_region(DataSource_t const & labels,
Iter & begin,
Iter & end,
Array const & region_response) const
{
typedef typename
LossTraits<LineSearchLossTag, DataSource_t>::type LineSearchLoss;
LineSearchLoss region_loss(labels, ext_param_);
return
region_loss.init(begin, end, region_response);
}
};
typedef ThresholdSplit<RandomSplitOfColumn> RandomSplit;
}
}
} //namespace vigra
#endif // VIGRA_RANDOM_FOREST_SPLIT_HXX
|
raymondchen625/grafana
|
pkg/services/preference/prefimpl/store_test.go
|
<reponame>raymondchen625/grafana
//go:build integration
// +build integration
package prefimpl
import (
"context"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/grafana/grafana/pkg/models"
pref "github.com/grafana/grafana/pkg/services/preference"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/stretchr/testify/require"
)
func TestPreferencesDataAccess(t *testing.T) {
ss := sqlstore.InitTestDB(t)
prefStore := sqlStore{db: ss}
orgNavbarPreferences := pref.NavbarPreference{
SavedItems: []pref.NavLink{{
ID: "alerting",
Text: "Alerting",
Url: "/alerting",
}},
}
t.Run("Get with saved org and user home dashboard returns not found", func(t *testing.T) {
query := &pref.Preference{OrgID: 1, UserID: 1, TeamID: 2}
prefs, err := prefStore.Get(context.Background(), query)
require.EqualError(t, err, pref.ErrPrefNotFound.Error())
require.Nil(t, prefs)
})
t.Run("Get with saved org and user home dashboard should return user home dashboard", func(t *testing.T) {
_, err := prefStore.Insert(context.Background(),
&pref.Preference{
OrgID: 1,
UserID: 1,
HomeDashboardID: 4,
TeamID: 2,
Created: time.Now(),
Updated: time.Now(),
})
require.NoError(t, err)
query := &pref.Preference{OrgID: 1, UserID: 1, TeamID: 2}
prefs, err := prefStore.Get(context.Background(), query)
require.NoError(t, err)
require.Equal(t, int64(4), prefs.HomeDashboardID)
})
t.Run("List with saved org and user home dashboard should return user home dashboard", func(t *testing.T) {
_, err := prefStore.Insert(context.Background(),
&pref.Preference{
OrgID: 1,
UserID: 1,
TeamID: 3,
HomeDashboardID: 1,
Created: time.Now(),
Updated: time.Now(),
})
require.NoError(t, err)
query := &pref.Preference{OrgID: 1, UserID: 1, Teams: []int64{2}}
prefs, err := prefStore.List(context.Background(), query)
require.NoError(t, err)
require.Equal(t, int64(4), prefs[0].HomeDashboardID)
})
t.Run("List with saved org and other user home dashboard should return org home dashboard", func(t *testing.T) {
_, err := prefStore.Insert(context.Background(),
&pref.Preference{
OrgID: 1,
UserID: 2,
TeamID: 3,
HomeDashboardID: 1,
Created: time.Now(),
Updated: time.Now(),
})
require.NoError(t, err)
query := &pref.Preference{OrgID: 1, UserID: 1, Teams: []int64{3}}
prefs, err := prefStore.List(context.Background(), query)
require.NoError(t, err)
require.Equal(t, int64(1), prefs[0].HomeDashboardID)
require.Equal(t, int64(1), prefs[1].HomeDashboardID)
})
t.Run("List with saved org and teams home dashboard should return last team home dashboard", func(t *testing.T) {
query := &pref.Preference{
OrgID: 1, Teams: []int64{2, 3},
}
prefs, err := prefStore.List(context.Background(), query)
require.NoError(t, err)
require.Equal(t, int64(4), prefs[0].HomeDashboardID)
require.Equal(t, int64(1), prefs[1].HomeDashboardID)
require.Equal(t, int64(1), prefs[2].HomeDashboardID)
})
t.Run("List with saved org and other teams home dashboard should return org home dashboard", func(t *testing.T) {
_, err := prefStore.Insert(context.Background(), &pref.Preference{OrgID: 1, HomeDashboardID: 1, Created: time.Now(), Updated: time.Now()})
require.NoError(t, err)
_, err = prefStore.Insert(context.Background(), &pref.Preference{OrgID: 1, TeamID: 2, HomeDashboardID: 2, Created: time.Now(), Updated: time.Now()})
require.NoError(t, err)
_, err = prefStore.Insert(context.Background(), &pref.Preference{OrgID: 1, TeamID: 3, HomeDashboardID: 3, Created: time.Now(), Updated: time.Now()})
require.NoError(t, err)
query := &pref.Preference{OrgID: 1}
prefs, err := prefStore.List(context.Background(), query)
require.NoError(t, err)
require.Equal(t, int64(1), prefs[0].HomeDashboardID)
})
t.Run("Update for a user should only modify a single value", func(t *testing.T) {
ss := sqlstore.InitTestDB(t)
prefStore := sqlStore{db: ss}
id, err := prefStore.Insert(context.Background(), &pref.Preference{
UserID: models.SignedInUser{}.UserId,
Theme: "dark",
Timezone: "browser",
HomeDashboardID: 5,
WeekStart: "1",
JSONData: &pref.PreferenceJSONData{Navbar: orgNavbarPreferences},
Created: time.Now(),
Updated: time.Now(),
})
require.NoError(t, err)
err = prefStore.Update(context.Background(), &pref.Preference{
ID: id,
Theme: "dark",
HomeDashboardID: 5,
Timezone: "browser",
WeekStart: "1",
Created: time.Now(),
Updated: time.Now(),
JSONData: &pref.PreferenceJSONData{},
})
require.NoError(t, err)
query := &pref.Preference{}
prefs, err := prefStore.List(context.Background(), query)
require.NoError(t, err)
expected := &pref.Preference{
ID: prefs[0].ID,
Version: prefs[0].Version,
HomeDashboardID: 5,
Timezone: "browser",
WeekStart: "1",
Theme: "dark",
JSONData: prefs[0].JSONData,
Created: prefs[0].Created,
Updated: prefs[0].Updated,
}
if diff := cmp.Diff(expected, prefs[0]); diff != "" {
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
}
})
t.Run("insert preference that does not exist", func(t *testing.T) {
_, err := prefStore.Insert(context.Background(),
&pref.Preference{
UserID: models.SignedInUser{}.UserId,
Created: time.Now(),
Updated: time.Now(),
JSONData: &pref.PreferenceJSONData{},
})
require.NoError(t, err)
})
}
|
sammympie/scitools
|
lib/scitools/_numpyload.py
|
"""
Note:
This module stems from the days when there were three (almost) competing
Numerical Python implementations around and people wanted to be able
to switch between these implementations in their Python programs.
Nowadays, numpy is the dominating module, and the use of _numpyload and
numpytools is no longer particularly fruitful. For backward compatibility
of scitools, the two modules still exist.
Unified array computing interface
=================================
Numeric, numarray, and numpy can be viewed as three different
implementations of Numerical Python functionality. The present module
enables writing scripts that are independent of the particular choice
of Numeric, numarray, or numpy. That is, the idea is that any of these
modules can be replaced by one of the alternatives, and the script
should still work. This requires the script to only use the set of
instructions that are common to Numeric, numarray, and numpy.
One reason for wanting the flexibility is that the different
implementations may exhibit different computational efficiency in
different applications. It also makes it trivial to adopt new versions
of Numerical Python in old scripts.
Basic Usage
-----------
To achieve a script that makes transparent use of Numeric, numarray, and
numpy, one needs to do one of the following imports::
from scitools.numpytools import *
# or
import scitools.numpytools as N
Then one should never explicitly import Numeric, numarray, or numpy,
and explicitly use functions in these modules as this may cause
different array types to be mixed in the same application. Only call
the functions that were imported by the star or prefix functions by
the N symbol.
What Gets Imported?
-------------------
All symbols from either Numeric, numarray, or numpy are imported
into the global namespace of this numpytools module::
from Numeric import *
#or
from numarray import *
#or
from numpy import *
Also the modules for random arrays, linear algebra, Matlab functions,
and FFT are imported. One problem with switching between Numeric,
numarray, and numpy is the additional modules for random arrays, etc.,
have different names in the three packages. For example::
Numeric has LinearAlgebra
numarray has numarray.linear_algebra.LinearAlgebra2
numpy has numpy.linalg
The Numeric names are always available in addition to the native names.
For example, an import numpy.linalg is associated with a::
LinearAlgebra = numpy.linalg
Note that the MA module is not imported since it redefines
the repr function (array([1,2]) becomes [1,2] as for a list) if
the Numeric is used. The user must always explicitly import this package
if Numeric is used as basic array module.
Note that the numpytools module also makes some extensions of Numerical
Python available, see the section "Functionality of this module that
extends Numerical Python" (below).
What to use: Numeric, numarray, or numpy?
-----------------------------------------
The present module defines a global variable basic_NumPy holding
either "Numeric", "numarray", or "numpy", depending on which module
that was actually imported.
To determine whether Numeric, numarray, or numpy is to be imported,
the following procedure is applied:
1. The command line arguments are checked for a --numarray,
--Numeric, or --numpy option.
2. If the user has already imported Numeric, numarray, or numpy by an::
import Numeric
#or
import numarray
#or
import numpy
statement, the present module continues to import from the same
module (module in sys.modules is used to check whether it should
be Numeric, numarray, or numpy). If the user has imported more than
one of the three module alternatives, numpy is used.
3. The environment variable NUMPYARRAY is checked.
If this variable contains "numarray", "Numeric", or "numpy" the
corresponding module is imported.
If neither 1., 2., nor 3. determines the import, i.e., the user has not
explicitly indicated what to use, the new numpy is the default choice.
Some Functions for Unified Usage
--------------------------------
Some operations, like finding the maximum and minimum values in an array,
or controlling the output format when printing arrays, have different
syntax in the different Numerical Python implementations. The functions
below attempt to provide a uniform syntax to functionality with
different names in Numeric, numarray, and numpy:
- NumPyArray:
the type used in isinstance(a,NumPyArray) for
checking if a is a NumPy array
- arrmin, arrmax:
compute maximum and minimum of all array entries
(same as amin(a,None) and amax(a,None) in scipy)
- array_output_precision(n):
print arrays with n decimals
- NumPy_type:
returns the type of an array, i.e., "Numeric", "numarray",
or "numpy"
- NumPy_dtype:
returns the type of the data in an array, i.e., 'd', 'i', etc.
- fortran_storage:
transparent transform of an array to column major (Fortran) storage
that preserves the nature (Numeric, numarray, numpy) of the array
Some frequently standard modules like sys, os, and operator are
imported into the namespace of the present module.
"""
import sys, os
import collections
import numbers
# The first task to accomplish in this module is to determine
# whether to use Numeric, numarray, or numpy
basic_NumPy = None # will later hold 'Numeric', 'numarray', or 'numpy'
# check the command line (this code is similar to matplotlib.numerix):
if basic_NumPy is None:
if hasattr(sys, 'argv'): # Apache mod_python has no argv
for _a in sys.argv:
if _a in ["--Numeric", "--numeric", "--NUMERIC"]:
basic_NumPy = 'Numeric'
break
if _a in ["--Numarray", "--numarray", "--NUMARRAY"]:
basic_NumPy = 'numarray'
break
if _a in ["--NumPy", "--numpy", "--NUMPY"]:
basic_NumPy = 'numpy'
break
del _a # don't pollute the global namespace
# check if the user has already done an import Numeric, import numarray,
# or import numpy; use the module that was imported
if basic_NumPy is None:
if 'numpy' in sys.modules:
basic_NumPy = 'numpy'
elif 'numarray' in sys.modules:
basic_NumPy = 'numarray'
elif 'Numeric' in sys.modules:
basic_NumPy = 'Numeric'
# check the environment variable NUMPYARRAY:
if basic_NumPy is None:
if 'NUMPYARRAY' in os.environ:
if os.environ['NUMPYARRAY'] == 'numpy':
basic_NumPy = 'numpy'
elif os.environ['NUMPYARRAY'] == 'numarray':
basic_NumPy = 'numarray'
elif os.environ['NUMPYARRAY'] == 'Numeric':
basic_NumPy = 'Numeric'
if basic_NumPy is None: basic_NumPy = 'numpy' # final default choice
if basic_NumPy not in ('Numeric', 'numarray', 'numpy'):
raise ImportError('cannot decide which Numerical Python '\
'implementation to use (ended up with "%s")' % basic_NumPy)
#print 'from', basic_NumPy, 'import *'
# table of equivalent names of Numerical Python modules:
# (used to import modules under Numeric, numarray, or numpy name)
_NumPy_modules = (
('Numeric', 'numarray', 'numpy'),
# umath and Precision are included as part of Numeric, numarray, numpy
('LinearAlgebra', 'numarray.linear_algebra.LinearAlgebra2',
'numpy.linalg'),
('RandomArray', 'numarray.random_array.RandomArray2', 'numpy.random'),
('RNG', '', 'numpy.random'),
('FFT', 'numarray.fft', 'numpy.fft'),
('MLab', 'numarray.linear_algebra.mlab', 'numpy.oldnumeric.mlab'),
('MA', 'numarray.ma.MA', 'numpy.ma'),
)
if basic_NumPy == 'numpy':
try:
# fix backward compatibility with Numeric names:
import numpy
oldversion = (numpy.version.version[0] == '0')
majorversion = int(numpy.version.version[0])
minorversion = int(numpy.version.version[2])
for _Numeric_name, _dummy1, _numpy_name in _NumPy_modules[1:]:
if oldversion and (_Numeric_name in ['RNG', 'FFT']):
n, module = _numpy_name.split('.')
exec("from %s import %s as %s" %(n, module, _Numeric_name))
elif oldversion and (_Numeric_name == 'MLab'):
from numpy.lib import mlab as MLab
elif (oldversion or (majorversion == 1 and minorversion < 1)) \
and (_Numeric_name == 'MA'):
import numpy.core.ma; MA = numpy.core.ma
elif _numpy_name != '':
exec('import %s; %s = %s' %
(_numpy_name, _Numeric_name, _numpy_name))
del _Numeric_name, _dummy1, _numpy_name, _NumPy_modules
from numpy import *
if not oldversion:
# get the old names too (NewAxis, Float, etc.):
from numpy.oldnumeric import *
del oldversion
# define new names compatible with Numeric:
LinearAlgebra.solve_linear_equations = linalg.solve
LinearAlgebra.inverse = linalg.inv
LinearAlgebra.determinant = linalg.det
LinearAlgebra.eigenvalues = linalg.eigvals
LinearAlgebra.eigenvectors = linalg.eig
except ImportError as e:
raise ImportError('%s\nnumpy import failed!\n'\
'see doc of %s module for how to choose Numeric instead' % \
(e, __name__))
def array_output_precision(no_of_decimals):
"""Set no of decimals in printout of arrays."""
arrayprint.set_precision(no_of_decimals)
def arrmax(a):
"""Compute the maximum of all the entries in a."""
try:
return a.max()
except AttributeError:
# not a NumPy array
if isinstance(a, collections.Sequence):
return max(a) # does not work for nested sequences
elif isinstance(a, numbers.Number):
return a
else:
raise TypeError('arrmax of %s not supported' % type(a))
def arrmin(a):
"""Compute the minimum of all the entries in a."""
try:
return a.min()
except AttributeError:
# not a NumPy array
if isinstance(a, collections.Sequence):
return min(a) # does not work for nested sequences
elif isinstance(a, numbers.Number):
return a
else:
raise TypeError('arrmin of %s not supported' % type(a))
NumPyArray = ndarray
if basic_NumPy == 'numarray':
try:
for _Numeric_name, _numarray_name, _dummy1 in _NumPy_modules[1:]:
if _numarray_name:
exec('import %s; %s = %s' %
(_numarray_name, _Numeric_name, _numarray_name))
# RNG is not supported, make an object that gives an error message:
class __Dummy:
def __getattr__(self, name):
raise ImportError('You have chosen the numarray package, '\
'but it does not have the functionality of the RNG module')
RNG = __Dummy()
del _Numeric_name, _numarray_name, _dummy1, __Dummy, _NumPy_modules
from numarray import *
except ImportError as e:
raise ImportError('%s\nnumarray import failed!\n'\
'see doc of %s module for how to choose Numeric instead' % \
(e, __name__))
def array_output_precision(no_of_decimals):
"""Set no of decimals in printout of arrays."""
arrayprint.set_precision(no_of_decimals)
def arrmax(a):
"""Compute the maximum of all the entries in a."""
try:
return a.max()
except AttributeError:
# not a NumPy array
if isinstance(a, collections.Sequence):
return max(a) # does not work for nested sequences
elif isinstance(a, numbers.Number):
return a
else:
raise TypeError('arrmax of %s not supported' % type(a))
def arrmin(a):
"""Compute the minimum of all the entries in a."""
try:
return a.min()
except AttributeError:
# not a NumPy array
if isinstance(a, collections.Sequence):
return min(a) # does not work for nested sequences
elif isinstance(a, numbers.Number):
return a
else:
raise TypeError('arrmin of %s not supported' % type(a))
NumPyArray = NumArray
if basic_NumPy == 'Numeric':
try:
for _Numeric_name, _dummy1, _dummy2 in _NumPy_modules[1:]:
if _Numeric_name != 'MA': # exclude MA, see comment above
exec('import %s' % _Numeric_name)
del _Numeric_name, _dummy1, _dummy2, _NumPy_modules
from Numeric import *
# the following is perhaps not a good idea;
# Numeric.UserArray and numarray.NumArray have different
# data attributes!
from UserArray import UserArray as NumArray
# define new numpy names:
newaxis = NewAxis
def linspace(start, stop, num=50, endpoint=True, retstep=False):
return asarray(numpy.linspace(start, stop, num, endpoint, retstep))
# hack if LinearAlgebra.eigenvalues hang (because of trouble
# with gcc and Numeric and -ffloat-store flag):
_problems = False
if _problems:
def numpy_eigenvalues(A):
"""
Temporary wrapper for Numeric's LinearAlgebra.eigenvalues.
Convert A to numpy, call numpy's eigenvalues,
convert back to Numeric.
"""
import numpy
A = numpy.array(A)
E = numpy.linalg.eigenvalues(A)
import Numeric
E = Numeric.array(E)
return E
def numpy_eigenvectors(A):
"""
Temporary wrapper for Numeric's LinearAlgebra.eigenvectors.
Convert A to numpy, call numpy's eigenvalues,
convert back to Numeric.
"""
import numpy
A = numpy.array(A)
E, V = numpy.linalg.eigenvectors(A)
import Numeric
E = Numeric.array(E)
V = Numeric.array(V)
return E, V
LinearAlgebra.eigenvalues = numpy_eigenvalues
LinearAlgebra.eigenvectors = numpy_eigenvectors
del _problems
except ImportError as e:
raise ImportError('%s\nNumeric import failed!\n'\
'see doc of %s module for how to choose numarray instead' % \
(e, __name__))
# fix of matrixmultiply bug in Numeric (according to <NAME>,
# SciPy-dev mailing list, Sep 28, 2004:
# http://www.scipy.net/pipermail/scipy-dev/2004-September/002267.html,
# matrixmultiply is dot if not dotblas is used, otherwise dot is
# imported from dotblas, and matrixmultiply becomes the unoptimized
# version (Perez timed the difference to be 0.55 vs 122.6 on his
# computer)):
matrixmultiply = dot
def array_output_precision(no_of_decimals):
"""Set no of decimals in printout of arrays."""
sys.float_output_precision = no_of_decimals
def arrmax(a):
"""Compute the maximum of all the entries in a."""
# could set arrmax = amax in scipy if scipy is installed
try:
return max(a.flat) # use Python's list min
except AttributeError:
# not a NumPy array
if isinstance(a, collections.Sequence):
return max(a)
elif isinstance(a, numbers.Number):
return a
else:
raise TypeError('arrmax of %s not supported' % type(a))
def arrmin(a):
"""Compute the minimum of all the entries in a."""
# could set arrmin = amin in scipy if scipy is installed
try:
return min(a.flat)
except AttributeError:
# not a NumPy array
if isinstance(a, collections.Sequence):
return min(a)
elif isinstance(a, numbers.Number):
return a
else:
raise TypeError('arrmin of %s not supported' % type(a))
NumPyArray = ArrayType
# support numpy types:
int_ = Int
int0 = Int0
int8 = Int8
int16 = Int16
int32 = Int32
float_ = Float
float32 = Float32
float64 = Float64
complex_ = Complex
complex64 = Complex64
_N = __import__(basic_NumPy)
NumPy_version = _N.__version__
del _N
# Short forms:
fft = FFT
mlab = MLab
try:
ma = MA
except NameError:
# for Numeric we do not import MA since it affects output format
pass
ra = RandomArray
la = LinearAlgebra
def NumPy_type(a):
"""
@param a: NumPy array
@return: "Numeric", "numarray", or "numpy", depending on which
module that was used to generate the a array
If type is list or tuple then the corresponding typename will be returned
"""
# check basic_NumPy type first to avoid possible import errors
types = {'Numeric': 'Numeric.ArrayType',
'numarray': 'numarray.NumArray',
'numpy': 'numpy.ndarray'}
# Check for non NumPy types first
if isinstance(a, tuple):
return "tuple"
elif isinstance(a, list):
return "list"
exec("import %s" % basic_NumPy) # Why isn't basic_NumPy imported?
if isinstance(a, eval(types[basic_NumPy])):
return basic_NumPy
# not the main NumPy type, try the others:
import numpy
if isinstance(a, numpy.ndarray):
return 'numpy'
import Numeric
if isinstance(a, Numeric.ArrayType):
return 'Numeric'
import numarray
if isinstance(a, numarray.NumArray):
return 'numarray'
def NumPy_dtype(a):
"""
@param a: NumPy array
@return: array data type, as a character,
depending on which module that was
used to generate the a array (a.typecode() for Numeric and
numarray, a.dtype for numpy).
"""
if NumPy_type(a) == 'Numeric':
return a.typecode()
elif NumPy_type(a) == 'numarray':
return a.typecode()
elif NumPy_type(a) == 'numpy':
return a.dtype
else:
raise TypeError("array should be NumPy array, not %s" % type(a))
def fortran_storage(a):
"""
Transparent transform of a NumPy array to Fortran (column major)
storage.
@param a: NumPy array (generated in Python or C with C storage)
@return: a new NumPy array with column major storage.
Method: If a is of numpy type, numpy.asarray(a, fortran=True)
is used to produce the new array.
If a is of Numeric or numarray type, we want to preserve the array type
and use a simple (and slower) transpose(transpose(a).copy()) instead.
"""
if NumPy_type(a) == 'Numeric' or NumPy_type(a) == 'numarray':
return transpose(transpose(a).copy())
else:
import numpy
return numpy.asarray(a, fortran=True)
|
chewaca/gtsSolution
|
GtsSoftware/src/GtsSoftware/personal/diasnolaboralData.java
|
<reponame>chewaca/gtsSolution
package GtsSoftware.personal;
import java.io.Serializable;
import java.util.Date;
public class diasnolaboralData implements Serializable{
private Integer codigo;
private String nombre;
private Date fecha;
private String motivo;
public Integer getCodigo() {
return codigo;
}
public void setCodigo(Integer codigo) {
this.codigo = codigo;
}
public String getNombre() {
return nombre;
}
public void setNombre(String nombre) {
this.nombre = nombre;
}
public Date getFecha() {
return fecha;
}
public void setFecha(Date fecha) {
this.fecha = fecha;
}
public String getMotivo() {
return motivo;
}
public void setMotivo(String motivo) {
this.motivo = motivo;
}
}
|
ggydush-fn/flytekit
|
tests/flytekit/integration/remote/mock_flyte_repo/workflows/basic/joblib.py
|
"""Test joblib file."""
import os
import typing
import joblib
import flytekit
from flytekit import task, workflow
from flytekit.types.file import JoblibSerializedFile
@task
def joblib_task(obj: typing.List[int]) -> JoblibSerializedFile:
working_dir = flytekit.current_context().working_directory
filename = os.path.join(working_dir, "object.joblib")
joblib.dump(obj, filename)
return JoblibSerializedFile(path=filename)
@workflow
def joblib_workflow(obj: typing.List[int]) -> JoblibSerializedFile:
return joblib_task(obj=obj)
|
petergroth/trajectory_forecasting
|
src/training_modules/train_nbody_model.py
|
import argparse
import math
import os
import random
from typing import Union
import hydra
import pytorch_lightning as pl
import torch
import torch_geometric.nn
import torchmetrics
import yaml
from omegaconf import DictConfig, OmegaConf
from pytorch_lightning.loggers import WandbLogger
from pytorch_lightning.utilities.seed import seed_everything
from torch_geometric.data import Batch, Data
from src.data.dataset_nbody import (OneStepNBodyDataModule,
SequentialNBodyDataModule)
from src.models.model import *
class OneStepModule(pl.LightningModule):
def __init__(
self,
model_type: Union[None, str],
model_dict: Union[None, dict],
noise: Union[None, float] = None,
lr: float = 1e-4,
weight_decay: float = 0.0,
min_dist: int = 0,
edge_weight: bool = False,
self_loop: bool = True,
undirected: bool = False,
grav_attraction: bool = False,
node_features: int = 5,
edge_features: int = 0,
out_features: int = 4,
normalise: bool = True,
):
super().__init__()
# Instantiate model
self.model_type = model_type
self.model = eval(model_type)(**model_dict)
# Setup metrics
self.train_pos_loss = torchmetrics.MeanSquaredError()
self.train_vel_loss = torchmetrics.MeanSquaredError()
self.train_yaw_loss = torchmetrics.MeanSquaredError()
self.val_ade_loss = torchmetrics.MeanSquaredError()
self.val_fde_loss = torchmetrics.MeanSquaredError()
self.val_vel_loss = torchmetrics.MeanSquaredError()
self.val_yaw_loss = torchmetrics.MeanSquaredError()
self.model = model_type
self.save_hyperparameters()
self.noise = noise
self.lr = lr
self.weight_decay = weight_decay
self.min_dist = min_dist
self.edge_weight = edge_weight
self.self_loop = self_loop
self.undirected = undirected
self.grav_attraction = grav_attraction
self.normalise = normalise
def training_step(self, batch: Batch, batch_idx: int):
# Extract node features and edge_index
x = batch.x
edge_index = batch.edge_index
edge_attr = None
######################
# Graph construction #
######################
# Determine whether to compute edge_index as a function of distances
if self.min_dist is not None:
edge_index = torch_geometric.nn.radius_graph(
x=x[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=30,
flow="source_to_target",
)
# 1 nearest neighbour to ensure connected graphs
nn_edge_index = torch_geometric.nn.knn_graph(
x=x[:, :2], k=1, batch=batch.batch
)
# Remove duplicates
edge_index = torch_geometric.utils.coalesce(
torch.cat((edge_index, nn_edge_index), dim=1)
)
self.log("average_num_edges", edge_index.shape[1] / batch.num_graphs)
# Determine whether to add random noise to features
if self.noise is not None:
x += self.noise * torch.randn_like(x)
# # Add noise to dynamic states
# x_d = x[:, :4] + self.noise*torch.randn_like(x[:, :4])
# # Concatenate dynamic and static states
# x = torch.cat((x_d, x[:, 4:]), dim=1)
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x[row, :2] - x[col, :2]).norm(dim=-1).unsqueeze(1)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x[row, 4] * 1e10
m2 = x[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
edge_attr = edge_attr.type_as(batch.x)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(
edge_index, edge_attr
)
######################
# Training 1/1 #
######################
# Obtain target delta dynamic nodes
y_target = batch.y[:, :4] - x[:, :4]
# Update normalisation state and normalise
if edge_attr is None:
self.update_in_normalisation(x.clone())
else:
self.update_in_normalisation(x.clone(), edge_attr.clone())
self.update_out_normalisation(y_target.clone())
# Obtain normalised input graph and normalised target nodes
x_nrm, edge_attr_nrm = self.in_normalise(x, edge_attr)
y_target_nrm = self.out_normalise(y_target)
# Obtain normalised predicted delta dynamics
y_hat = self.model(
x=x_nrm, edge_index=edge_index, edge_attr=edge_attr_nrm, batch=batch.batch
)
# Compute and log loss
pos_loss = self.train_pos_loss(y_hat[:, :2], y_target_nrm[:, :2])
vel_loss = self.train_pos_loss(y_hat[:, 2:], y_target_nrm[:, 2:])
self.log("train_pos_loss", pos_loss, on_step=True, on_epoch=True)
self.log("train_vel_loss", vel_loss, on_step=True, on_epoch=True)
self.log(
"train_total_loss", (pos_loss + vel_loss) / 2, on_step=True, on_epoch=True
)
return pos_loss + vel_loss
def validation_step(self, batch: Batch, batch_idx: int):
######################
# Initialisation #
######################
# Validate on sequential dataset. First 11 observations are used to prime the model.
# Loss is computed on remaining 80 samples using rollout.
# Extract dimensions and allocate target/prediction tensors
n_nodes = batch.num_nodes
y_hat = torch.zeros((80, n_nodes, 4))
y_target = torch.zeros((80, n_nodes, 4))
edge_index = batch.edge_index
static_features = batch.x[:, 0, 4:]
edge_attr = None
######################
# History #
######################
for t in range(11):
######################
# Graph construction #
######################
x = batch.x[:, t, :]
# Permutation for baseline experiment
x = x
if self.min_dist is not None:
edge_index = torch_geometric.nn.radius_graph(
x=x[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=30,
flow="source_to_target",
)
# 1 nearest neighbour to ensure connected graphs
nn_edge_index = torch_geometric.nn.knn_graph(
x=x[:, :2], k=1, batch=batch.batch
)
# Remove duplicates
edge_index = torch_geometric.utils.coalesce(
torch.cat((edge_index, nn_edge_index), dim=1)
)
self.log("average_num_edges", edge_index.shape[1] / batch.num_graphs)
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x[row, :2] - x[col, :2]).norm(dim=-1).unsqueeze(1)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x[row, 4] * 1e10
m2 = x[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(
edge_index, edge_attr
)
######################
# Validation 1/2 #
######################
# Normalise input graph
x, edge_attr = self.in_normalise(x, edge_attr)
# Obtain normalised predicted delta dynamics
x = self.model(
x=x, edge_index=edge_index, edge_attr=edge_attr, batch=batch.batch
)
# Renormalise output dynamics
x = self.out_renormalise(x)
# Add deltas to input graph
predicted_graph = torch.cat(
(batch.x[:, t, :4] + x, static_features), dim=-1
) # [n_nodes, n_features]
# Save first prediction and target
y_hat[0, :, :] = predicted_graph[:, :4]
y_target[0, :, :] = batch.x[:, 11, :4]
######################
# Future #
######################
for t in range(11, 90):
######################
# Graph construction #
######################
x = predicted_graph
if self.min_dist is not None:
# Edge indices of close nodes
edge_index = torch_geometric.nn.radius_graph(
x=x[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=30,
flow="source_to_target",
)
# 1 nearest neighbour to entotal_valsure connected graphs
nn_edge_index = torch_geometric.nn.knn_graph(
x=x[:, :2], k=1, batch=batch.batch
)
# Remove duplicates
edge_index = torch_geometric.utils.coalesce(
torch.cat((edge_index, nn_edge_index), dim=1)
)
self.log("average_num_edges", edge_index.shape[1] / batch.num_graphs)
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x[row, :2] - x[col, :2]).norm(dim=-1).unsqueeze(1)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x[row, 4] * 1e10
m2 = x[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(
edge_index, edge_attr
)
######################
# Validation 2/2 #
######################
# Normalise input graph
x, edge_attr = self.in_normalise(x, edge_attr)
# Obtain normalised predicted delta dynamics
x = self.model(
x=x, edge_index=edge_index, edge_attr=edge_attr, batch=batch.batch
)
# Renormalise deltas
x = self.out_renormalise(x)
# Add deltas to input graph
predicted_graph = torch.cat(
(predicted_graph[:, :4] + x, static_features), dim=-1
) # [n_nodes, n_features]
# Save prediction alongside true value (next time step state)
y_hat[t - 10, :, :] = predicted_graph[:, :4]
y_target[t - 10, :, :] = batch.x[:, t + 1, :4]
# Compute and log loss
fde_loss = self.val_fde_loss(y_hat[-1, :, :2], y_target[-1, :, :2])
ade_loss = self.val_ade_loss(y_hat[:, :, :2], y_target[:, :, :2])
vel_loss = self.val_vel_loss(y_hat[:, :, 2:], y_target[:, :, 2:])
######################
# Logging #
######################
self.log("val_ade_loss", ade_loss)
self.log("val_fde_loss", fde_loss)
self.log("val_vel_loss", vel_loss)
self.log("val_total_loss", (ade_loss + vel_loss) / 2)
# Log normalisation states
if self.log_norm:
self.log(
"in_std",
{f"in_std_{i}": std for i, std in enumerate(self.in_std)},
)
self.log(
"in_mean",
{f"in_mean_{i}": mean for i, mean in enumerate(self.in_mean)},
)
self.log(
"out_std",
{f"out_std_{i}": std for i, std in enumerate(self.out_std)},
)
self.log(
"out_mean",
{f"out_mean_{i}": mean for i, mean in enumerate(self.out_mean)},
)
return (ade_loss + vel_loss) / 2
def predict_step(self, batch, batch_idx=None):
######################
# Initialisation #
######################
# Extract dimensions and allocate target/prediction tensors
n_nodes = batch.num_nodes
n_features = 5
y_hat = torch.zeros((90, n_nodes, n_features))
y_target = torch.zeros((90, n_nodes, n_features))
edge_index = batch.edge_index
static_features = batch.x[:, 0, 4:]
edge_attr = None
######################
# History #
######################
for t in range(11):
######################
# Graph construction #
######################
x = batch.x[:, t, :]
if self.min_dist is not None:
# Edge indices of close nodes
edge_index = torch_geometric.nn.radius_graph(
x=x[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=30,
flow="source_to_target",
)
# 1 nearest neighbour to ensure connected graphs
nn_edge_index = torch_geometric.nn.knn_graph(
x=x[:, :2], k=1, batch=batch.batch
)
# Remove duplicates
edge_index = torch_geometric.utils.coalesce(
torch.cat((edge_index, nn_edge_index), dim=1)
)
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x[row, :2] - x[col, :2]).norm(dim=-1).unsqueeze(1)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x[row, 4] * 1e10
m2 = x[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(
edge_index, edge_attr
)
######################
# Predictions #
######################
# Normalise input graph
x, edge_attr = self.in_normalise(x, edge_attr)
# Obtain normalised predicted delta dynamics
x = self.model(
x=x, edge_index=edge_index, edge_attr=edge_attr, batch=batch.batch
)
# Renormalise output dynamics
x = self.out_renormalise(x)
# Add deltas to input graph
predicted_graph = torch.cat(
(batch.x[:, t, :4] + x, static_features), dim=-1
)
# Save predictions
y_hat[t, :, :] = predicted_graph[:, :]
y_target[t, :, :] = batch.x[:, t + 1, :]
######################
# Future #
######################
for t in range(11, 90):
######################
# Graph construction #
######################
x = predicted_graph
if self.min_dist is not None:
# Edge indices of close nodes
edge_index = torch_geometric.nn.radius_graph(
x=x[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=30,
flow="source_to_target",
)
# 1 nearest neighbour to ensure connected graphs
nn_edge_index = torch_geometric.nn.knn_graph(
x=x[:, :2], k=1, batch=batch.batch
)
# Remove duplicates
edge_index = torch_geometric.utils.coalesce(
torch.cat((edge_index, nn_edge_index), dim=1)
)
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x[row, :2] - x[col, :2]).norm(dim=-1).unsqueeze(1)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x[row, 4] * 1e10
m2 = x[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(
edge_index, edge_attr
)
######################
# Predictions #
######################
# Normalise input graph
x, edge_attr = self.in_normalise(x, edge_attr)
# Obtain normalised predicted delta dynamics
x = self.model(
x=x, edge_index=edge_index, edge_attr=edge_attr, batch=batch.batch
)
# Renormalise deltas
x = self.out_renormalise(x)
# Add deltas to input graph
predicted_graph = torch.cat(
(predicted_graph[:, :4] + x, static_features), dim=-1
) # [n_nodes, n_features]
# Save prediction alongside true value (next time step state)
y_hat[t, :, :] = predicted_graph[:, :]
y_target[t, :, :] = batch.x[:, t + 1, :]
return y_hat, y_target
def test_step(self, batch: Batch, batch_idx: int):
return self.validation_step(batch, batch_idx)
def configure_optimizers(self):
return torch.optim.Adam(
self.parameters(), lr=self.lr, weight_decay=self.weight_decay
)
class SequentialModule(pl.LightningModule):
def __init__(
self,
model_type: Union[None, str],
model_dict: Union[None, dict],
lr: float = 1e-4,
weight_decay: float = 0.0,
noise: Union[None, float] = None,
teacher_forcing_ratio: float = 0.3,
min_dist: int = 0,
n_neighbours: int = 30,
fully_connected: bool = True,
edge_weight: bool = False,
edge_type: str = "distance",
self_loop: bool = True,
undirected: bool = False,
out_features: int = 6,
node_features: int = 9,
edge_features: int = 1,
training_horizon: int = 90,
grav_attraction: bool = False,
):
super().__init__()
# Setup metrics
self.train_ade_loss = torchmetrics.MeanSquaredError()
self.train_fde_loss = torchmetrics.MeanSquaredError()
self.train_vel_loss = torchmetrics.MeanSquaredError()
self.val_ade_loss = torchmetrics.MeanSquaredError()
self.val_fde_loss = torchmetrics.MeanSquaredError()
self.val_vel_loss = torchmetrics.MeanSquaredError()
self.test_ade_loss = torchmetrics.MeanSquaredError()
self.test_fde_loss = torchmetrics.MeanSquaredError()
self.test_vel_loss = torchmetrics.MeanSquaredError()
# Instantiate model
self.model_type = model_type
self.model = eval(model_type)(**model_dict)
# Learning parameters
self.noise = noise
self.lr = lr
self.weight_decay = weight_decay
self.teacher_forcing_ratio = teacher_forcing_ratio
self.training_horizon = training_horizon
# Model parameters
self.rnn_type = (
model_dict["rnn_type"] if "rnn_type" in model_dict.keys() else None
)
self.out_features = out_features
self.edge_features = edge_features
self.node_features = node_features
# Graph parameters
self.min_dist = min_dist
self.edge_weight = edge_weight
self.undirected = undirected
self.self_loop = self_loop
self.grav_attraction = grav_attraction
self.edge_type = edge_type
self.fully_connected = fully_connected
self.n_neighbours = n_neighbours
if self.fully_connected:
self.edge_type = "knn"
self.n_neighbours = 100
self.save_hyperparameters()
def training_step(self, batch: Batch, batch_idx: int):
######################
# Initialisation #
######################
# Extract data from batch
n_nodes = batch.num_nodes
static_features = batch.x[:, 10, 4].unsqueeze(1)
edge_attr = None
# Ignore data after training horizon
batch.x = batch.x[:, : (self.training_horizon + 1)]
# Allocate prediction tensor
y_predictions = torch.zeros((n_nodes, self.training_horizon, self.out_features))
y_predictions = y_predictions.type_as(batch.x)
# Define target tensor
y_target = batch.x[:, 1 : (self.training_horizon + 1), : self.out_features]
y_target = y_target.type_as(batch.x)
assert y_target.shape == y_predictions.shape
# Initial hidden state
if self.rnn_type == "GRU":
h_node = torch.zeros((self.model.num_layers, n_nodes, self.model.rnn_size))
h_edge = torch.zeros(
(self.model.num_layers, n_nodes, self.model.rnn_edge_size)
)
h_node = h_node.type_as(batch.x)
h_edge = h_edge.type_as(batch.x)
c_node, c_edge = None, None
elif self.rnn_type == "LSTM":
h_node = torch.zeros((self.model.num_layers, n_nodes, self.model.rnn_size))
h_edge = torch.zeros(
(self.model.num_layers, n_nodes, self.model.rnn_edge_size)
)
h_node = h_node.type_as(batch.x)
h_edge = h_edge.type_as(batch.x)
c_node = torch.zeros((self.model.num_layers, n_nodes, self.model.rnn_size))
c_edge = torch.zeros(
(self.model.num_layers, n_nodes, self.model.rnn_edge_size)
)
c_node = c_node.type_as(batch.x)
c_edge = c_edge.type_as(batch.x)
else:
h_node, h_edge, c_node, c_edge = None, None, None, None
######################
# History #
######################
for t in range(11):
# Extract current input
x_t = batch.x[:, t, :]
x_t = x_t.type_as(batch.x)
# Add noise if specified
if self.noise is not None:
x_t[:, : self.out_features] += self.noise * torch.randn_like(
x_t[:, : self.out_features]
)
######################
# Graph construction #
######################
# Construct edges
if self.edge_type == "knn":
# Neighbour-based graph
edge_index = torch_geometric.nn.knn_graph(
x=x_t[:, :2],
k=self.n_neighbours,
batch=batch.batch,
loop=self.self_loop,
)
else:
# Distance-based graph
edge_index = torch_geometric.nn.radius_graph(
x=x_t[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=self.n_neighbours,
flow="source_to_target",
)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(edge_index)
# Remove duplicates and sort
edge_index = torch_geometric.utils.coalesce(edge_index)
# Create edge_attr if specified
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x_t[row, :2] - x_t[col, :2]).norm(dim=-1).unsqueeze(1)
edge_attr = edge_attr.type_as(batch.x)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x_t[row, 4] * 1e10
m2 = x_t[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
#######################
# Training 1/2 #
#######################
# Obtain predicted delta dynamics
if self.rnn_type == "GRU":
delta_x, (h_node, h_edge) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=(h_node, h_edge),
)
elif self.rnn_type == "LSTM": # LSTM
delta_x, ((h_node, c_node), (h_edge, c_edge)) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=((h_node, c_node), (h_edge, c_edge)),
)
else:
delta_x = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
)
# Compute updated positions
vel = delta_x[:, [0, 1]]
pos = batch.x[:, t][:, [0, 1]] + 0.1 * vel
x_t = torch.cat([pos, vel, static_features], dim=-1)
x_t = x_t.type_as(batch.x)
# Save deltas for loss computation
y_predictions[:, t, :] = x_t[:, : self.out_features]
# If using teacher_forcing, draw sample and accept <teach_forcing_ratio*100> % of the time. Else, deny.
use_groundtruth = random.random() < self.teacher_forcing_ratio
######################
# Future #
######################
for t in range(11, self.training_horizon):
# Use groundtruth 'teacher_forcing_ratio' % of the time
if use_groundtruth:
x_t = batch.x[:, t, :].clone()
x_prev = x_t.clone()
######################
# Graph construction #
######################
# Construct edges
if self.edge_type == "knn":
# Neighbour-based graph
edge_index = torch_geometric.nn.knn_graph(
x=x_t[:, :2],
k=self.n_neighbours,
batch=batch.batch,
loop=self.self_loop,
)
else:
# Distance-based graph
edge_index = torch_geometric.nn.radius_graph(
x=x_t[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=self.n_neighbours,
flow="source_to_target",
)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(edge_index)
# Remove duplicates and sort
edge_index = torch_geometric.utils.coalesce(edge_index)
# Create edge_attr if specified
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x_t[row, :2] - x_t[col, :2]).norm(dim=-1).unsqueeze(1)
edge_attr = edge_attr.type_as(batch.x)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x_t[row, 4] * 1e10
m2 = x_t[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
#######################
# Training 2/2 #
#######################
# Obtain predicted delta dynamics
if self.rnn_type == "GRU":
delta_x, (h_node, h_edge) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=(h_node, h_edge),
)
elif self.rnn_type == "LSTM": # LSTM
delta_x, ((h_node, c_node), (h_edge, c_edge)) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=((h_node, c_node), (h_edge, c_edge)),
)
else:
delta_x = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
)
# Compute updated positions
vel = delta_x[:, [0, 1]]
pos = x_prev[:, [0, 1]] + 0.1 * vel
x_t = torch.cat([pos, vel, static_features], dim=-1)
x_t = x_t.type_as(batch.x)
# Save deltas for loss computation
y_predictions[:, t, :] = x_t[:, : self.out_features]
# Compute and log loss
fde_loss = self.train_fde_loss(y_predictions[:, -1, :2], y_target[:, -1, :2])
ade_loss = self.train_ade_loss(y_predictions[:, :, :2], y_target[:, :, :2])
vel_loss = self.train_vel_loss(
y_predictions[:, :, [2, 3]], y_target[:, :, [2, 3]]
)
self.log(
"train_fde_loss", fde_loss, on_step=True, on_epoch=True, batch_size=n_nodes
)
self.log(
"train_ade_loss", ade_loss, on_step=True, on_epoch=True, batch_size=n_nodes
)
self.log(
"train_vel_loss", vel_loss, on_step=True, on_epoch=True, batch_size=n_nodes
)
loss = ade_loss
self.log(
"train_total_loss", loss, on_step=True, on_epoch=True, batch_size=n_nodes
)
return loss
def validation_step(self, batch: Batch, batch_idx: int):
######################
# Initialisation #
######################
# Extract data from batch
n_nodes = batch.num_nodes
static_features = batch.x[:, 10, 4].unsqueeze(1)
edge_attr = None
# Ignore data after training horizon
batch.x = batch.x[:, : (self.training_horizon + 1)]
# Allocate prediction tensor
y_predictions = torch.zeros((n_nodes, self.training_horizon, self.out_features))
y_predictions = y_predictions.type_as(batch.x)
# Define target tensor
y_target = batch.x[:, 1 : (self.training_horizon + 1), : self.out_features]
y_target = y_target.type_as(batch.x)
assert y_target.shape == y_predictions.shape
# Initial hidden state
if self.rnn_type == "GRU":
h_node = torch.zeros((self.model.num_layers, n_nodes, self.model.rnn_size))
h_edge = torch.zeros(
(self.model.num_layers, n_nodes, self.model.rnn_edge_size)
)
h_node = h_node.type_as(batch.x)
h_edge = h_edge.type_as(batch.x)
c_node, c_edge = None, None
elif self.rnn_type == "LSTM":
h_node = torch.zeros((self.model.num_layers, n_nodes, self.model.rnn_size))
h_edge = torch.zeros(
(self.model.num_layers, n_nodes, self.model.rnn_edge_size)
)
h_node = h_node.type_as(batch.x)
h_edge = h_edge.type_as(batch.x)
c_node = torch.zeros((self.model.num_layers, n_nodes, self.model.rnn_size))
c_edge = torch.zeros(
(self.model.num_layers, n_nodes, self.model.rnn_edge_size)
)
c_node = c_node.type_as(batch.x)
c_edge = c_edge.type_as(batch.x)
else:
h_node, h_edge, c_node, c_edge = None, None, None, None
######################
# History #
######################
for t in range(11):
# Extract current input
x_t = batch.x[:, t, :]
x_t = x_t.type_as(batch.x)
######################
# Graph construction #
######################
# Construct edges
if self.edge_type == "knn":
# Neighbour-based graph
edge_index = torch_geometric.nn.knn_graph(
x=x_t[:, :2],
k=self.n_neighbours,
batch=batch.batch,
loop=self.self_loop,
)
else:
# Distance-based graph
edge_index = torch_geometric.nn.radius_graph(
x=x_t[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=self.n_neighbours,
flow="source_to_target",
)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(edge_index)
# Remove duplicates and sort
edge_index = torch_geometric.utils.coalesce(edge_index)
# Create edge_attr if specified
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x_t[row, :2] - x_t[col, :2]).norm(dim=-1).unsqueeze(1)
edge_attr = edge_attr.type_as(batch.x)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x_t[row, 4] * 1e10
m2 = x_t[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
#######################
# Training 1/2 #
#######################
# Obtain predicted delta dynamics
if self.rnn_type == "GRU":
delta_x, (h_node, h_edge) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=(h_node, h_edge),
)
elif self.rnn_type == "LSTM": # LSTM
delta_x, ((h_node, c_node), (h_edge, c_edge)) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=((h_node, c_node), (h_edge, c_edge)),
)
else:
delta_x = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
)
# Compute updated positions
vel = delta_x[:, [0, 1]]
pos = batch.x[:, t][:, [0, 1]] + 0.1 * vel
x_t = torch.cat([pos, vel, static_features], dim=-1)
x_t = x_t.type_as(batch.x)
# Save deltas for loss computation
y_predictions[:, t, :] = x_t[:, : self.out_features]
######################
# Future #
######################
for t in range(11, self.training_horizon):
x_prev = x_t.clone()
######################
# Graph construction #
######################
# Construct edges
if self.edge_type == "knn":
# Neighbour-based graph
edge_index = torch_geometric.nn.knn_graph(
x=x_t[:, :2],
k=self.n_neighbours,
batch=batch.batch,
loop=self.self_loop,
)
else:
# Distance-based graph
edge_index = torch_geometric.nn.radius_graph(
x=x_t[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=self.n_neighbours,
flow="source_to_target",
)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(edge_index)
# Remove duplicates and sort
edge_index = torch_geometric.utils.coalesce(edge_index)
# Create edge_attr if specified
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x_t[row, :2] - x_t[col, :2]).norm(dim=-1).unsqueeze(1)
edge_attr = edge_attr.type_as(batch.x)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x_t[row, 4] * 1e10
m2 = x_t[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
#######################
# Training 2/2 #
#######################
# Obtain predicted delta dynamics
if self.rnn_type == "GRU":
delta_x, (h_node, h_edge) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=(h_node, h_edge),
)
elif self.rnn_type == "LSTM": # LSTM
delta_x, ((h_node, c_node), (h_edge, c_edge)) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=((h_node, c_node), (h_edge, c_edge)),
)
else:
delta_x = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
)
# Compute updated positions
vel = delta_x[:, [0, 1]]
pos = x_prev[:, [0, 1]] + 0.1 * vel
x_t = torch.cat([pos, vel, static_features], dim=-1)
x_t = x_t.type_as(batch.x)
# Save deltas for loss computation
y_predictions[:, t, :] = x_t[:, : self.out_features]
# Compute and log loss
fde_loss = self.val_fde_loss(y_predictions[:, -1, :2], y_target[:, -1, :2])
ade_loss = self.val_ade_loss(y_predictions[:, 11:, :2], y_target[:, 11:, :2])
vel_loss = self.val_vel_loss(
y_predictions[:, 11:, [2, 3]], y_target[:, 11:, [2, 3]]
)
self.log("val_fde_loss", fde_loss, batch_size=n_nodes)
self.log("val_ade_loss", ade_loss, batch_size=n_nodes)
self.log("val_vel_loss", vel_loss, batch_size=n_nodes)
loss = ade_loss
self.log("val_total_loss", loss, batch_size=n_nodes)
return loss
def predict_step(self, batch: Batch, batch_idx=None, prediction_horizon: int = 90):
######################
# Initialisation #
######################
# Extract data from batch
n_nodes = batch.num_nodes
static_features = batch.x[:, 10, 4].unsqueeze(1)
edge_attr = None
# Ignore data after training horizon
batch.x = batch.x[:, : (self.training_horizon + 1)]
# Allocate prediction tensor
y_predictions = torch.zeros((n_nodes, prediction_horizon, self.node_features))
y_predictions = y_predictions.type_as(batch.x)
# Define target tensor
y_target = batch.x[:, 1 : (self.training_horizon + 1)]
y_target = y_target.type_as(batch.x)
assert y_target.shape == y_predictions.shape
# Initial hidden state
if self.rnn_type == "GRU":
h_node = torch.zeros((self.model.num_layers, n_nodes, self.model.rnn_size))
h_edge = torch.zeros(
(self.model.num_layers, n_nodes, self.model.rnn_edge_size)
)
h_node = h_node.type_as(batch.x)
h_edge = h_edge.type_as(batch.x)
c_node, c_edge = None, None
elif self.rnn_type == "LSTM":
h_node = torch.zeros((self.model.num_layers, n_nodes, self.model.rnn_size))
h_edge = torch.zeros(
(self.model.num_layers, n_nodes, self.model.rnn_edge_size)
)
h_node = h_node.type_as(batch.x)
h_edge = h_edge.type_as(batch.x)
c_node = torch.zeros((self.model.num_layers, n_nodes, self.model.rnn_size))
c_edge = torch.zeros(
(self.model.num_layers, n_nodes, self.model.rnn_edge_size)
)
c_node = c_node.type_as(batch.x)
c_edge = c_edge.type_as(batch.x)
else:
h_node, h_edge, c_node, c_edge = None, None, None, None
######################
# History #
######################
for t in range(11):
# Extract current input
x_t = batch.x[:, t, :]
x_t = x_t.type_as(batch.x)
######################
# Graph construction #
######################
# Construct edges
if self.edge_type == "knn":
# Neighbour-based graph
edge_index = torch_geometric.nn.knn_graph(
x=x_t[:, :2],
k=self.n_neighbours,
batch=batch.batch,
loop=self.self_loop,
)
else:
# Distance-based graph
edge_index = torch_geometric.nn.radius_graph(
x=x_t[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=self.n_neighbours,
flow="source_to_target",
)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(edge_index)
# Remove duplicates and sort
edge_index = torch_geometric.utils.coalesce(edge_index)
# Create edge_attr if specified
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x_t[row, :2] - x_t[col, :2]).norm(dim=-1).unsqueeze(1)
edge_attr = edge_attr.type_as(batch.x)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x_t[row, 4] * 1e10
m2 = x_t[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
#######################
# Training 1/2 #
#######################
# Obtain predicted delta dynamics
if self.rnn_type == "GRU":
delta_x, (h_node, h_edge) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=(h_node, h_edge),
)
elif self.rnn_type == "LSTM": # LSTM
delta_x, ((h_node, c_node), (h_edge, c_edge)) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=((h_node, c_node), (h_edge, c_edge)),
)
else:
delta_x = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
)
# Compute updated positions
vel = delta_x[:, [0, 1]]
pos = batch.x[:, t][:, [0, 1]] + 0.1 * vel
x_t = torch.cat([pos, vel, static_features], dim=-1)
x_t = x_t.type_as(batch.x)
# Save deltas for loss computation
y_predictions[:, t, :] = x_t
######################
# Future #
######################
for t in range(11, prediction_horizon):
x_prev = x_t.clone()
######################
# Graph construction #
######################
# Construct edges
if self.edge_type == "knn":
# Neighbour-based graph
edge_index = torch_geometric.nn.knn_graph(
x=x_t[:, :2],
k=self.n_neighbours,
batch=batch.batch,
loop=self.self_loop,
)
else:
# Distance-based graph
edge_index = torch_geometric.nn.radius_graph(
x=x_t[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=self.n_neighbours,
flow="source_to_target",
)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(edge_index)
# Remove duplicates and sort
edge_index = torch_geometric.utils.coalesce(edge_index)
# Create edge_attr if specified
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x_t[row, :2] - x_t[col, :2]).norm(dim=-1).unsqueeze(1)
edge_attr = edge_attr.type_as(batch.x)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x_t[row, 4] * 1e10
m2 = x_t[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
#######################
# Training 2/2 #
#######################
# Obtain predicted delta dynamics
if self.rnn_type == "GRU":
delta_x, (h_node, h_edge) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=(h_node, h_edge),
)
elif self.rnn_type == "LSTM": # LSTM
delta_x, ((h_node, c_node), (h_edge, c_edge)) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=((h_node, c_node), (h_edge, c_edge)),
)
else:
delta_x = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
)
# Compute updated positions
vel = delta_x[:, [0, 1]]
pos = x_prev[:, [0, 1]] + 0.1 * vel
x_t = torch.cat([pos, vel, static_features], dim=-1)
x_t = x_t.type_as(batch.x)
# Save deltas for loss computation
y_predictions[:, t, :] = x_t
return y_predictions, y_target
def test_step(self, batch: Batch, batch_idx: int):
######################
# Initialisation #
######################
# Extract data from batch
n_nodes = batch.num_nodes
static_features = batch.x[:, 10, 4].unsqueeze(1)
edge_attr = None
# Ignore data after training horizon
batch.x = batch.x[:, : (self.training_horizon + 1)]
# Allocate prediction tensor
y_predictions = torch.zeros((n_nodes, self.training_horizon, self.out_features))
y_predictions = y_predictions.type_as(batch.x)
# Define target tensor
y_target = batch.x[:, 1 : (self.training_horizon + 1), : self.out_features]
y_target = y_target.type_as(batch.x)
assert y_target.shape == y_predictions.shape
# Initial hidden state
if self.rnn_type == "GRU":
h_node = torch.zeros((self.model.num_layers, n_nodes, self.model.rnn_size))
h_edge = torch.zeros(
(self.model.num_layers, n_nodes, self.model.rnn_edge_size)
)
h_node = h_node.type_as(batch.x)
h_edge = h_edge.type_as(batch.x)
c_node, c_edge = None, None
elif self.rnn_type == "LSTM":
h_node = torch.zeros((self.model.num_layers, n_nodes, self.model.rnn_size))
h_edge = torch.zeros(
(self.model.num_layers, n_nodes, self.model.rnn_edge_size)
)
h_node = h_node.type_as(batch.x)
h_edge = h_edge.type_as(batch.x)
c_node = torch.zeros((self.model.num_layers, n_nodes, self.model.rnn_size))
c_edge = torch.zeros(
(self.model.num_layers, n_nodes, self.model.rnn_edge_size)
)
c_node = c_node.type_as(batch.x)
c_edge = c_edge.type_as(batch.x)
else:
h_node, h_edge, c_node, c_edge = None, None, None, None
######################
# History #
######################
for t in range(11):
# Extract current input
x_t = batch.x[:, t, :]
x_t = x_t.type_as(batch.x)
######################
# Graph construction #
######################
# Construct edges
if self.edge_type == "knn":
# Neighbour-based graph
edge_index = torch_geometric.nn.knn_graph(
x=x_t[:, :2],
k=self.n_neighbours,
batch=batch.batch,
loop=self.self_loop,
)
else:
# Distance-based graph
edge_index = torch_geometric.nn.radius_graph(
x=x_t[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=self.n_neighbours,
flow="source_to_target",
)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(edge_index)
# Remove duplicates and sort
edge_index = torch_geometric.utils.coalesce(edge_index)
# Create edge_attr if specified
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x_t[row, :2] - x_t[col, :2]).norm(dim=-1).unsqueeze(1)
edge_attr = edge_attr.type_as(batch.x)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x_t[row, 4] * 1e10
m2 = x_t[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
#######################
# Training 1/2 #
#######################
# Obtain predicted delta dynamics
if self.rnn_type == "GRU":
delta_x, (h_node, h_edge) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=(h_node, h_edge),
)
elif self.rnn_type == "LSTM": # LSTM
delta_x, ((h_node, c_node), (h_edge, c_edge)) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=((h_node, c_node), (h_edge, c_edge)),
)
else:
delta_x = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
)
# Compute updated positions
vel = delta_x[:, [0, 1]]
pos = batch.x[:, t][:, [0, 1]] + 0.1 * vel
x_t = torch.cat([pos, vel, static_features], dim=-1)
x_t = x_t.type_as(batch.x)
# Save deltas for loss computation
y_predictions[:, t, :] = x_t[:, : self.out_features]
######################
# Future #
######################
for t in range(11, self.training_horizon):
x_prev = x_t.clone()
######################
# Graph construction #
######################
# Construct edges
if self.edge_type == "knn":
# Neighbour-based graph
edge_index = torch_geometric.nn.knn_graph(
x=x_t[:, :2],
k=self.n_neighbours,
batch=batch.batch,
loop=self.self_loop,
)
else:
# Distance-based graph
edge_index = torch_geometric.nn.radius_graph(
x=x_t[:, :2],
r=self.min_dist,
batch=batch.batch,
loop=self.self_loop,
max_num_neighbors=self.n_neighbours,
flow="source_to_target",
)
if self.undirected:
edge_index, edge_attr = torch_geometric.utils.to_undirected(edge_index)
# Remove duplicates and sort
edge_index = torch_geometric.utils.coalesce(edge_index)
# Create edge_attr if specified
if self.edge_weight:
# Encode distance between nodes as edge_attr
row, col = edge_index
edge_attr = (x_t[row, :2] - x_t[col, :2]).norm(dim=-1).unsqueeze(1)
edge_attr = edge_attr.type_as(batch.x)
if self.grav_attraction:
# Compute gravitational attraction between all nodes
m1 = x_t[row, 4] * 1e10
m2 = x_t[col, 4] * 1e10
attraction = m1 * m2 / (edge_attr.squeeze() ** 2) * 6.674e-11
edge_attr = torch.hstack([edge_attr, attraction.unsqueeze(1)])
# Replace inf values with 0
edge_attr = torch.nan_to_num(edge_attr, posinf=0)
#######################
# Training 2/2 #
#######################
# Obtain predicted delta dynamics
if self.rnn_type == "GRU":
delta_x, (h_node, h_edge) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=(h_node, h_edge),
)
elif self.rnn_type == "LSTM": # LSTM
delta_x, ((h_node, c_node), (h_edge, c_edge)) = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
hidden=((h_node, c_node), (h_edge, c_edge)),
)
else:
delta_x = self.model(
x=x_t,
edge_index=edge_index,
edge_attr=edge_attr,
batch=batch.batch,
)
# Compute updated positions
vel = delta_x[:, [0, 1]]
pos = x_prev[:, [0, 1]] + 0.1 * vel
x_t = torch.cat([pos, vel, static_features], dim=-1)
x_t = x_t.type_as(batch.x)
# Save deltas for loss computation
y_predictions[:, t, :] = x_t[:, : self.out_features]
# Compute and log loss
fde_loss = self.test_fde_loss(y_predictions[:, -1, :2], y_target[:, -1, :2])
ade_loss = self.test_ade_loss(y_predictions[:, 11:, :2], y_target[:, 11:, :2])
vel_loss = self.test_vel_loss(
y_predictions[:, 11:, [2, 3]], y_target[:, 11:, [2, 3]]
)
self.log("test_fde_loss", fde_loss)
self.log("test_ade_loss", ade_loss)
self.log("test_vel_loss", vel_loss)
loss = ade_loss
return loss
def configure_optimizers(self):
return torch.optim.Adam(
self.parameters(), lr=self.lr, weight_decay=self.weight_decay
)
class ConstantPhysicalBaselineModule(pl.LightningModule):
def __init__(self, prediction_horizon: int = 90, out_features: int = 4, **kwargs):
super().__init__()
# Setup metrics
self.val_ade_loss = torchmetrics.MeanSquaredError()
self.val_fde_loss = torchmetrics.MeanSquaredError()
self.val_vel_loss = torchmetrics.MeanSquaredError()
self.val_total_loss = torchmetrics.MeanSquaredError()
self.prediction_horizon = prediction_horizon
self.out_features = out_features
self.save_hyperparameters()
def training_step(self, batch: Batch, batch_idx: int):
pass
def validation_step(self, batch: Batch, batch_idx: int):
# Validate on sequential dataset. First 11 observations are used to prime the model.
# Loss is computed on remaining 80 samples using rollout.
# Setup target and allocate prediction tensors
y_target = batch.x[:, 11 : (self.prediction_horizon + 1), : self.out_features]
y_hat = torch.zeros_like(y_target)
static_features = batch.x[:, 0, 4].unsqueeze(1)
# Extract last observed positions/velocities
last_pos = batch.x[:, 10, :2]
last_vel = batch.x[:, 10, 2:4]
# Compute delta change
delta_pos = last_vel * 0.1
# First predicted positions
predicted_pos = last_pos + delta_pos
predicted_graph = torch.cat((predicted_pos, last_vel, static_features), dim=-1)
# Save first prediction and target
y_hat[:, 0, :] = predicted_graph[:, :4]
# 1 prediction done, 79 remaining
for t in range(11, self.prediction_horizon):
predicted_pos += delta_pos
predicted_graph = torch.cat(
(predicted_pos, last_vel, static_features), dim=-1
)
y_hat[:, t - 10, :] = predicted_graph[:, :4]
# Compute and log loss
fde_loss = self.val_fde_loss(y_hat[:, -1, :2], y_target[:, -1, :2])
ade_loss = self.val_ade_loss(y_hat[:, 11:, :2], y_target[:, 11:, :2])
vel_loss = self.val_vel_loss(y_hat[:, 11:, 2:], y_target[:, 11:, 2:])
self.log("val_ade_loss", ade_loss)
self.log("val_fde_loss", fde_loss)
self.log("val_vel_loss", vel_loss)
self.log("val_total_loss", ade_loss)
return ade_loss
def predict_step(self, batch: Batch, prediction_horizon: int = 51):
# Extract dimensions and allocate target/prediction tensors
n_nodes = batch.num_nodes
n_features = 5
y_hat = torch.zeros((prediction_horizon, n_nodes, n_features))
y_target = torch.zeros((prediction_horizon, n_nodes, n_features))
# Fill in targets
for t in range(0, prediction_horizon):
y_target[t, :, :] = batch.x[:, t + 1, :]
static_features = batch.x[:, 0, 4].unsqueeze(1)
for t in range(11):
last_pos = batch.x[:, t, :2]
last_vel = batch.x[:, t, 2:4]
delta_pos = last_vel * 0.1
predicted_pos = last_pos + delta_pos
predicted_graph = torch.cat(
(predicted_pos, last_vel, static_features), dim=1
)
y_hat[t, :, :] = predicted_graph
for t in range(11, prediction_horizon):
last_pos = predicted_pos
# velocity no longer changing
# delta_pos no longer changing
predicted_pos = last_pos + delta_pos
predicted_graph = torch.cat(
(predicted_pos, last_vel, static_features), dim=1
)
y_hat[t, :, :] = predicted_graph
y_hat = torch.permute(y_hat, (1, 0, 2))
y_target = torch.permute(y_target, (1, 0, 2))
return y_hat, y_target
def test_step(self, batch: Batch, batch_idx: int):
return self.validation_step(batch, batch_idx)
def configure_optimizers(self):
return torch.optim.Adam(self.parameters(), lr=1e-4)
@hydra.main(config_path="../../configs/nbody/", config_name="config")
def main(config):
# Print configuration for online monitoring
print(OmegaConf.to_yaml(config))
# Save complete yaml file for logging and reproducibility
log_dir = f"logs/nbody/{config.logger.project}/{config.logger.version}"
os.makedirs(log_dir, exist_ok=True)
yaml_path = f"{log_dir}/{config.logger.version}.yaml"
OmegaConf.save(config, f=yaml_path)
# Seed for reproducibility
seed_everything(config["misc"]["seed"], workers=True)
# Load data, model, and regressor
datamodule = eval(config["misc"]["dm_type"])(**config["datamodule"])
# Define model
if config["misc"]["model_type"] != "ConstantModel":
model_dict = dict(config["model"])
model_type = config["misc"]["model_type"]
else:
model_dict, model_type = None, None
config["misc"]["regressor_type"] = "ConstantPhysicalBaselineModule"
config["misc"]["train"] = False
# Define LightningModule
regressor = eval(config["misc"]["regressor_type"])(
model_type=model_type, model_dict=model_dict, **config["regressor"]
)
# Setup logging (using saved yaml file)
wandb_logger = WandbLogger(
entity="petergroth",
config=OmegaConf.to_container(config, resolve=True),
**config["logger"],
)
wandb_logger.watch(regressor, log_freq=config["misc"]["log_freq"], log_graph=False)
# Setup callbacks
checkpoint_callback = pl.callbacks.ModelCheckpoint(
filename=config["logger"]["version"], monitor="val_total_loss", save_last=True
)
# Create trainer, fit, and validate
trainer = pl.Trainer(
logger=wandb_logger, **config["trainer"], callbacks=[checkpoint_callback]
)
if config["misc"]["train"]:
trainer.fit(model=regressor, datamodule=datamodule)
if config["misc"]["validate"]:
trainer.validate(regressor, datamodule=datamodule, ckpt_path="best")
if config["misc"]["test"]:
trainer.test(datamodule=datamodule, ckpt_path="best")
if __name__ == "__main__":
main()
|
eeyesonme/myspider
|
projects/ireader/src/main/java/com/digitalplay/network/ireader/book/controller/BookController.java
|
package com.digitalplay.network.ireader.book.controller;
import java.util.Map;
import javax.servlet.ServletRequest;
import javax.transaction.Transactional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import com.digitalplay.network.ireader.book.domain.Book;
import com.digitalplay.network.ireader.book.service.BookService;
import com.digitalplay.network.ireader.util.Servlets;
import com.google.common.collect.Maps;
@Controller
@Transactional
public class BookController {
@Autowired
private BookService bookService;
// @Autowired
// private Validator validator;
private static final String PAGE_SIZE = "10";
private static Map<String, String> sortTypes = Maps.newLinkedHashMap();
static {
sortTypes.put("auto", "自动");
sortTypes.put("name", "标题");
}
/*@RequestMapping(value = "/book",method = RequestMethod.GET)
public String list(@RequestParam(value = "page", defaultValue = "1") int pageNumber,
@RequestParam(value = "page.size", defaultValue = PAGE_SIZE) int pageSize,
@RequestParam(value = "sortType", defaultValue = "auto") String sortType, Model model,
ServletRequest request) {
Map<String, Object> searchParams = Servlets.getParametersStartingWith(request, "search_");
Page<Book> books =bookService.listBooks( searchParams, pageNumber, pageSize, sortType);
model.addAttribute("books", books);
model.addAttribute("sortType", sortType);
model.addAttribute("sortTypes", sortTypes);
// 将搜索条件编码成字符串,用于排序,分页的URL
model.addAttribute("searchParams", Servlets.encodeParameterStringWithPrefix(searchParams, "search_"));
return "book/bookList";
}*/
@RequestMapping(value="/book/{id}" ,method = RequestMethod.GET)
public String getBookDetail(@PathVariable("id") Long id,Model model){
Book book= bookService.findOne(id);
model.addAttribute("book",book);
return "book/bookDetail";
}
/*@RequestMapping(value="/api/book/{id}" ,method = RequestMethod.GET, produces = MediaTypes.JSON_UTF_8)
public Book getBook(@PathVariable("id") Long id){
Book book= bookService.findOne(id);
return book;
}
@RequestMapping(value="/api/books" ,produces = MediaTypes.JSON_UTF_8)
public List<Book> list(Pageable pageable){
Iterable<Book> book= bookService.findAll(pageable);
ArrayList<Book> books= new ArrayList<Book>();
Iterator<Book> iter= book.iterator();
while(iter.hasNext()){
books.add(iter.next());
}
return books;
}
*/
}
|
djee-ms/webrtc-scripts
|
defaults.py
|
<gh_stars>1-10
"""
This file holds default settings value and it is used like template for creation userdef.py file.
If you want to change any variable listed below do that in generated userdef.py file.
"""
#args.gn template path
webRTCGnArgsTemplatePath='./webrtc/windows/templates/gns/args.gn'
#Supported platforms for specific host OS
supportedPlatformsForHostOs = {
'windows' : ['win', 'winuwp'],
'darwin' : ['ios', 'mac'],
'linux' : ['android', 'linux']
}
#Supported cpus for specific platform
supportedCPUsForPlatform = {
'winuwp' : ['arm', 'arm64', 'x86', 'x64'],
'win' : ['x86', 'x64'],
'ios' : ['arm'],
'mac' : [ 'x86', 'x64'],
'android' : ['arm'],
'linux' : [ 'x86', 'x64'],
}
#List of targets for which will be performed specified actions. Supported target is webrtc. In future it will be added support for ortc.
targets = [ 'webrtc' ]
#List of target cpus. Supported cpus are arm, x86 and x64
targetCPUs = [ 'arm', 'x86', 'x64' ]
#List of target platforms. Supported cpus are win and winuwp
targetPlatforms = [ 'win', 'winuwp' ]
#List of target configurations. Supported cpus are Release and Debug
targetConfigurations = [ 'Release', 'Debug' ]
#TODO: Implement logic to update zslib_eventing_tool.gni based on list of specified programming languages.
targetProgrammingLanguage = [ 'cx', 'cppwinrt', 'c', 'dotnet', 'python' ]
#=========== Supported actions: clean, createuserdef, prepare, build, backup, createnuget, publishnuget, uploadbackup.
# In future it will be added support updatesample.
#'clean' : Based on cleanup options set in cleanupOptions dict, it can be choosen desired cleanup actions.
#'createuserdef' : Deletes existing userdef.py if exists and create a new from defaults.py.
#'prepare' : Prepares developement environemnt for selected targets for choosen cpus, platforms and configurations.
#'build' : Builds selected targets for choosen cpus, platforms and configurations.
#'backup': Backup latest build.
#'createnuget' : Creates nuget package.
#'releasenote' : Gives user a choice on how to add a release note.
#'publishnuget' : Publishes nuget package
#'uploadbackup' : Creates a zipp file with pdb files and nuget package based on configuration and uploads it to onedrive
#List of actions to perform
actions = [ 'prepare', 'build' ]
buildWithClang = False
#Flag if wrapper library should be built. If it is False, it will be built only native libraries
buildWrapper = True
#Flag if rtc_include_tests should be defined. If False, native tests aren't built
includeTests = False
#=========== cleanupOptions
#'actions' : ['cleanOutput', 'cleanIdls', 'cleanUserDef','cleanPrepare'],
#'targets' : If [], it will use values from targets variable above.
# If ['*'] it will delete output folders for all targets.
# If ['webrtc'] it will delete just webrtc target
#'cpus' : If [], it will use values from targetCPUs variable above.
# If ['*'] it will delete output folders for all cpus.
# If ['x64'] it will delete just x64 output folder
#'platforms' : If [], it will use values from targetPlatforms variable above.
# If ['*'] it will delete output folders for all platforms.
# If ['winuwp'] it will delete just winuwp output folder
#'configurations' : If [], it will use values from targetConfigurations variable above.
# If ['*'] it will delete output folders for all configurations.
# If ['Release'] it will delete just Release output folder
cleanupOptions = {
'actions' : ['cleanOutput'],
'targets' : [],
'cpus' : [],
'platforms' : [],
'configurations' : []
}
"""
Supported formats: %(funcName)s - function name, %(levelname)s - log level name, %(asctime)s - time, %(message)s - log message, %(filename)s - curremt python filename, %(lineno)d - log message line no, %(name)d - module name
For the rest of available attributes you can check on https://docs.python.org/3/library/logging.html#logrecord-attributes
"""
#logFormat = '[%(levelname)-17s] - %(asctime)s - %(message)s (%(filename)s:%(lineno)d)'
logFormat = '[%(levelname)-17s] - [%(name)-15s] - %(funcName)-30s - %(message)s (%(filename)s:%(lineno)d)'
#Supported log levels: DEBUG, INFO, WARNING, ERROR, CRITICAL (case sensitive)
logLevel = 'DEBUG'
#Select ninja environemnt variables whose values will be logged. Available values are 'LIB', 'PATHEXT', 'LIBPATH', 'PATH', 'SYSTEMROOT', 'INCLUDE'
logNinjaEnvironmentFileVariables = ['INCLUDE', 'LIBPATH']
#Log filename. If it is empty string, log will be shown in console.
#In other case, it will log to specified file in folder from where script is run.
logToFile = ''
#If true overwrite old log file, otherwise it will create a new log file with time suffix.
overwriteLogFile = False
#If set to False, log messages for different log levels will be shown colorized.
noColoredOutput = False
#If set to True script execution will be stopped on error.
stopExecutionOnError = False
#If set to True, shows trace log when script execution is stopped on error
showTraceOnError = True
#If set to True, shows all settings values when script execution is stopped on error
showSettingsValuesOnError = True
#If set to True, shows PATH variable when script execution is stopped on error
showPATHOnError = True
#Windows specific variables
#If VS is installed but it is not found,, it is required to set msvsPath variable
msvsPath = ''
#If set to True, output libraries and pdbs will be stored in Backup folder
enabledBackup = False
#Backup folder, in user working directory (folder from where script is run)
libsBackupPath = './Backup'
#Flag for overwriting current backup folder
overwriteBackup = False
#Additional targets that can be built
#'target_name' : Name of target to build. You can name target as your wish.
# e.g. peercc_server. It is dictionary key for a list
# of gn targets that will be built for target you define,
# flag for linking obj files. (0 don't link, 1 link) and
# flag for copying libs, exes and pdbs to OUTPUT folder.
# {
# 'target_name' : ( [list of gn target paths], merging libs flag, copying to ouptut flag ),
# }
availableTargetsForBuilding = {
'peercc_server' : (
[
'peerconnection_server'
],0,1
),
}
#Path where nuget package and all of the files used to create the package are stored
nugetFolderPath = './nugetpackages'
nugetVersionInfo = {
#Main version number of the NuGet package
'number': '71',
#Use '' if not prerelease, 'Default' is based on previous version, or use some other prerelease ('Alpha', 'Beta', ...)
'prerelease': 'Default',
#Initial version number format
'format': '1.[number].0.1[prerelease]'
}
#Imput NuGet package version number manualy, used if selected version number does not exist on nuget.org, E.g., '1.66.0.3-Alpha'
manualNugetVersionNumber = ''
#Path to a release notes file
releaseNotePath = 'releases.txt'
#Information about the sample to be updated
updateSampleInfo = {
'package' : 'default',
'samples' : [
{
'name' : 'PeerCC',
'url' : 'https://github.com/webrtc-uwp/PeerCC-Sample',
'branch': 'webrtc_merge_m66'
}
]
}
#List of NuGet packages used to manualy publish nuget packages, E.g., 'webrtc.1.66.0.3-Alpha.nupkg'
#Packages must be placed in a folder referenced in nugetFolderPath variable
nugetPackagesToPublish = []
#API key used to publish nuget packages nuget.org
nugetAPIKey = ''
#URL for the nuget server, if 'default' nuget.org is used
nugetServerURL = 'default'
#Output path where will be stored nuget package as well as libs and pdbs
#releaseOutputPath = '.'
enableIdlImpl = False
#Put list of unit tests, present in unitTest dictionary, to execute, or '*' to run all unit tests from unitTest
unitTestsToRun = ['*']
#Dictionary of all availabe unit tests, with list of tests to execute.
#Each unit test is associated with the list of tests. List can contain just '*' which will run all tests, for that unit test.
#List can contain specific tests, that will be run for specific unit test
# (e.g. 'rtc_pc_unittests' : ['ExternalAuth/SrtpTransportTestWithExternalAuth.SendAndRecvPacket_SRTP_AEAD_AES_256_GCM/1',
# ExternalAuth/SrtpTransportTestWithExternalAuth.SendAndRecvPacket_AES_CM_128_HMAC_SHA1_80/0]).
#Also it can be specified to run particular test cases (e.g. 'rtc_pc_unittests' : ['ExternalAuth*','VoiceChannelSingleThreadTest*'],).
#If some of these unit tests are not of interest, remove it from the unitTests dictionary.
#For some specific configuration of unit tests goow practive would be to create a tamplate with tailored unitTests dictionary
unitTests = {
'audio_codec_speed_tests' : ['*'],
'audio_decoder_unittests' : ['*'],
'common_audio_unittests' : ['*'],
'common_video_unittests' : ['*'],
'fake_network_unittests' : ['*'],
'modules_tests' : ['*'],
'modules_unittests' : [ '*'],
'ortc_unittests' : ['*'],
'peerconnection_unittests' : ['*'],
'rtc_media_unittests' : ['*'],
'rtc_pc_unittests' : ['*'],
'rtc_stats_unittests' : ['*'],
'rtc_unittests' : ['*'],
'system_wrappers_unittests' : ['*'],
'test_packet_masks_metrics' : ['*'],
'tools_unittests' : ['*'],
'video_capture_tests' : ['*'],
'video_engine_tests' : ['*'],
'webrtc_nonparallel_tests' : ['*'],
'webrtc_opus_fec_test' : ['*'],
'webrtc_perf_tests' : ['*'],
}
|
bill1812/wicromatch
|
benchmark/fixtures/match/star-basename-short.js
|
module.exports = [
[
"a00.js",
"b00.js",
"c00.js",
"d00.js",
"e00.js",
"f00.js",
"g00.js",
"h00.js",
"i00.js",
"j00.js",
"k00.js",
"l00.js",
"m00.js",
"n00.js",
"o00.js",
"p00.js",
"q00.js",
"r00.js",
"s00.js",
"t00.js",
"u00.js",
"v00.js"
],
"c*3.txt"
];
|
imgstack/stackpath-cdn-go
|
models/custconf_auth_geo.go
|
<filename>models/custconf_auth_geo.go
// Code generated by go-swagger; DO NOT EDIT.
package models
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"github.com/go-openapi/errors"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
)
// CustconfAuthGeo Geographic restrictions allow you to restrict content to end users in specific locations. The IP address of incoming requests is checked against a current list of IP allocations to countries and to states within the US. If an end user's IP address is not found in the list, they are allowed access to the content by default. The feature has both an Include and an Exclude list which are used to target the allowed audience.
//
// swagger:model custconfAuthGeo
type CustconfAuthGeo struct {
// code
Code *AuthGeoCodeEnumWrapperValue `json:"code,omitempty"`
// enabled
Enabled bool `json:"enabled"`
// This is used by the API to perform conflict checking
ID string `json:"id,omitempty"`
// String of values delimited by a ',' character. These are the region codes you are targeting for this policy. The values that can be supplied within this field are those that are supported by the MaxMind® GeoIP database.
Values string `json:"values,omitempty"`
}
// Validate validates this custconf auth geo
func (m *CustconfAuthGeo) Validate(formats strfmt.Registry) error {
var res []error
if err := m.validateCode(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *CustconfAuthGeo) validateCode(formats strfmt.Registry) error {
if swag.IsZero(m.Code) { // not required
return nil
}
if m.Code != nil {
if err := m.Code.Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("code")
}
return err
}
}
return nil
}
// ContextValidate validate this custconf auth geo based on the context it is used
func (m *CustconfAuthGeo) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
var res []error
if err := m.contextValidateCode(ctx, formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *CustconfAuthGeo) contextValidateCode(ctx context.Context, formats strfmt.Registry) error {
if m.Code != nil {
if err := m.Code.ContextValidate(ctx, formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("code")
}
return err
}
}
return nil
}
// MarshalBinary interface implementation
func (m *CustconfAuthGeo) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *CustconfAuthGeo) UnmarshalBinary(b []byte) error {
var res CustconfAuthGeo
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
}
|
kubemq-hub/kubemq-target-connectors
|
targets/gcp/memorystore/redis/connector.go
|
package redis
import (
"github.com/kubemq-hub/builder/connector/common"
"math"
)
func Connector() *common.Connector {
return common.NewConnector().
SetKind("gcp.cache.redis").
SetDescription("GCP Memory Store Redis Target").
SetName("Redis").
SetProvider("GCP").
SetCategory("Cache").
SetTags("db","memory-store","cloud","managed").
AddProperty(
common.NewProperty().
SetKind("string").
SetName("url").
SetTitle("Connection String").
SetDescription("Set Redis url").
SetMust(true).
SetDefault("redis://localhost:6379"),
).
AddMetadata(
common.NewMetadata().
SetName("method").
SetKind("string").
SetDescription("Set Redis execution method").
SetOptions([]string{"get", "set", "delete"}).
SetDefault("get").
SetMust(true),
).
AddMetadata(
common.NewMetadata().
SetName("key").
SetKind("string").
SetDescription("Set Redis key").
SetMust(true),
).
AddMetadata(
common.NewMetadata().
SetName("etag").
SetKind("int").
SetDescription("Set Redis etag").
SetDefault("0").
SetMin(0).
SetMax(math.MaxInt16).
SetMust(false),
).
AddMetadata(
common.NewMetadata().
SetName("concurrency").
SetKind("string").
SetDescription("Set Redis write concurrency").
SetOptions([]string{"first-write", "last-write", ""}).
SetDefault("").
SetMust(false),
).
AddMetadata(
common.NewMetadata().
SetName("consistency").
SetKind("string").
SetDescription("Set Redis read consistency").
SetOptions([]string{"strong", "eventual", ""}).
SetDefault("").
SetMust(false),
)
}
|
jstokes/secure-data-service
|
tools/odin/lib/Shared/EntityClasses/enum/GraduationPlanType.rb
|
=begin
Copyright 2012-2013 inBloom, Inc. and its affiliates.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require_relative 'Enum.rb'
# Enumerates the types of graduation plans. From Ed-Fi-Core.xsd:
# <xs:simpleType name="GraduationPlanType">
# <xs:annotation>
# <xs:documentation>The type of academic plan the student is following for graduation.</xs:documentation>
# </xs:annotation>
# <xs:restriction base="xs:token">
# <xs:enumeration value="Career and Technical Education"/>
# <xs:enumeration value="Distinguished"/>
# <xs:enumeration value="Minimum"/>
# <xs:enumeration value="Recommended"/>
# <xs:enumeration value="Standard"/>
# </xs:restriction>
# </xs:simpleType>
class GraduationPlanType
include Enum
GraduationPlanType.define :CAREER_AND_TECHNICAL_EDUCATION, "Career and Technical Education"
GraduationPlanType.define :DISTINGUISHED, "Distinguished"
GraduationPlanType.define :MINIMUM, "Minimum"
GraduationPlanType.define :RECOMMENDED, "Recommended"
GraduationPlanType.define :STANDARD, "Standard"
end
|
adesutherland/crexx
|
re2c/test/american_fuzzy_lop/005.c
|
<filename>re2c/test/american_fuzzy_lop/005.c
american_fuzzy_lop/005.re:2:9: error: syntax error in escape sequence
|
hernad/zimbra9
|
zm-mailbox/store/src/java-test/com/zimbra/cs/mailbox/acl/AclPushTest.java
|
<filename>zm-mailbox/store/src/java-test/com/zimbra/cs/mailbox/acl/AclPushTest.java
/*
* ***** BEGIN LICENSE BLOCK *****
* Zimbra Collaboration Suite Server
* Copyright (C) 2014, 2016 Synacor, Inc.
*
* This program is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software Foundation,
* version 2 of the License.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public License for more details.
* You should have received a copy of the GNU General Public License along with this program.
* If not, see <https://www.gnu.org/licenses/>.
* ***** END LICENSE BLOCK *****
*/
package com.zimbra.cs.mailbox.acl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Date;
import java.util.HashMap;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.collect.Multimap;
import com.zimbra.common.account.Key;
import com.zimbra.common.localconfig.LC;
import com.zimbra.cs.account.Account;
import com.zimbra.cs.account.MockProvisioning;
import com.zimbra.cs.account.Provisioning;
import com.zimbra.cs.db.DbPendingAclPush;
import com.zimbra.cs.db.DbPool;
import com.zimbra.cs.db.DbPool.DbConnection;
import com.zimbra.cs.db.HSQLDB;
import com.zimbra.cs.mailbox.ACL;
import com.zimbra.cs.mailbox.Folder;
import com.zimbra.cs.mailbox.MailItem;
import com.zimbra.cs.mailbox.Mailbox;
import com.zimbra.cs.mailbox.MailboxManager;
import com.zimbra.cs.mailbox.MailboxTestUtil;
import com.zimbra.cs.mailbox.OperationContext;
import com.zimbra.cs.mailbox.ScheduledTaskManager;
/**
* @author zimbra
*
*/
public class AclPushTest {
private DbConnection connection;
@BeforeClass
public static void init() throws Exception {
Provisioning.setInstance(new MockProvisioning());
LC.zimbra_class_database.setDefault(HSQLDB.class.getName());
DbPool.startup();
HSQLDB.createDatabase();
MailboxTestUtil.initServer();
Provisioning prov = Provisioning.getInstance();
HashMap<String, Object> attrs = new HashMap<String, Object>();
attrs.put(Provisioning.A_zimbraId,
"17dd075e-2b47-44e6-8cb8-7fdfa18c1a9f");
prov.createAccount("<EMAIL>", "secret", attrs);
attrs = new HashMap<String, Object>();
attrs.put(Provisioning.A_zimbraId,
"a4e41fbe-9c3e-4ab5-8b34-c42f17e251cd");
prov.createAccount("<EMAIL>", "secret", attrs);
ScheduledTaskManager.startup();
}
@Before
public void setUp() throws Exception {
MailboxTestUtil.clearData();
HSQLDB.clearDatabase();
connection = DbPool.getConnection();
}
@After
public void tearDown() throws Exception {
connection.close();
}
@Test
public void getAclPushEntriesMultipleGrantForSameItem() throws Exception {
Account owner = Provisioning.getInstance().get(Key.AccountBy.name,
"<EMAIL>");
Account grantee = Provisioning.getInstance().get(Key.AccountBy.name,
"<EMAIL>");
Mailbox mbox = MailboxManager.getInstance().getMailboxByAccount(owner);
Folder folder = mbox.createFolder(null, "shared",
new Folder.FolderOptions()
.setDefaultView(MailItem.Type.DOCUMENT));
OperationContext octxt = new OperationContext(owner);
Multimap<Integer, Integer> mboxIdToItemIds = null;
mbox.lock.lock();
try {
mbox.grantAccess(octxt, folder.getId(), grantee.getId(),
ACL.GRANTEE_USER, ACL.stringToRights("r"), null);
mbox.grantAccess(octxt, folder.getId(), grantee.getId(),
ACL.GRANTEE_USER, ACL.stringToRights("rw"), null);
mboxIdToItemIds = DbPendingAclPush
.getEntries(new Date());
} finally {
mbox.lock.release();
}
// assertTrue(mboxIdToItemIds.size() == 1);
Thread.sleep(1000);
mboxIdToItemIds = DbPendingAclPush.getEntries(new Date());
assertTrue(mboxIdToItemIds.size() == 0);
short rights = folder.getACL().getGrantedRights(grantee);
assertEquals(3, rights);
}
@Test
public void getAclPushEntriesFolderNameWithSemiColon() throws Exception {
try {
Account owner = Provisioning.getInstance().get(Key.AccountBy.name,
"<EMAIL>");
Account grantee = Provisioning.getInstance().get(Key.AccountBy.name,
"<EMAIL>");
Mailbox mbox = MailboxManager.getInstance().getMailboxByAccount(owner);
Folder folder = mbox.createFolder(null, "shared",
new Folder.FolderOptions()
.setDefaultView(MailItem.Type.DOCUMENT));
Folder folder2 = mbox.createFolder(null, "shared; hello",
new Folder.FolderOptions()
.setDefaultView(MailItem.Type.DOCUMENT));
OperationContext octxt = new OperationContext(owner);
Multimap<Integer, Integer> mboxIdToItemIds = null;
mbox.lock.lock();
try {
mbox.grantAccess(octxt, folder.getId(), grantee.getId(),
ACL.GRANTEE_USER, ACL.stringToRights("r"), null);
mbox.grantAccess(octxt, folder2.getId(), grantee.getId(),
ACL.GRANTEE_USER, ACL.stringToRights("rw"), null);
mboxIdToItemIds = DbPendingAclPush
.getEntries(new Date());
} finally {
mbox.lock.release();
}
// assertTrue(mboxIdToItemIds.size() == 2);
Thread.sleep(1000);
mboxIdToItemIds = DbPendingAclPush.getEntries(new Date());
assertTrue(mboxIdToItemIds.size() == 0);
} catch (Exception e) {
fail("Should not throw an exception.");
}
}
}
|
eID-Testbeds/server
|
eidsrv-testbed-runner/src/main/java/com/secunet/eidserver/testbed/tlsclient/TLSConnection.java
|
package com.secunet.eidserver.testbed.tlsclient;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.reflect.Field;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.bouncycastle.crypto.params.AsymmetricKeyParameter;
import org.bouncycastle.crypto.tls.AlertLevel;
import org.bouncycastle.crypto.tls.Certificate;
import org.bouncycastle.crypto.tls.CertificateRequest;
import org.bouncycastle.crypto.tls.CipherSuite;
import org.bouncycastle.crypto.tls.CompressionMethod;
import org.bouncycastle.crypto.tls.DefaultTlsClient;
import org.bouncycastle.crypto.tls.DefaultTlsSignerCredentials;
import org.bouncycastle.crypto.tls.HashAlgorithm;
import org.bouncycastle.crypto.tls.ProtocolVersion;
import org.bouncycastle.crypto.tls.ServerOnlyTlsAuthentication;
import org.bouncycastle.crypto.tls.SignatureAlgorithm;
import org.bouncycastle.crypto.tls.SignatureAndHashAlgorithm;
import org.bouncycastle.crypto.tls.TlsAuthentication;
import org.bouncycastle.crypto.tls.TlsCredentials;
import org.bouncycastle.crypto.tls.TlsUtils;
public class TLSConnection extends DefaultTlsClient
{
private static final Logger logger = LogManager.getLogger(TLSConnection.class);
private int[] supported_suites;
private final Certificate client_cert;
private final AsymmetricKeyParameter client_key;
private final boolean clientAuthentication;
private String compressionMethod, cipherSuite, serverVersion;
// Assume that client authentication is required if a certificate with key
// is supplied
TLSConnection(Certificate cert, AsymmetricKeyParameter keys, int[] supported_cipher_suites)
{
supported_suites = supported_cipher_suites;
client_cert = cert;
client_key = keys;
clientAuthentication = true;
}
// Assume that client authentication is required if a certificate with key
// is supplied
TLSConnection(Certificate cert, AsymmetricKeyParameter keys)
{
supported_suites = new int[] { CipherSuite.TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA256, CipherSuite.TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA, CipherSuite.TLS_RSA_PSK_WITH_AES_128_CBC_SHA256,
CipherSuite.TLS_PSK_WITH_AES_256_CBC_SHA, CipherSuite.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, CipherSuite.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 };
client_cert = cert;
client_key = keys;
clientAuthentication = true;
}
// Perform a Server-Only Authentication if client certificate is not
// supplied
TLSConnection(int[] supported_cipher_suites)
{
supported_suites = supported_cipher_suites;
client_cert = null;
client_key = null;
clientAuthentication = false;
}
// Perform a Server-Only Authentication if client certificate is not
// supplied
TLSConnection()
{
supported_suites = new int[] { CipherSuite.TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA256, CipherSuite.TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA, CipherSuite.TLS_RSA_PSK_WITH_AES_128_CBC_SHA256,
CipherSuite.TLS_PSK_WITH_AES_256_CBC_SHA };
client_cert = null;
client_key = null;
clientAuthentication = false;
}
// Allows the manipulation cipher suites that can be used for the connection
public void setCipherSuites(int[] supported_cipher_suites)
{
supported_suites = supported_cipher_suites;
}
// Returns the cipher suites that can be used for the connection
@Override
public int[] getCipherSuites()
{
return supported_suites;
}
// We support only TLS1.2
@Override
public ProtocolVersion getMinimumVersion()
{
return ProtocolVersion.SSLv3;
}
@Override
public void notifySecureRenegotiation(boolean secureRenegotiation) throws IOException
{
// do not terminate
}
@Override
public void notifyAlertRaised(short level, short description, String message, Throwable cause)
{
if (level == AlertLevel.fatal)
{
logger.fatal("TLS error during sending: " + AlertLevel.getName(description) + System.getProperty("line.separator") + AlertLevel.getText(description) + System.getProperty("line.separator")
+ message);
}
else
{
logger.warn("TLS warning during sending: " + AlertLevel.getName(description) + System.getProperty("line.separator") + AlertLevel.getText(description) + System.getProperty("line.separator")
+ message);
}
super.notifyAlertRaised(level, description, message, cause);
}
@Override
public void notifyAlertReceived(short level, short description)
{
if (level == AlertLevel.fatal)
{
logger.info("TLS error during reading: " + AlertLevel.getName(description) + System.getProperty("line.separator") + AlertLevel.getText(description));
}
else
{
logger.debug("TLS warning during reading: " + AlertLevel.getName(description) + System.getProperty("line.separator") + AlertLevel.getText(description));
}
super.notifyAlertReceived(level, description);
}
// This function is called when establishing the connection to verify/supply
// certificates
@Override
public TlsAuthentication getAuthentication() throws IOException
{
if (clientAuthentication)
{
logger.debug("Using client and server certificates");
return new TlsAuthentication() {
@Override
public void notifyServerCertificate(Certificate arg0) throws IOException
{
// we do not need to perform any checks here
}
@Override
public TlsCredentials getClientCredentials(CertificateRequest certificateRequest) throws IOException
{
logger.debug("Answering request for client certificates");
// for TLS 1.2 there MUST be this object
SignatureAndHashAlgorithm saha = null;
if (TlsUtils.isTLSv12(context))
{
// TODO replace static values
saha = new SignatureAndHashAlgorithm(HashAlgorithm.sha256, SignatureAlgorithm.rsa);
}
return new DefaultTlsSignerCredentials(context, client_cert, client_key, saha);
}
};
}
else
{
logger.debug("Using only server certificates");
return new ServerOnlyTlsAuthentication() {
@Override
public void notifyServerCertificate(Certificate arg0) throws IOException
{
// we do not need to perform any checks here
}
};
}
}
/*
* (non-Javadoc)
*
* @see com.secunet.bouncycastle.crypto.tls.AbstractTlsClient#notifyServerVersion(com.secunet.bouncycastle.crypto.tls.ProtocolVersion)
*/
@Override
public void notifyServerVersion(ProtocolVersion serverVersion) throws IOException
{
this.serverVersion = serverVersion.toString();
super.notifyServerVersion(serverVersion);
}
/*
* (non-Javadoc)
*
* @see com.secunet.bouncycastle.crypto.tls.AbstractTlsClient#notifySelectedCompressionMethod(short)
*/
@Override
public void notifySelectedCompressionMethod(short selectedCompressionMethod)
{
for (Field f : CompressionMethod.class.getFields())
{
try
{
if (f.getShort(null) == selectedCompressionMethod)
{
this.compressionMethod = f.getName();
}
}
catch (IllegalArgumentException | IllegalAccessException e)
{
StringWriter trace = new StringWriter();
e.printStackTrace(new PrintWriter(trace));
logger.error("Could not decode compression method:" + System.getProperty("line.separator") + trace.toString());
}
}
super.notifySelectedCompressionMethod(selectedCompressionMethod);
}
/*
* (non-Javadoc)
*
* @see com.secunet.bouncycastle.crypto.tls.AbstractTlsClient#notifySelectedCipherSuite(int)
*/
@Override
public void notifySelectedCipherSuite(int selectedCipherSuite)
{
for (Field f : CipherSuite.class.getFields())
{
try
{
if (f.getInt(null) == selectedCipherSuite)
{
this.cipherSuite = f.getName();
}
}
catch (IllegalArgumentException | IllegalAccessException e)
{
StringWriter trace = new StringWriter();
e.printStackTrace(new PrintWriter(trace));
logger.error("Could not decode ciphersuite:" + System.getProperty("line.separator") + trace.toString());
}
}
super.notifySelectedCipherSuite(selectedCipherSuite);
}
}
|
ShreyanshRoyGeek/Java-Logical_Programming
|
Moderate Program/src/Rectangle.java
|
public class Rectangle {
int width;
int length;
void area(int length,int width)
{
this.length = length;
this.width = width;
int area = (this.length*this.width);
System.out.println(area);
}
public static void main(String[] args) {
Rectangle r = new Rectangle();
r.area(4, 5);
}
}
|
nikiqta/HomeWork
|
node-express-mongoose/MongoDBAndMongoose/index.js
|
const mongodb = require('mongodb');
let connectionStr = 'mongodb://localhost:27017/liveDemoDB';
mongodb.MongoClient.connect(connectionStr)
.then(client => {
let db = client.db('liveDemoDB');
let dogs = db.collection('dogs');
/*
dogs.insert({
name: 'Hektor',
age: 4,
color: 'Black/Brown',
breed: 'Foxy-Terrier'
});
*/
dogs.find({}).toArray((err, dogs) => console.log(dogs));
})
.catch(err => console.warn(err.message)
);
|
santosh-shaastry/lithium-sdk
|
test/lib/responsive-options-test.js
|
<gh_stars>10-100
'use strict';
var rewire = require('rewire');
var nock = require('nock');
var chai = require('chai');
var expect = chai.expect;
var path = require('path');
var gutil = require('gulp-util');
var fs = require('fs');
var through = require('through2');
var testRoot = path.resolve(__dirname) + '/..';
var serverMocks = require('./server-mocks');
var apiHost = 'https://mycommunity.com';
var badErrorRespnse = 'Bad response';
describe('test responsive options', function() {
var apiPath = "/restapi/ldntool/plugins/responsive?format=json";
function createErrorRequest(errMsg, requestBody) {
var n = nock(apiHost).log(console.log);
n = requestBody ? n.post(apiPath, requestBody) : n.get(apiPath);
n = n.replyWithError(errMsg);
return n;
}
function createErrorResponse(requestBody) {
var n = nock(apiHost).log(console.log);
n = requestBody ? n.post(apiPath, requestBody) : n.get(apiPath);
n = n.reply(200, '{"status": "error"}');
return n;
}
function createMangledResponse(requestBody) {
var n = nock(apiHost).log(console.log);
n = requestBody ? n.post(apiPath, requestBody) : n.get(apiPath);
n = n.reply(500, '<html><body>NonXMLResponse</body><body>test</body></html>');
return n;
}
function createResponse(config, requestBody) {
var n = nock(apiHost).log(console.log);
n = requestBody ? n.post(apiPath, requestBody) : n.get(apiPath);
n = n.reply(200, JSON.stringify(config));
return n;
}
function createInvalidPluginTokenResponse(requestBody) {
var n = nock(apiHost).log(console.log);
n = requestBody ? n.post(apiPath, requestBody) : n.get(apiPath);
n = n.reply(200, '{"service-response": {"hard-failures": [ ], "message": "Anonymous users cannot view or modify community plugins. Go to Studio > SDK and confirm that your upload token has not expired.' +
'", "soft-failures":[ ], "status":"UPLOAD_FAIL", "success": false }}');
return n;
}
describe('responsive options put', function() {
var server;
function check(done, expects, config, opts) {
if (!opts.pluginType) {
opts.pluginType = 'responsive';
}
if (!opts.doPut) {
opts.doPut = false;
}
if (!opts.noCache) {
opts.noCache = true;
}
if (!opts.skinOpts) {
opts.skinOpts = {};
}
opts.verboseMode = true;
opts.debugMode = true;
opts.configDir = 'configs';
opts.skinOpts = {
enabled: true,
id: 'my_responsive_skin',
url: 'http://localhost:9000/styles/my_responsive_skin.css',
anonymous_viewing: true
};
if (expects.serverError) {
createErrorRequest(badErrorRespnse, {
"dev_skin":{
"enabled":true,
"id":"my_responsive_skin",
"url":"http://localhost:9000/styles/my_responsive_skin.css",
"anonymous_viewing":true
}
});
} else if (expects.respondSuccess) {
createResponse(opts.doPut ? { status:'OK', message:''} : config, {
"dev_skin":{
"enabled":true,
"id":"my_responsive_skin",
"url":"http://localhost:9000/styles/my_responsive_skin.css",
"anonymous_viewing":true
}
});
createResponse(config);
} else if (expects.errorResponse) {
createErrorResponse({
"dev_skin":{
"enabled":true,
"id":"my_responsive_skin",
"url":"http://localhost:9000/styles/my_responsive_skin.css",
"anonymous_viewing":true
}
});
} else if (expects.mangledResponse) {
createMangledResponse({
"dev_skin":{
"enabled":true,
"id":"my_responsive_skin",
"url":"http://localhost:9000/styles/my_responsive_skin.css",
"anonymous_viewing":true
}
});
} else if (expects.invalidPluginTokenResponse) {
createInvalidPluginTokenResponse({
"dev_skin":{
"enabled":true,
"id":"my_responsive_skin",
"url":"http://localhost:9000/styles/my_responsive_skin.css",
"anonymous_viewing":true
}
});
}
var gulp = {
task: function(name, required, fn) {
}
};
var fixDir = function(path) {
if (path == 'configs') {
path = testRoot + '/lib/' + path;
}
return path;
};
var configFileValue = null;
var fsMock = {
existsSync: function(path) {
if (path == 'configs/responsive.conf.json') {
return configFileValue != null;
}
return fs.existsSync(fixDir(path));
},
readFileSync: function(path) {
if (path == 'configs/responsive.conf.json') {
return configFileValue;
}
return fs.readFileSync(fixDir(path));
},
writeFile: function(path, val, cb) {
configFileValue = val;
return cb();
},
mkdirSync: function(path) {
return fs.mkdirSync(fixDir(path));
}
};
var responsiveOptions = rewire(testRoot + '/../lib/responsive-options.js');
responsiveOptions.__set__({
fs: fsMock
});
var cb = opts.cb ? opts.cb : function(err) {
done();
};
responsiveOptions(gulp, gutil).putOptions(server, opts, cb).pipe(through.obj());;
}
before(function() {
server = serverMocks.createDefaultServerMock();
});
beforeEach(function() {
nock.cleanAll();
});
it('should return error response from server', function(done) {
check(done, { serverError: true }, undefined,
{ cb: function(err) {
expect(err.message).to.contain('Error making request to save responsive options');
done();
}, debugMode: false
});
});
it('should return error for bad response from server', function(done) {
var cb = function(err) {
expect(err.message).to.contain('Error making request to save responsive options');
done();
};
check(done, { errorResponse: true }, undefined, { cb: cb, debugMode: false });
});
it('should return error for mangled response from server', function(done) {
var cb = function(err) {
expect(err.message).to.contain('server returned status code 500');
done();
};
check(done, { mangledResponse: true }, undefined, { cb: cb, debugMode: false });
});
it('should return error for bad plugin token response from server', function(done) {
var cb = function(err) {
expect(err.message).to.contain('Error making request to save responsive options');
done();
};
check(done, { invalidPluginTokenResponse : true }, undefined, { cb: cb, debugMode: false });
});
it('should return success', function(done) {
var cb = function(err, config, msg) {
expect(err).to.be.null;
expect(config).to.not.be.null;
expect(msg).to.equal('saved configs/responsive.conf.json');
done();
};
check(done, { respondSuccess: true }, {
status: 'OK',
features: ['responsivepeak', 'responsivebase'],
feature: {
responsivepeak: {
id: 'responsivepeak',
version: '1.6',
path: '/res/feature/responsivepeak/v1.6-lia16.1',
fq_version: 'v1.6-lia16.1'
},
responsivebase:{
id: 'responsivebase',
version: '1.6',
path: '/res/feature/responsivebase/v1.6-lia16.1',
fq_version: 'v1.6-lia16.1'
}
},
skins: ['responsive_base', 'responsive_peak'],
skin: {
responsive_base: {
id: 'responsive_base',
feature_id: 'responsivebase'
},
responsive_peak: {
id: 'responsive_peak',
feature_id: 'responsivepeak'
}
},
dev_skin:{
enabled: true,
id: 'my_first_responsive_skin',
url: 'http://localhost:9000/skins/my_first_responsive_skin',
anonymous_viewing: true
}
}, { cb: cb, debugMode: false, configDir: 'configs' });
});
});
});
|
abodacs/django-fullstack-biolerplate
|
backend/apps/projects/models.py
|
<reponame>abodacs/django-fullstack-biolerplate
import datetime
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _
from common.models import IndexedTimeStampedModel
class ProjectQueryset(models.QuerySet):
def active(self, date=None):
if date is None:
date = datetime.date.today()
return self.filter(Q(end_date__isnull=True) | Q(end_date__gte=date), start_date__lte=date,)
# Model manager
class ProjectManager(models.Manager):
def get_queryset(self):
return ProjectQueryset(self.model, using=self._db)
def active(self):
return self.get_queryset().active()
class Project(IndexedTimeStampedModel):
title = models.CharField(verbose_name=_("Project title"), max_length=128)
project_class = models.ForeignKey(
"meta.ProjectClass", verbose_name=_("Project Class"), on_delete=models.CASCADE,
)
case_types = models.ManyToManyField("meta.CaseType", related_name="project_case_types")
envoys = models.ManyToManyField(settings.AUTH_USER_MODEL, related_name="project_envoys")
start_date = models.DateField(verbose_name=_("Start Date"), null=True)
end_date = models.DateField(verbose_name=_("End Date"), null=True)
description = models.TextField(
verbose_name=_("Project description"),
blank=True,
null=False,
help_text=_("Long description for the Project"),
)
objects = ProjectManager()
def clean(self):
if self.end_date and self.start_date and self.end_date < self.start_date:
raise ValidationError(_("Project start date can be less than end date."))
class Meta:
app_label = "projects"
verbose_name = _("Project")
verbose_name_plural = _("Projects")
def __str__(self):
return self.title or "Project"
class Case(IndexedTimeStampedModel):
name = models.CharField(_("Name"), max_length=128)
code = models.SlugField(_("Code"), max_length=128, unique=True,)
famous_name = models.CharField(_("Famous Name"), max_length=128, blank=True, null=True)
mobile = models.CharField(_("Mobile"), max_length=32, blank=True, null=True)
address = models.CharField(_("Address"), max_length=150, blank=True, null=True)
national_id = models.CharField(
max_length=32, verbose_name=_("National ID"), blank=True, null=True
)
description = models.TextField(
verbose_name=_("Description"),
blank=True,
null=False,
help_text=_("Long description for the Case"),
)
types = models.ManyToManyField("meta.CaseType", related_name="case_types")
problems = models.ManyToManyField("meta.Problem", related_name="case_problems")
envoy = models.ForeignKey(
settings.AUTH_USER_MODEL,
verbose_name=_("Case Envoy"),
on_delete=models.CASCADE,
related_name="cases",
)
class Meta:
app_label = "projects"
verbose_name = _("Case")
verbose_name_plural = _("Cases")
def __str__(self):
return f"{self.name}, {self.famous_name}"
|
Oldes/rs
|
projects/ucs2/latest/charmaps/CP1256.d.rb
|
<reponame>Oldes/rs
; CP1256 UCS-2 decoding rule
; source: ftp://dkuug.dk/i18n/charmaps/CP1256
any [
#{060C} (insert tail result #{80}) |
#{0660} (insert tail result #{81}) |
#{201A} (insert tail result #{82}) |
#{0661} (insert tail result #{83}) |
#{201E} (insert tail result #{84}) |
#{2026} (insert tail result #{85}) |
#{2020} (insert tail result #{86}) |
#{2021} (insert tail result #{87}) |
#{0662} (insert tail result #{88}) |
#{0663} (insert tail result #{89}) |
#{0664} (insert tail result #{8A}) |
#{2039} (insert tail result #{8B}) |
#{0665} (insert tail result #{8C}) |
#{0666} (insert tail result #{8D}) |
#{0667} (insert tail result #{8E}) |
#{0668} (insert tail result #{8F}) |
#{0669} (insert tail result #{90}) |
#{2018} (insert tail result #{91}) |
#{2019} (insert tail result #{92}) |
#{201C} (insert tail result #{93}) |
#{201D} (insert tail result #{94}) |
#{2022} (insert tail result #{95}) |
#{2013} (insert tail result #{96}) |
#{2014} (insert tail result #{97}) |
#{061B} (insert tail result #{98}) |
#{2122} (insert tail result #{99}) |
#{061F} (insert tail result #{9A}) |
#{203A} (insert tail result #{9B}) |
#{0621} (insert tail result #{9C}) |
#{0622} (insert tail result #{9D}) |
#{0623} (insert tail result #{9E}) |
#{0178} (insert tail result #{9F}) |
#{0624} (insert tail result #{A1}) |
#{0625} (insert tail result #{A2}) |
#{0626} (insert tail result #{A5}) |
#{0627} (insert tail result #{A8}) |
#{0628} (insert tail result #{AA}) |
#{067E} (insert tail result #{AF}) |
#{0629} (insert tail result #{B2}) |
#{062A} (insert tail result #{B3}) |
#{062B} (insert tail result #{B4}) |
#{062C} (insert tail result #{B8}) |
#{0686} (insert tail result #{B9}) |
#{062D} (insert tail result #{BA}) |
#{062E} (insert tail result #{BC}) |
#{062F} (insert tail result #{BD}) |
#{0630} (insert tail result #{BE}) |
#{0631} (insert tail result #{BF}) |
#{0632} (insert tail result #{C1}) |
#{0698} (insert tail result #{C3}) |
#{0633} (insert tail result #{C4}) |
#{0634} (insert tail result #{C5}) |
#{0635} (insert tail result #{C6}) |
#{0636} (insert tail result #{CC}) |
#{0637} (insert tail result #{CD}) |
#{3113} (insert tail result #{D0}) |
#{0639} (insert tail result #{D1}) |
#{063A} (insert tail result #{D2}) |
#{0640} (insert tail result #{D3}) |
#{0641} (insert tail result #{D5}) |
#{0642} (insert tail result #{D6}) |
#{0643} (insert tail result #{D8}) |
#{06AF} (insert tail result #{DA}) |
#{0644} (insert tail result #{DD}) |
#{0645} (insert tail result #{DE}) |
#{0646} (insert tail result #{DF}) |
#{0647} (insert tail result #{E1}) |
#{0681} (insert tail result #{E3}) |
#{0648} (insert tail result #{E4}) |
#{0649} (insert tail result #{E5}) |
#{064A} (insert tail result #{E6}) |
#{064B} (insert tail result #{EC}) |
#{064C} (insert tail result #{ED}) |
#{064D} (insert tail result #{F0}) |
#{064E} (insert tail result #{F1}) |
#{064F} (insert tail result #{F2}) |
#{0650} (insert tail result #{F3}) |
#{0651} (insert tail result #{F5}) |
#{0652} (insert tail result #{F6}) |
#{200E} (insert tail result #{FD}) |
#{200F} (insert tail result #{FE}) |
#{00} copy c 1 skip (insert tail result c) |
copy c 2 skip (decodeUnknownChar c)
]
|
academyyyy12/LIA-LEARNING
|
src/app/pages/pages.module.js
|
<filename>src/app/pages/pages.module.js<gh_stars>1-10
function rowSelect() {
return {
require: '^stTable',
template: '<input type="checkbox" class="s-row-select">',
scope: {
row: '=rowSelect'
},
link: function (scope, element, attr, ctrl, prop) {
element.bind('click', function (evt) {
scope.$apply(function () {
ctrl.select(scope.row, 'multiple');
});
});
// console.log(element);
scope.$watch('row.isSelected', function (newValue) {
if (newValue === true) {
element.parent().addClass('st-selected');
element.find('input').prop('checked', true);
} else {
element.parent().removeClass('st-selected');
element.find('input').prop('checked', false);
}
});
}
};
}
// /** @ngInject */zasa
function incScheduler() {
return {
templateUrl: 'app/pages/schedule/schedule-template.html',
link: function ($scope, elem, attr) {
$scope.indexMonths = ["Januari", "Februari", "Maret", "April", "Mei", "Juni", "Juli", "Agustus", "September", "Oktober", "November", "Desember"];
$scope.scheduleSch = [];
$scope.toggleRow = function (x) {
x.expanded = !x.expanded;
}
function getRandomColor() {
var letters = '0123456789ABCDEF';
var color = '#';
for (var i = 0; i < 6; i++) {
color += letters[Math.floor(Math.random() * 16)];
}
return color;
}
function validate() {
if ($scope.dateSch.start == "" || $scope.dateSch.start == undefined) {
alert("start date must be filled !");
return;
}
if ($scope.dateSch.end == "" || $scope.dateSch.end == undefined) {
alert("End date must be filled !");
return;
}
return true;
}
function resolveSchedule(start, end) {
var current = angular.copy(start);
current.setDate(1);
end.setDate(1);
var sch = [];
while (current.getTime() <= end.getTime()) {
console.log(current.getMonth());
var x = {
month: current.getMonth(),
year: current.getFullYear(),
numberOfDays: getDaysInMonth(current)
};
sch.push(x);
current.setMonth(current.getMonth() + 1);
}
return sch;
}
function getDaysInMonth(x) {
var date = angular.copy(x);
var i = 0;
const month = date.getMonth();
while (date.getMonth() == month) {
i++;
date.setDate(date.getDate() + 1);
}
return i;
}
$scope.check = function (data, day, schedule) {
var date = new Date(schedule.year, schedule.month, day);
var res;
console.log(data);
if (date.getDay() == 0) {
res = "weekend";
} else if (data == null) {
return;
} else {
for (var i in data.events) {
var eventDate = data.events[i].start;
console.log(eventDate, date);
if (eventDate.getTime() == date.getTime()) {
res = "active";
break;
}
}
}
if (res == "weekend") {
alert("weekend");
} else if (res == "active") {
alert("active");
}
}
$scope.inspectDay = function (data, day, schedule) {
var date = new Date(schedule.year, schedule.month, day);
var res;
if (date.getDay() == 0) {
res = "weekend";
} else if (data == null) {
return;
} else {
for (var i in data.events) {
var eventDate = data.events[i].start;
if (eventDate.getTime() == date.getTime()) {
res = "active";
break;
}
}
}
if (res == "weekend") {
return "gray";
} else if (res == "active") {
return data.color;
}
}
function resolveColor(data) {
for (var i in data) {
for (var j in data[i].children) {
for (var k in data[i].children[j].children) {
var x = data[i].children[j].children[k];
x.color = getRandomColor();
}
}
}
return data;
}
$scope.generateScheduler = function () {
$scope.dateSch = $scope.schDate;
if (!validate()) {
return;
};
$scope.scheduleSch = resolveSchedule(new Date($scope.schDate.start), new Date($scope.schDate.end));
$scope.dataSch = resolveColor($scope.schData);
}
}
};
}
function rowSelectAll($rootScope) {
return {
require: '^stTable',
template: '<input type="checkbox" class="top-min-11">',
scope: {
all: '=rowSelectAll',
selected: '='
},
link: function (scope, element, attr, prop) {
scope.isAllSelected = false;
element.bind('click', function (evt) {
scope.$apply(function () {
scope.all.forEach(function (val) {
val.isSelected = scope.isAllSelected;
});
});
});
$rootScope.selectReset = function () {
element.find('input').prop('checked', false);
scope.isAllSelected = true;
scope.all.forEach(function (val) {
val.isSelected = false;
});
scope.selected = [];
}
scope.$watchCollection('selected', function (newVal) {
var s = newVal.length;
var a = scope.all.length;
if ((s == a) && s > 0 && a > 0) {
element.find('input').prop('checked', true);
scope.isAllSelected = false;
} else {
element.find('input').prop('checked', false);
scope.isAllSelected = true;
}
});
}
};
}
(function () {
'use strict';
angular.module('BlurAdmin.pages', [
'ui.router',
'BlurAdmin.pages.dashboard',
'BlurAdmin.pages.CourseForm',
'BlurAdmin.pages.CourseList',
'BlurAdmin.pages.TopicForm',
'BlurAdmin.pages.LessonActivity',
'BlurAdmin.pages.ContentPages',
'BlurAdmin.pages.CurriculumAdministration',
'BlurAdmin.pages.Test',
'BlurAdmin.pages.SessionActivity',
'BlurAdmin.pages.Grading',
'BlurAdmin.pages.Resource',
'BlurAdmin.pages.ResourceTag',
'BlurAdmin.pages.ResourceCategory',
])
.factory('AuthenticationSvc', function ($http, config) {
return {
//service
Authentication: function (data) {
var url = config.apiUserManagement + 'authtoken';
return $http.post(url, data)
},
getById: function (key) {
var url = config.apiGeneral + 'enumeration?Criteria=Key:' + key
return $http.get(url)
},
};
})
.directive('incScheduler', incScheduler)
.directive('fileModel', ['$parse', function ($parse) {
return {
restrict: 'A',
link: function (scope, element, attrs) {
var model = $parse(attrs.fileModel);
console.log("masuk pak eko")
var modelSetter = model.assign;
element.bind('change', function () {
scope.$apply(function () {
modelSetter(scope, element[0].files[0]);
});
});
}
};
}])
.directive('sglclick', ['$parse', function ($parse) {
return {
restrict: 'A',
link: function (scope, element, attr) {
var fn = $parse(attr['sglclick']);
var delay = 300,
clicks = 0,
timer = null;
element.on('click', function (event) {
clicks++; //count clicks
if (clicks === 1) {
timer = setTimeout(function () {
scope.$apply(function () {
fn(scope, { $event: event });
});
clicks = 0; //after action performed, reset counter
}, delay);
} else {
clearTimeout(timer); //prevent single-click action
clicks = 0; //after action performed, reset counter
}
});
}
};
}])
.directive('checkList', function () {
return {
scope: {
list: '=checkList',
value: '@'
},
link: function (scope, elem, attrs) {
var handler = function (setup) {
var checked = elem.prop('checked');
var index = scope.list.indexOf(scope.value);
if (checked && index == -1) {
if (setup) elem.prop('checked', false);
else scope.list.push(scope.value);
} else if (!checked && index != -1) {
if (setup) elem.prop('checked', true);
else scope.list.splice(index, 1);
}
};
var setupHandler = handler.bind(null, true);
var changeHandler = handler.bind(null, false);
elem.on('change', function () {
scope.$apply(changeHandler);
});
scope.$watch('list', setupHandler, true);
}
};
})
.filter('range', function () {
return function (input, total) {
total = parseInt(total);
for (var i = 0; i < total; i++)
input.push(i);
return input;
};
})
.directive('numericOnly', function ($filter) {
return {
require: 'ngModel',
link: function (scope, element, attrs, modelCtrl) {
function numberLoad() {
var strinput = modelCtrl.$$rawModelValue;
//filter user input
var transformedInput = strinput ? strinput.replace(/[^,\d,-]/g, '') : null;
//remove trailing 0
if (transformedInput.charAt(0) <= '0') {
transformedInput = null;
modelCtrl.$setViewValue(transformedInput);
modelCtrl.$render();
} else {
var decimalSplit = transformedInput.split(",")
var intPart = decimalSplit[0];
var decPart = decimalSplit[1];
//remove previously formated number
intPart = intPart.replace(/,/g, "");
//split whole number into array of 3 digits
if (intPart.length > 3) {
var intDiv = Math.floor(intPart.length / 3);
var strfraction = [];
var i = intDiv,
j = 3;
while (intDiv > 0) {
strfraction[intDiv] = intPart.slice(intPart.length - j, intPart.length - (j - 3));
j = j + 3;
intDiv--;
}
var k = j - 3;
if ((intPart.length - k) > 0) {
strfraction[0] = intPart.slice(0, intPart.length - k);
}
}
//join arrays
if (strfraction == undefined) {
return;
}
var currencyformat = strfraction.join('.');
//check for leading comma
if (currencyformat.charAt(0) == '.') {
currencyformat = currencyformat.slice(1);
}
if (decPart == undefined) {
modelCtrl.$setViewValue(currencyformat);
modelCtrl.$render();
return;
} else {
currencyformat = currencyformat + "," + decPart.slice(0, 2);
modelCtrl.$setViewValue(currencyformat);
modelCtrl.$render();
}
}
}
element.bind('keyup', function (inputValue, e) {
numberLoad();
});
element.ready(function (inputValue, e) {
numberLoad();
});
}
}
})
.filter("formatPrice", function () {
return function (price, digits, thoSeperator, decSeperator, bdisplayprice) {
var i;
digits = (typeof digits === "undefined") ? 2 : digits;
bdisplayprice = (typeof bdisplayprice === "undefined") ? true : bdisplayprice;
thoSeperator = (typeof thoSeperator === "undefined") ? "." : thoSeperator;
decSeperator = (typeof decSeperator === "undefined") ? "," : decSeperator;
price = price.toString();
var _temp = price.split(".");
var dig = (typeof _temp[1] === "undefined") ? "00" : _temp[1];
if (bdisplayprice && parseInt(dig, 10) === 0) {
dig = "-";
} else {
dig = dig.toString();
if (dig.length > digits) {
dig = (Math.round(parseFloat("0." + dig) * Math.pow(10, digits))).toString();
}
for (i = dig.length; i < digits; i++) {
dig += "0";
}
}
var num = _temp[0];
var s = "",
ii = 0;
for (i = num.length - 1; i > -1; i--) {
s = ((ii++ % 3 === 2) ? ((i > 0) ? thoSeperator : "") : "") + num.substr(i, 1) + s;
}
return s;
// return s + decSeperator + dig;
}
})
.directive('rowSelect', rowSelect)
.directive('rowSelectAll', rowSelectAll)
.directive('contextMenu', function ($timeout) {
return {
restrict: 'A',
scope: '@&',
compile: function compile(tElement, tAttrs, transclude) {
return {
post: function postLink(scope, iElement, iAttrs, controller) {
var ul;
var last = null;
// ul.css({ 'display': 'none' });
$(iElement).on('contextmenu', function (event) {
event.preventDefault();
// $("a.jstree-anchor").removeClass('jstree-clicked');
var newId = $(iElement).data('id');
scope.saveId(newId);
if (typeof iAttrs.custom === 'undefined') {
var newValue = $(iElement).find('input').prop('checked');
var selected = scope.selected.length;
if (newValue == false) {
if (selected == 1) {
$timeout(function () {
$(".custom-table").find('.s-row-select').each(function () {
var prop = $(this).prop('checked');
if (prop == true) {
$(this).parent().triggerHandler("click");
}
});
});
}
}
if (newValue === false) {
$timeout(function () {
$(iElement).find('.row-select').triggerHandler("click");
});
}
ul = $('#' + iAttrs.contextMenu);
} else {
if ($(this).is(".jstree-clicked")) {
var setShow = true;
} else {
var setShow = false;
}
$timeout(function () {
scope.contextmenu(iAttrs.custom, setShow);
ul = $('#' + iAttrs.contextMenu);
});
$(this).addClass('jstree-clicked2');
}
$timeout(function () {
var X = event.clientX;
var Y = event.clientY;
ul.css({
display: "block",
position: "fixed",
top: Y + 'px',
left: X + 'px'
});
last = event.timeStamp;
})
});
$(document).on('click', function (event) {
ul = $('#' + iAttrs.contextMenu);
var target = $(event.target);
if (!target.is(".popover") && !target.parents().is(".popover")) {
if (last === event.timeStamp)
return;
ul.css({
'display': 'none'
});
$(iElement).removeClass('jstree-clicked2');
}
});
}
};
}
};
})
.factory('lia', function ($timeout, $uibModal, toastr, $filter, $state, $window) {
var self = {};
self.jstree = function ($scope) {
var refreshJs = function () {
angular.element(document).injector().invoke(function ($compile) {
$compile($("#js-tree").contents())($scope);
});
}
$scope.$watch(
function () {
return {
w: $("#js-tree").width(),
h: $("#js-tree").height()
};
},
function (newValue, oldValue) {
if (newValue.w != oldValue.w || newValue.h != oldValue.h) {
// Do something ...
$timeout(function () {
refreshJs();
});
}
},
true
);
}
self.init = function ($scope, data) {
$scope.form = {};
$scope.view_object = function (id) {
$scope.form = {};
var selected;
selected = id == undefined ? $scope.getId : id;
// if (typeof $scope.getParams == "undefined") {
// var obj = self.viewByAttr(data, 'id', selected);
// $scope.form.id = selected;
// } else {
var obj = self.viewByAttr(data, $scope.getParams, selected);
// }
$scope.form = obj;
console.log(selected)
};
$scope.doEdit = function () {
if (typeof $scope.getParams == "undefined") {
var id = $scope.form.id;
} else {
var id = $scope.form[$scope.getParams];
}
var index = data.findIndex(x => x.id === id);
data[index] = $scope.form;
console.log(id);
$scope.showEditMsg();
}
$scope.doDelete = function () {
var id = $scope.selected.length;
if (typeof $scope.getParams == "undefined") {
while (id--) {
self.removeByAttr(data, 'id', $scope.selected[id]);
}
} else {
while (id--) {
self.removeByAttr(data, $scope.getParams, $scope.selected[id]);
}
}
$scope.selected = [];
$scope.showDeleteMsg();
// console.log(id);
}
}
self.select_control = function ($scope) {
$scope.selected = [];
$scope.selectedByAttr = [];
$scope.selectAll = function (collection, attr = 'Id') {
self.selectAll($scope, collection, attr);
};
$scope.select = function (id) {
self.select($scope, id);
}
}
self.behaviour = function ($scope) {
$scope.loading = false;
$scope.saveId = function (newId) {
$scope.getId = newId.toString();
}
$scope.showSuccessMsg = function () {
toastr.success("Data Berhasil Di Save");
}
$scope.showEditMsg = function () {
toastr.success("Data Berhasil Di Edit");
}
$scope.showDeleteMsg = function () {
toastr.success("Data Berhasil Di Hapus");
}
var full_control = function () {
$scope.control = true;
$scope.add = false;
$scope.edit = false;
$scope.edits = false;
$scope.view = false;
}
$scope.behaviour_add = function () {
$scope.form = {};
full_control();
$scope.add = true;
}
$scope.behaviour_view = function () {
$scope.selected = [];
full_control();
$scope.control = false;
$scope.view = true;
}
$scope.behaviour_edit = function () {
full_control();
$scope.edit = true;
}
}
self.modal = function ($scope) {
$scope.doRefresh = function () {
$window.location.reload();
}
$scope.showError = function () {
// $uibModal.open({
// animation: true,
// template: '<div class="modal-content modal-delete"> <div class="modal-body"> <p>Mohon Maaf, Ada Kesalahan Dalam Memuat Data</p> </div> <div class="modal-footer"><button ng-click="doRefresh();" class="btn btn-danger">Muat Ulang</button></div> </div>',
// size: 'sm',
// scope: $scope
// });
}
$scope.open = function (page, size) {
// $scope.selected = [];
var modal = $uibModal.open({
animation: true,
templateUrl: page,
size: size,
scope: $scope,
controller: function ($scope, $uibModalInstance, $rootScope) {
$rootScope.closemodal = function () {
$uibModalInstance.dismiss('cancel');
}
},
resolve: {
items: function () {
return $scope.items;
}
}
}).closed.then(function(){
$scope.selected=[];
angular.element('.st-selected').removeClass("st-selected");
angular.element('.s-row-select').prop("checked", false);
$scope.results.forEach(function (val) {
val.isSelected = false;
});
});
};
$scope.openDelete = function (page, size) {
var modal = $uibModal.open({
animation: true,
template: '<div class="modal-content modal-delete"> <div class="modal-body"> <p>Are you sure want to delete selected data?</p> </div> <div class="modal-footer"><button class="btn btn-default" ng-click="$close()">No</button> <button ng-click="doDelete();$close();" class="btn btn-danger">Yes</button></div> </div>',
size: 'sm',
scope: $scope
});
};
}
self.selectAll = function (scope, collection, attr = 'Id') {
if (scope.selected.length === 0) {
angular.forEach(collection, function (val) {
scope.selected.push(val[attr]);
});
} else if (scope.selected.length > 0 && scope.selected.length != collection.length) {
angular.forEach(collection, function (val) {
var found = scope.selected.indexOf(val[attr]);
if (found == -1) scope.selected.push(val[attr]);
});
} else {
scope.selected = [];
}
}
self.select = function (scope, id) {
var found = scope.selected.indexOf(id);
if (found == -1) scope.selected.push(id);
else scope.selected.splice(found, 1);
}
self.removeByAttr = function (arr, attr, value) {
var i = arr.length;
while (i--) {
if (arr[i] &&
arr[i].hasOwnProperty(attr) &&
(arguments.length > 2 && arr[i][attr] === value)) {
arr.splice(i, 1);
}
}
return arr;
}
self.viewByAttr = function (arr, attr, value) {
var i = arr.length;
while (i--) {
if (arr[i] &&
arr[i].hasOwnProperty(attr) &&
(arguments.length > 2 && arr[i][attr] === value)) {
// arr.splice(i, 1);
return angular.copy(arr[i]);
}
}
};
self.contextmenu = function ($scope, custom = [], privileges) {
var induk = "";
induk += '<ul id="menuOptions" class="dropdown-menu context-menu" style="min-width:130px !important;border-radius:0;cursor:pointer !important;"></ul>';
$("#context").html(induk);
$scope.$watchCollection('selected', function (newVal, oldVal) {
var html = "";
if (custom.length > 0) {
if (custom.indexOf('setuju') != -1) html += '<li><a ng-click="doSetuju()"><i class="fa fa-check text-success"></i> Approve</a></li>';
if (custom.indexOf('tolak') != -1) html += '<li><a ng-click="doTolak()"><i class="fa fa-close text-warning"></i> Reject</a></li>';
if ($scope.selected.length == 1 && (privileges != undefined && privileges.CanUpdate)) {
if (custom.indexOf('edit') != -1) html += '<li><a ng-click="showForm();behaviour_edit();view_object();"><i class="fa fa-edit text-primary"></i> Edit</a></li>';
}
if (custom.indexOf('hapus') != -1 && (privileges != undefined && privileges.CanDelete)) html += '<li><a ng-click="openDelete()"><i class="fa fa-trash text-danger"></i> Delete</a></li>';
} else {
html += '<li><a ng-click="test()"><i class="fa fa-check text-success"></i> Approve</a></li>';
html += '<li><a ng-click="test()"><i class="fa fa-close text-warning"></i> Reject</a></li>';
if ($scope.selected.length == 1 && (privileges != undefined && privileges.CanUpdate)) {
html += '<li><a ng-click="showForm();behaviour_edit();view_object();"><i class="fa fa-edit text-primary"></i> Edit</a></li>';
}
if (privileges != undefined && privileges.CanDelete) {
html += '<li><a ng-click="openDelete()"><i class="fa fa-trash text-danger"></i> Delete</a></li>';
}
}
$("#menuOptions").html(html);
angular.element(document).injector().invoke(function ($compile) {
$compile($("#context").contents())($scope);
});
});
}
return self;
})
.filter('groupSelectpickerOptions', GroupSelectpickerOptions)
.config(routeConfig)
.run(run);
function GroupSelectpickerOptions() {
return function (items, props) {
var out = [];
if (angular.isArray(items)) {
var keys = Object.keys(props);
items.forEach(function (item) {
var itemMatches = false;
for (var i = 0; i < keys.length; i++) {
var prop = keys[i];
var text = props[prop].toLowerCase();
if (item[prop].toString().toLowerCase().indexOf(text) !== -1) {
itemMatches = true;
break;
}
}
if (itemMatches) {
out.push(item);
}
});
} else {
// Let the output be the input untouched
out = items;
}
return out;
};
}
function run(baSidebarService, $rootScope, $localStorage, config, $window, AuthenticationSvc, toastr, $sce, $filter) {
var array_move = function (arr, old_index, new_index) {
if (new_index >= arr.length) {
var k = new_index - arr.length + 1;
while (k--) {
arr.push(undefined);
}
}
arr.splice(new_index, 0, arr.splice(old_index, 1)[0]);
return arr; // for testing
};
// var server = 'http://172.16.17.32:3010'
var server = 'http://192.168.127.12:3010'
var appId = '98134469-b6ee-4720-827e-788559ee2zbv'
$localStorage.AppId=appId;
var getMenues=async function(Privilege,AppId){
var Privileges = _.where(Privilege, {
MenuAppID: AppId,
Hidden: false
});
var allMenues = _.findWhere(Privileges,{AllMenues : true,MenuAppID: AppId});
var data = [];
Privileges.sort((a, b) => parseFloat(a.MenuSequence) - parseFloat(b.MenuSequence));
for (let i = 0; i < Privileges.length; i++) {
Privileges[i].title = Privileges[i].MenuLabel;
Privileges[i].icon = Privileges[i].MenuIconUrl;
Privileges[i].stateRef = Privileges[i].MenuMenuName;
if(allMenues != undefined){
Privileges[i].CanDelete = allMenues.CanDelete;
Privileges[i].CanUpdate = allMenues.CanUpdate;
Privileges[i].CanCreate = allMenues.CanCreate;
Privileges[i].CanRead = allMenues.CanRead;
}
}
var subMenu = function (menuId,row) {
var menuData = _.findWhere(Privileges,{MenuId : menuId});
var subMenues = _.where(Privileges, {
ParentId: menuId
});
if(subMenues.length == 0) return null;
var newSubMenu = [];
for (let x = 0; x < subMenues.length; x++) {
subMenu(subMenues[x].MenuId)
newSubMenu.push(subMenues[x]);
}
menuData.subMenu = newSubMenu;
if(row != undefined) row = menuData;
}
for (let i = 0; i < Privileges.length; i++) {
if (Privileges[i].ParentMenuName == null) {
data.push(Privileges[i]);
subMenu(Privileges[i].MenuId,data[data.length-1]);
}
}
console.log(data, 'Ini')
var showMenu = function () {
for (let x = 0; x < data.length; x++) {
if (data[x].AllMenues == false) {
baSidebarService.addStaticItem(data[x])
}
}
}
await showMenu();
$localStorage.Menues = Privileges;
}
var validate = function () {
console.log($window.location.href)
var getauthkey = $window.location.href.split('?AuthKey=')
if (getauthkey.length == 2) {
var authkey = getauthkey[1].split('#/')
$localStorage.AuthKey = authkey[0]
$window.location.href = getauthkey[0]
$localStorage.isLogoutSession = false
}
if ($localStorage.AuthKey == undefined && $localStorage.isLogoutSession == true) {
var originurl = $window.location.href.split('#/')
$window.location.href = server + '/login.html?origin_url=' + originurl[0] + '&app_id=' + appId + '&sessionexpired=true'
} else if ($localStorage.AuthKey == undefined) {
var originurl = $window.location.href.split('#/')
$window.location.href = server + '/login.html?origin_url=' + originurl[0]+ '&app_id=' + appId
} else {
var url = {
appId: appId
}
AuthenticationSvc.Authentication(url).then(async function (res) {
if (res.data.ErrorCode == 0) {
await getMenues(res.data.Data.Privileges,$localStorage.AppId);
console.log($localStorage,"sebelum")
$localStorage = angular.extend($localStorage, res.data.Data)
// $localStorage.$reset(res.data.Data);
console.log($localStorage,"sesudah")
$rootScope.FullUserName = $localStorage.FullName;
$rootScope.Unit = []
for (var i = 0; i < $localStorage.Roles.length; i++) {
if (i == 0) {
if ($localStorage.Roles[i].UnitId == 0) {
$rootScope.Unit.push({ UnitCode: $localStorage.Roles[i].UnitName, UnitId: $localStorage.Roles[i].UnitId, templateUrl: 'myPopoverTemplate.html' });
} else {
$rootScope.Unit.push({ UnitCode: $localStorage.Roles[i].UnitCode, UnitId: $localStorage.Roles[i].UnitId, templateUrl: 'myPopoverTemplate.html' });
}
} else {
for (var x = 0; x < $rootScope.Unit.length; x++) {
if ($rootScope.Unit[x].UnitId != $localStorage.Roles[i].UnitId) {
if ($localStorage.Roles[i].UnitIdId == 0) {
$rootScope.Unit.push({ UnitCode: $localStorage.Roles[i].UnitName, UnitId: $localStorage.Roles[i].UnitId, templateUrl: 'myPopoverTemplate.html' });
} else {
$rootScope.Unit.push({ UnitCode: $localStorage.Roles[i].UnitCode, UnitId: $localStorage.Roles[i].UnitId, templateUrl: 'myPopoverTemplate.html' });
}
array_move($rootScope.Unit, $rootScope.Unit.length - 1, 0)
break;
} else {
break;
}
}
}
}
$rootScope.Roles = function (id) {
$rootScope.isOpen = true;
// console.log(id.UnitId,'Id Unit');
$rootScope.Role = [];
for (var i = 0; i < $localStorage.Roles.length; i++) {
if (id.UnitId == $localStorage.Roles[i].UnitId) {
$rootScope.Role.push({ UnitName: $localStorage.Roles[i].UnitName, RoleName: $localStorage.Roles[i].Name, Branches: $localStorage.Roles[i].Branches });
}
}
var html = '<div class="table-responsive">' +
'<table class="table table-bordered table-popover">' +
'<thead>' +
'<tr>' +
'<th class="sm-head">Role Name</th>' +
'<th class="sm-head">Branch</th>' +
'</tr>' +
'</thead>' +
'<tbody>';
for (var i = 0; i < $rootScope.Role.length; i++) {
html = html + '<tr> <td class="sm-body">' + $rootScope.Role[i].RoleName + '</td><td class="sm-body">';
for (var x = 0; x < $rootScope.Role[i].Branches.length; x++) {
html = html + ($rootScope.Role[i].Branches.length - 1 == x ? $rootScope.Role[i].Branches[x].BranchName : $rootScope.Role[i].Branches[x].BranchName + ', ');
}
html = html + '</tr>';
}
html = html + '</tbody></table></div>'
// if(id !=undefined) angular.element('#preview').html(html);
return $sce.trustAsHtml(html);
}
$rootScope.isTeacher = $filter('filter')($localStorage.Roles, {'RoleId': 205 }, true)[0];
console.log($rootScope.isTeacher,'isi isteacher')
} else {
toastr.error('Session Expired')
var originurl = $window.location.href
$window.location.href = server + '/login.html?origin_url=' + originurl + '&app_id=' + appId + '&sessionexpired=true'
}
}).catch(async (err)=>{
$rootScope.LoadingAuthToken = false;
await getMenues($localStorage.Privileges,$localStorage.AppId);
})
}
}
$rootScope.$on('$stateChangeStart', async function (event, toState, toParams, fromState, fromParams, options) {
baSidebarService.setMenuCollapsed(false);
await validate();
$rootScope.privileges = _.findWhere($localStorage.Menues,{MenuAppID : $localStorage.AppId,MenuMenuName:toState.name});
console.log($localStorage.Menues,"menues")
console.log($rootScope.privileges,"privileges")
});
$rootScope.logout = function () {
$localStorage.$reset();
$localStorage.isLogoutSession = true
var originurl = $window.location.href.split('#/')
$window.location.href = server + '/login.html?origin_url=' + originurl[0] + '&app_id=' + appId + '&sessionexpired=true'
}
}
/** @ngInject */
function routeConfig($urlRouterProvider, baSidebarServiceProvider, $stateProvider, $httpProvider) {
$urlRouterProvider.otherwise('/dashboard');
$httpProvider.interceptors.push(function ($q, $location, $localStorage, $window) {
return {
'request': function (config) {
config.headers = config.headers || {};
var authkey = $localStorage.AuthKey
if (authkey) {
config.headers.AuthKey = authkey;
}
return config;
},
// 'responseError': function (response) {
// $localStorage.$reset();
// if (response.data.ErrorCode === 500 && response.data.Message === '') {
// // toastr.error('Session Expired')
// var originurl = $window.location.href.split('#/')
// $window.location.href = 'http://172.16.17.32:3010/login.html/login.html?origin_url=' + originurl[0] + '&sessionexpired=true'
// }
// return $q.reject(response);
// }
};
});
baSidebarServiceProvider.addStaticItem(
{
icon: 'ion-grid',
title: 'Activity',
stateRef: 'curriculum-administration'
},
{
icon: 'ion-grid',
title: 'Session Activity',
stateRef: 'session-activity'
},
{
title: 'Resource',
icon: 'book',
subMenu: [
{
title: 'Resource Repository',
stateRef: 'resource',
},
{
title: 'Category',
stateRef: 'resource-category',
},
// {
// title: 'Tag',
// stateRef: 'resource-tag',
// }
]
});
baSidebarServiceProvider.addStaticItem({
title: 'Online Course',
icon: 'ion-university',
subMenu: [
{
title: 'Running Online Course',
// stateView: 'form-branchlist',
// stateRef: 'list-branchlist'
},
{
title: 'Participants',
// stateView: 'form-branchlist',
// stateRef: 'list-branchlist'
},
{
title: 'Makeup Class Participant',
// stateView: 'form-branchlist',
// stateRef: 'list-branchlist'
},
{
title: 'Grade',
stateView: 'form-grading',
stateRef: 'list-grading'
},
{
title: 'Test',
stateView: 'form-test',
stateRef: 'list-test'
},
]
});
}
})();
|
EchoThreeLLC/echothree
|
ui/web/main/src/java/com/echothree/ui/web/main/action/configuration/postaladdresslineelement/AddActionForm.java
|
<reponame>EchoThreeLLC/echothree
// --------------------------------------------------------------------------------
// Copyright 2002-2022 Echo Three, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// --------------------------------------------------------------------------------
package com.echothree.ui.web.main.action.configuration.postaladdresslineelement;
import com.echothree.control.user.contact.common.ContactUtil;
import com.echothree.control.user.contact.common.form.GetPostalAddressElementTypeChoicesForm;
import com.echothree.control.user.contact.common.result.GetPostalAddressElementTypeChoicesResult;
import com.echothree.model.control.contact.common.choice.PostalAddressElementTypeChoicesBean;
import com.echothree.util.common.command.CommandResult;
import com.echothree.util.common.command.ExecutionResult;
import com.echothree.view.client.web.struts.BaseActionForm;
import com.echothree.view.client.web.struts.sprout.annotation.SproutForm;
import java.util.List;
import javax.naming.NamingException;
import javax.servlet.http.HttpServletRequest;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.util.LabelValueBean;
@SproutForm(name="PostalAddressLineElementAdd")
public class AddActionForm
extends BaseActionForm {
private PostalAddressElementTypeChoicesBean postalAddressElementTypeChoices;
private String postalAddressFormatName;
private String postalAddressLineSortOrder;
private String postalAddressLineElementSortOrder;
private String postalAddressElementTypeChoice;
private String prefix;
private Boolean alwaysIncludePrefix;
private String suffix;
private Boolean alwaysIncludeSuffix;
public void setupPostalAddressElementTypeChoices() {
if(postalAddressElementTypeChoices == null) {
try {
GetPostalAddressElementTypeChoicesForm form = ContactUtil.getHome().getGetPostalAddressElementTypeChoicesForm();
form.setDefaultPostalAddressElementTypeChoice(postalAddressElementTypeChoice);
form.setAllowNullChoice(Boolean.FALSE.toString());
CommandResult commandResult = ContactUtil.getHome().getPostalAddressElementTypeChoices(userVisitPK, form);
ExecutionResult executionResult = commandResult.getExecutionResult();
GetPostalAddressElementTypeChoicesResult result = (GetPostalAddressElementTypeChoicesResult)executionResult.getResult();
postalAddressElementTypeChoices = result.getPostalAddressElementTypeChoices();
if(postalAddressElementTypeChoice == null) {
postalAddressElementTypeChoice = postalAddressElementTypeChoices.getDefaultValue();
}
} catch (NamingException ne) {
ne.printStackTrace();
// failed, postalAddressElementTypeChoices remains null, no default
}
}
}
public String getPostalAddressFormatName() {
return postalAddressFormatName;
}
public void setPostalAddressFormatName(String postalAddressFormatName) {
this.postalAddressFormatName = postalAddressFormatName;
}
public String getPostalAddressLineSortOrder() {
return postalAddressLineSortOrder;
}
public void setPostalAddressLineSortOrder(String postalAddressLineSortOrder) {
this.postalAddressLineSortOrder = postalAddressLineSortOrder;
}
public String getPostalAddressLineElementSortOrder() {
return postalAddressLineElementSortOrder;
}
public void setPostalAddressLineElementSortOrder(String postalAddressLineElementSortOrder) {
this.postalAddressLineElementSortOrder = postalAddressLineElementSortOrder;
}
public String getPostalAddressElementTypeChoice() {
return postalAddressElementTypeChoice;
}
public void setPostalAddressElementTypeChoice(String postalAddressElementTypeChoice) {
this.postalAddressElementTypeChoice = postalAddressElementTypeChoice;
}
public List<LabelValueBean> getPostalAddressElementTypeChoices() {
List<LabelValueBean> choices = null;
setupPostalAddressElementTypeChoices();
if(postalAddressElementTypeChoices != null) {
choices = convertChoices(postalAddressElementTypeChoices);
}
return choices;
}
public String getPrefix() {
return prefix;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
public Boolean getAlwaysIncludePrefix() {
return alwaysIncludePrefix;
}
public void setAlwaysIncludePrefix(Boolean alwaysIncludePrefix) {
this.alwaysIncludePrefix = alwaysIncludePrefix;
}
public String getSuffix() {
return suffix;
}
public void setSuffix(String suffix) {
this.suffix = suffix;
}
public Boolean getAlwaysIncludeSuffix() {
return alwaysIncludeSuffix;
}
public void setAlwaysIncludeSuffix(Boolean alwaysIncludeSuffix) {
this.alwaysIncludeSuffix = alwaysIncludeSuffix;
}
@Override
public void reset(ActionMapping mapping, HttpServletRequest request) {
super.reset(mapping, request);
setAlwaysIncludePrefix(Boolean.FALSE);
setAlwaysIncludeSuffix(Boolean.FALSE);
}
}
|
QuietSugar/run-java
|
project-mybatis-generator/src/test/java/name/xu/utils/VelocityUtilTest.java
|
package name.xu.utils;
import org.junit.Test;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
/**
* @author Created by HuoXu
*/
public class VelocityUtilTest {
@Test
public void gen2File() throws IOException {
HashMap<String, Object> map = new HashMap<>();
map.put("modelName","modelName");
map.put("packageName","packageName");
FileWriter out = new FileWriter("a.java");
VelocityUtil.gen2File("template/Java.vm", map, out);
}
}
|
Degget1986/neo-mamba
|
neo3/network/payloads/inventory.py
|
<reponame>Degget1986/neo-mamba<gh_stars>10-100
from __future__ import annotations
from enum import IntEnum
from typing import List
from neo3.core import types
from neo3.core import Size as s, utils
from neo3.core import serialization
import abc
from .verification import IVerifiable
class InventoryType(IntEnum):
TX = 0x2b
BLOCK = 0x2c
CONSENSUS = 0x2d
EXTENSIBLE = 0x2e
class InventoryPayload(serialization.ISerializable):
"""
A payload used to share inventory hashes.
See also:
- :ref:`getblocks <message-usage-getblocks>`
- :ref:`getdata <message-usage-getdata>`
- :ref:`mempool <message-usage-mempool>`
"""
def __init__(self, type: InventoryType, hashes: List[types.UInt256]):
"""
Create payload.
Args:
type: indicator to what type of object the the hashes of this payload relate to.
hashes: hashes of "type" objects.
"""
self.type = type
self.hashes = hashes
def __len__(self):
""" Get the total size in bytes of the object. """
return s.uint8 + utils.get_var_size(self.hashes)
def serialize(self, writer: serialization.BinaryWriter) -> None:
"""
Serialize the object into a binary stream.
Args:
writer: instance.
"""
writer.write_uint8(self.type)
writer.write_var_int(len(self.hashes))
for h in self.hashes: # type: types.UInt256
writer.write_bytes(h.to_array())
def deserialize(self, reader: serialization.BinaryReader) -> None:
"""
Deserialize the object from a binary stream.
Args:
reader: instance.
"""
self.type = InventoryType(reader.read_uint8())
self.hashes = reader.read_serializable_list(types.UInt256)
@classmethod
def _serializable_init(cls):
return cls(InventoryType.BLOCK, [])
class IInventory(IVerifiable):
@abc.abstractmethod
def hash(self) -> types.UInt256:
""""""
@property
@abc.abstractmethod
def inventory_type(self) -> InventoryType:
""""""
|
simokhov/schemas44
|
src/main/java/ru/gov/zakupki/oos/eptypes/_1/AppCorrespondenceType.java
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2019.07.02 at 03:35:23 PM MSK
//
package ru.gov.zakupki.oos.eptypes._1;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
import ru.gov.zakupki.oos.common._1.PreferenseType;
import ru.gov.zakupki.oos.common._1.RequirementRestrictionType;
/**
* Тип: Соответствие участника преимуществам, требованиям к участникам (согласно действующему извещению о закупке / изменению извещения)
*
* <p>Java class for appCorrespondenceType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="appCorrespondenceType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="compatible" type="{http://www.w3.org/2001/XMLSchema}boolean"/>
* <choice>
* <element name="preferenseInfo" type="{http://zakupki.gov.ru/oos/common/1}preferenseType"/>
* <element name="requirementInfo" type="{http://zakupki.gov.ru/oos/common/1}requirementRestrictionType"/>
* <element name="restrictionInfo" type="{http://zakupki.gov.ru/oos/common/1}requirementRestrictionType"/>
* </choice>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "appCorrespondenceType", propOrder = {
"compatible",
"preferenseInfo",
"requirementInfo",
"restrictionInfo"
})
public class AppCorrespondenceType {
protected boolean compatible;
protected PreferenseType preferenseInfo;
protected RequirementRestrictionType requirementInfo;
protected RequirementRestrictionType restrictionInfo;
/**
* Gets the value of the compatible property.
*
*/
public boolean isCompatible() {
return compatible;
}
/**
* Sets the value of the compatible property.
*
*/
public void setCompatible(boolean value) {
this.compatible = value;
}
/**
* Gets the value of the preferenseInfo property.
*
* @return
* possible object is
* {@link PreferenseType }
*
*/
public PreferenseType getPreferenseInfo() {
return preferenseInfo;
}
/**
* Sets the value of the preferenseInfo property.
*
* @param value
* allowed object is
* {@link PreferenseType }
*
*/
public void setPreferenseInfo(PreferenseType value) {
this.preferenseInfo = value;
}
/**
* Gets the value of the requirementInfo property.
*
* @return
* possible object is
* {@link RequirementRestrictionType }
*
*/
public RequirementRestrictionType getRequirementInfo() {
return requirementInfo;
}
/**
* Sets the value of the requirementInfo property.
*
* @param value
* allowed object is
* {@link RequirementRestrictionType }
*
*/
public void setRequirementInfo(RequirementRestrictionType value) {
this.requirementInfo = value;
}
/**
* Gets the value of the restrictionInfo property.
*
* @return
* possible object is
* {@link RequirementRestrictionType }
*
*/
public RequirementRestrictionType getRestrictionInfo() {
return restrictionInfo;
}
/**
* Sets the value of the restrictionInfo property.
*
* @param value
* allowed object is
* {@link RequirementRestrictionType }
*
*/
public void setRestrictionInfo(RequirementRestrictionType value) {
this.restrictionInfo = value;
}
}
|
MrLys/verbose-waffle
|
src/main/java/no/rutta/rutta/common/Identifiable.java
|
package no.rutta.rutta.common;
public interface Identifiable {
int getId();
}
|
shuaicj/hello-java
|
hello-string/src/test/java/shuaicj/hello/string/StringTest.java
|
package shuaicj.hello.string;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Test String.
*
* Conclusions:
* 1. Literal Strings are always in the pool.
* 2. Strings by 'new String(..)' are not in the pool.
* 3. Strings by 'String.intern()' are always in the pool.
* 4. Strings by 'String.valueOf(..)' are not in the pool.
* 5. Strings by 'StringBuilder.toString()' are not in the pool.
* 6. Strings by 'literal append' are always in the pool because of compiler optimization.
* 7. Strings by 'variable append' are not in the pool because it's done via StringBuilder.
*
* 8. String.valueOf(char[]) and String.valueOf(Object) act weird while passing in the same char[].
*
* @author shuaicj 2017/02/05
*/
@SuppressWarnings({"RedundantStringConstructorCall", "StringEquality"})
public class StringTest {
@Test
public void literal() throws Exception {
String s1 = "abc";
String s2 = new String("abc");
String s3 = "abc";
assertTrue(s1 != s2);
assertTrue(s1 == s3);
}
@Test
public void intern() throws Exception {
String s1 = "abc";
String s2 = s1.intern();
String s3 = new String("abc");
String s4 = s3.intern();
assertTrue(s1 == s2);
assertTrue(s3 != s4);
assertTrue(s3 != s1);
assertTrue(s4 == s1);
String s5 = new String("def");
String s6 = s5.intern();
String s7 = "def";
assertTrue(s5 != s6);
assertTrue(s5 != s7);
assertTrue(s6 == s7);
}
@SuppressWarnings("ConstantConditions")
@Test
public void valueOf() throws Exception {
String s1 = "123";
String s2 = String.valueOf(123);
String s3 = s2.intern();
assertTrue(s1 != s2);
assertTrue(s1 == s3);
assertTrue(s2 != s3);
}
@SuppressWarnings("StringBufferReplaceableByString")
@Test
public void builder() throws Exception {
String s1 = "abc";
StringBuilder sb = new StringBuilder();
sb.append("a");
sb.append("b");
sb.append("c");
String s2 = sb.toString();
assertTrue(s1.equals(s2));
assertTrue(s1 != s2);
}
@Test
public void literalAppend() throws Exception {
String s1 = "abc";
String s2 = "ab" + "c";
assertTrue(s1 == s2);
}
@Test
public void variableAppend() throws Exception {
String abc = "abc";
String ab = "ab";
String a = "a";
String s1 = a + "b";
String s2 = s1 + "c";
assertTrue(s1.equals(ab));
assertTrue(s1 != ab);
assertTrue(s2.equals(abc));
assertTrue(s2 != abc);
}
@Test
public void variableAppendFinal() throws Exception {
String ab = "ab";
String a = "a";
String b = "b";
final String fa = "a";
final String fb = "b";
assertTrue(a + b != ab);
assertTrue(a + fb != ab);
assertTrue("a" + fb == ab);
assertTrue(fa + b != ab);
assertTrue(fa + "b" == ab);
assertTrue(fa + fb == ab);
}
@SuppressWarnings("UnnecessaryLocalVariable")
@Test
public void valueOfConfusion() throws Exception {
char[] arr = new char[]{'a', 'b', 'c'};
String s1 = String.valueOf(arr);
Object obj = arr;
String s2 = String.valueOf(obj);
assertFalse(s1.equals(s2));
}
}
|
theatrus/drivers
|
ads1x15/driver_test.go
|
package ads1x15
import (
"testing"
"github.com/reef-pi/hal"
)
var params = map[string]interface{}{
"Address": 72,
"Gain 1": "2/3",
"Gain 2": "1",
"Gain 3": "2",
"Gain 4": 4,
}
type mock struct {
Bytes []byte
}
func (m *mock) SetAddress(_ byte) error { return nil }
func (m *mock) ReadBytes(addr byte, num int) ([]byte, error) { return m.Bytes, nil }
func (m *mock) WriteBytes(addr byte, value []byte) error { return nil }
func (m *mock) ReadFromReg(addr, reg byte, value []byte) error {
if len(m.Bytes) >= 2 {
value[0] = m.Bytes[0]
value[1] = m.Bytes[1]
m.Bytes = m.Bytes[2:]
}
return nil
}
func (m *mock) WriteToReg(addr, reg byte, value []byte) error { return nil }
func (m *mock) Close() error { return nil }
func mocki2cBus() *mock { return new(mock) }
func TestAds1015Driver(t *testing.T) {
bus := mocki2cBus() //i2c.MockBus()
f := Ads1015Factory()
_, err := f.NewDriver(nil, bus)
if err == nil {
t.Error("Adapter creation should fail when configuration is null")
}
metadata := f.Metadata()
if metadata.Name != "ADS1015" {
t.Error("Incorrect metadata received")
}
parameters := f.GetParameters()
if len(parameters) != 5 {
t.Error("Incorrect number of parameters received")
}
driver, err := f.NewDriver(params, bus)
if err != nil {
t.Error(err)
}
if driver.Metadata().Name != "ADS1015" {
t.Error("Unexpected name")
}
if !driver.Metadata().HasCapability(hal.AnalogInput) {
t.Error("analog input cpability should exist")
}
if driver.Metadata().HasCapability(hal.DigitalInput) {
t.Error("Digital input Capability should not exist")
}
pins, err := driver.Pins(hal.AnalogInput)
if err != nil {
t.Error(err)
}
if len(pins) != 4 {
t.Error("Unexpected number of pins returned by driver")
}
pins, err = driver.Pins(hal.DigitalOutput)
if err == nil {
t.Error("ADS1015 should not support Digital Output")
}
d := driver.(hal.AnalogInputDriver)
if len(d.AnalogInputPins()) != 4 {
t.Error("Expected 4 channels")
}
if _, err := d.AnalogInputPin(5); err == nil {
t.Error("Expected error for invalid channel name")
}
ch, err := d.AnalogInputPin(0)
if err != nil {
t.Error(err)
}
if ch.Name() != "0" {
t.Error("Unexpected channel name")
}
_, err = ch.Read()
if err == nil {
t.Error("Read should fail due to config mismatch")
}
//Set i2c bytes to config and reading 193, 131
bus.Bytes = []byte{0xC1, 0x83, 0x6F, 0xF0}
v, err := ch.Read()
if err != nil {
t.Error(err)
}
if v != 1791 {
t.Error("Unexepected value")
}
if err := d.Close(); err != nil {
t.Error(err)
}
}
func TestAds1115Driver(t *testing.T) {
bus := mocki2cBus() //i2c.MockBus()
f := Ads1115Factory()
_, err := f.NewDriver(nil, bus)
if err == nil {
t.Error("Adapter creation should fail when configuration is null")
}
metadata := f.Metadata()
if metadata.Name != "ADS1115" {
t.Error("Incorrect metadata received")
}
parameters := f.GetParameters()
if len(parameters) != 5 {
t.Error("Incorrect number of parameters received")
}
driver, err := f.NewDriver(params, bus)
if err != nil {
t.Error(err)
}
if driver.Metadata().Name != "ADS1115" {
t.Error("Unexpected name")
}
if !driver.Metadata().HasCapability(hal.AnalogInput) {
t.Error("analog input cpability should exist")
}
if driver.Metadata().HasCapability(hal.DigitalInput) {
t.Error("Digital input Capability should not exist")
}
pins, err := driver.Pins(hal.AnalogInput)
if err != nil {
t.Error(err)
}
if len(pins) != 4 {
t.Error("Unexpected number of pins returned by driver")
}
pins, err = driver.Pins(hal.DigitalOutput)
if err == nil {
t.Error("ADS1015 should not support Digital Output")
}
d := driver.(hal.AnalogInputDriver)
if len(d.AnalogInputPins()) != 4 {
t.Error("Expected 4 channels")
}
if _, err := d.AnalogInputPin(5); err == nil {
t.Error("Expected error for invalid channel name")
}
ch, err := d.AnalogInputPin(0)
if err != nil {
t.Error(err)
}
if ch.Name() != "0" {
t.Error("Unexpected channel name")
}
_, err = ch.Read()
if err == nil {
t.Error("Read should fail due to config mismatch")
}
//Set i2c bytes to config and reading 193, 131
bus.Bytes = []byte{0xC1, 0x83, 0x6F, 0xF0}
v, err := ch.Read()
if err != nil {
t.Error(err)
}
if v != 28656 {
t.Error("Unexepected value")
}
if err := d.Close(); err != nil {
t.Error(err)
}
}
|
test-coverage-enforce-bot/wso2-ode
|
axis2/src/main/java/org/apache/ode/axis2/httpbinding/ProxyConf.java
|
<reponame>test-coverage-enforce-bot/wso2-ode
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.axis2.httpbinding;
import org.apache.axis2.transport.http.HttpTransportProperties;
import org.apache.commons.httpclient.Credentials;
import org.apache.commons.httpclient.HostConfiguration;
import org.apache.commons.httpclient.HttpState;
import org.apache.commons.httpclient.NTCredentials;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.URIException;
import org.apache.commons.httpclient.auth.AuthScope;
import org.apache.commons.httpclient.params.HttpParams;
import org.apache.ode.utils.Properties;
/**
* The purpose of this class is to configure proxy for HttpClient.
*/
public class ProxyConf {
// these properties are java system properties
// see http://java.sun.com/j2se/1.5.0/docs/guide/net/properties.html
public static final String HTTP_PROXY_HOST = "http.proxyHost";
public static final String HTTP_PROXY_PORT = "http.proxyPort";
public static final String HTTP_NON_PROXY_HOSTS = "http.nonProxyHosts";
public static void configure(HostConfiguration hostConfig, HttpState state, HttpTransportProperties.ProxyProperties proxyProperties) {
String proxyHost = proxyProperties.getProxyHostName();
int proxyPort = proxyProperties.getProxyPort();
//Setting credentials
String userName = proxyProperties.getUserName();
String password = proxyProperties.getPassWord();
String domain = proxyProperties.getDomain();
Credentials proxyCred;
if (userName != null && password != null && domain != null) {
proxyCred = new NTCredentials(userName, password, proxyHost, domain);
} else if (userName != null) {
proxyCred = new UsernamePasswordCredentials(userName, password);
} else {
proxyCred = new UsernamePasswordCredentials("", "");
}
//Using Java Networking Properties
String host = System.getProperty(HTTP_PROXY_HOST);
if (host != null) {
proxyHost = host;
proxyCred = new UsernamePasswordCredentials("", "");
}
String port = System.getProperty(HTTP_PROXY_PORT);
if (port != null) {
proxyPort = Integer.parseInt(port);
}
state.setProxyCredentials(AuthScope.ANY, proxyCred);
hostConfig.setProxy(proxyHost, proxyPort);
}
/**
* @return true if a proxy is set in the params, or in the system property "http.proxyHost"
* and the host is not mentionnned in the system property "http.nonProxyHosts"
* @see Properties#PROP_HTTP_PROXY_PREFIX
*/
public static boolean isProxyEnabled(HttpParams params, String targetHost) throws URIException {
// from IL properties
boolean isSet = params.isParameterSet(Properties.PROP_HTTP_PROXY_PREFIX);
// from Java Networking Properties
isSet |= System.getProperty(HTTP_PROXY_HOST) != null;
boolean isNonProxyHost = isNonProxyHost(targetHost);
return isSet && !isNonProxyHost;
}
/**
*
* @return true if the targetHost is mentioned in the system properties "http.nonProxyHosts"
* @see http://java.sun.com/j2se/1.5.0/docs/guide/net/properties.html
*/
public static boolean isNonProxyHost(String targetHost) {
String nonProxyHosts = System.getProperty(HTTP_NON_PROXY_HOSTS);
if (nonProxyHosts != null) {
String[] splitted = nonProxyHosts.split("\\|");
for (int i = 0; i < splitted.length; i++) {
if (targetHost.matches(splitted[i])) return true;
}
}
return false;
}
}
|
danil/logastic
|
uint8p.go
|
<filename>uint8p.go
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package log0
// Uint8p returns stringer/JSON/text marshaler for the uint8 pointer type.
func Uint8p(p *uint8) uint8P { return uint8P{P: p} }
type uint8P struct{ P *uint8 }
func (p uint8P) String() string {
if p.P == nil {
return "null"
}
return uint8V{V: *p.P}.String()
}
func (p uint8P) MarshalText() ([]byte, error) {
return []byte(p.String()), nil
}
func (p uint8P) MarshalJSON() ([]byte, error) {
return p.MarshalText()
}
|
billlin0904/xamp2
|
src/xamp_output_device/include/output_device/win32/wasapi.h
|
<reponame>billlin0904/xamp2<filename>src/xamp_output_device/include/output_device/win32/wasapi.h
//=====================================================================================================================
// Copyright (c) 2018-2021 xamp project. All rights reserved.
// More license information, please see LICENSE file in module root folder.
//=====================================================================================================================
#pragma once
#include <base/base.h>
#ifdef XAMP_OS_WIN
#pragma comment(lib, "avrt.lib")
#pragma comment(lib, "Mfplat.lib")
#pragma comment(lib, "Propsys.lib")
#include <chrono>
#include <atlcomcli.h>
#include <mmdeviceapi.h>
#include <atlbase.h>
#include <mmdeviceapi.h>
#include <Audioclient.h>
#include <audiopolicy.h>
#include <mfapi.h>
#include <strmif.h>
#include <endpointvolume.h>
#include <functiondiscoverykeys_devpkey.h>
#include <base/stl.h>
#include <base/windows_handle.h>
#include <base/uuid.h>
#include <avrt.h>
#include <Mferror.h>
#include <output_device/deviceinfo.h>
#include <output_device/win32/mmcss_types.h>
struct IMMDevice;
struct IMFAsyncResult;
struct IAudioClient;
struct IAudioRenderClient;
struct IMMNotificationClient;
struct IMMDeviceEnumerator;
namespace xamp::output_device::win32::helper {
inline constexpr int32_t kWasapiReftimesPerMillisec = 10000;
inline constexpr double kWasapiReftimesPerSec = 10000000;
XAMP_ALWAYS_INLINE constexpr double Nano100ToSeconds(REFERENCE_TIME ref) noexcept {
// 1 nano = 0.000000001 seconds
//100 nano = 0.0000001 seconds
//100 nano = 0.0001 milliseconds
constexpr double ratio = 0.0000001;
return (static_cast<double>(ref) * ratio);
}
XAMP_ALWAYS_INLINE constexpr std::chrono::milliseconds ConvertToMilliseconds(REFERENCE_TIME ref) noexcept {
const LONGLONG kMilliseconds = (1000); // 10 ^ 3
const LONGLONG kNanoSeconds = (1000000000); // 10 ^ 9
const LONGLONG kUnit = (kNanoSeconds / 100); // 10 ^ 7
return std::chrono::milliseconds(ref / (kUnit / kMilliseconds));
}
XAMP_ALWAYS_INLINE constexpr UINT32 ReferenceTimeToFrames(REFERENCE_TIME period, UINT32 samplerate) noexcept {
return static_cast<UINT32>(
1.0 * period * // hns *
samplerate / // (frames / s) /
1000 / // (ms / s) /
10000 // (hns / s) /
+ 0.5 // rounding
);
}
XAMP_ALWAYS_INLINE constexpr REFERENCE_TIME MakeHnsPeriod(UINT32 frames, UINT32 samplerate) noexcept {
return static_cast<REFERENCE_TIME>(10000.0 * 1000.0 / double(samplerate) * double(frames) + 0.5);
}
DeviceInfo GetDeviceInfo(CComPtr<IMMDevice>& device, Uuid const& device_type_id);
CComPtr<IMMDeviceEnumerator> CreateDeviceEnumerator();
HashMap<std::string, std::wstring> GetDeviceProperty(CComPtr<IMMDevice>& device);
double GetStreamPosInMilliseconds(CComPtr<IAudioClock>& clock);
}
#endif
|
Jumpscale/jumpscale_examples7
|
examples/serializers/multiserialze.py
|
import os
import struct
from JumpScale import j
import JumpScale.baselib.serializers
j.application.start("blowfishtest")
from random import randrange
msg = ""
for i in range(500):
msg += chr(randrange(0, 256))
key = ""
for i in range(56):
key += chr(randrange(0, 256))
serializationstr = "mcb"
s = j.db.serializers.get(serializationstr, key=key)
print s.loads(s.dumps("some data"))
def perftest(data):
nr = 10000
j.base.timer.start()
for i in range(nr):
data1 = s.dumps(data)
data2 = s.loads(data1)
j.base.timer.stop(nr)
serializationstr = "mcb"
s = j.db.serializers.get(serializationstr, key=key)
perftest(data=msg)
j.application.stop()
|
lucascassilha/react-fastfeet
|
src/pages/Providers/index.js
|
import React, { useState, useEffect } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { produce } from 'immer';
import { toast } from 'react-toastify';
import { IoIosMore, IoIosArrowBack, IoIosArrowForward } from 'react-icons/io';
import { FaTrashAlt, FaPen } from 'react-icons/fa';
import history from '~/services/history';
import {
providerRequest,
providerDeleteRequest,
} from '~/store/modules/providers/actions';
import TableHeader from '~/components/TableHeader/index';
import { Container, Holder, Table, Action, NameDiv, Pages } from './styles';
export default function Provider() {
const loading = useSelector(state => state.providers.loading);
const providerLoad = useSelector(state => state.providers.data);
const [providers, setProviders] = useState([]);
const [page, setPage] = useState(1);
const [input, setInput] = useState('');
const dispatch = useDispatch();
useEffect(() => {
dispatch(providerRequest(null, 1));
}, []);
useEffect(() => {
setProviders(providerLoad);
}, [providerLoad]);
const handleActions = ({ id, visible }) => {
setProviders(
produce(providers, draft => {
draft.map(item => {
if (item.id === id) {
item.visible = !visible;
} else {
item.visible = false;
}
});
})
);
};
const handlePageAdd = () => {
if (providers.length < 10) {
return toast.info('There are no more pages!');
}
const pageSwitch = page + 1;
setPage(page + 1);
dispatch(providerRequest(input, pageSwitch));
};
const handlePageSub = () => {
if (page === 1) {
return toast.info('This is already the first page!');
}
const pageSwitch = page - 1;
setPage(page - 1);
console.log(page);
dispatch(providerRequest(input, pageSwitch));
};
const handleDelete = id => {
// eslint-disable-next-line no-restricted-globals
const confirmation = confirm(
'Are you sure you want to delete this provider?'
);
if (confirmation) {
dispatch(providerDeleteRequest(id));
}
};
const handleSearch = e => {
if (e.key === 'Enter') {
dispatch(providerRequest(input, 1));
}
setInput('');
};
const handleRegister = () => {
history.push('/providers/register');
};
const handleEdition = id => {
history.push(`providers/edit/${id}`);
};
return (
<Container>
<Holder>
<header>
<h1>Managing Providers</h1>
</header>
<div>
<TableHeader
loading={loading}
onChange={e => setInput(e.target.value)}
onKeyDown={e => handleSearch(e)}
onClick={handleRegister}
/>
<Table>
<thead>
<tr>
<th>ID</th>
<th>Picture</th>
<th>Name</th>
<th>Email</th>
<th style={{ textAlign: 'right' }}>Actions</th>
</tr>
</thead>
<tbody>
{providers.map(item => (
<tr>
<td>{`#${item.id}`}</td>
<NameDiv>
{item.avatar ? (
<img src={item.avatar.url} alt="Avatar" />
) : (
<div>{item.nullImageString}</div>
)}
</NameDiv>
<td>{item.name}</td>
<td>{item.email}</td>
<td>
<button type="button" onClick={() => handleActions(item)}>
<IoIosMore size={25} />
<Action visible={item.visible}>
<div>
<button
type="button"
onClick={() => handleEdition(item.id)}
>
<FaPen
size={14}
color="#4D85EE"
style={{ marginRight: 10 }}
/>
<p>Edit</p>
</button>
</div>
<div>
<button
type="button"
onClick={() => handleDelete(item.id)}
>
<FaTrashAlt
size={14}
color="#DE3B3B"
style={{ marginRight: 10 }}
/>
<p>Delete</p>
</button>
</div>
</Action>
</button>
</td>
</tr>
))}
</tbody>
</Table>
</div>
<Pages>
<IoIosArrowBack onClick={handlePageSub} />
<strong>{page}</strong>
<IoIosArrowForward onClick={handlePageAdd} />
</Pages>
</Holder>
</Container>
);
}
|
Copeland87/Nuanced
|
models/user.js
|
<filename>models/user.js
var passportLocalSequelize = require("passport-local-sequelize");
module.exports = function(sequelize, DataTypes) {
var User = passportLocalSequelize.defineUser(sequelize, {
favoriteColor: DataTypes.STRING
});
return User;
}
|
siahr/basex
|
basex-tests/src/test/java/org/basex/qt3ts/op/OpSubtractYearMonthDurations.java
|
<gh_stars>0
package org.basex.qt3ts.op;
import org.basex.tests.bxapi.XQuery;
import org.basex.tests.qt3ts.*;
/**
* Tests for the subtract-yearMonthDurations() function.
*
* @author BaseX Team 2005-13, BSD License
* @author <NAME>
*/
@SuppressWarnings("all")
public class OpSubtractYearMonthDurations extends QT3TestSet {
/**
*
* *******************************************************
* Test: K-YearMonthDurationSubtract-1
* Written by: <NAME>
* Date: 2007-11-22T11:31:21+01:00
* Purpose: Simple test of substraction an xs:yearMonthDuration with P0M.
* *******************************************************
* .
*/
@org.junit.Test
public void kYearMonthDurationSubtract1() {
final XQuery query = new XQuery(
"xs:yearMonthDuration(\"P3Y3M\") - xs:yearMonthDuration(\"P0M\") eq xs:yearMonthDuration(\"P3Y3M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: K-YearMonthDurationSubtract-2
* Written by: <NAME>
* Date: 2007-11-22T11:31:21+01:00
* Purpose: Simple test of substraction P0M with an xs:yearMonthDuration.
* *******************************************************
* .
*/
@org.junit.Test
public void kYearMonthDurationSubtract2() {
final XQuery query = new XQuery(
"xs:yearMonthDuration(\"P0M\") - xs:yearMonthDuration(\"P3Y3M\") eq xs:yearMonthDuration(\"-P3Y3M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: K-YearMonthDurationSubtract-3
* Written by: <NAME>
* Date: 2007-11-22T11:31:21+01:00
* Purpose: Simple test of substraction two arbitrary xs:yearMonthDurations.
* *******************************************************
* .
*/
@org.junit.Test
public void kYearMonthDurationSubtract3() {
final XQuery query = new XQuery(
"xs:yearMonthDuration(\"P32Y43M\") - xs:yearMonthDuration(\"P12Y13M\") eq xs:yearMonthDuration(\"P22Y6M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: K-YearMonthDurationSubtract-4
* Written by: <NAME>
* Date: 2007-11-22T11:31:21+01:00
* Purpose: The addition operator is not available between xs:dayTimeDuration and xs:yearMonthDuration.
* *******************************************************
* .
*/
@org.junit.Test
public void kYearMonthDurationSubtract4() {
final XQuery query = new XQuery(
"xs:dayTimeDuration(\"P3D\") + xs:yearMonthDuration(\"P3Y3M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("XPTY0004")
);
}
/**
*
* *******************************************************
* Test: K-YearMonthDurationSubtract-5
* Written by: <NAME>
* Date: 2007-11-22T11:31:21+01:00
* Purpose: The addition operator is not available between xs:yearMonthDuration and xs:dayTimeDuration.
* *******************************************************
* .
*/
@org.junit.Test
public void kYearMonthDurationSubtract5() {
final XQuery query = new XQuery(
"xs:yearMonthDuration(\"P3Y3M\") + xs:dayTimeDuration(\"P3D\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("XPTY0004")
);
}
/**
*
* *******************************************************
* Test: K-YearMonthDurationSubtract-6
* Written by: <NAME>
* Date: 2007-11-22T11:31:21+01:00
* Purpose: The addition operator is not available between xs:duration and xs:yearMonthDuration.
* *******************************************************
* .
*/
@org.junit.Test
public void kYearMonthDurationSubtract6() {
final XQuery query = new XQuery(
"xs:duration(\"P3D\") + xs:yearMonthDuration(\"P3Y3M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("XPTY0004")
);
}
/**
*
* *******************************************************
* Test: K-YearMonthDurationSubtract-7
* Written by: <NAME>
* Date: 2007-11-22T11:31:21+01:00
* Purpose: The addition operator is not available between xs:yearMonthDuration and xs:duration.
* *******************************************************
* .
*/
@org.junit.Test
public void kYearMonthDurationSubtract7() {
final XQuery query = new XQuery(
"xs:yearMonthDuration(\"P3Y3M\") + xs:duration(\"P3D\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("XPTY0004")
);
}
/**
*
* *******************************************************
* Test: K-YearMonthDurationSubtract-8
* Written by: <NAME>
* Date: 2007-11-22T11:31:21+01:00
* Purpose: The addition operator is not available between xs:dayTimeDuration and xs:duration.
* *******************************************************
* .
*/
@org.junit.Test
public void kYearMonthDurationSubtract8() {
final XQuery query = new XQuery(
"xs:dayTimeDuration(\"P3D\") + xs:duration(\"P3Y3M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("XPTY0004")
);
}
/**
*
* *******************************************************
* Test: K-YearMonthDurationSubtract-9
* Written by: <NAME>
* Date: 2007-11-22T11:31:21+01:00
* Purpose: The addition operator is not available between xs:duration and xs:dayTimeDuration.
* *******************************************************
* .
*/
@org.junit.Test
public void kYearMonthDurationSubtract9() {
final XQuery query = new XQuery(
"xs:duration(\"P3Y3M\") + xs:dayTimeDuration(\"P3D\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("XPTY0004")
);
}
/**
* test subtraction of zero duration from yearMonthDurations .
*/
@org.junit.Test
public void cbclSubtractYearMonthDurations001() {
final XQuery query = new XQuery(
"\n" +
" declare function local:yearMonthDuration($years as xs:integer, $months as xs:integer ) { xs:yearMonthDuration(concat('P', $years, 'Y', $months, 'M')) };\n" +
" local:yearMonthDuration(1, 1) - xs:yearMonthDuration(\"P0Y\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "P1Y1M")
);
}
/**
* test subtraction of yearMonthDurations .
*/
@org.junit.Test
public void cbclSubtractYearMonthDurations002() {
final XQuery query = new XQuery(
"\n" +
" declare function local:yearMonthDuration($years as xs:integer, $months as xs:integer ) { xs:yearMonthDuration(concat('P', $years, 'Y', $months, 'M')) };\n" +
" local:yearMonthDuration(1, 1) - local:yearMonthDuration(1, 1)",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "P0M")
);
}
/**
* test subtraction of large yearMonthDuration .
*/
@org.junit.Test
public void cbclSubtractYearMonthDurations003() {
final XQuery query = new XQuery(
"\n" +
" declare function local:yearMonthDuration($years as xs:integer, $months as xs:integer ) { xs:yearMonthDuration(concat('P', $years, 'Y', $months, 'M')) };\n" +
" xs:yearMonthDuration(\"-P768614336404564650Y\") - local:yearMonthDuration(768614336404564650, 1)",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("FODT0002")
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-1
* Written By: <NAME>
* Date: June 28, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function
* As per example 1 (for this function)of the F&O specs.
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations1() {
final XQuery query = new XQuery(
"(xs:yearMonthDuration(\"P2Y11M\") - xs:yearMonthDuration(\"P3Y3M\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "-P4M")
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-10
* Written By: <NAME>
* Date: June 28, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function used
* together with an "or" expression.
* Apply "fn:string" function to account for new EBV.
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations10() {
final XQuery query = new XQuery(
"fn:string((xs:yearMonthDuration(\"P05Y02M\") - xs:yearMonthDuration(\"P03Y04M\"))) or fn:string((xs:yearMonthDuration(\"P05Y03M\") - xs:yearMonthDuration(\"P01Y03M\")))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-11
* Written By: <NAME>
* Date: June 28, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function used
* together with a "div" expression.
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations11() {
final XQuery query = new XQuery(
"(xs:yearMonthDuration(\"P42Y10M\") - xs:yearMonthDuration(\"P20Y10M\")) div (xs:yearMonthDuration(\"P20Y11M\") - xs:yearMonthDuration(\"P18Y11M\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "11")
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-12
* Written By: <NAME>
* Date: June 29, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" operators used
* with a boolean expression and the "fn:true" function.
* Apply "fn:string" function to account for new EBV.
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations12() {
final XQuery query = new XQuery(
"fn:string((xs:yearMonthDuration(\"P10Y11M\") - xs:yearMonthDuration(\"P05Y07M\"))) and (fn:true())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-13
* Written By: <NAME>
* Date: June 29, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function used
* together with the numeric-equal- operator "eq".
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations13() {
final XQuery query = new XQuery(
"(xs:yearMonthDuration(\"P23Y11M\") - xs:yearMonthDuration(\"P23Y11M\")) eq xs:yearMonthDuration(\"P23Y11M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-14
* Written By: <NAME>
* Date: June 29, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function used
* together with the numeric-equal operator "ne".
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations14() {
final XQuery query = new XQuery(
"(xs:yearMonthDuration(\"P21Y12M\") - xs:yearMonthDuration(\"P08Y05M\")) ne xs:yearMonthDuration(\"P08Y05M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-15
* Written By: <NAME>
* Date: June 29, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function used
* together with the numeric-equal operator "le".
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations15() {
final XQuery query = new XQuery(
"(xs:yearMonthDuration(\"P10Y01M\") - xs:yearMonthDuration(\"P17Y02M\")) le xs:yearMonthDuration(\"P17Y02M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-16
* Written By: <NAME>
* Date: June 29, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function used
* together with the numeric-equal operator "ge".
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations16() {
final XQuery query = new XQuery(
"(xs:yearMonthDuration(\"P13Y09M\") - xs:yearMonthDuration(\"P18Y02M\")) ge xs:yearMonthDuration(\"P18Y02M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-2
* Written By: <NAME>
* Date: June 28, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function
* used as part of a boolean expression (and operator) and the "fn:false" function.
* Apply "fn:string" function to account for new EBV.
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations2() {
final XQuery query = new XQuery(
"fn:string(xs:yearMonthDuration(\"P10Y11M\") - xs:yearMonthDuration(\"P12Y07M\")) and fn:false()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-3
* Written By: <NAME>
* Date: June 28, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function as
* part of a boolean expression (or operator) and the "fn:boolean" function.
* Apply "fn:string" function to account for new EBV.
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations3() {
final XQuery query = new XQuery(
"fn:string(xs:yearMonthDuration(\"P20Y10M\") - xs:yearMonthDuration(\"P19Y10M\")) or fn:false()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-4
* Written By: <NAME>
* Date: June 28, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function that
* return true and used together with fn:not.
* Apply "fn:string" function to account for new EBV.
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations4() {
final XQuery query = new XQuery(
"fn:not(fn:string(xs:yearMonthDuration(\"P11Y04M\") - xs:yearMonthDuration(\"P02Y11M\")))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-5
* Written By: <NAME>
* Date: June 28 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function that
* is used as an argument to the fn:boolean function.
* Apply "fn:string" function to account for new EBV.
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations5() {
final XQuery query = new XQuery(
"fn:boolean(fn:string(xs:yearMonthDuration(\"P05Y08M\") - xs:yearMonthDuration(\"P03Y06M\")))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-6
* Written By: <NAME>
* Date: June 28, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" operator that
* is used as an argument to the fn:number function.
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations6() {
final XQuery query = new XQuery(
"fn:number(xs:yearMonthDuration(\"P02Y09M\") - xs:yearMonthDuration(\"P10Y01M\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "NaN")
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-7
* Written By: <NAME>
* Date: June 28, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function used
* as an argument to the "fn:string" function).
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations7() {
final XQuery query = new XQuery(
"fn:string(xs:yearMonthDuration(\"P03Y08M\") - xs:yearMonthDuration(\"P01Y02M\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "P2Y6M")
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-8
* Written By: <NAME>
* Date: June 28, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function that
* returns a negative value.
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations8() {
final XQuery query = new XQuery(
"(xs:yearMonthDuration(\"P10Y01M\") - xs:yearMonthDuration(\"P11Y02M\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "-P1Y1M")
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations-9
* Written By: <NAME>
* Date: June 28, 2005
* Purpose: Evaluates The "subtract-yearMonthDurations" function used
* together with and "and" expression.
* Apply "fn:string" function to account for new EBV.
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations9() {
final XQuery query = new XQuery(
"fn:string((xs:yearMonthDuration(\"P01Y01M\") - xs:yearMonthDuration(\"P02Y02M\"))) and fn:string((xs:yearMonthDuration(\"P02Y03M\") - xs:yearMonthDuration(\"P04Y04M\")))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations2args-1
* Written By: <NAME>
* Date: Tue Apr 12 16:29:08 GMT-05:00 2005
* Purpose: Evaluates The "op:subtract-yearMonthDurations" operator
* with the arguments set as follows:
* $arg1 = xs:yearMonthDuration(lower bound)
* $arg2 = xs:yearMonthDuration(lower bound)
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations2args1() {
final XQuery query = new XQuery(
"xs:yearMonthDuration(\"P0Y0M\") - xs:yearMonthDuration(\"P0Y0M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "P0M")
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations2args-2
* Written By: <NAME>
* Date: Tue Apr 12 16:29:08 GMT-05:00 2005
* Purpose: Evaluates The "op:subtract-yearMonthDurations" operator
* with the arguments set as follows:
* $arg1 = xs:yearMonthDuration(mid range)
* $arg2 = xs:yearMonthDuration(lower bound)
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations2args2() {
final XQuery query = new XQuery(
"xs:yearMonthDuration(\"P1000Y6M\") - xs:yearMonthDuration(\"P0Y0M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "P1000Y6M")
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations2args-3
* Written By: <NAME>
* Date: Tue Apr 12 16:29:08 GMT-05:00 2005
* Purpose: Evaluates The "op:subtract-yearMonthDurations" operator
* with the arguments set as follows:
* $arg1 = xs:yearMonthDuration(upper bound)
* $arg2 = xs:yearMonthDuration(lower bound)
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations2args3() {
final XQuery query = new XQuery(
"xs:yearMonthDuration(\"P2030Y12M\") - xs:yearMonthDuration(\"P0Y0M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "P2031Y")
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations2args-4
* Written By: <NAME>
* Date: Tue Apr 12 16:29:08 GMT-05:00 2005
* Purpose: Evaluates The "op:subtract-yearMonthDurations" operator
* with the arguments set as follows:
* $arg1 = xs:yearMonthDuration(lower bound)
* $arg2 = xs:yearMonthDuration(mid range)
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations2args4() {
final XQuery query = new XQuery(
"xs:yearMonthDuration(\"P0Y0M\") - xs:yearMonthDuration(\"P1000Y6M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "-P1000Y6M")
);
}
/**
*
* *******************************************************
* Test: op-subtract-yearMonthDurations2args-5
* Written By: <NAME>
* Date: Tue Apr 12 16:29:08 GMT-05:00 2005
* Purpose: Evaluates The "op:subtract-yearMonthDurations" operator
* with the arguments set as follows:
* $arg1 = xs:yearMonthDuration(lower bound)
* $arg2 = xs:yearMonthDuration(upper bound)
* *******************************************************
* .
*/
@org.junit.Test
public void opSubtractYearMonthDurations2args5() {
final XQuery query = new XQuery(
"xs:yearMonthDuration(\"P0Y0M\") - xs:yearMonthDuration(\"P2030Y12M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "-P2031Y")
);
}
}
|
jianjunchu/pentaho-kettle
|
plugins/pentaho-obs-vfs/src/test/java/org/pentaho/obs/vfs/OBSFileProviderTest.java
|
/*******************************************************************************
*
*
*
* Copyright (C) 2011-2019 by Sun : http://www.kingbase.com.cn
*
*******************************************************************************
*
*
* Email : <EMAIL>
*
*
******************************************************************************/
package org.pentaho.obs.vfs;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
import org.apache.commons.vfs2.FileName;
import org.apache.commons.vfs2.FileSystemOptions;
import org.junit.Before;
import org.junit.Test;
/**
*
*
* @author Sun
* @since 2019年8月23日
* @version
*
*/
public class OBSFileProviderTest {
OBSFileProvider provider;
@Before
public void setUp() throws Exception {
provider = new OBSFileProvider();
}
@Test
public void testDoCreateFileSystem() throws Exception {
FileName fileName = mock(FileName.class);
FileSystemOptions options = new FileSystemOptions();
assertNotNull(provider.doCreateFileSystem(fileName, options));
}
}
|
npocmaka/Windows-Server-2003
|
inetsrv/iis/iisrearc/iisplus/mbid/mbid.cxx
|
/*++
Copyright (c) 1996 Microsoft Corporation
Module Name:
mbid.cxx
Abstract:
Builds a library that defines the MB guids.
Author:
<NAME> (TaylorW) 13-Dec-1999
Revision History:
--*/
#include <windows.h>
#include <initguid.h>
#include <iadmw.h>
|
cloudfoundry-attic/perm
|
internal/migrations/7_combine_actor_and_role_assignment_tables.go
|
package migrations
import (
"context"
"code.cloudfoundry.org/perm/internal/sqlx"
"code.cloudfoundry.org/perm/logx"
"github.com/Masterminds/squirrel"
uuid "github.com/satori/go.uuid"
)
var createAssignmentTable = `
CREATE TABLE IF NOT EXISTS assignment
(
id BIGINT NOT NULL AUTO_INCREMENT PRIMARY KEY,
uuid BINARY(16) NOT NULL UNIQUE,
role_id BIGINT NOT NULL,
actor_id VARCHAR(511) NOT NULL,
actor_namespace VARCHAR(2047) NOT NULL,
role_id_actor_hash VARCHAR(64) AS (SHA2(CONCAT(role_id, actor_id, actor_namespace), 256)) VIRTUAL UNIQUE
)
`
var addAssignmentRoleIDForeignKey = `
ALTER TABLE
assignment
ADD CONSTRAINT
assignment_role_id_fkey
FOREIGN KEY(role_id) REFERENCES role(id)
ON DELETE CASCADE
`
var dropActorTable = `DROP TABLE IF EXISTS actor`
var dropAssignmentTable = `DROP TABLE IF EXISTS assignment`
var dropRoleAssignmentTable = `DROP TABLE IF EXISTS role_assignment`
func combineActorAndRoleAssignmentTablesUp(ctx context.Context, logger logx.Logger, tx *sqlx.Tx) error {
logger = logger.WithName("create-actor-and-role-assignment-tables")
logger.Debug(starting)
defer logger.Debug(finished)
var err error
_, err = tx.ExecContext(ctx, createAssignmentTable)
if err != nil {
return err
}
_, err = tx.ExecContext(ctx, addAssignmentRoleIDForeignKey)
if err != nil {
return err
}
rows, err := squirrel.Select("role_assignment.role_id", "actor.domain_id", "actor.issuer").
From("role_assignment").
JoinClause("INNER JOIN actor ON role_assignment.actor_id = actor.id").
RunWith(tx).
QueryContext(ctx)
if err != nil {
return err
}
defer rows.Close()
type roleAssignment struct {
RoleID int64
ActorID string
ActorNamespace string
}
var roleAssignments []roleAssignment
for rows.Next() {
ra := roleAssignment{}
err = rows.Scan(&ra.RoleID, &ra.ActorID, &ra.ActorNamespace)
if err != nil {
return err
}
roleAssignments = append(roleAssignments, ra)
}
for _, ra := range roleAssignments {
u := uuid.NewV4().Bytes()
_, err = squirrel.Insert("assignment").
Columns("uuid", "role_id", "actor_id", "actor_namespace").
Values(u, ra.RoleID, ra.ActorID, ra.ActorNamespace).
RunWith(tx).
ExecContext(ctx)
if err != nil {
return err
}
}
_, err = tx.ExecContext(ctx, dropRoleAssignmentTable)
if err != nil {
return err
}
_, err = tx.ExecContext(ctx, dropActorTable)
return err
}
func combineActorAndRoleAssignmentTablesDown(ctx context.Context, logger logx.Logger, tx *sqlx.Tx) error {
logger = logger.WithName("create-actor-and-role-assignment-tables")
logger.Debug(starting)
defer logger.Debug(finished)
var err error
err = createActorsTableUp(ctx, logger, tx)
if err != nil {
return err
}
err = createRoleAssignmentsTableUp(ctx, logger, tx)
if err != nil {
return err
}
var entityData [][]string
rows, err := squirrel.Select("actor_id", "actor_namespace").
From("assignment").
RunWith(tx).
QueryContext(ctx)
if err != nil {
return err
}
for rows.Next() {
var (
actorID string
actorNamespace string
)
err = rows.Scan(&actorID, &actorNamespace)
entityData = append(entityData, []string{actorID, actorNamespace})
if err != nil {
return err
}
}
rows.Close()
for _, actor := range entityData {
u := uuid.NewV4().Bytes()
_, err = squirrel.Insert("actor").
Columns("uuid", "domain_id", "issuer").
Values(u, actor[0], actor[1]).
RunWith(tx).
ExecContext(ctx)
if err != nil {
return err
}
}
var roleAssignmentData [][]int64
rows, err = squirrel.Select("actor.id", "assignment.role_id").
From("actor").
JoinClause("INNER JOIN assignment ON actor.domain_id = assignment.actor_id and actor.issuer = assignment.actor_namespace").
RunWith(tx).
QueryContext(ctx)
if err != nil {
return err
}
for rows.Next() {
var (
actorID int64
roleID int64
)
err = rows.Scan(&actorID, &roleID)
roleAssignmentData = append(roleAssignmentData, []int64{actorID, roleID})
if err != nil {
return err
}
}
rows.Close()
for _, roleAssignment := range roleAssignmentData {
_, err = squirrel.Insert("role_assignment").
Columns("actor_id", "role_id").
Values(roleAssignment[0], roleAssignment[1]).
RunWith(tx).
ExecContext(ctx)
if err != nil {
return err
}
}
_, err = tx.ExecContext(ctx, dropAssignmentTable)
if err != nil {
return err
}
return nil
}
|
timxor/leetcode-journal
|
solutions/LeetCode/Java/49.java
|
<filename>solutions/LeetCode/Java/49.java
__________________________________________________________________________________________________
6ms
class Solution {
int[] hash = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 47, 53, 59, 67, 71, 83, 89, 101, 107, 109, 113, 127, 131};
public List<List<String>> groupAnagrams(String[] strs) {
List<List<String>> res = new ArrayList();
Map<Integer, List<String>> map = new HashMap();
for(String s : strs){
int hash = getHash(s);
if(!map.containsKey(hash)){
List<String> group = new ArrayList();
map.put(hash, group);
res.add(group);
}
map.get(hash).add(s);
}
return res;
}
private int getHash(String s){
int hashValue = 1;
for(int i = 0; i < s.length(); i++){
hashValue *= hash[s.charAt(i) - 'a'];
}
return hashValue;
}
}
__________________________________________________________________________________________________
7ms
class Solution {
public List<List<String>> groupAnagrams(String[] strs) {
HashMap<String, List<String>> map = new HashMap<>();
List<List<String>> anagramList = new ArrayList<>();
for (String str : strs) {
char[] arr = str.toCharArray();
Arrays.sort(arr);
String sorted = new String(arr);
List<String> list = map.get(sorted);
if (list == null) {
list = new ArrayList<>();
map.put(sorted, list);
anagramList.add(list);
}
list.add(str);
}
return anagramList;
}
}
__________________________________________________________________________________________________
8ms
class Solution {
// sort the characters of the string to make them the same
public List<List<String>> groupAnagrams(String[] strs) {
List<List<String>> res = new ArrayList<>();
if(strs == null || strs.length == 0)
{
return res;
}
Map<String, List<String>> map = new HashMap<String, List<String>>();
for(String tmpStr : strs)
{
char[] tmpArr = tmpStr.toCharArray();
Arrays.sort(tmpArr);
String keyStr = String.valueOf(tmpArr);
if(map.containsKey(keyStr))
{
map.get(keyStr).add(tmpStr);
}
else
{
map.put(keyStr, new ArrayList<>());
map.get(keyStr).add(tmpStr);
}
}
return new ArrayList(map.values());
}
}
__________________________________________________________________________________________________
38392 kb
class Solution {
public List<List<String>> groupAnagrams(String[] strs) {
Map<String,List<String>> map = new HashMap<>();
for(int i=0;i<strs.length;i++){
char[] ch = strs[i].toCharArray();
Arrays.sort(ch);
List<String> nl = map.getOrDefault(new String(ch),new ArrayList<String>());
nl.add(strs[i]);
map.put(new String(ch),nl);
}
List<List<String>> result = new ArrayList<>(map.values());
return result;
}
}
__________________________________________________________________________________________________
38436 kb
class Solution {
public List<List<String>> groupAnagrams(String[] strs) {
List<List<String>> result = new LinkedList<>();
Map<String, List<String>> map = new HashMap<>();
for(String str : strs)
{
char[] arr = new char[26];
for(int i =0; i<str.length(); i++)
{
arr[str.charAt(i) - 'a']++;
}
String ns = new String(arr);
if(!map.containsKey(ns))
{
List<String> al = new LinkedList<String>();
map.put(ns, al);
}
map.get(ns).add(str);
}
result.addAll(map.values());
return result;
}
}
__________________________________________________________________________________________________
|
chinhung/demoshop
|
src/main/java/net/chinhung/application/endpoint/order/impl/create_order/CreateOrderService.java
|
<filename>src/main/java/net/chinhung/application/endpoint/order/impl/create_order/CreateOrderService.java
package net.chinhung.application.endpoint.order.impl.create_order;
import net.chinhung.application.endpoint.order.impl.OrderConverter;
import net.chinhung.application.endpoint.order.CreateOrder;
import net.chinhung.application.endpoint.order.OrderDTO;
import net.chinhung.application.order.Item;
import net.chinhung.application.order.Order;
import net.chinhung.application.component.order.OrderComponent;
import net.chinhung.application.component.order.Create;
import net.chinhung.core.order.CoreItem;
import net.chinhung.fundamental.aspect.CommandService;
import java.util.List;
import java.util.stream.Collectors;
public class CreateOrderService implements CommandService<CreateOrder, OrderDTO> {
private final OrderComponent orderComponent;
private final OrderConverter orderConverter;
public CreateOrderService(
final OrderComponent orderComponent,
final OrderConverter orderConverter
) {
this.orderComponent = orderComponent;
this.orderConverter = orderConverter;
}
@Override
public OrderDTO execute(final CreateOrder createOrder) {
List<Item> items = createOrder.getOrderLines().stream().map(line -> {
Item item = new Item(new CoreItem(line.getName(), line.getProductId(), line.getQuantity()));
return item;
}).collect(Collectors.toList());
Order order = orderComponent.create(new Create(items));
return orderConverter.toOrderDTO(order);
}
}
|
hyunaaaah/0000
|
src/js/markets.js
|
require('bootstrap');
require('../less/markets.less');
require('../less/market-intro.less');
require('../less/common.less');
require('./common');
require('./market-tab');
require('./headline');
var URLSearchParams = require('url-search-params');
var params = new URLSearchParams(location.search);
var marketId = params.get('id');
function initContents() {
switch (marketId) {
case 'marketMI':
case 'marketQnA':
var MIcontents = require('../template/markets/marketMI.hbs');
var charContents = require('../template/markets/marketQnA.hbs');
$('.market-contents').empty();
if (marketId === 'marketMI') {
$('.market-contents').append(MIcontents);
}
else {
$('.market-contents').append(charContents);
}
break;
case 'marketIntroduce':
case 'yydIntroduce':
case 'ddpIntroduce':
case 'cgcIntroduce':
case 'banpoIntroduce':
case 'cggjIntroduce':
var introContents = require('./markets/introduce/' + marketId);
var template = require('../template/markets/introduce.hbs');
$('.market-contents').empty();
for (var i = 0; i < introContents.length; i++) {
var contentsHtml = template(introContents[i]);
$('.market-contents').append(contentsHtml);
}
break;
case 'yydMarket':
case 'ddpMarket':
case 'cgcMarket':
case 'banpoMarket':
case 'cggjMarket':
var template = require('../template/markets/marketDetail.hbs');
$('.market-contents').append(template);
break;
case 'yydMap':
case 'ddpMap':
case 'cgcMap':
case 'banpoMap':
case 'cggjMap':
var mapContents = require('./markets/map/' + marketId);
var template = require('../template/markets/map.hbs');
$('.market-contents').empty();
for (var i = 0; i < mapContents.length; i++) {
var contentsHtml = template(mapContents[i]);
$('.market-contents').append(contentsHtml);
}
break;
case 'yydNotice':
case 'ddpNotice':
case 'cgcNotice':
case 'cggjNotice':
case 'banpoNotice':
case 'marketNotice':
/*
var noticeContents = require('./markets/notice/' + marketId);
var template = require('../template/markets/map.hbs');
$('.market-contents').empty();
for (var i = 0; i < noticeContents.length; i++) {
var contentsHtml = template(noticeContents[i]);
$('.market-contents').append(contentsHtml);
}
*/
var template = require('../template/markets/notice.hbs');
$('.market-contents').append(template);
$('.notice-list tr td').on('click', function () {
if ($(this).hasClass('active')) {
return;
}
else {
var tabIndex = $(this).index();
var tabBtns = $(this).parent('.notice-list tr').find('td');
tabBtns.removeClass('active');
$(tabBtns[tabIndex]).addClass('active');
}
});
break;
case 'yydSketch':
case 'ddpSketch':
case 'cgcSketch':
case 'banpoSketch':
case 'cggjSketch':
case 'marketSketch':
var template = require('../template/markets/sketch.hbs');
$('.market-contents').append(template);
$('.sketch-list tr td').on('click', function () {
if ($(this).hasClass('active')) {
return;
}
else {
var tabIndex = $(this).index();
var tabBtns = $(this).parent('.sketch-list tr').find('td');
tabBtns.removeClass('active');
$(tabBtns[tabIndex]).addClass('active');
var ID = $(tabBtns[tabIndex]).attr('id');
if ( ID === 'bamMarkets') {
location.href = './markets.html?id=marketSketch';
}
else if ( ID === 'bamYyd') {
location.href = './markets.html?id=yydSketch';
}
else if ( ID === 'bamDDP') {
location.href = './markets.html?id=ddpSketch';
}
else if ( ID === 'bamCgc') {
location.href = './markets.html?id=cgcSketch';
}
else if ( ID === 'bamBanpo') {
location.href = './markets.html?id=banpoSketch';
}
else if ( ID === 'bamCgcj') {
location.href = './markets.html?id=cggjSketch';
}
}
});
break;
case 'yydConcert':
case 'ddpConcert':
case 'cgcConcert':
case 'banpoConcert':
case 'cggjConcert':
var template = require('../template/markets/concert.hbs');
$('.market-contents').append(template);
break;
case 'marketPress':
var template = require('../template/markets/marketPress.hbs');
$('.market-contents').append(template);
break;
case 'marketMedia':
var template = require('../template/markets/marketMedia.hbs');
$('.market-contents').append(template);
break;
}
}
initContents();
|
dmgerman/zephyrd3
|
boards/arm/nrf52_vbluno52/board.h
|
DECL|BUT_GPIO_NAME|macro|BUT_GPIO_NAME
DECL|BUT_GPIO_PIN|macro|BUT_GPIO_PIN
DECL|LED0_GPIO_PIN|macro|LED0_GPIO_PIN
DECL|LED0_GPIO_PORT|macro|LED0_GPIO_PORT
DECL|LED_GPIO_PIN|macro|LED_GPIO_PIN
DECL|LED_GPIO_PORT|macro|LED_GPIO_PORT
DECL|SW0_GPIO_NAME|macro|SW0_GPIO_NAME
DECL|SW0_GPIO_PIN|macro|SW0_GPIO_PIN
DECL|__INC_BOARD_H|macro|__INC_BOARD_H
|
tadvi/uva
|
UVa 1548 - The Game of Master-Mind/sample/1548 - The Game of Master-Mind.cpp
|
<gh_stars>1-10
#include <stdio.h>
#include <string.h>
#include <algorithm>
using namespace std;
const int MAXM = 128;
const int MAXP = 11;
const int MAXC = 128;
int P, C, M;
int B[MAXM], W[MAXM], G[MAXM][MAXP], CG[MAXM][MAXC];
int path[MAXP], bcnt[MAXM], bwcnt[MAXM], CGcnt[MAXC];
// B[i] = |vector_intersection(G, S)|
// W[i] = |set_intersection(G, S)| - B[i]
// ===>|set_intersection(G, S)| = B[i] + W[i]
int checkValid(int p, int c) {
for (int i = 0; i < M; i++) {
if (G[i][p] == c && bcnt[i] == B[i])
return 0; // BLACK exceeded
if (CGcnt[c] < CG[i][c] && bwcnt[i] == B[i] + W[i])
return 0; // |set_intersection(G, S)| > B[i] + W[i]
}
return 1;
}
void remove(int p, int c) {
for (int i = 0; i < M; i++) {
if (G[i][p] == c)
bcnt[i]++;
if (CGcnt[c] < CG[i][c])
bwcnt[i]++;
}
CGcnt[c]++;
}
void resume(int p, int c) {
for (int i = 0; i < M; i++) {
if (G[i][p] == c)
bcnt[i]--;
if (CGcnt[c] <= CG[i][c])
bwcnt[i]--;
}
CGcnt[c]--;
}
int dfs(int idx) {
if (idx == P) {
int ok = 1;
for (int i = 0; i < M && ok; i++)
ok &= bcnt[i] == B[i] && bwcnt[i] == B[i] + W[i];
return ok;
}
for (int i = 1; i <= C; i++) {
if (!checkValid(idx, i))
continue;
remove(idx, i);
path[idx] = i;
if (dfs(idx+1))
return 1;
resume(idx, i);
}
return 0;
}
int main() {
int testcase;
scanf("%d", &testcase);
while (testcase--) {
scanf("%d %d %d", &P, &C, &M);
for (int i = 0; i < M; i++) {
memset(CG[i], 0, sizeof(CG[i]));
for (int j = 0; j < P; j++) {
scanf("%d", &G[i][j]);
CG[i][G[i][j]]++;
}
scanf("%d %d", &B[i], &W[i]);
}
memset(bcnt, 0, sizeof(bcnt));
memset(bwcnt, 0, sizeof(bwcnt));
memset(CGcnt, 0, sizeof(CGcnt));
int f = dfs(0);
if (f) {
for (int i = 0; i < P; i++)
printf("%d%c", path[i], i == P-1 ? '\n' : ' ');
} else {
puts("You are cheating!");
}
}
return 0;
}
|
ejkim-dev/ejkim-dev
|
TIL/Android/MyTravelDiary/MyTravelDiary/app/src/main/java/com/example/mytraveldiary/FindPWActivity.java
|
<gh_stars>0
package com.example.mytraveldiary;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import com.example.mytraveldiary.sharedPreferences.AccountData;
import java.util.Random;
public class FindPWActivity extends AppCompatActivity {
TextView tv_userEmail;//회원정보에 있는 이메일만 전송 가능
Button bt_sendEmail;//이메일 보내기 버튼
private Context mContext;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_find_pw);
mContext = this;
tv_userEmail = (TextView) findViewById(R.id.tv_userEmail);
bt_sendEmail = (Button) findViewById(R.id.bt_sendEmail);
//앞에서 전달받은 이메일
try {
final Intent intent = getIntent();
String getUserEmail = intent.getExtras().getString("passEmail");
tv_userEmail.setText(getUserEmail);
//이메일 보내기
bt_sendEmail.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Random rd = new Random();
int impw = rd.nextInt(999999)+100000;//6자리 비밀번호 랜덤 발급
Intent email = new Intent(Intent.ACTION_SEND);
email.setType("plain/Text");
email.putExtra(android.content.Intent.EXTRA_EMAIL,new String[] {tv_userEmail.getText().toString()});//Intent.EXTRA_EMAIL: 받을 사람 이메일 (ex) <EMAIL>)tv_userEmail.getText().toString()/"<EMAIL>"
// email.setType("message/html");//rfc822
email.putExtra(Intent.EXTRA_SUBJECT, "<여행기록 임시 비밀번호 발급>");//Intent.EXTRA_SUBJECT: 미리 설정할 제목이 있다면 입력
email.putExtra(Intent.EXTRA_TEXT, "임시 비밀번호는 "+impw+" 입니다.");//Intent.EXTRA_TEXT: 미리 설정할 내용이 있다면 입력
AccountData.setString(mContext, tv_userEmail.getText().toString(), impw+"");//userEmail 에 userPW 넣기
email.setType("message/html");//rfc822
email.setPackage("com.google.android.gm");
// email.putExtra(Intent.EXTRA_EMAIL, new String[]{"<EMAIL>"});
startActivity(email);
// startActivity(Intent.createChooser(intent,"Send Email"));
}
});
}catch (NullPointerException e){}
}
@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onRestart() {
super.onRestart();
Intent intent = new Intent(FindPWActivity.this, LoginActivity.class);
startActivity(intent);
Toast.makeText(getApplicationContext(), "다시 로그인 해주세요", Toast.LENGTH_LONG).show();
finish();//버튼 누르면 액티비티 종료
}
}
|
natdurlik/chicken-challenge-game
|
core/src/com/mygdx/game/ChangeChickenSkinCommand.java
|
package com.mygdx.game;
public class ChangeChickenSkinCommand implements Command{
@Override
public void execute(float delta) {
Player.getInstance().changeTexture();
}
}
|
Mortano/Prismatic
|
PrismaticCore/Headers/Rendering/Utility/peBxDF.h
|
<gh_stars>0
#pragma once
#include "Type/peColor.h"
#include "Type\peBitmask.h"
#include "peCoreDefs.h"
#include <glm/detail/type_vec2.hpp>
#undef max
#include <gsl.h>
#include <random>
#pragma warning(push)
#pragma warning(disable : 4251)
/*
* Everything in here is taking more or less one-to-one from 'Physically based
* rendering' (<NAME>, 2010)
*/
namespace pe {
struct Sample;
class peCoordSys;
//! \brief Represents a color spectrum. We will use RGB with floating-point
//! precision here
using Spectrum_t = RGB_32BitFloat;
//! \brief Types of BRDFs and BTDFs
enum class BxDFType {
Reflection = 1 << 0,
Transmission = 1 << 1,
Diffuse = 1 << 2,
Glossy = 1 << 3,
Specular = 1 << 4,
AllTypes = Diffuse | Glossy | Specular,
AllReflection = Reflection | AllTypes,
AllTransmission = Transmission | AllTypes,
All = AllReflection | AllTransmission
};
template <> struct EnableEnumBitmask<BxDFType> : std::true_type {};
//! \brief BRDF or BTDF base class
class PE_CORE_API peBxDF {
public:
virtual ~peBxDF() = default;
explicit peBxDF(BxDFType type);
bool HasFlags(BxDFType flags) const;
auto Type() const { return _type; }
//! \brief Evaluate this BxDF for the given outgoing and incoming vectors
//! \param wo Outgoing vector
//! \param wi Incoming vector
//! \returns Value of the distribution function for the two vectors
virtual Spectrum_t Eval(const glm::vec3 &wo, const glm::vec3 &wi) const = 0;
//! \brief Sampling function for BxDF distributions that utilize delta
//! distributions. Here, the incident direction wi cannot
//! be known to the user, hence the BxDF computes it
//! \param wo Outgoing vector
//! \param wi Incoming vector, will be computed by the BxDF
//! \param rnd1 First uniform random variable
//! \param rnd2 Second uniform random variable
//! \param pdf Probability density function for this BxDF and the given pair
//! of vectors \returns Value of the distribution function for the two vectors
virtual Spectrum_t Sample_f(const glm::vec3 &wo, glm::vec3 &wi,
const float rnd1, const float rnd2,
float &pdf) const;
//! \brief Computes the hemishperical-directional reflectance, which is the
//! total reflection in the given direction due to constant illumination over
//! the hemisphere. Not every BxDF will be able to compute this in closed
//! form, so they will use something like a monte carlo algorithm to compute
//! it, hence the <paramref name="samples"/> member
//! \param wo Outgoing vector
//! \param samples Samples
//! \returns Hemispherical reflection around <paramref name="wo"/>
virtual Spectrum_t rho(const glm::vec3 &wo,
const gsl::span<glm::vec2> &samples) const;
//! \brief Computes the hemispherical-hemispherical reflectance, which is the
//! fraction of incident light reflected by the surface when the incident
//! light is the same from all directions. Not every BxDF will be able to
//! compute this in closed form, so they will use something like a monte carlo
//! algorithm to compute it
//! \param samples1 First set of samples for monte carlo method
//! \param samples2 Second set of samples for monte carlo method
//! \returns Direction-independent reflectance
virtual Spectrum_t rho(const gsl::span<glm::vec2> &samples1,
const gsl::span<glm::vec2> &samples2) const;
//! \brief Returns the probability density function for the given pairs of
//! vectors \param wo Outgoing vector in shading space \param wi Incoming
//! vector in shading space \returns PDF
virtual float Pdf(const glm::vec3 &wo, const glm::vec3 &wi) const;
private:
const BxDFType _type;
};
#pragma region Fresnel
//! \brief Helper class to encapsulate fresnel reflectance
struct PE_CORE_API peFresnel {
virtual ~peFresnel() {}
virtual Spectrum_t Eval(float cosi) const = 0;
};
//! \brief Fresnel reflectance for a conductor
class PE_CORE_API peFresnelConductor : public peFresnel {
public:
//! \brief Initializes this FresnelConductor structure with the given index of
//! refraction and absorption
//! \param eta Index of refraction of conductor
//! \param k Absorption index of conductor
peFresnelConductor(const Spectrum_t &eta, const Spectrum_t &k);
Spectrum_t Eval(float cosi) const override;
private:
const Spectrum_t _eta;
const Spectrum_t _k;
};
//! \brief Fresnel reflectance for dielectric material
class PE_CORE_API peFresnelDielectric : public peFresnel {
public:
//! \brief Initializes this FresnelDielectric structure with the given
//! incident and transmitted medium indices of refraction
//! \param etaIncident Index of refraction for incident material
//! \param etaTransmitted Index of refraction for transmitted material
peFresnelDielectric(float etaIncident, float etaTransmitted);
Spectrum_t Eval(float cosi) const override;
private:
const float _etaIndicent, _etaTransmitted;
};
#pragma endregion
//! \brief BRDF for specular reflection
class PE_CORE_API peSpecularReflection : public peBxDF {
public:
peSpecularReflection(const Spectrum_t &spectrum, const peFresnel &fresnel);
Spectrum_t Eval(const glm::vec3 &wo, const glm::vec3 &wi) const override;
Spectrum_t Sample_f(const glm::vec3 &wo, glm::vec3 &wi, const float rnd1,
const float rnd2, float &pdf) const override;
private:
const Spectrum_t _color;
const peFresnel &_fresnel;
};
//! \brief Lambertian diffuse reflection
class PE_CORE_API peLambert : public peBxDF {
public:
explicit peLambert(const Spectrum_t &color);
Spectrum_t Eval(const glm::vec3 &wo, const glm::vec3 &wi) const override;
Spectrum_t rho(const glm::vec3 &wo,
const gsl::span<glm::vec2> &samples) const override;
Spectrum_t rho(const gsl::span<glm::vec2> &samples1,
const gsl::span<glm::vec2> &samples2) const override;
private:
const Spectrum_t _color;
};
//! \brief Random values for BSDF sampling
struct PE_CORE_API BSDFSample {
BSDFSample() = default;
BSDFSample(const glm::vec2 &dir, float component);
template <typename Rnd> explicit BSDFSample(Rnd &rng) {
std::uniform_real_distribution<float> dist{0.f, 1.f};
dir = {dist(rng), dist(rng)};
component = dist(rng);
}
glm::vec2 dir;
float component;
};
//! \brief Bidirectional scattering distribution function. Determines surface
//! properties of an object
class PE_CORE_API BSDF {
public:
BSDF();
BSDF(const BSDF &other);
BSDF(BSDF &&other) noexcept;
BSDF &operator=(const BSDF &);
BSDF &operator=(BSDF &&) noexcept;
void Add(peBxDF const *bxdf);
uint32_t NumBxDFs() const;
uint32_t NumBxDFsWithFlags(BxDFType flags) const;
//! \brief Evaluate the BSDF for the given set of incoming and outgoing
//! vectors \param outgoingWorld Outgoing vector in world space \param
//! incomingWorld Incoming vector in world space \param shadingCoordSys
//! Shading coordinate system \param flags Types of BxDFs to sample \param
//! geometricNormal The normal of the actual geometry for the evaluated point
//! \returns Evaluated spectrum
Spectrum_t Eval(const glm::vec3 &outgoingWorld,
const glm::vec3 &incomingWorld,
const peCoordSys &shadingCoordSys,
const glm::vec3 &geometricNormal, BxDFType flags) const;
//! \brief Sample this BSDF by evaluating a random BxDF
Spectrum_t Sample_f(const glm::vec3 &wo, glm::vec3 &wi,
const peCoordSys &shadingCoordSys,
const glm::vec3 &geometryNormal, const BSDFSample &sample,
float &pdf, BxDFType flags, BxDFType &sampledType) const;
//! \brief Sums up the hemispherical-hemispherical reflectance values of
//! all assigned BxDFs \param rnd Random number generator for monte carlo
//! method \param flags Types of BxDFs to sample \param sqrtSamples
//! Sampling parameter for monte carlo \returns Summed reflectance
Spectrum_t rho(std::default_random_engine &rnd,
BxDFType flags = BxDFType::All,
uint32_t sqrtSamples = 6) const;
//! \brief Sums up the hemispherical-directional reflectance values of all
//! assigned BxDFs \param wo Outgoing direction \param rnd Random number
//! generator for monte carlo method \param flags Types of BxDFs to sample
//! \param sqrtSamples Sampling parameter for monte carlo
//! \returns Summed reflectance
Spectrum_t rho(const glm::vec3 &wo, std::default_random_engine &rnd,
BxDFType flags = BxDFType::All,
uint32_t sqrtSamples = 6) const;
//! \brief Probability density function of this BSDF for the given pair of
//! vectors \param wo Outgoing vector in world space \param wi Incoming vector
//! in world space
float Pdf(const glm::vec3 &wo, const glm::vec3 &wi,
const peCoordSys &shadingCoordSys,
BxDFType flags = BxDFType::All) const;
private:
constexpr static size_t MaxBxDF = 8;
uint32_t _numBxdfs;
std::array<peBxDF const *, MaxBxDF> _bxdfs;
};
#pragma region HelperFunctions
//! \brief Computes fresnel reflectance for dielectric materials
//! \param cosIncident Cosine of angle of incident direction
//! \param cosTransmitted Cosine of angle of transmitted direction
//! \param etaIncident Index of refraction for incident medium
//! \param etaTransmitted Index of refraction for transmitted medium
//! \returns Fresnel reflectance
Spectrum_t PE_CORE_API FresnelDielectric(float cosIncident,
float cosTransmitted,
const Spectrum_t &etaIncident,
const Spectrum_t &etaTransmitted);
//! \brief Computes the fresnel reflectance for a conducting material
//! \param cosIndicent Cosine of the angle of incident direction
//! \param eta Index of refraction of the conductor
//! \param k Absorption coefficient
//! \returns Fresnel reflectance
Spectrum_t PE_CORE_API FresnelConductor(float cosIndicent,
const Spectrum_t &eta,
const Spectrum_t &k);
//! \brief Helper to evaluate fresnel shading for dieletric materials
//! \param cosIncident Cosine of angle of incident direction
//! \param etaIncident Index of refraction of incident medium
//! \param etaTransmitted Index of refraction of transmitted medium
//! \returns Fresnel reflectance
Spectrum_t PE_CORE_API EvalFresnelDielectric(float cosIncident,
float etaIncident,
float etaTransmitted);
#pragma endregion
} // namespace pe
#pragma warning(pop)
|
augustinebest/app
|
tests/saas-tests/dashboard/StatusPage.test.js
|
<filename>tests/saas-tests/dashboard/StatusPage.test.js
const puppeteer = require('puppeteer');
const utils = require('../../test-utils');
const init = require('../../test-init');
require('should');
// user credentials
const email = utils.generateRandomBusinessEmail();
const password = '<PASSWORD>';
const componentName = 'hackerbay';
const monitorName = 'fyipe';
const monitorName1 = 'testFyipe';
let browser, page;
const gotoTheFirstStatusPage = async page => {
await page.goto(utils.DASHBOARD_URL, {
waitUntil: ['networkidle2'],
});
await init.pageWaitForSelector(page, '#statusPages');
await init.page$Eval(page, '#statusPages', e => e.click());
const rowItem = await init.pageWaitForSelector(
page,
'#statusPagesListContainer > tr',
{ visible: true, timeout: init.timeout }
);
rowItem.click();
};
describe('Status Page', () => {
const operationTimeOut = init.timeout;
beforeAll(async () => {
jest.setTimeout(init.timeout);
browser = await puppeteer.launch(utils.puppeteerLaunchConfig);
page = await browser.newPage();
await page.setUserAgent(utils.agent);
const user = {
email,
password,
};
// user
await init.registerUser(user, page);
// await init.loginUser(user, page);
//project + status page
await init.addProject(page);
await init.addStatusPageToProject('test', 'test', page);
//component + monitor
await init.addComponent(componentName, page);
await init.addNewMonitorToComponent(page, componentName, monitorName);
// Creates the second monitor
await init.addAdditionalMonitorToComponent(
page,
componentName,
monitorName1
);
});
afterAll(async done => {
await browser.close();
done();
});
test(
'should indicate that no monitor is set yet for a status page',
async done => {
await gotoTheFirstStatusPage(page);
const elem = await init.pageWaitForSelector(page, '#app-loading', {
visible: true,
timeout: init.timeout,
});
expect(elem).toBeTruthy();
const element = await init.page$Eval(page, '#app-loading', e => {
return e.innerHTML;
});
expect(element).toContain(
'No monitors are added to this status page.'
);
done();
},
operationTimeOut
);
test(
'should show error message and not submit the form if no monitor is selected and user clicks on save.',
async done => {
await gotoTheFirstStatusPage(page);
await init.pageWaitForSelector(page, '#addMoreMonitors', {
visible: true,
timeout: init.timeout,
});
await init.pageClick(page, '#addMoreMonitors');
await init.pageWaitForSelector(page, '#monitor-0');
await init.pageClick(page, '#btnAddStatusPageMonitors');
await init.pageWaitForSelector(page, '#monitor-0', {
visible: true,
timeout: init.timeout,
});
const textContent = await init.page$Eval(
page,
'#monitor-0',
e => e.textContent
);
expect(textContent.includes('A monitor must be selected.')).toEqual(
true
);
await page.reload({ waitUntil: 'networkidle2' });
const monitor = await init.pageWaitForSelector(page, '#monitor-0', {
hidden: true,
});
expect(monitor).toBeNull();
done();
},
operationTimeOut
);
test(
'should show an error message and not submit the form if the users select the same monitor twice.',
async done => {
await gotoTheFirstStatusPage(page);
await init.pageWaitForSelector(page, '#addMoreMonitors', {
visible: true,
timeout: init.timeout,
});
await init.pageClick(page, '#addMoreMonitors');
await init.pageWaitForSelector(page, '#monitor-0');
await init.selectDropdownValue(
'#monitor-0 .db-select-nw',
`${componentName} / ${monitorName}`,
page
);
await init.pageClick(page, '#addMoreMonitors');
await init.pageWaitForSelector(page, '#monitor-1');
await init.selectDropdownValue(
'#monitor-1 .db-select-nw',
`${componentName} / ${monitorName}`,
page
);
await init.pageClick(page, '#btnAddStatusPageMonitors');
await init.pageWaitForSelector(page, '#monitor-1', {
visible: true,
timeout: init.timeout,
});
const textContent = await init.page$Eval(
page,
'#monitor-1',
e => e.textContent
);
expect(
textContent.includes('This monitor is already selected.')
).toEqual(true);
await page.reload({ waitUntil: 'networkidle2' });
const monitor = await init.pageWaitForSelector(page, '#monitor-0', {
hidden: true,
});
expect(monitor).toBeNull();
const monitor1 = await init.pageWaitForSelector(
page,
'#montior-1',
{
hidden: true,
}
);
expect(monitor1).toBeNull();
done();
},
operationTimeOut
);
test(
'should add a new monitor.',
async done => {
await gotoTheFirstStatusPage(page);
await init.pageWaitForSelector(page, '#addMoreMonitors', {
visible: true,
timeout: init.timeout,
});
await init.pageClick(page, '#addMoreMonitors');
await init.pageWaitForSelector(page, '#monitor-0');
await init.selectDropdownValue(
'#monitor-0 .db-select-nw',
`${componentName} / ${monitorName}`,
page
);
await init.pageClick(page, '#btnAddStatusPageMonitors');
await page.reload({ waitUntil: 'networkidle2' });
const elem = await init.pageWaitForSelector(page, '#monitor-0', {
visible: true,
timeout: init.timeout,
});
expect(elem).toBeDefined();
done();
},
operationTimeOut
);
test(
'should remove monitor.',
async done => {
await gotoTheFirstStatusPage(page);
await init.pageWaitForSelector(page, '#monitor-0');
await init.pageClick(page, '#delete-monitor-0');
await init.pageClick(page, '#btnAddStatusPageMonitors');
await page.reload({ waitUntil: 'networkidle2' });
const elem = await init.pageWaitForSelector(page, '#app-loading', {
visible: true,
timeout: init.timeout,
});
expect(elem).toBeTruthy();
const element = await init.page$Eval(page, '#app-loading', e => {
return e.innerHTML;
});
expect(element).toContain(
'No monitors are added to this status page.'
);
done();
},
operationTimeOut
);
// Test Splits
});
|
devovi/ia-node
|
app/iot/app/router/http/building_router.js
|
<filename>app/iot/app/router/http/building_router.js
buildingRouter = function(app){
const colors = require('colors');
//**************************** Collections ****************************
// Fetches all data of collections
app.route('/building/findall')
.get(function(req, res){
console.log(colors.bgBlue(req.method+" "+req.route.path+" "+res.statusCode));
const findAllbuildingController=require('../../building/controller/findall_building_contr.js');
findAllbuildingController.findall_building_contr_fn(req, res);
});
// Fetches data from collections by given ID
app.route('/building/findbyid')
.get(function(req, res){
console.log(colors.bgBlue(req.method+" "+req.route.path+" "+res.statusCode));
const findByIDbuildingController=require('../../building/controller/findbyid_building_contr.js');
findByIDbuildingController.findbyid_building_contr_fn(req, res);
});
///////////////////////////////////////////////////////////
}
module.exports = buildingRouter;
|
zchee/protoc-gen-jsonschema
|
vendor/github.com/golang/protobuf/proto/message_set.go
|
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proto
/*
* Support for message sets.
*/
import (
"errors"
"reflect"
"google.golang.org/protobuf/reflect/protoreflect"
)
// errNoMessageTypeID occurs when a protocol buffer does not have a message type ID.
// A message type ID is required for storing a protocol buffer in a message set.
var errNoMessageTypeID = errors.New("proto does not have a message type ID")
// The first two types (_MessageSet_Item and messageSet)
// model what the protocol compiler produces for the following protocol message:
// message MessageSet {
// repeated group Item = 1 {
// required int32 type_id = 2;
// required string message = 3;
// };
// }
// That is the MessageSet wire format. We can't use a proto to generate these
// because that would introduce a circular dependency between it and this package.
type _MessageSet_Item struct {
TypeId *int32 `protobuf:"varint,2,req,name=type_id"`
Message []byte `protobuf:"bytes,3,req,name=message"`
}
type messageSet struct {
Item []*_MessageSet_Item `protobuf:"group,1,rep"`
XXX_unrecognized []byte
// TODO: caching?
}
// Make sure messageSet is a Message.
var _ Message = (*messageSet)(nil)
// messageTypeIder is an interface satisfied by a protocol buffer type
// that may be stored in a MessageSet.
type messageTypeIder interface {
MessageTypeId() int32
}
func (ms *messageSet) find(pb Message) *_MessageSet_Item {
mti, ok := pb.(messageTypeIder)
if !ok {
return nil
}
id := mti.MessageTypeId()
for _, item := range ms.Item {
if *item.TypeId == id {
return item
}
}
return nil
}
func (ms *messageSet) Has(pb Message) bool {
return ms.find(pb) != nil
}
func (ms *messageSet) Unmarshal(pb Message) error {
if item := ms.find(pb); item != nil {
return Unmarshal(item.Message, pb)
}
if _, ok := pb.(messageTypeIder); !ok {
return errNoMessageTypeID
}
return nil // TODO: return error instead?
}
func (ms *messageSet) Marshal(pb Message) error {
msg, err := Marshal(pb)
if err != nil {
return err
}
if item := ms.find(pb); item != nil {
// reuse existing item
item.Message = msg
return nil
}
mti, ok := pb.(messageTypeIder)
if !ok {
return errNoMessageTypeID
}
mtid := mti.MessageTypeId()
ms.Item = append(ms.Item, &_MessageSet_Item{
TypeId: &mtid,
Message: msg,
})
return nil
}
func (ms *messageSet) Reset() { *ms = messageSet{} }
func (ms *messageSet) String() string { return CompactTextString(ms) }
func (*messageSet) ProtoMessage() {}
// Support for the message_set_wire_format message option.
func skipVarint(buf []byte) []byte {
i := 0
for ; buf[i]&0x80 != 0; i++ {
}
return buf[i+1:]
}
// unmarshalMessageSet decodes the extension map encoded in buf in the message set wire format.
// It is called by Unmarshal methods on protocol buffer messages with the message_set_wire_format option.
func unmarshalMessageSet(buf []byte, mi Message, exts interface{}) error {
ms := new(messageSet)
if err := Unmarshal(buf, ms); err != nil {
return err
}
unrecognized := reflect.ValueOf(mi).Elem().FieldByName("XXX_unrecognized").Addr().Interface().(*[]byte)
for _, item := range ms.Item {
id := protoreflect.FieldNumber(*item.TypeId)
msg := item.Message
// Restore wire type and field number varint, plus length varint.
b := EncodeVarint(uint64(id)<<3 | WireBytes)
b = append(b, EncodeVarint(uint64(len(msg)))...)
b = append(b, msg...)
*unrecognized = append(*unrecognized, b...)
}
return unmarshalExtensions(mi, unrecognized)
}
|
Beguiled/omi
|
Unix/samples/Providers/Color/ColorBase.h
|
/* @migen@ */
/*
**==============================================================================
**
** WARNING: THIS FILE WAS AUTOMATICALLY GENERATED. PLEASE DO NOT EDIT.
**
**==============================================================================
*/
#ifndef _ColorBase_h
#define _ColorBase_h
#include <MI.h>
/*
**==============================================================================
**
** ColorBase [XYZ_ColorBase]
**
** Keys:
** Id
**
**==============================================================================
*/
typedef struct _ColorBase
{
MI_Instance __instance;
/* ColorBase properties */
/*KEY*/ MI_ConstUint32Field Id;
}
ColorBase;
typedef struct _ColorBase_Ref
{
ColorBase* value;
MI_Boolean exists;
MI_Uint8 flags;
}
ColorBase_Ref;
typedef struct _ColorBase_ConstRef
{
MI_CONST ColorBase* value;
MI_Boolean exists;
MI_Uint8 flags;
}
ColorBase_ConstRef;
typedef struct _ColorBase_Array
{
struct _ColorBase** data;
MI_Uint32 size;
}
ColorBase_Array;
typedef struct _ColorBase_ConstArray
{
struct _ColorBase MI_CONST* MI_CONST* data;
MI_Uint32 size;
}
ColorBase_ConstArray;
typedef struct _ColorBase_ArrayRef
{
ColorBase_Array value;
MI_Boolean exists;
MI_Uint8 flags;
}
ColorBase_ArrayRef;
typedef struct _ColorBase_ConstArrayRef
{
ColorBase_ConstArray value;
MI_Boolean exists;
MI_Uint8 flags;
}
ColorBase_ConstArrayRef;
MI_EXTERN_C MI_CONST MI_ClassDecl ColorBase_rtti;
MI_INLINE MI_Result MI_CALL ColorBase_Construct(
ColorBase* self,
MI_Context* context)
{
return MI_ConstructInstance(context, &ColorBase_rtti,
(MI_Instance*)&self->__instance);
}
MI_INLINE MI_Result MI_CALL ColorBase_Clone(
const ColorBase* self,
ColorBase** newInstance)
{
return MI_Instance_Clone(
&self->__instance, (MI_Instance**)newInstance);
}
MI_INLINE MI_Boolean MI_CALL ColorBase_IsA(
const MI_Instance* self)
{
MI_Boolean res = MI_FALSE;
return MI_Instance_IsA(self, &ColorBase_rtti, &res) == MI_RESULT_OK && res;
}
MI_INLINE MI_Result MI_CALL ColorBase_Destruct(ColorBase* self)
{
return MI_Instance_Destruct(&self->__instance);
}
MI_INLINE MI_Result MI_CALL ColorBase_Delete(ColorBase* self)
{
return MI_Instance_Delete(&self->__instance);
}
MI_INLINE MI_Result MI_CALL ColorBase_Post(
const ColorBase* self,
MI_Context* context)
{
return MI_PostInstance(context, &self->__instance);
}
MI_INLINE MI_Result MI_CALL ColorBase_Set_Id(
ColorBase* self,
MI_Uint32 x)
{
((MI_Uint32Field*)&self->Id)->value = x;
((MI_Uint32Field*)&self->Id)->exists = 1;
return MI_RESULT_OK;
}
MI_INLINE MI_Result MI_CALL ColorBase_Clear_Id(
ColorBase* self)
{
memset((void*)&self->Id, 0, sizeof(self->Id));
return MI_RESULT_OK;
}
/*
**==============================================================================
**
** ColorBase provider function prototypes
**
**==============================================================================
*/
/* The developer may optionally define this structure */
typedef struct _ColorBase_Self ColorBase_Self;
MI_EXTERN_C void MI_CALL ColorBase_Load(
ColorBase_Self** self,
MI_Module_Self* selfModule,
MI_Context* context);
MI_EXTERN_C void MI_CALL ColorBase_Unload(
ColorBase_Self* self,
MI_Context* context);
MI_EXTERN_C void MI_CALL ColorBase_EnumerateInstances(
ColorBase_Self* self,
MI_Context* context,
const MI_Char* nameSpace,
const MI_Char* className,
const MI_PropertySet* propertySet,
MI_Boolean keysOnly,
const MI_Filter* filter);
MI_EXTERN_C void MI_CALL ColorBase_GetInstance(
ColorBase_Self* self,
MI_Context* context,
const MI_Char* nameSpace,
const MI_Char* className,
const ColorBase* instanceName,
const MI_PropertySet* propertySet);
MI_EXTERN_C void MI_CALL ColorBase_CreateInstance(
ColorBase_Self* self,
MI_Context* context,
const MI_Char* nameSpace,
const MI_Char* className,
const ColorBase* newInstance);
MI_EXTERN_C void MI_CALL ColorBase_ModifyInstance(
ColorBase_Self* self,
MI_Context* context,
const MI_Char* nameSpace,
const MI_Char* className,
const ColorBase* modifiedInstance,
const MI_PropertySet* propertySet);
MI_EXTERN_C void MI_CALL ColorBase_DeleteInstance(
ColorBase_Self* self,
MI_Context* context,
const MI_Char* nameSpace,
const MI_Char* className,
const ColorBase* instanceName);
/*
**==============================================================================
**
** ColorBase_Class
**
**==============================================================================
*/
#ifdef __cplusplus
# include <micxx/micxx.h>
MI_BEGIN_NAMESPACE
class ColorBase_Class : public Instance
{
public:
typedef ColorBase Self;
ColorBase_Class() :
Instance(&ColorBase_rtti)
{
}
ColorBase_Class(
const ColorBase* instanceName,
bool keysOnly) :
Instance(
&ColorBase_rtti,
&instanceName->__instance,
keysOnly)
{
}
ColorBase_Class(
const MI_ClassDecl* clDecl,
const MI_Instance* instance,
bool keysOnly) :
Instance(clDecl, instance, keysOnly)
{
}
ColorBase_Class(
const MI_ClassDecl* clDecl) :
Instance(clDecl)
{
}
ColorBase_Class& operator=(
const ColorBase_Class& x)
{
CopyRef(x);
return *this;
}
ColorBase_Class(
const ColorBase_Class& x) :
Instance(x)
{
}
static const MI_ClassDecl* GetClassDecl()
{
return &ColorBase_rtti;
}
//
// ColorBase_Class.Id
//
const Field<Uint32>& Id() const
{
const size_t n = offsetof(Self, Id);
return GetField<Uint32>(n);
}
void Id(const Field<Uint32>& x)
{
const size_t n = offsetof(Self, Id);
GetField<Uint32>(n) = x;
}
const Uint32& Id_value() const
{
const size_t n = offsetof(Self, Id);
return GetField<Uint32>(n).value;
}
void Id_value(const Uint32& x)
{
const size_t n = offsetof(Self, Id);
GetField<Uint32>(n).Set(x);
}
bool Id_exists() const
{
const size_t n = offsetof(Self, Id);
return GetField<Uint32>(n).exists ? true : false;
}
void Id_clear()
{
const size_t n = offsetof(Self, Id);
GetField<Uint32>(n).Clear();
}
};
typedef Array<ColorBase_Class> ColorBase_ClassA;
MI_END_NAMESPACE
#endif /* __cplusplus */
#endif /* _ColorBase_h */
|
MrDML/LinKingSDKWLZG
|
LinKingSDK/Products/LinKingSDK.framework/Headers/LKOrderApi.h
|
<filename>LinKingSDK/Products/LinKingSDK.framework/Headers/LKOrderApi.h
//
// LKOrderApi.h
// LinKingSDK
//
// Created by leoan on 2020/7/22.
// Copyright © 2020 <EMAIL>. All rights reserved.
//
#import "LKBaseApi.h"
NS_ASSUME_NONNULL_BEGIN
@interface LKOrderApi : LKBaseApi
+ (void)orderRecordQuery:(NSString *)fullDate month:(NSString *)month complete:(void(^_Nullable)(NSError *error,NSArray *records))complete;
+ (void)createOrderType:(NSString *)type withParameters:(NSDictionary *)parames complete:(void(^_Nullable)(NSError *error, NSDictionary*result))complete;
+ (void)appleFinishOrderNum:(NSString *)orderNum receipt:(NSString *)receipt subscribe:(BOOL)subscribe complete:(void(^_Nullable)(NSError *error, NSDictionary*result))complete;
+ (void)fetchtAppleProductDatasComplete:(void(^_Nullable)(NSError *error, NSArray*results))complete;
+ (void)querySubscribeProduct:(NSString *)productId Complete:(void(^_Nullable)(NSError *_Nullable error, NSDictionary*results))complete;
@end
NS_ASSUME_NONNULL_END
|
TRibeiro94/ISEP-ARQSI-2020
|
Semester_Project/SPA/src/components/showPathsComp/ShowPathsComp.js
|
import React, { Component } from "react";
import { Table, Tag, Button } from "antd";
import masterDataRede from "../../apis/masterDataRede";
class ShowPathsComp extends Component {
constructor(props) {
super(props);
this.state = {
paths: null,
pathNodes: null,
nodes: null
};
this.getPathsRequest();
this.getPathNodesRequest();
this.getNodesRequest();
}
getPathsRequest = async () => {
const response = await masterDataRede.get("/paths");
this.setState({
paths: response.data.paths,
});
};
getPathNodesRequest = async () => {
const response = await masterDataRede.get("/pathNodes/complete");
this.setState({
pathNodes: response.data.pathNodes,
});
};
getNodesRequest = async () => {
const response = await masterDataRede.get("/nodes/complete");
this.setState({
nodes: response.data.nodes,
});
};
reload = async () => {
await this.getPathsRequest();
await this.getPathNodesRequest();
await this.getNodesRequest();
}
render() {
let pageContent = <div></div>;
if (this.props.display) {
const columns = [
{
title: 'Name',
dataIndex: 'key',
key: 'key',
},
{
title: 'Nodes',
key: 'pathNodes',
dataIndex: 'pathNodes',
render: pathNodes => (
<>
{pathNodes.map(pathNode => {
let color = 'purple';
return (
<Tag color={color} key={pathNode}>
{pathNode.toUpperCase()}
</Tag>
);
})}
</>
),
}
];
var arrayTemp = [];
const data = [];
this.state.paths.map(async (path) => {
arrayTemp = [];
for(var i = 0; i < path.pathNodes.length; i++){
const pathNodeByID = this.state.pathNodes.find(pathNode => pathNode._id === path.pathNodes[i]);
const nodeByID = this.state.nodes.find(node => node._id === pathNodeByID.node);
arrayTemp.push(nodeByID.shortName)
}
data.push({
key: path.key,
pathNodes: arrayTemp
});
})
pageContent = (
<div>
<p><Button onClick={this.reload} style={{width: 75, height: 30}} type="primary">Reload</Button></p>
<p><Table columns={columns} dataSource={data} /></p>
</div>
);
}
return <div>{pageContent}</div>;
}
}
export default ShowPathsComp;
|
anruky/Zeta
|
zds-server/src/main/java/com/ebay/dss/zds/runner/CustomizableThreadFactory.java
|
<reponame>anruky/Zeta
package com.ebay.dss.zds.runner;
import org.springframework.lang.Nullable;
import org.springframework.util.ClassUtils;
import java.io.Serializable;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Created by tatian on 2020-09-07.
*/
public class CustomizableThreadFactory implements ThreadFactory, Serializable {
private String threadNamePrefix;
private int threadPriority = 5;
private boolean daemon = false;
@Nullable
private ThreadGroup threadGroup;
private final AtomicInteger threadCount = new AtomicInteger(0);
public CustomizableThreadFactory() {
this.threadNamePrefix = this.getDefaultThreadNamePrefix();
}
public CustomizableThreadFactory(@Nullable String threadNamePrefix) {
this.threadNamePrefix = threadNamePrefix != null ? threadNamePrefix : this.getDefaultThreadNamePrefix();
}
public void setThreadNamePrefix(@Nullable String threadNamePrefix) {
this.threadNamePrefix = threadNamePrefix != null ? threadNamePrefix : this.getDefaultThreadNamePrefix();
}
public String getThreadNamePrefix() {
return this.threadNamePrefix;
}
public void setThreadPriority(int threadPriority) {
this.threadPriority = threadPriority;
}
public int getThreadPriority() {
return this.threadPriority;
}
public void setDaemon(boolean daemon) {
this.daemon = daemon;
}
public boolean isDaemon() {
return this.daemon;
}
public void setThreadGroupName(String name) {
this.threadGroup = new ThreadGroup(name);
}
public void setThreadGroup(@Nullable ThreadGroup threadGroup) {
this.threadGroup = threadGroup;
}
@Nullable
public ThreadGroup getThreadGroup() {
return this.threadGroup;
}
public Thread createThread(Runnable runnable) {
Thread thread = new Thread(this.getThreadGroup(), runnable, this.nextThreadName());
thread.setPriority(this.getThreadPriority());
thread.setDaemon(this.isDaemon());
return thread;
}
protected String nextThreadName() {
return this.getThreadNamePrefix() + this.threadCount.incrementAndGet();
}
protected String getDefaultThreadNamePrefix() {
return ClassUtils.getShortName(this.getClass()) + "-";
}
public Thread newThread(Runnable runnable) {
return this.createThread(runnable);
}
}
|
acidicMercury8/xray-1.0
|
sdk/MagicSoftware/FreeMagic/Source/Numerics/MgcSpecialFunction.cpp
|
// Magic Software, Inc.
// http://www.magic-software.com
// Copyright (c) 2000-2002. All Rights Reserved
//
// Source code from Magic Software is supplied under the terms of a license
// agreement and may not be copied or disclosed except in accordance with the
// terms of that agreement. The various license agreements may be found at
// the Magic Software web site. This file is subject to the license
//
// FREE SOURCE CODE
// http://www.magic-software.com/License/free.pdf
#include "MgcSpecialFunction.h"
using namespace Mgc;
//----------------------------------------------------------------------------
Real SpecialFunction::LogGamma (Real fX)
{
static const Real s_afCoeff[6] =
{
76.18009173f, -86.50532033f, 24.01409822f, -1.231739516f,
0.120858003e-2f, -0.536382e-5f
};
fX -= 1.0f;
Real fTmp = fX + 5.5f;
fTmp -= (fX+0.5f)*Math::Log(fTmp);
Real fSeries = 1.0f;
for (int j = 0; j <= 5; j++)
{
fX += 1.0f;
fSeries += s_afCoeff[j]/fX;
}
return -fTmp + Math::Log(2.50662827465f*fSeries);
}
//----------------------------------------------------------------------------
Real SpecialFunction::Gamma (Real fX)
{
return Math::Exp(LogGamma(fX));
}
//----------------------------------------------------------------------------
Real SpecialFunction::IncompleteGammaS (Real fA, Real fX)
{
const int iMaxIterations = 100;
const Real fTolerance = 3e-07f;
if ( fX > 0.0f )
{
Real fAp = fA;
Real fSum = 1.0f/fA, fDel = fSum;
for (int i = 1; i <= iMaxIterations; i++)
{
fAp += 1.0f;
fDel *= fX/fAp;
fSum += fDel;
if ( Math::FAbs(fDel) < Math::FAbs(fSum)*fTolerance )
{
Real fArg = -fX+fA*Math::Log(fX)-LogGamma(fA);
return fSum*Math::Exp(fArg);
}
}
}
if ( fX == 0.0f )
return 0.0f;
return Math::MAX_REAL; // LogGamma not defined for x < 0
}
//----------------------------------------------------------------------------
Real SpecialFunction::IncompleteGammaCF (Real fA, Real fX)
{
const int iMaxIterations = 100;
const Real fTolerance = 3e-07f;
Real fA0 = 1.0f, fA1 = fX;
Real fB0 = 0, fB1 = 1.0f;
Real fGold = 0.0, fFac = 1.0f;
for (int i = 1; i <= iMaxIterations; i++)
{
Real fI = (Real) i;
Real fImA = fI - fA;
fA0 = (fA1 + fA0*fImA)*fFac;
fB0 = (fB1 + fB0*fImA)*fFac;
Real fItF = fI*fFac;
fA1 = fX*fA0 + fItF*fA1;
fB1 = fX*fB0 + fItF*fB1;
if ( fA1 != 0.0f )
{
fFac = 1.0f/fA1;
Real fG = fB1*fFac;
if ( Math::FAbs((fG-fGold)/fG) < fTolerance)
{
Real fArg = -fX + fA*Math::Log(fX) - LogGamma(fA);
return fG*Math::Exp(fArg);
}
fGold = fG;
}
}
return Math::MAX_REAL; // numerical error if you get here
}
//----------------------------------------------------------------------------
Real SpecialFunction::IncompleteGamma (Real fA, Real fX)
{
if ( fX < 1.0f + fA )
return IncompleteGammaS(fA,fX);
else
return 1.0f-IncompleteGammaCF(fA,fX);
}
//----------------------------------------------------------------------------
Real SpecialFunction::Erf (Real fX)
{
return 1.0f-Erfc(fX);
}
//----------------------------------------------------------------------------
Real SpecialFunction::Erfc (Real fX)
{
static const Real s_afCoeff[10] =
{
-1.26551223f, 1.00002368f, 0.37409196f, 0.09678418f, -0.18628806f,
0.27886807f, -1.13520398f, 1.48851587f, -0.82215223f, 0.17087277f
};
Real fZ = Math::FAbs(fX);
Real fT = 1.0f/(1.0f+0.5f*fZ);
Real fSum = s_afCoeff[9];
for (int i = 9; i >= 0; i--)
fSum = fT*fSum + s_afCoeff[i];
Real fResult = fT*Math::Exp(-fZ*fZ + fSum);
return fX >= 0.0f ? fResult : 2.0f - fResult;
}
//----------------------------------------------------------------------------
Real SpecialFunction::ModBessel0 (Real fX)
{
if ( fX < 0.0f ) // function is even
fX = -fX;
Real fT, fResult;
int i;
if ( fX <= 3.75f )
{
static const Real s_afCoeff[7] =
{
+1.0000000f, +3.5156229f, +3.0899424f, +1.2067492f, +0.2659732f,
+0.0360768f, +0.0045813f
};
fT = fX/3.75f;
Real fT2 = fT*fT;
fResult = s_afCoeff[6];
for (i = 5; i >= 0; i--)
{
fResult *= fT2;
fResult += s_afCoeff[i];
}
// |error| < 1.6e-07
}
else
{
static const Real s_afCoeff[9] =
{
+0.39894228f, +0.01328592f, +0.00225319f, -0.00157565f,
+0.00916281f, -0.02057706f, +0.02635537f, -0.01647633f,
+0.00392377f
};
fT = fX/3.75f;
Real fInvT = 1.0f/fT;
fResult = s_afCoeff[8];
for (i = 7; i >= 0; i--)
{
fResult *= fInvT;
fResult += s_afCoeff[i];
}
fResult *= Math::Exp(fX);
fResult /= Math::Sqrt(fX);
// |error| < 1.9e-07
}
return fResult;
}
//----------------------------------------------------------------------------
Real SpecialFunction::ModBessel1 (Real fX)
{
int iSign;
if ( fX > 0.0f )
{
iSign = 1;
}
else if ( fX < 0.0f )
{
fX = -fX;
iSign = -1;
}
else
{
return 0.0f;
}
Real fT, fResult;
int i;
if ( fX <= 3.75f )
{
static const Real s_afCoeff[7] =
{
+0.50000000f, +0.87890549f, +0.51498869f, +0.15084934f,
+0.02658733f, +0.00301532f, +0.00032411f
};
fT = fX/3.75f;
Real fT2 = fT*fT;
fResult = s_afCoeff[6];
for (i = 5; i >= 0; i--)
{
fResult *= fT2;
fResult += s_afCoeff[i];
}
fResult *= fX;
// |error| < 8e-09
}
else
{
static const Real s_afCoeff[9] =
{
+0.39894228f, -0.03988024f, -0.00362018f, +0.00163801f,
-0.01031555f, +0.02282967f, -0.02895312f, +0.01787654f,
-0.00420059f
};
fT = fX/3.75f;
Real fInvT = 1.0f/fT;
fResult = s_afCoeff[8];
for (i = 7; i >= 0; i--)
{
fResult *= fInvT;
fResult += s_afCoeff[i];
}
fResult *= Math::Exp(fX);
fResult /= Math::Sqrt(fX);
// |error| < 2.2e-07
}
fResult *= iSign;
return fResult;
}
//----------------------------------------------------------------------------
|
ahodanenok/gwt-rpc-stub
|
src/main/java/ahodanenok/gwt/stub/gui/dialog/ServiceStubDialog.java
|
<filename>src/main/java/ahodanenok/gwt/stub/gui/dialog/ServiceStubDialog.java
package ahodanenok.gwt.stub.gui.dialog;
import ahodanenok.gwt.stub.gui.component.ComponentUtils;
import ahodanenok.gwt.stub.core.Profile;
import ahodanenok.gwt.stub.core.ServiceStub;
import ahodanenok.gwt.stub.core.Stubs;
import ahodanenok.gwt.stub.core.StubsException;
import ahodanenok.gwt.stub.gui.StubsExceptionHandler;
import javafx.collections.FXCollections;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.scene.control.*;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.Priority;
import javafx.util.Callback;
import javafx.util.StringConverter;
import org.apache.commons.lang3.StringUtils;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Optional;
public final class ServiceStubDialog extends Dialog<ServiceStub> {
private boolean edit;
private Stubs stubs;
private StubsExceptionHandler exceptionHandler;
private TextField idTextField;
private ComboBox<Profile> profileComboBox;
private TextField serviceClassTextField;
private TextField pathTextField;
public ServiceStubDialog(Stubs stubs, StubsExceptionHandler exceptionHandler) {
this(stubs, null, exceptionHandler);
}
public ServiceStubDialog(Stubs stubs, ServiceStub stub, StubsExceptionHandler exceptionHandler) {
this.stubs = stubs;
this.exceptionHandler = exceptionHandler;
this.edit = stub != null;
setResultConverter(new Callback<ButtonType, ServiceStub>() {
@Override
public ServiceStub call(ButtonType param) {
if (param == ButtonType.OK) {
ServiceStub resultStub = new ServiceStub();
resultStub.setId(stub != null ? stub.getId() : null);
resultStub.setProfileId(stub != null ? stub.getProfileId() : profileComboBox.getValue().getId());
try {
resultStub.setServiceClass(stubs.loadServiceClass(serviceClassTextField.getText()));
} catch (ClassNotFoundException e) {
exceptionHandler.handleNotifyUser(e);
return null;
}
resultStub.setUrl(pathTextField.getText());
return resultStub;
} else {
return null;
}
}
});
createForm();
if (edit) {
initForEdit(stub);
} else {
initForCreate();
}
}
private void createForm() {
Label idTitleLabel = ComponentUtils.createItemTitleLabel("ID:");
idTextField = new TextField();
idTextField.setEditable(false);
idTextField.setDisable(true);
Label profileTitleLabel = ComponentUtils.createItemTitleLabel("Profile:");
profileComboBox = new ComboBox<>();
profileComboBox.setMaxWidth(Double.MAX_VALUE);
profileComboBox.setConverter(new StringConverter<Profile>() {
@Override
public String toString(Profile object) {
return object.getDisplayName();
}
@Override
public Profile fromString(String string) {
throw new UnsupportedOperationException();
}
});
Label classTitleLabel = ComponentUtils.createMandatoryItemTitleLabel("Class", ":");
Label urlTitleLabel = ComponentUtils.createMandatoryItemTitleLabel("Path", ":");
serviceClassTextField = new TextField();
pathTextField = new TextField();
GridPane formPane = new GridPane();
formPane.setHgap(5);
formPane.setVgap(5);
formPane.add(idTitleLabel, 0, 0);
formPane.add(idTextField, 1, 0);
GridPane.setHgrow(idTextField, Priority.ALWAYS);
formPane.add(profileTitleLabel, 0, 1);
formPane.add(profileComboBox, 1, 1);
GridPane.setHgrow(profileComboBox, Priority.ALWAYS);
formPane.add(classTitleLabel, 0, 2);
formPane.add(serviceClassTextField, 1, 2);
GridPane.setHgrow(serviceClassTextField, Priority.ALWAYS);
formPane.add(urlTitleLabel, 0, 3);
formPane.add(pathTextField, 1, 3);
GridPane.setHgrow(pathTextField, Priority.ALWAYS);
getDialogPane().setContent(formPane);
getDialogPane().getButtonTypes().add(ButtonType.OK);
getDialogPane().getButtonTypes().add(ButtonType.CANCEL);
getDialogPane()
.lookupButton(ButtonType.OK)
.addEventFilter(ActionEvent.ACTION, new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
if (!validate()) {
event.consume();
}
}
});
setWidth(700);
getDialogPane().setPrefWidth(700);
}
private void initForCreate() {
setTitle("Create service stub");
idTextField.setText("< generated automatically >");
try {
List<Profile> profiles = stubs.listProfiles();
profileComboBox.setItems(FXCollections.observableList(profiles));
profileComboBox.setValue(stubs.getActiveProfile());
} catch (StubsException e) {
exceptionHandler.handleNotifyUser(e);
}
}
private void initForEdit(ServiceStub stub) {
setTitle("Edit service stub");
idTextField.setText(stub.getId());
profileComboBox.setDisable(true);
profileComboBox.setEditable(false);
try {
Profile p = stubs.getProfile(stub.getProfileId());
profileComboBox.setItems(FXCollections.observableArrayList(p));
profileComboBox.setValue(p);
} catch (StubsException e) {
exceptionHandler.handleNotifyUser(e);
}
serviceClassTextField.setText(stub.getDisplayString());
pathTextField.setText(stub.getUrl());
}
private boolean validate() {
boolean valid = true;
ComponentUtils.clearInvalid(profileComboBox);
ComponentUtils.clearInvalid(serviceClassTextField);
ComponentUtils.clearInvalid(pathTextField);
if (profileComboBox.getValue() == null) {
ComponentUtils.markInvalid(profileComboBox, "Profile isn't selected");
valid = false;
}
String className = serviceClassTextField.getText();
if (StringUtils.isBlank(className)) {
ComponentUtils.markInvalid(serviceClassTextField, "Service class name is empty");
valid = false;
}
Class<?> serviceClass = null;
if (StringUtils.isNotBlank(serviceClassTextField.getText())) {
try {
serviceClass = stubs.loadServiceClass(className);
} catch (ClassNotFoundException e) {
ComponentUtils.markInvalid(serviceClassTextField, "Service class can't be loaded: not present in classpath");
valid = false;
}
}
try {
if (!edit && serviceClass != null && stubs.getServiceStub(serviceClass) != null) {
ComponentUtils.markInvalid(serviceClassTextField, "Stub already exists for this class");
valid = false;
}
} catch (StubsException e) {
exceptionHandler.handle(e);
}
String url = pathTextField.getText();
if (StringUtils.isBlank(url)) {
ComponentUtils.markInvalid(pathTextField, "Service path is empty");
valid = false;
}
if (StringUtils.isNotBlank(pathTextField.getText())) {
try {
new URI(pathTextField.getText());
} catch (URISyntaxException e) {
ComponentUtils.markInvalid(pathTextField, "Invalid path format: " + e.getMessage());
valid = false;
}
}
return valid;
}
public ServiceStub getServiceStub() {
Optional<ServiceStub> result = showAndWait();
if (result.isPresent()) {
return result.get();
} else {
return null;
}
}
}
|
twigkit/aws-sdk-java
|
aws-java-sdk-support/src/main/java/com/amazonaws/services/support/model/TrustedAdvisorResourcesSummary.java
|
<filename>aws-java-sdk-support/src/main/java/com/amazonaws/services/support/model/TrustedAdvisorResourcesSummary.java
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.support.model;
import java.io.Serializable;
/**
* <p>
* Details about AWS resources that were analyzed in a call to Trusted Advisor
* <a>DescribeTrustedAdvisorCheckSummaries</a>.
* </p>
*/
public class TrustedAdvisorResourcesSummary implements Serializable, Cloneable {
/**
* <p>
* The number of AWS resources that were analyzed by the Trusted Advisor
* check.
* </p>
*/
private Long resourcesProcessed;
/**
* <p>
* The number of AWS resources that were flagged (listed) by the Trusted
* Advisor check.
* </p>
*/
private Long resourcesFlagged;
/**
* <p>
* The number of AWS resources ignored by Trusted Advisor because
* information was unavailable.
* </p>
*/
private Long resourcesIgnored;
/**
* <p>
* The number of AWS resources ignored by Trusted Advisor because they were
* marked as suppressed by the user.
* </p>
*/
private Long resourcesSuppressed;
/**
* <p>
* The number of AWS resources that were analyzed by the Trusted Advisor
* check.
* </p>
*
* @param resourcesProcessed
* The number of AWS resources that were analyzed by the Trusted
* Advisor check.
*/
public void setResourcesProcessed(Long resourcesProcessed) {
this.resourcesProcessed = resourcesProcessed;
}
/**
* <p>
* The number of AWS resources that were analyzed by the Trusted Advisor
* check.
* </p>
*
* @return The number of AWS resources that were analyzed by the Trusted
* Advisor check.
*/
public Long getResourcesProcessed() {
return this.resourcesProcessed;
}
/**
* <p>
* The number of AWS resources that were analyzed by the Trusted Advisor
* check.
* </p>
*
* @param resourcesProcessed
* The number of AWS resources that were analyzed by the Trusted
* Advisor check.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public TrustedAdvisorResourcesSummary withResourcesProcessed(
Long resourcesProcessed) {
setResourcesProcessed(resourcesProcessed);
return this;
}
/**
* <p>
* The number of AWS resources that were flagged (listed) by the Trusted
* Advisor check.
* </p>
*
* @param resourcesFlagged
* The number of AWS resources that were flagged (listed) by the
* Trusted Advisor check.
*/
public void setResourcesFlagged(Long resourcesFlagged) {
this.resourcesFlagged = resourcesFlagged;
}
/**
* <p>
* The number of AWS resources that were flagged (listed) by the Trusted
* Advisor check.
* </p>
*
* @return The number of AWS resources that were flagged (listed) by the
* Trusted Advisor check.
*/
public Long getResourcesFlagged() {
return this.resourcesFlagged;
}
/**
* <p>
* The number of AWS resources that were flagged (listed) by the Trusted
* Advisor check.
* </p>
*
* @param resourcesFlagged
* The number of AWS resources that were flagged (listed) by the
* Trusted Advisor check.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public TrustedAdvisorResourcesSummary withResourcesFlagged(
Long resourcesFlagged) {
setResourcesFlagged(resourcesFlagged);
return this;
}
/**
* <p>
* The number of AWS resources ignored by Trusted Advisor because
* information was unavailable.
* </p>
*
* @param resourcesIgnored
* The number of AWS resources ignored by Trusted Advisor because
* information was unavailable.
*/
public void setResourcesIgnored(Long resourcesIgnored) {
this.resourcesIgnored = resourcesIgnored;
}
/**
* <p>
* The number of AWS resources ignored by Trusted Advisor because
* information was unavailable.
* </p>
*
* @return The number of AWS resources ignored by Trusted Advisor because
* information was unavailable.
*/
public Long getResourcesIgnored() {
return this.resourcesIgnored;
}
/**
* <p>
* The number of AWS resources ignored by Trusted Advisor because
* information was unavailable.
* </p>
*
* @param resourcesIgnored
* The number of AWS resources ignored by Trusted Advisor because
* information was unavailable.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public TrustedAdvisorResourcesSummary withResourcesIgnored(
Long resourcesIgnored) {
setResourcesIgnored(resourcesIgnored);
return this;
}
/**
* <p>
* The number of AWS resources ignored by Trusted Advisor because they were
* marked as suppressed by the user.
* </p>
*
* @param resourcesSuppressed
* The number of AWS resources ignored by Trusted Advisor because
* they were marked as suppressed by the user.
*/
public void setResourcesSuppressed(Long resourcesSuppressed) {
this.resourcesSuppressed = resourcesSuppressed;
}
/**
* <p>
* The number of AWS resources ignored by Trusted Advisor because they were
* marked as suppressed by the user.
* </p>
*
* @return The number of AWS resources ignored by Trusted Advisor because
* they were marked as suppressed by the user.
*/
public Long getResourcesSuppressed() {
return this.resourcesSuppressed;
}
/**
* <p>
* The number of AWS resources ignored by Trusted Advisor because they were
* marked as suppressed by the user.
* </p>
*
* @param resourcesSuppressed
* The number of AWS resources ignored by Trusted Advisor because
* they were marked as suppressed by the user.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public TrustedAdvisorResourcesSummary withResourcesSuppressed(
Long resourcesSuppressed) {
setResourcesSuppressed(resourcesSuppressed);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getResourcesProcessed() != null)
sb.append("ResourcesProcessed: " + getResourcesProcessed() + ",");
if (getResourcesFlagged() != null)
sb.append("ResourcesFlagged: " + getResourcesFlagged() + ",");
if (getResourcesIgnored() != null)
sb.append("ResourcesIgnored: " + getResourcesIgnored() + ",");
if (getResourcesSuppressed() != null)
sb.append("ResourcesSuppressed: " + getResourcesSuppressed());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof TrustedAdvisorResourcesSummary == false)
return false;
TrustedAdvisorResourcesSummary other = (TrustedAdvisorResourcesSummary) obj;
if (other.getResourcesProcessed() == null
^ this.getResourcesProcessed() == null)
return false;
if (other.getResourcesProcessed() != null
&& other.getResourcesProcessed().equals(
this.getResourcesProcessed()) == false)
return false;
if (other.getResourcesFlagged() == null
^ this.getResourcesFlagged() == null)
return false;
if (other.getResourcesFlagged() != null
&& other.getResourcesFlagged().equals(
this.getResourcesFlagged()) == false)
return false;
if (other.getResourcesIgnored() == null
^ this.getResourcesIgnored() == null)
return false;
if (other.getResourcesIgnored() != null
&& other.getResourcesIgnored().equals(
this.getResourcesIgnored()) == false)
return false;
if (other.getResourcesSuppressed() == null
^ this.getResourcesSuppressed() == null)
return false;
if (other.getResourcesSuppressed() != null
&& other.getResourcesSuppressed().equals(
this.getResourcesSuppressed()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getResourcesProcessed() == null) ? 0
: getResourcesProcessed().hashCode());
hashCode = prime
* hashCode
+ ((getResourcesFlagged() == null) ? 0 : getResourcesFlagged()
.hashCode());
hashCode = prime
* hashCode
+ ((getResourcesIgnored() == null) ? 0 : getResourcesIgnored()
.hashCode());
hashCode = prime
* hashCode
+ ((getResourcesSuppressed() == null) ? 0
: getResourcesSuppressed().hashCode());
return hashCode;
}
@Override
public TrustedAdvisorResourcesSummary clone() {
try {
return (TrustedAdvisorResourcesSummary) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
|
vlsi/pru-emulator
|
src/main/java/com/github/vlsi/pru/plc110/CodeEmitter.java
|
<reponame>vlsi/pru-emulator
package com.github.vlsi.pru.plc110;
import com.github.vlsi.pru.plc110.debug.RegisterVariableLocation;
import java.util.ArrayList;
import java.util.List;
public class CodeEmitter {
final List<Instruction> result = new ArrayList<>();
final List<Integer> jumpInstructions = new ArrayList<>();
final List<RegisterVariableLocation> varLocations = new ArrayList<>();
public void visitInstruction(Instruction instruction) {
if (instruction instanceof Jump) {
if (((Jump) instruction).getTarget() != null) {
jumpInstructions.add(result.size());
}
}
result.add(instruction);
}
public void visitLabel(Label label) {
label.setAbsoluteOffset(result.size());
}
public void visitRegisterVariable(String name, String typeName, Label start, Label end, Register reg) {
varLocations.add(new RegisterVariableLocation(name, typeName, start, end, reg));
}
public BinaryCode visitEnd() {
for (Integer i : jumpInstructions) {
Jump jump = (Jump) result.get(i);
Label target = jump.getTarget();
if (!target.isInitialized()) {
throw new IllegalStateException("Unresolved jump target " + target + " for jump instruction " + jump);
}
jump.resolveTarget(i);
}
for (RegisterVariableLocation var : varLocations) {
if (!var.start.isInitialized()) {
throw new IllegalStateException("Unresolved start location for variable " + var.name);
}
if (!var.end.isInitialized()) {
throw new IllegalStateException("Unresolved end location for variable " + var.name);
}
}
return new BinaryCode(result, varLocations);
}
}
|
asiboro/asiboro.github.io
|
vsdoc/search--/s_3731.js
|
search_result['3731']=["topic_00000000000008F0.html","StageTabListDto.StageTabList Property",""];
|
darlyhellen/oto
|
DLClent_A/src/design/patterns/structural/Decorator/DecoratorSource.java
|
<filename>DLClent_A/src/design/patterns/structural/Decorator/DecoratorSource.java
/**上午10:44:01
* @author zhangyh2
* DecoratorSource.java
* TODO
*/
package design.patterns.structural.Decorator;
/**
* @author zhangyh2 DecoratorSource 上午10:44:01 TODO Decorator类是一个装饰类
*/
public class DecoratorSource implements Sourceable {
private Sourceable source;
public DecoratorSource(Sourceable source) {
super();
this.source = source;
}
/*
*
* (non-Javadoc)
*
* @see design.patterns.structural.Decorator.Sourceable#simplename()
*/
@Override
public void simplename() {
// TODO Auto-generated method stub
System.out.println("before decorator!");
source.simplename();
System.out.println("after decorator!");
}
}
|
ernestyalumni/HrdwCCppCUDA
|
Constructicon/Source/Performance/CPrimer/pointer.c
|
<filename>Constructicon/Source/Performance/CPrimer/pointer.c
/*******************************************************************************
* @ref MIT 6.172 Fall 2018 Hwk 1, c-primer
******************************************************************************/
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#define PRINT_SIZE_1(T_STR, T) \
printf("size of %s : %zu bytes \n", T_STR, sizeof(T));
// char * argv is an array of pointers to char.
int main(int argc, char * argv[]) {
if (argc > 1)
{
for (int i = 1; i < argc; ++i)
{
printf("value: %s, %zu Size bytes \n", argv[i], sizeof(argv[i]));
}
}
int i = 5;
// The & operator here gets the address of i and stores it into pi
int * pi = &i;
// The * operator here dereferences pi and stores the value -- 5 --
// into j.
int j = *pi;
PRINT_SIZE_1("deferenced int", &j);
char c[] = "6.172";
char * pc = c; // Valid assignment: c acts like a pointer to c[0] here.
char d = *pc;
printf("char d = %c\n", d); // This prints "6"
// Compound types are read right to left in C.
// pcp is a pointer to a pointer to a char, meaning that
// pcp stores the address of a char pointer.
// Originally,
// char ** pcp;
char* *pcp; // pointer to a pointer to a char.
PRINT_SIZE_1("\n char** pcp \n", pcp);
pcp = argv; // Why is this assignment valid?
// argv is an array of char ptrs, but itself points to first char*.
PRINT_SIZE_1("\n After assignment: char** pcp \n", pcp);
const char* pcc = c; // pcc is a pointer to char constant.
PRINT_SIZE_1("\n const char* pcc \n", pcc);
char const* pcc2 = c; // What is the type of pcc2?
// pcc2 is a const pointer to a char. The pointer itself can't change.
PRINT_SIZE_1("\n char const* pcc2 \n", pcc2);
//----------------------------------------------------------------------------
// For each of the following, why is the assignment:
// error: assignment of read only location.
//*pcc = '7'; // invalid?
pcc = *pcp; // The char pointer on the right gets assigned to a pointer to a
// char constant. valid.
pcc = argv[0]; // valid?
// argv is an array of char pointers, so a right-hand side char pointer is
// assigned to a pointer to a char constant.
//----------------------------------------------------------------------------
char * const cp = c; // cp is a const pointer to char.
// For each of the following, why is assignment:
//cp = *pcp; // invalid?
// Char pointer on the right is being assigned to something that is const.
// error: assignment of read-only variable.
//cp = *argv; // invalid?
// Char pointer on right begin assigned to something that's const.
*cp = '!'; // valid?
// cp points to a char. cp has address of the char. We can go to that address
// and directly change the value that is stored at that address.
printf("char c[] = %c %c %c %c \n", c[0], c[1], c[2], c[3]); // This prints
// '!', '.', '1', '7'
//----------------------------------------------------------------------------
const char * const cpc = c; // cpc is a const pointer to char const
// For each of the following, why is the assignment:
//cpc = *pcp; // invalid? char pointer gets assigned to const pointer.
//cpc = argv[0]; // invalid? Char pointer gets assigned to const pointer.
//*cpc = '@'; // invalid? Const char can't be changed.
}
|
phpc0de/idea-android
|
transport-database/testSrc/com/android/tools/datastore/database/MemoryStatsTableTest.java
|
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.datastore.database;
import static com.google.common.truth.Truth.assertThat;
import com.android.tools.profiler.proto.Common;
import com.android.tools.profiler.proto.Memory.*;
import com.android.tools.profiler.proto.MemoryProfiler.*;
import com.android.tools.idea.protobuf.ByteString;
import org.jetbrains.annotations.NotNull;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.function.Consumer;
public class MemoryStatsTableTest extends DatabaseTest<MemoryStatsTable> {
private static final Common.Session VALID_SESSION = Common.Session.newBuilder().setSessionId(1L).setStreamId(1234).setPid(1).build();
private static final Common.Session INVALID_SESSION = Common.Session.newBuilder().setSessionId(-1L).setStreamId(4321).setPid(-1).build();
@Override
protected MemoryStatsTable createTable() {
return new MemoryStatsTable();
}
@Override
protected List<Consumer<MemoryStatsTable>> getTableQueryMethodsForVerification() {
List<Consumer<MemoryStatsTable>> methodCalls = new ArrayList<>();
Common.Session session = Common.Session.getDefaultInstance();
methodCalls.add((table) -> assertThat(table.getAllocationsInfo(session, 0)).isNull());
methodCalls.add((table) -> assertThat(table.getHeapDumpInfoByRequest(session, ListDumpInfosRequest.getDefaultInstance())).isEmpty());
methodCalls.add((table) -> {
ArrayList<MemoryData.AllocStatsSample> stats = new ArrayList<>();
stats.add(MemoryData.AllocStatsSample.getDefaultInstance());
table.insertAllocStats(session, stats);
});
methodCalls.add((table) -> {
ArrayList<MemoryData.GcStatsSample> samples = new ArrayList<>();
samples.add(MemoryData.GcStatsSample.getDefaultInstance());
table.insertGcStats(session, samples);
});
methodCalls.add((table) -> {
List<MemoryData.MemorySample> samples = new ArrayList<>();
samples.add(MemoryData.MemorySample.getDefaultInstance());
table.insertMemory(session, samples);
});
methodCalls.add((table) -> table.insertOrReplaceAllocationsInfo(session, AllocationsInfo.getDefaultInstance()));
methodCalls.add((table) -> table.insertOrReplaceHeapInfo(session, HeapDumpInfo.getDefaultInstance()));
methodCalls.add((table) -> table.getData(MemoryRequest.getDefaultInstance()));
return methodCalls;
}
@Test
public void testInsertAndGetData() {
/*
* Insert a cascading sequence of sample data into the database:
* Timestamp: 0 1 2 3 4 5 6 7 8 9
* mem |
* allocStats |
* ongoing heap |---------->
* finished heap |-|
* ongoing alloc |---->
* finished alloc |-|
* gcStats |-|
*/
MemoryData.MemorySample memSample = MemoryData.MemorySample.newBuilder().setTimestamp(1).build();
MemoryData.AllocStatsSample allocStatsSample = MemoryData.AllocStatsSample.newBuilder().setTimestamp(2).build();
HeapDumpInfo ongoingHeapSample =
HeapDumpInfo.newBuilder().setStartTime(3).setEndTime(Long.MAX_VALUE).build();
HeapDumpInfo finishedHeapSample = HeapDumpInfo.newBuilder().setStartTime(4).setEndTime(5).build();
AllocationsInfo ongoingAllocSample =
AllocationsInfo.newBuilder().setStartTime(6).setEndTime(Long.MAX_VALUE).build();
AllocationsInfo finishedAllocSample = AllocationsInfo.newBuilder().setStartTime(7).setEndTime(8).build();
MemoryData.GcStatsSample gcStatsSample = MemoryData.GcStatsSample.newBuilder().setStartTime(8).setEndTime(9).build();
getTable().insertMemory(VALID_SESSION, Collections.singletonList(memSample));
getTable().insertAllocStats(VALID_SESSION, Collections.singletonList(allocStatsSample));
getTable().insertGcStats(VALID_SESSION, Collections.singletonList(gcStatsSample));
getTable().insertOrReplaceHeapInfo(VALID_SESSION, finishedHeapSample);
getTable().insertOrReplaceHeapInfo(VALID_SESSION, ongoingHeapSample);
getTable().insertOrReplaceAllocationsInfo(VALID_SESSION, ongoingAllocSample);
getTable().insertOrReplaceAllocationsInfo(VALID_SESSION, finishedAllocSample);
// Perform a sequence of queries to ensure we are getting startTime-exclusive and endTime-inclusive data.
MemoryData result = getTable().getData(MemoryRequest.newBuilder().setSession(VALID_SESSION).setStartTime(-1).setEndTime(0).build());
verifyMemoryDataResultCounts(result, 0, 0, 0, 0, 0);
result = getTable().getData(MemoryRequest.newBuilder().setSession(VALID_SESSION).setStartTime(0).setEndTime(1).build());
verifyMemoryDataResultCounts(result, 1, 0, 0, 0, 0);
assertThat(result.getMemSamples(0)).isEqualTo(memSample);
result = getTable().getData(MemoryRequest.newBuilder().setSession(VALID_SESSION).setStartTime(1).setEndTime(2).build());
verifyMemoryDataResultCounts(result, 0, 1, 0, 0, 0);
assertThat(result.getAllocStatsSamples(0)).isEqualTo(allocStatsSample);
result = getTable().getData(MemoryRequest.newBuilder().setSession(VALID_SESSION).setStartTime(2).setEndTime(3).build());
verifyMemoryDataResultCounts(result, 0, 0, 0, 1, 0);
assertThat(result.getHeapDumpInfos(0)).isEqualTo(ongoingHeapSample);
result = getTable().getData(MemoryRequest.newBuilder().setSession(VALID_SESSION).setStartTime(3).setEndTime(4).build());
verifyMemoryDataResultCounts(result, 0, 0, 0, 2, 0);
assertThat(result.getHeapDumpInfosList()).contains(ongoingHeapSample);
assertThat(result.getHeapDumpInfosList()).contains(finishedHeapSample);
result = getTable().getData(MemoryRequest.newBuilder().setSession(VALID_SESSION).setStartTime(4).setEndTime(5).build());
verifyMemoryDataResultCounts(result, 0, 0, 0, 2, 0);
assertThat(result.getHeapDumpInfosList()).contains(ongoingHeapSample);
assertThat(result.getHeapDumpInfosList()).contains(finishedHeapSample);
result = getTable().getData(MemoryRequest.newBuilder().setSession(VALID_SESSION).setStartTime(5).setEndTime(6).build());
verifyMemoryDataResultCounts(result, 0, 0, 0, 1, 1);
assertThat(result.getHeapDumpInfos(0)).isEqualTo(ongoingHeapSample);
assertThat(result.getAllocationsInfo(0)).isEqualTo(ongoingAllocSample);
result = getTable().getData(MemoryRequest.newBuilder().setSession(VALID_SESSION).setStartTime(6).setEndTime(7).build());
verifyMemoryDataResultCounts(result, 0, 0, 0, 1, 2);
assertThat(result.getHeapDumpInfos(0)).isEqualTo(ongoingHeapSample);
assertThat(result.getAllocationsInfoList()).contains(ongoingAllocSample);
assertThat(result.getAllocationsInfoList()).contains(finishedAllocSample);
result = getTable().getData(MemoryRequest.newBuilder().setSession(VALID_SESSION).setStartTime(7).setEndTime(8).build());
verifyMemoryDataResultCounts(result, 0, 0, 1, 1, 2);
assertThat(result.getGcStatsSamples(0)).isEqualTo(gcStatsSample);
assertThat(result.getHeapDumpInfos(0)).isEqualTo(ongoingHeapSample);
assertThat(result.getAllocationsInfoList()).contains(ongoingAllocSample);
assertThat(result.getAllocationsInfoList()).contains(finishedAllocSample);
result = getTable().getData(MemoryRequest.newBuilder().setSession(VALID_SESSION).setStartTime(8).setEndTime(9).build());
verifyMemoryDataResultCounts(result, 0, 0, 0, 1, 1);
assertThat(result.getHeapDumpInfos(0)).isEqualTo(ongoingHeapSample);
assertThat(result.getAllocationsInfo(0)).isEqualTo(ongoingAllocSample);
// Test that querying for an invalid session returns no data.
result = getTable().getData(MemoryRequest.newBuilder().setSession(INVALID_SESSION).setStartTime(0).setEndTime(9).build());
verifyMemoryDataResultCounts(result, 0, 0, 0, 0, 0);
}
private static void verifyMemoryDataResultCounts(@NotNull MemoryData result,
int numMemSample,
int numAllocStatsSample,
int numGcStatsSample,
int numHeapInfoSample,
int numAllocInfoSample) {
assertThat(result.getMemSamplesCount()).isEqualTo(numMemSample);
assertThat(result.getAllocStatsSamplesCount()).isEqualTo(numAllocStatsSample);
assertThat(result.getGcStatsSamplesCount()).isEqualTo(numGcStatsSample);
assertThat(result.getHeapDumpInfosCount()).isEqualTo(numHeapInfoSample);
assertThat(result.getAllocationsInfoCount()).isEqualTo(numAllocInfoSample);
}
}
|
tobiasvasquez/P2Tec
|
controllers/perfilController.js
|
<filename>controllers/perfilController.js
const jsonModel = require('../models/jsonModel');
const productModel = jsonModel('products');
let db = require("../database/models");
const sequelize = require('sequelize');
const Op = sequelize.Op;
const {
validationResult
} = require("express-validator");
const bcrypt = require("bcryptjs");
const controller = {
root: (req, res) => {
const user = req.session.user;
if (user) {
db.User.findByPk(req.session.user.id)
.then(function (user) {
req.session.user = user
const currentUser = req.session.user;
if (currentUser) {
return res.render('perfil', {
user: currentUser
})
} else {
return res.render('not-found', {
user: currentUser
});
}
})
} else {
return res.render('not-found', {
user
});
}
},
//tiene que cargar la nueva informacion a la base de datos
edit: (req, res) => {
const errors = validationResult(req);
if (errors.isEmpty()) {
db.User.update({
username: req.body.username,
email: req.body.email
}, {
where: {
id: req.params.id
}
})
.then(() => {
db.User.findByPk(req.params.id).then(function (user) {
req.session.user = user
return res.redirect(req.params.id + "")
})
})
} else {
return res.render("perfil", {
errors: errors.mapped(),
old: req.body,
user: req.session.user
});
}
},
datos: (req, res) => {
db.User.update({
address: req.body.Direccion,
floor: req.body.Direccion2,
PostalCode: req.body.CodPostal,
location: req.body.Localidad,
Province: req.body.Provincia,
}, {
where: {
id: req.session.user.id
}
})
.then(() => {
return res.redirect("/perfil");
})
},
controlarea: (req, res) => {
const user = req.session.user;
if (typeof user !== 'undefined' && user.rol === 1) {
// Do the magic
db.User.findAll({
include: {
all: true,
nested: true
}
})
.then(function (results) {
const UserAll = results;
return res.render("paneldecontrol", {
data: UserAll,
user
})
})
.catch(e => console.log(e))
} else {
return res.render('not-found', {
user
});
}
},
//tengo que cargar la nueva informacion de roles
updaterol: (req, res) => {
db.User.update({
username: req.body.username,
rol: req.body.rol
}, {
where: {
username: req.body.username
}
})
.then(() => {
return res.redirect("/perfil/controlarea/")
})
},
editpas: (req, res) => {
const user = req.session.user;
if (user) {
return res.render('cambiarcontra', {
user
})
} else {
return res.render('not-found', {
user
});
}
},
updatecontra: (req, res) => {
let errors = validationResult(req);
if (errors.isEmpty()) {
db.User.findByPk(req.params.id)
.then(function (user) {
return db.User.update({
password: req.body.newPassword != "" ? <PASSWORD>.hashSync(req.body.newPassword, 10) : user.password
}, {
where: {
id: req.params.id
}
})
})
.then(() => {
return res.redirect('/perfil/' + req.session.user.id);
})
} else {
console.log(JSON.stringify(errors));
console.log(errors.mapped())
return res.render('cambiarcontra', {
user: req.session.user,
errors: errors.mapped(),
old: req.body
})
}
},
comentario: (req, res) => {
const user = req.session.user;
if (user) {
db.Item.findAll(
{
where: {
state: 0
},
include: [{association: "product"}],
})
.then((item) => {
return res.render('reseña', {
user,
item
});
})
.catch(e => console.log(e));
} else {
return res.render('not-found', {
user
});
}
},
createComentario: (req, res) => {
const user = req.session.user;
if (user) {
db.Comment.create({
name: req.body.name,
userId: req.session.user.id,
productId: req.body.productId,
})
.then(() => {
return res.redirect('/lista');
})
} else {
return res.render('not-found', {
user
});
}
}
};
module.exports = controller;
|
enterpact/enterchain
|
ethereum/eth/src/test/java/org/enterchain/enter/ethereum/eth/manager/task/GetReceiptsFromPeerTaskTest.java
|
<filename>ethereum/eth/src/test/java/org/enterchain/enter/ethereum/eth/manager/task/GetReceiptsFromPeerTaskTest.java
/*
* Copyright ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.enterchain.enter.ethereum.eth.manager.task;
import static org.assertj.core.api.Assertions.assertThat;
import org.enterchain.enter.ethereum.core.BlockHeader;
import org.enterchain.enter.ethereum.core.TransactionReceipt;
import org.enterchain.enter.ethereum.eth.manager.ethtaskutils.PeerMessageTaskTest;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class GetReceiptsFromPeerTaskTest
extends PeerMessageTaskTest<Map<BlockHeader, List<TransactionReceipt>>> {
@Override
protected Map<BlockHeader, List<TransactionReceipt>> generateDataToBeRequested() {
final Map<BlockHeader, List<TransactionReceipt>> expectedData = new HashMap<>();
for (long i = 0; i < 3; i++) {
final BlockHeader header = blockchain.getBlockHeader(10 + i).get();
final List<TransactionReceipt> transactionReceipts =
blockchain.getTxReceipts(header.getHash()).get();
expectedData.put(header, transactionReceipts);
}
return expectedData;
}
@Override
protected EthTask<AbstractPeerTask.PeerTaskResult<Map<BlockHeader, List<TransactionReceipt>>>>
createTask(final Map<BlockHeader, List<TransactionReceipt>> requestedData) {
return GetReceiptsFromPeerTask.forHeaders(ethContext, requestedData.keySet(), metricsSystem);
}
@Override
protected void assertPartialResultMatchesExpectation(
final Map<BlockHeader, List<TransactionReceipt>> requestedData,
final Map<BlockHeader, List<TransactionReceipt>> partialResponse) {
assertThat(partialResponse.size()).isLessThanOrEqualTo(requestedData.size());
assertThat(partialResponse.size()).isGreaterThan(0);
partialResponse.forEach(
(blockHeader, transactionReceipts) -> {
assertThat(requestedData).containsKey(blockHeader);
assertThat(requestedData.get(blockHeader)).isEqualTo(transactionReceipts);
});
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.