repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
redhatanalytics/oshinko-cli
|
vendor/github.com/openshift/origin/pkg/oc/admin/diagnostics/diagnostics/cluster/registry.go
|
<reponame>redhatanalytics/oshinko-cli
package cluster
import (
"bufio"
"fmt"
"reflect"
"regexp"
"strings"
kerrs "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/kubernetes/pkg/apis/authorization"
kapi "k8s.io/kubernetes/pkg/apis/core"
kclientset "k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset"
osapi "github.com/openshift/origin/pkg/image/apis/image"
imagetypedclient "github.com/openshift/origin/pkg/image/generated/internalclientset/typed/image/internalversion"
"github.com/openshift/origin/pkg/oc/admin/diagnostics/diagnostics/types"
)
// ClusterRegistry is a Diagnostic to check that there is a working Docker registry.
type ClusterRegistry struct {
KubeClient kclientset.Interface
ImageStreamClient imagetypedclient.ImageStreamsGetter
PreventModification bool
}
const (
ClusterRegistryName = "ClusterRegistry"
registryName = "docker-registry"
registryVolume = "registry-storage"
clGetRegNone = `
There is no "%s" service in project "%s". This is not strictly required to
be present; however, it is required for builds, and its absence probably
indicates an incomplete installation.
Please consult the documentation and use the 'oc adm registry' command
to create a Docker registry.`
clGetRegFailed = `
Client error while retrieving registry service. Client retrieved records
during discovery, so this is likely to be a transient error. Try running
diagnostics again. If this message persists, there may be a permissions
problem with getting records. The error was:
(%T) %[1]v `
clRegNoPods = `
The "%s" service exists but has no associated pods, so it
is not available. Builds and deployments that use the registry will fail.`
clRegNoRunningPods = `
The "%s" service exists but no pods currently running, so it
is not available. Builds and deployments that use the registry will fail.`
clRegMultiPods = `
The "%s" service has multiple associated pods each using
ephemeral storage. These are likely to have inconsistent stores of
images. Builds and deployments that use images from the registry may
fail sporadically. Use a single registry or add a shared storage volume
to the registries.`
clRegMultiCustomCfg = `
The "%s" service has multiple associated pods each mounted with
ephemeral storage, but also has a custom config %s
mounted; assuming storage config is as desired.`
clRegPodDown = `
The "%s" pod for the "%s" service is not running.
This may be transient, a scheduling error, or something else.`
clRegPodLog = `
Failed to read the logs for the "%s" pod belonging to
the "%s" service. This is not a problem by itself but
prevents diagnostics from looking for errors in those logs. The
error encountered was:
%s`
clRegPodConn = `
The pod logs for the "%s" pod belonging to
the "%s" service indicated a problem connecting to the
master to notify it about a new image. This typically results in builds
succeeding but not triggering deployments (as they wait on notifications
to the ImageStream from the build).
There are many reasons for this step to fail, including invalid
credentials, master outages, DNS failures, network errors, and so on. It
can be temporary or ongoing. Check the most recent error message from the
registry pod logs to determine the nature of the problem:
%s`
clRegPodErr = `
The pod logs for the "%s" pod belonging to
the "%s" service indicated unknown errors.
This could result in problems with builds or deployments.
Please examine the log entries to determine if there might be
any related problems:
%s`
clRegSelinuxErr = `
The pod logs for the "%s" pod belonging to
the "%s" service indicated the registry is unable to write to disk.
This may indicate an SELinux denial, or problems with volume
ownership/permissions.
For volume permission problems please consult the Persistent Storage section
of the Administrator's Guide.
%s`
clRegNoEP = `
The "%[1]s" service exists with %d associated pod(s), but there
are %d endpoints in the "%[1]s" service.
This mismatch likely indicates a system bug, and builds and
deployments that require the registry may fail sporadically.`
clRegISDelFail = `
The diagnostics created an ImageStream named "%[1]s"
for test purposes and then attempted to delete it, which failed. This
should be an unusual, transient occurrence. The error encountered in
deleting it was:
%s
This message is just to notify you that this object exists.
You ought to be able to delete this object with:
oc delete imagestream/%[1]s -n default
`
clRegISMismatch = `
Diagnostics created a test ImageStream and compared the registry
it received to the registry IP and host available via the %[1]s service.
%[1]s by IP: %[2]s
%[1]s by host: %[3]s
ImageStream registry : %[4]s
Neither matches, which could mean that the master has cached an old
service; possibly an administrator re-created the %[1]s service with
a different IP address. Builds or deployments that use ImageStreams
with the wrong %[1]s IP will fail under this condition. If this is the
case, restarting the master (to clear the cache) should resolve the
issue. Existing ImageStreams may need to be re-created.`
)
func (d *ClusterRegistry) Name() string {
return ClusterRegistryName
}
func (d *ClusterRegistry) Description() string {
return "Check that there is a working Docker registry"
}
func (d *ClusterRegistry) CanRun() (bool, error) {
if d.ImageStreamClient == nil || d.KubeClient == nil {
return false, fmt.Errorf("must have kube and os clients")
}
return userCan(d.KubeClient.Authorization(), &authorization.ResourceAttributes{
Namespace: metav1.NamespaceDefault,
Verb: "get",
Group: kapi.GroupName,
Resource: "services",
Name: registryName,
})
}
func (d *ClusterRegistry) Check() types.DiagnosticResult {
r := types.NewDiagnosticResult(ClusterRegistryName)
if service := d.getRegistryService(r); service != nil {
// Check that it actually has pod(s) selected and running
if runningPods := d.getRegistryPods(service, r); len(runningPods) == 0 {
// not reporting an error here, if there are no running pods an error
// is reported by getRegistryPods
return r
} else if d.checkRegistryEndpoints(runningPods, r) { // Check that matching endpoint exists on the service
// attempt to create an imagestream and see if it gets the same registry service IP from the service cache
d.verifyRegistryImageStream(service, r)
}
}
return r
}
func (d *ClusterRegistry) getRegistryService(r types.DiagnosticResult) *kapi.Service {
service, err := d.KubeClient.Core().Services(metav1.NamespaceDefault).Get(registryName, metav1.GetOptions{})
if err != nil && reflect.TypeOf(err) == reflect.TypeOf(&kerrs.StatusError{}) {
r.Warn("DClu1002", err, fmt.Sprintf(clGetRegNone, registryName, metav1.NamespaceDefault))
return nil
} else if err != nil {
r.Error("DClu1003", err, fmt.Sprintf(clGetRegFailed, err))
return nil
}
r.Debug("DClu1004", fmt.Sprintf("Found %s service with ports %v", registryName, service.Spec.Ports))
return service
}
func (d *ClusterRegistry) getRegistryPods(service *kapi.Service, r types.DiagnosticResult) []*kapi.Pod {
runningPods := []*kapi.Pod{}
pods, err := d.KubeClient.Core().Pods(metav1.NamespaceDefault).List(metav1.ListOptions{LabelSelector: labels.SelectorFromSet(service.Spec.Selector).String()})
if err != nil {
r.Error("DClu1005", err, fmt.Sprintf("Finding pods for '%s' service failed. This should never happen. Error: (%T) %[2]v", registryName, err))
return runningPods
} else if len(pods.Items) < 1 {
r.Error("DClu1006", nil, fmt.Sprintf(clRegNoPods, registryName))
return runningPods
} else if len(pods.Items) > 1 {
emptyDir := false // multiple registry pods using EmptyDir will be inconsistent
customConfig := false // ... unless the user has configured them for e.g. S3
configPath := "/config.yml"
// look through the pod volumes to see if that might be a problem
podSpec := pods.Items[0].Spec
container := podSpec.Containers[0]
for _, volume := range podSpec.Volumes {
if volume.Name == registryVolume && volume.EmptyDir != nil {
emptyDir = true
}
}
for _, env := range container.Env {
if env.Name == "REGISTRY_CONFIGURATION_PATH" {
configPath = env.Value // look for custom config here
}
}
for _, vmount := range container.VolumeMounts {
if strings.HasPrefix(configPath, vmount.MountPath) {
customConfig = true // if something's mounted there, assume custom config.
}
}
if emptyDir {
if customConfig { // assume they know what they're doing
r.Info("DClu1020", fmt.Sprintf(clRegMultiCustomCfg, registryName, configPath))
} else { // assume they scaled up with ephemeral storage
r.Error("DClu1007", nil, fmt.Sprintf(clRegMultiPods, registryName))
}
}
}
for _, pod := range pods.Items {
r.Debug("DClu1008", fmt.Sprintf("Found %s pod with name %s", registryName, pod.ObjectMeta.Name))
if pod.Status.Phase != kapi.PodRunning {
r.Warn("DClu1009", nil, fmt.Sprintf(clRegPodDown, pod.ObjectMeta.Name, registryName))
} else {
runningPods = append(runningPods, &pod)
// Check the logs for that pod for common issues (credentials, DNS resolution failure)
d.checkRegistryLogs(&pod, r)
}
}
return runningPods
}
func (d *ClusterRegistry) checkRegistryLogs(pod *kapi.Pod, r types.DiagnosticResult) {
// pull out logs from the pod
readCloser, err := d.KubeClient.Core().RESTClient().Get().
Namespace("default").Name(pod.ObjectMeta.Name).
Resource("pods").SubResource("log").
Param("follow", "false").
Param("container", pod.Spec.Containers[0].Name).
Stream()
if err != nil {
r.Warn("DClu1010", nil, fmt.Sprintf(clRegPodLog, pod.ObjectMeta.Name, registryName, fmt.Sprintf("(%T) %[1]v", err)))
return
}
defer readCloser.Close()
// Indicator that selinux is blocking the registry from writing to disk:
selinuxErrorRegex, _ := regexp.Compile(".*level=error.*mkdir.*permission denied.*")
// If seen after the above error regex, we know the problem has since been fixed:
selinuxSuccessRegex, _ := regexp.Compile(".*level=info.*response completed.*http.request.method=PUT.*")
clientError := ""
registryError := ""
selinuxError := ""
scanner := bufio.NewScanner(readCloser)
for scanner.Scan() {
logLine := scanner.Text()
// TODO: once the logging API gets "since" and "tail" and "limit", limit to more recent log entries
// https://github.com/kubernetes/kubernetes/issues/12447
if strings.Contains(logLine, `level=error msg="client error:`) {
clientError = logLine // end up showing only the most recent client error
} else if selinuxErrorRegex.MatchString(logLine) {
selinuxError = logLine
} else if selinuxSuccessRegex.MatchString(logLine) {
// Check for a successful registry push, if this occurs after a selinux error
// we can safely clear it, the problem has already been fixed.
selinuxError = ""
} else if strings.Contains(logLine, "level=error msg=") {
registryError += "\n" + logLine // gather generic errors
}
}
if clientError != "" {
r.Error("DClu1011", nil, fmt.Sprintf(clRegPodConn, pod.ObjectMeta.Name, registryName, clientError))
}
if selinuxError != "" {
r.Error("DClu1020", nil, fmt.Sprintf(clRegSelinuxErr, pod.ObjectMeta.Name, registryName, selinuxError))
}
if registryError != "" {
r.Warn("DClu1012", nil, fmt.Sprintf(clRegPodErr, pod.ObjectMeta.Name, registryName, registryError))
}
}
func (d *ClusterRegistry) checkRegistryEndpoints(pods []*kapi.Pod, r types.DiagnosticResult) bool {
endPoint, err := d.KubeClient.Core().Endpoints(metav1.NamespaceDefault).Get(registryName, metav1.GetOptions{})
if err != nil {
r.Error("DClu1013", err, fmt.Sprintf(`Finding endpoints for "%s" service failed. This should never happen. Error: (%[2]T) %[2]v`, registryName, err))
return false
}
numEP := 0
for _, subs := range endPoint.Subsets {
numEP += len(subs.Addresses)
}
if numEP != len(pods) {
r.Warn("DClu1014", nil, fmt.Sprintf(clRegNoEP, registryName, len(pods), numEP))
return false
}
return true
}
func (d *ClusterRegistry) verifyRegistryImageStream(service *kapi.Service, r types.DiagnosticResult) {
if d.PreventModification {
r.Info("DClu1021", "Skipping creating an ImageStream to test registry service address, because you requested no API modifications.")
return
}
imgStream, err := d.ImageStreamClient.ImageStreams(metav1.NamespaceDefault).Create(&osapi.ImageStream{ObjectMeta: metav1.ObjectMeta{GenerateName: "diagnostic-test"}})
if err != nil {
r.Error("DClu1015", err, fmt.Sprintf("Creating test ImageStream failed. Error: (%T) %[1]v", err))
return
}
defer func() { // delete what we created, or notify that we couldn't
if err := d.ImageStreamClient.ImageStreams(metav1.NamespaceDefault).Delete(imgStream.ObjectMeta.Name, nil); err != nil {
r.Warn("DClu1016", err, fmt.Sprintf(clRegISDelFail, imgStream.ObjectMeta.Name, fmt.Sprintf("(%T) %[1]s", err)))
}
}()
imgStream, err = d.ImageStreamClient.ImageStreams(metav1.NamespaceDefault).Get(imgStream.ObjectMeta.Name, metav1.GetOptions{}) // status is filled in post-create
if err != nil {
r.Error("DClu1017", err, fmt.Sprintf("Getting created test ImageStream failed. Error: (%T) %[1]v", err))
return
}
r.Debug("DClu1018", fmt.Sprintf("Created test ImageStream: %[1]v", imgStream))
cacheHost := strings.SplitN(imgStream.Status.DockerImageRepository, "/", 2)[0]
// the registry for imagestreams was previously recorded as an IP, which could change if the registry service were re-created.
// Now it is a cluster hostname, which should be unchanging even if re-created. Just ensure it is the right hostname.
serviceIpPort := fmt.Sprintf("%s:%d", service.Spec.ClusterIP, service.Spec.Ports[0].Port)
serviceHostPort := fmt.Sprintf("%s.%s.svc:%d", registryName, service.ObjectMeta.Namespace, service.Spec.Ports[0].Port)
if cacheHost != serviceIpPort && cacheHost != serviceHostPort {
r.Error("DClu1019", nil, fmt.Sprintf(clRegISMismatch, registryName, serviceIpPort, serviceHostPort, cacheHost))
}
}
|
yanaspaula/rdf4j
|
tools/federation/src/main/java/org/eclipse/rdf4j/federated/endpoint/provider/RemoteRepositoryProvider.java
|
/*******************************************************************************
* Copyright (c) 2019 Eclipse RDF4J contributors.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Distribution License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*******************************************************************************/
package org.eclipse.rdf4j.federated.endpoint.provider;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.client.HttpClients;
import org.eclipse.rdf4j.federated.endpoint.Endpoint;
import org.eclipse.rdf4j.federated.endpoint.EndpointClassification;
import org.eclipse.rdf4j.federated.endpoint.ManagedRepositoryEndpoint;
import org.eclipse.rdf4j.federated.exception.FedXException;
import org.eclipse.rdf4j.http.client.SharedHttpClientSessionManager;
import org.eclipse.rdf4j.repository.http.HTTPRepository;
/**
* Provider for an Endpoint that uses a RDF4J {@link HTTPRepository} as underlying repository. All SPARQL endpoints are
* considered Remote.
*
* @author <NAME>
*/
public class RemoteRepositoryProvider implements EndpointProvider<RemoteRepositoryRepositoryInformation> {
@Override
public Endpoint loadEndpoint(RemoteRepositoryRepositoryInformation repoInfo)
throws FedXException {
String repositoryServer = repoInfo.get("repositoryServer");
String repositoryName = repoInfo.get("repositoryName");
if (repositoryServer == null || repositoryName == null) {
throw new FedXException("Invalid configuration, repositoryServer and repositoryName are required for "
+ repoInfo.getName());
}
try {
HTTPRepository repo = new HTTPRepository(repositoryServer, repositoryName);
HttpClientBuilder httpClientBuilder = HttpClients.custom()
.useSystemProperties()
.setMaxConnTotal(20)
.setMaxConnPerRoute(20);
((SharedHttpClientSessionManager) repo.getHttpClientSessionManager())
.setHttpClientBuilder(httpClientBuilder);
try {
repo.init();
} finally {
repo.shutDown();
}
String location = repositoryServer + "/" + repositoryName;
EndpointClassification epc = EndpointClassification.Remote;
ManagedRepositoryEndpoint res = new ManagedRepositoryEndpoint(repoInfo, location, epc, repo);
res.setEndpointConfiguration(repoInfo.getEndpointConfiguration());
return res;
} catch (Exception e) {
throw new FedXException("Repository " + repoInfo.getId() + " could not be initialized: " + e.getMessage(),
e);
}
}
}
|
anandchouhan/nomo
|
db/migrate/20171111202334_add_airport_to_trips.rb
|
<filename>db/migrate/20171111202334_add_airport_to_trips.rb
class AddAirportToTrips < ActiveRecord::Migration
def up
add_column :trips, :airport, :string
add_column :trips, :airport_code, :string
end
def down
remove_column :trips, :airport, :string
remove_column :trips, :airport_code, :string
end
end
|
jacadcaps/webkitty
|
Source/WebKitLegacy/Storage/WebDatabaseProvider.h
|
/*
* Copyright (C) 2015, 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <WebCore/DatabaseProvider.h>
#include <wtf/Forward.h>
#include <wtf/HashMap.h>
#include <wtf/RefPtr.h>
#if ENABLE(INDEXED_DATABASE)
#include "InProcessIDBServer.h"
#endif
class WebDatabaseProvider final : public WebCore::DatabaseProvider {
friend class NeverDestroyed<WebDatabaseProvider>;
public:
static WebDatabaseProvider& singleton();
virtual ~WebDatabaseProvider();
#if ENABLE(INDEXED_DATABASE)
WebCore::IDBClient::IDBConnectionToServer& idbConnectionToServerForSession(const PAL::SessionID&) override;
void deleteAllDatabases();
#endif
private:
explicit WebDatabaseProvider();
static String indexedDatabaseDirectoryPath();
#if ENABLE(INDEXED_DATABASE)
HashMap<PAL::SessionID, RefPtr<InProcessIDBServer>> m_idbServerMap;
#endif
};
|
local-motion/smokefree-initiative-service
|
src/main/java/io/localmotion/user/event/UserRenamedEvent.java
|
package io.localmotion.user.event;
import lombok.*;
@Getter
@NoArgsConstructor(access = AccessLevel.PRIVATE)
@AllArgsConstructor
@ToString
public class UserRenamedEvent {
String userId;
Long piiRecordId; // UserPII record type, only userName property is valid
}
|
Bridge-Mutual/Bridgemutual-v2
|
test/PolicyBookRegistry.js
|
<gh_stars>0
const PolicyBookRegistry = artifacts.require("PolicyBookRegistry");
const PolicyBookFabric = artifacts.require("PolicyBookFabric");
const ContractsRegistry = artifacts.require("ContractsRegistry");
const STBLMock = artifacts.require("STBLMock");
const BSCSTBLMock = artifacts.require("BSCSTBLMock");
const MATICSTBLMock = artifacts.require("MATICSTBLMock");
const BMICoverStaking = artifacts.require("BMICoverStaking");
const BMICoverStakingView = artifacts.require("BMICoverStakingView");
const RewardsGenerator = artifacts.require("RewardsGenerator");
const PolicyBookAdmin = artifacts.require("PolicyBookAdmin");
const PolicyQuote = artifacts.require("PolicyQuote");
const ClaimingRegistry = artifacts.require("ClaimingRegistry");
const PolicyRegistry = artifacts.require("PolicyRegistry");
const LiquidityRegistry = artifacts.require("LiquidityRegistry");
const NFTStaking = artifacts.require("NFTStaking");
const BMIUtilityNFT = artifacts.require("BMIUtilityNFT");
const LiquidityMiningStakingMock = artifacts.require("LiquidityMiningStakingMock");
const CapitalPool = artifacts.require("CapitalPool");
const ReinsurancePool = artifacts.require("ReinsurancePool");
const ShieldMining = artifacts.require("ShieldMining");
const UserLeveragePool = artifacts.require("UserLeveragePool");
const LeveragePortfolioView = artifacts.require("LeveragePortfolioView");
const YieldGenerator = artifacts.require("YieldGenerator");
const PolicyBook = artifacts.require("PolicyBook");
const PolicyBookFacade = artifacts.require("PolicyBookFacade");
const Reverter = require("./helpers/reverter");
const BigNumber = require("bignumber.js");
const truffleAssert = require("truffle-assertions");
const { assert } = require("chai");
const { getStableAmount, getNetwork, Networks } = require("./helpers/utils");
const ContractType = {
CONTRACT: 0,
STABLECOIN: 1,
SERVICE: 2,
EXCHANGE: 3,
VARIOUS: 4,
};
function toBN(number) {
return new BigNumber(number);
}
const wei = web3.utils.toWei;
contract("PolicyBookRegistry", async (accounts) => {
const zeroAddress = "0x0000000000000000000000000000000000000000";
const reverter = new Reverter(web3);
let policyBookRegistry;
let stbl;
let policyBookFabric;
let nftStaking;
let policyBookAdmin;
let policyBookFacade;
let userLeveragePool;
let network;
const DISTRIBUTOR = accounts[2];
const NON_FABRIC = accounts[3];
const NOTHING = accounts[9];
const PRECISION = toBN(10).pow(25);
let initialDeposit, stblInitialDeposit;
before("setup", async () => {
network = await getNetwork();
const contractsRegistry = await ContractsRegistry.new();
if (network == Networks.ETH) {
stbl = await STBLMock.new("stbl", "stbl", 6);
} else if (network == Networks.BSC) {
stbl = await BSCSTBLMock.new();
} else if (network == Networks.POL) {
stbl = await MATICSTBLMock.new();
await stbl.initialize("stbl", "stbl", 6, accounts[0]);
}
const _capitalPool = await CapitalPool.new();
const _bmiCoverStaking = await BMICoverStaking.new();
const _bmiCoverStakingView = await BMICoverStakingView.new();
const _rewardsGenerator = await RewardsGenerator.new();
const _policyBookRegistry = await PolicyBookRegistry.new();
const _policyBookFabric = await PolicyBookFabric.new();
const _policyBookAdmin = await PolicyBookAdmin.new();
const _policyQuote = await PolicyQuote.new();
const _policyRegistry = await PolicyRegistry.new();
const _claimingRegistry = await ClaimingRegistry.new();
const _liquidityRegistry = await LiquidityRegistry.new();
const _nftStaking = await NFTStaking.new();
const _bmiUtilityNFT = await BMIUtilityNFT.new();
const _stakingMock = await LiquidityMiningStakingMock.new();
const _yieldGenerator = await YieldGenerator.new();
const _reinsurancePool = await ReinsurancePool.new();
const _shieldMining = await ShieldMining.new();
const _leveragePortfolioView = await LeveragePortfolioView.new();
const _policyBookImpl = await PolicyBook.new();
const _policyBookFacadeImpl = await PolicyBookFacade.new();
const _userLeveragePoolImpl = await UserLeveragePool.new();
await contractsRegistry.__ContractsRegistry_init();
await contractsRegistry.addContract(await contractsRegistry.PRICE_FEED_NAME(), NOTHING);
await contractsRegistry.addContract(await contractsRegistry.CLAIM_VOTING_NAME(), NOTHING);
await contractsRegistry.addContract(await contractsRegistry.REINSURANCE_POOL_NAME(), NOTHING);
await contractsRegistry.addContract(await contractsRegistry.BMI_NAME(), NOTHING);
await contractsRegistry.addContract(await contractsRegistry.BMI_STAKING_NAME(), NOTHING);
await contractsRegistry.addContract(await contractsRegistry.BMI_UTILITY_NFT_NAME(), NOTHING);
await contractsRegistry.addContract(await contractsRegistry.BMI_TREASURY_NAME(), NOTHING);
await contractsRegistry.addContract(await contractsRegistry.CAPITAL_POOL_NAME(), _capitalPool.address);
await contractsRegistry.addContract(await contractsRegistry.USDT_NAME(), stbl.address);
await contractsRegistry.addContract(await contractsRegistry.DEFI_PROTOCOL_1_NAME(), NOTHING);
await contractsRegistry.addContract(await contractsRegistry.DEFI_PROTOCOL_2_NAME(), NOTHING);
await contractsRegistry.addContract(await contractsRegistry.DEFI_PROTOCOL_3_NAME(), NOTHING);
await contractsRegistry.addContract(await contractsRegistry.LIQUIDITY_BRIDGE_NAME(), NOTHING);
await contractsRegistry.addProxyContract(
await contractsRegistry.CLAIMING_REGISTRY_NAME(),
_claimingRegistry.address
);
await contractsRegistry.addProxyContract(await contractsRegistry.POLICY_REGISTRY_NAME(), _policyRegistry.address);
await contractsRegistry.addProxyContract(
await contractsRegistry.BMI_COVER_STAKING_NAME(),
_bmiCoverStaking.address
);
await contractsRegistry.addProxyContract(
await contractsRegistry.BMI_COVER_STAKING_VIEW_NAME(),
_bmiCoverStakingView.address
);
await contractsRegistry.addProxyContract(
await contractsRegistry.REWARDS_GENERATOR_NAME(),
_rewardsGenerator.address
);
await contractsRegistry.addProxyContract(
await contractsRegistry.POLICY_BOOK_REGISTRY_NAME(),
_policyBookRegistry.address
);
await contractsRegistry.addProxyContract(
await contractsRegistry.POLICY_BOOK_FABRIC_NAME(),
_policyBookFabric.address
);
await contractsRegistry.addProxyContract(
await contractsRegistry.POLICY_BOOK_ADMIN_NAME(),
_policyBookAdmin.address
);
await contractsRegistry.addProxyContract(await contractsRegistry.POLICY_QUOTE_NAME(), _policyQuote.address);
await contractsRegistry.addProxyContract(
await contractsRegistry.LIQUIDITY_REGISTRY_NAME(),
_liquidityRegistry.address
);
await contractsRegistry.addProxyContract(await contractsRegistry.NFT_STAKING_NAME(), _nftStaking.address);
await contractsRegistry.addProxyContract(await contractsRegistry.BMI_UTILITY_NFT_NAME(), _bmiUtilityNFT.address);
await contractsRegistry.addProxyContract(
await contractsRegistry.LIQUIDITY_MINING_STAKING_ETH_NAME(),
_stakingMock.address
);
await contractsRegistry.addProxyContract(
await contractsRegistry.LIQUIDITY_MINING_STAKING_USDT_NAME(),
_stakingMock.address
);
await contractsRegistry.addProxyContract(await contractsRegistry.REINSURANCE_POOL_NAME(), _reinsurancePool.address);
await contractsRegistry.addProxyContract(await contractsRegistry.SHIELD_MINING_NAME(), _shieldMining.address);
await contractsRegistry.addProxyContract(
await contractsRegistry.LEVERAGE_PORTFOLIO_VIEW_NAME(),
_leveragePortfolioView.address
);
await contractsRegistry.addProxyContract(await contractsRegistry.YIELD_GENERATOR_NAME(), _yieldGenerator.address);
policyBookAdmin = await PolicyBookAdmin.at(await contractsRegistry.getPolicyBookAdminContract());
const bmiCoverStaking = await BMICoverStaking.at(await contractsRegistry.getBMICoverStakingContract());
const bmiCoverStakingView = await BMICoverStakingView.at(await contractsRegistry.getBMICoverStakingViewContract());
const rewardsGenerator = await RewardsGenerator.at(await contractsRegistry.getRewardsGeneratorContract());
const claimingRegistry = await ClaimingRegistry.at(await contractsRegistry.getClaimingRegistryContract());
policyBookFabric = await PolicyBookFabric.at(await contractsRegistry.getPolicyBookFabricContract());
policyBookRegistry = await PolicyBookRegistry.at(await contractsRegistry.getPolicyBookRegistryContract());
nftStaking = await NFTStaking.at(await contractsRegistry.getNFTStakingContract());
const capitalPool = await CapitalPool.at(await contractsRegistry.getCapitalPoolContract());
const policyRegistry = await PolicyRegistry.at(await contractsRegistry.getPolicyRegistryContract());
const policyQuote = await PolicyQuote.at(await contractsRegistry.getPolicyQuoteContract());
const liquidityRegistry = await LiquidityRegistry.at(await contractsRegistry.getLiquidityRegistryContract());
const reinsurancePool = await ReinsurancePool.at(await contractsRegistry.getReinsurancePoolContract());
const shieldMining = await ShieldMining.at(await contractsRegistry.getShieldMiningContract());
const yieldGenerator = await YieldGenerator.at(await contractsRegistry.getYieldGeneratorContract());
await policyBookAdmin.__PolicyBookAdmin_init(
_policyBookImpl.address,
_policyBookFacadeImpl.address,
_userLeveragePoolImpl.address
);
await policyBookFabric.__PolicyBookFabric_init();
await claimingRegistry.__ClaimingRegistry_init();
await rewardsGenerator.__RewardsGenerator_init();
await capitalPool.__CapitalPool_init();
await reinsurancePool.__ReinsurancePool_init();
await bmiCoverStaking.__BMICoverStaking_init();
await nftStaking.__NFTStaking_init();
await yieldGenerator.__YieldGenerator_init(network);
await contractsRegistry.injectDependencies(await contractsRegistry.CAPITAL_POOL_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.BMI_COVER_STAKING_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.BMI_COVER_STAKING_VIEW_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.POLICY_BOOK_REGISTRY_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.POLICY_BOOK_FABRIC_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.POLICY_REGISTRY_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.REWARDS_GENERATOR_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.CLAIMING_REGISTRY_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.LIQUIDITY_REGISTRY_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.REINSURANCE_POOL_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.SHIELD_MINING_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.NFT_STAKING_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.LEVERAGE_PORTFOLIO_VIEW_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.POLICY_BOOK_ADMIN_NAME());
await contractsRegistry.injectDependencies(await contractsRegistry.POLICY_QUOTE_NAME());
await policyBookAdmin.setupPricingModel(
PRECISION.times(80),
PRECISION.times(80),
PRECISION.times(2),
PRECISION.times(2),
wei("10"),
PRECISION.times(10),
PRECISION.times(50),
PRECISION.times(25),
PRECISION.times(100)
);
const tx = await policyBookFabric.createLeveragePools(NOTHING, ContractType.VARIOUS, "User Leverage Pool", "USDT");
const userLeveragePoolAddress = tx.logs[0].args.at;
userLeveragePool = await UserLeveragePool.at(userLeveragePoolAddress);
initialDeposit = wei("1000");
stblInitialDeposit = getStableAmount("1000");
await stbl.approve(policyBookFabric.address, stblInitialDeposit);
await reverter.snapshot();
});
afterEach("revert", reverter.revert);
describe("add", async () => {
const CONTRACT = accounts[3];
const BOOK1 = accounts[4];
const FACADE1 = accounts[5];
beforeEach("setup", async () => {
initialDeposit = wei("1000");
stblInitialDeposit = getStableAmount("1000");
});
it("should not allow not fabric to add", async () => {
await truffleAssert.reverts(
policyBookRegistry.add(CONTRACT, ContractType.CONTRACT, BOOK1, FACADE1, { from: NON_FABRIC }),
"PolicyBookRegistry: Not a PolicyBookFabric"
);
});
it("should not allow to add duplicate by the same address", async () => {
await policyBookFabric.create(CONTRACT, ContractType.CONTRACT, "TestBook", "TB", initialDeposit, zeroAddress);
await truffleAssert.reverts(
policyBookFabric.create(CONTRACT, ContractType.CONTRACT, "TestBook", "TB", initialDeposit, zeroAddress),
"PolicyBookRegistry: PolicyBook for the contract is already created"
);
});
it("should increase count of books", async () => {
assert.equal(await policyBookRegistry.count(), 1);
await policyBookFabric.create(CONTRACT, ContractType.CONTRACT, "TestBook", "TB", initialDeposit, zeroAddress);
assert.equal(await policyBookRegistry.count(), 2);
assert.equal(await policyBookRegistry.countByType(ContractType.CONTRACT), 1);
});
it("should save policy book by address", async () => {
assert.equal(await policyBookRegistry.policyBookFor(CONTRACT), zeroAddress);
const policyBookAddr = (
await policyBookFabric.create(CONTRACT, ContractType.CONTRACT, "TestBook", "TB", initialDeposit, zeroAddress)
).logs[0].args.at;
assert.equal(await policyBookRegistry.policyBookFor(CONTRACT), policyBookAddr);
});
it("should save policy book", async () => {
assert.deepEqual(await policyBookRegistry.list(0, 10), [userLeveragePool.address]);
const policyBookAddr = (
await policyBookFabric.create(CONTRACT, ContractType.CONTRACT, "TestBook", "TB", initialDeposit, zeroAddress)
).logs[0].args.at;
assert.deepEqual(await policyBookRegistry.list(0, 10), [userLeveragePool.address, policyBookAddr]);
assert.deepEqual(await policyBookRegistry.listByType(ContractType.CONTRACT, 0, 10), [policyBookAddr]);
});
});
describe("buyPolicyBatch", async () => {
const CONTRACT1 = accounts[3];
const CONTRACT2 = accounts[4];
beforeEach("setup", async () => {
initialDeposit = wei("1000");
stblInitialDeposit = getStableAmount("1000");
});
it.skip("should buy policy batch", async () => {
// TODO on buyPolicyBatch function before calling buyPolicyFor() :
// · transfer funds (getPolicyPrice) to address(this) for each purchase
// · approve PolicyBook to spend address(this) funds
await stbl.approve(policyBookFabric.address, 0);
await stbl.approve(policyBookFabric.address, stblInitialDeposit);
await policyBookFabric.create(CONTRACT1, ContractType.CONTRACT, "TestBook", "TB1", initialDeposit, zeroAddress);
policyBook1 = await PolicyBook.at(await policyBookRegistry.policyBookFor(CONTRACT1));
await policyBookFabric.create(CONTRACT2, ContractType.CONTRACT, "TestBook", "TB2", initialDeposit, zeroAddress);
policyBook2 = await PolicyBook.at(await policyBookRegistry.policyBookFor(CONTRACT2));
const policyBookFacadeAddress1 = await policyBook1.policyBookFacade();
policyBookFacade1 = await PolicyBookFacade.at(policyBookFacadeAddress1);
const policyBookFacadeAddress2 = await policyBook2.policyBookFacade();
policyBookFacade2 = await PolicyBookFacade.at(policyBookFacadeAddress2);
await stbl.approve(policyBook1.address, getStableAmount("5000"));
await stbl.approve(policyBook2.address, getStableAmount("5000"));
await policyBookFacade1.addLiquidity(wei("1000"));
await policyBookFacade2.addLiquidity(wei("999"));
await policyBookRegistry.buyPolicyBatch(
[policyBook1.address, policyBook2.address],
[5, 6],
[wei("1000"), wei("999")]
);
const info1 = await policyBook1.userStats(accounts[0]);
const info2 = await policyBook2.userStats(accounts[0]);
assert.equal(toBN(info1.coverTokens).toString(), toBN(wei("1000")).toString());
assert.equal(toBN(info2.coverTokens).toString(), toBN(wei("999")).toString());
});
});
describe("listWithStats", async () => {
const CONTRACT = accounts[3];
beforeEach("setup", async () => {
initialDeposit = wei("1000");
stblInitialDeposit = getStableAmount("1000");
});
it("should return correct values", async () => {
await stbl.approve(policyBookFabric.address, 0);
await stbl.approve(policyBookFabric.address, stblInitialDeposit.times(2));
await policyBookFabric.create(
CONTRACT,
ContractType.CONTRACT,
"TestBook",
"TB",
toBN(initialDeposit).times(2),
zeroAddress
);
const result = await policyBookRegistry.listWithStats(0, 2);
assert.equal(result[0][1], await policyBookRegistry.policyBookFor(CONTRACT));
assert.equal(result[1][1][0], "bmiV2TBCover");
assert.equal(result[1][1][1], CONTRACT);
assert.equal(result[1][1][2], ContractType.CONTRACT);
assert.equal(toBN(result[1][1][3]).toString(), toBN(wei("2000")).toString());
assert.equal(toBN(result[1][1][4]).toString(), toBN(wei("2000")).toString());
assert.equal(result[1][1][5], 0);
assert.equal(result[1][1][6], 0);
assert.equal(result[1][1][7], 0);
assert.equal(toBN(result[1][1][8]).toString(), toBN(wei("15.625")).toString());
assert.equal(toBN(result[1][1][9]).toString(), toBN(wei("1")).toString());
assert.equal(result[1][1][10], false);
});
});
describe("listWithStatsByType", async () => {
const CONTRACT = accounts[3];
it("should return correct values", async () => {
await policyBookFabric.create(CONTRACT, ContractType.CONTRACT, "TestBook", "TB", initialDeposit, zeroAddress);
const result = await policyBookRegistry.listWithStatsByType(ContractType.CONTRACT, 0, 1);
assert.equal(result[0][0], await policyBookRegistry.policyBookFor(CONTRACT));
assert.equal(result[1][0][0], "bmiV2TBCover");
assert.equal(result[1][0][1], CONTRACT);
assert.equal(result[1][0][2], ContractType.CONTRACT);
assert.equal(toBN(result[1][0][3]).toString(), toBN(wei("1000")).toString());
assert.equal(toBN(result[1][0][4]).toString(), toBN(wei("1000")).toString());
assert.equal(result[1][0][5], 0);
assert.equal(result[1][0][6], 0);
assert.equal(result[1][0][7], 0);
assert.equal(toBN(result[1][0][8]).toString(), toBN(wei("100")).toString());
assert.equal(toBN(result[1][0][9]).toString(), toBN(wei("1")).toString());
assert.equal(result[1][0][10], false);
});
});
describe("getBooks", async () => {
const contracts = accounts.slice(3, 6);
let bookAddresses;
beforeEach("setup", async () => {
initialDeposit = wei("1000");
stblInitialDeposit = getStableAmount("1000");
bookAddresses = [];
for (let i = 0; i < 3; i++) {
await stbl.approve(policyBookFabric.address, 0);
await stbl.approve(policyBookFabric.address, initialDeposit);
const policyBookAddr = (
await policyBookFabric.create(
contracts[i],
ContractType.CONTRACT,
"TestBook",
"TB",
initialDeposit,
zeroAddress
)
).logs[0].args.at;
bookAddresses.push(policyBookAddr);
}
});
it("should return valid if inside range", async () => {
const result1 = await policyBookRegistry.list(0, 4);
const result2 = await policyBookRegistry.listByType(ContractType.CONTRACT, 0, 4);
const result3 = await policyBookRegistry.listByType(ContractType.STABLECOIN, 0, 4);
assert.deepEqual(result1.slice(1, 4), bookAddresses);
assert.deepEqual(result1.slice(1, 4), result2);
assert.deepEqual(result3, []);
});
it("should return valid longer than range", async () => {
const result1 = await policyBookRegistry.list(1, 3);
const result2 = await policyBookRegistry.listByType(ContractType.CONTRACT, 0, 3);
const result3 = await policyBookRegistry.listByType(ContractType.STABLECOIN, 0, 3);
assert.deepEqual(result1, bookAddresses);
assert.deepEqual(result1, result2);
assert.deepEqual(result3, []);
});
it("should return valid longer than range", async () => {
const result1 = await policyBookRegistry.list(3, 10);
const result2 = await policyBookRegistry.listByType(ContractType.CONTRACT, 3, 10);
const result3 = await policyBookRegistry.listByType(ContractType.STABLECOIN, 3, 10);
assert.deepEqual(result1.slice(1, 10), []);
assert.deepEqual(result1.slice(1, 10), result2);
assert.deepEqual(result3, []);
});
});
});
|
jreece1567/go_swagger_client
|
models_secure/wishlist_event_occurrence.go
|
<filename>models_secure/wishlist_event_occurrence.go
package models_secure
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
strfmt "github.com/go-openapi/strfmt"
"github.com/go-openapi/errors"
)
/*WishlistEventOccurrence The dateTime that the event starts and ends.
swagger:model wishlistEventOccurrence
*/
type WishlistEventOccurrence struct {
/* Date and time the event ends.
*/
FinishesAt strfmt.DateTime `json:"finishes_at,omitempty"`
/* Date and time the event starts.
*/
StartsAt strfmt.DateTime `json:"starts_at,omitempty"`
}
// Validate validates this wishlist event occurrence
func (m *WishlistEventOccurrence) Validate(formats strfmt.Registry) error {
var res []error
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
|
fancylou/o2oa
|
x_organization_assemble_control_alpha/jest/personAttribute.js
|
personAttribute_parameter = {
root : common_parameter.host + '/x_organization_assemble_control/jaxrs/personattribute',
list_action : null,
list_action_parameter : null,
first : '(0)',
last : '(0)',
count : 20
};
function personAttribute_list_reload() {
if (personAttribute_parameter.list_action) {
personAttribute_parameter.list_action.call(window, personAttribute_parameter.list_action_parameter);
} else {
personAttribute_list_next('(0)');
}
}
function personAttribute_list_next(id) {
var id = ( id ? id : personAttribute_parameter.last);
personAttribute_parameter.list_action = personAttribute_list_next;
personAttribute_parameter.list_action_parameter = id;
$.ajax({
type : 'get',
dataType : 'json',
url : personAttribute_parameter.root + '/list/' + id + '/next/' + personAttribute_parameter.count,
xhrFields : {
'withCredentials' : true
},
crossDomain : true
}).done(function(data) {
if (data.type == 'success') {
if (data.data.length > 0) {
personAttribute_parameter.first = data.data[0].id;
personAttribute_parameter.last = data.data[data.data.length - 1].id;
} else {
personAttribute_parameter.first = '(0)';
}
$('#content').html(personAttribute_list_grid(data.data));
$('#total', '#content').html(data.count);
personAttribute_list_init();
} else {
failure(data);
}
});
}
function personAttribute_list_prev(id) {
var id = ( id ? id : personAttribute_parameter.first);
personAttribute_parameter.list_action = personAttribute_list_prev;
personAttribute_parameter.list_action_parameter = id;
$.ajax({
type : 'get',
dataType : 'json',
url : personAttribute_parameter.root + '/list/' + id + '/prev/' + personAttribute_parameter.count,
xhrFields : {
'withCredentials' : true
},
crossDomain : true
}).done(function(data) {
if (data.type == 'success') {
if (data.data.length > 0) {
personAttribute_parameter.first = data.data[0].id;
personAttribute_parameter.last = data.data[data.data.length - 1].id;
} else {
personAttribute_parameter.last = '(0)';
}
$('#content').html(personAttribute_list_grid(data.data));
$('#total', '#content').html(data.count);
personAttribute_list_init();
} else {
failure(data);
}
});
}
function personAttribute_list_grid(items) {
var str = '<table border="1" width="100%">';
str += '<tr><td colspan="5"> <a href="#" id="prev">prev</a> <a href="#" id="next">next</a> <span id="total">0</span></td></tr>';
str += '<tr><th>rank</th><th>id</th><th>name</th><th>person</th><th>operate</th></tr>';
$.each(items, function(index, item) {
str += '<tr>';
str += '<td>' + item.rank + '</td>';
str += '<td>' + item.id + '</td>';
str += '<td>' + item.name + '</td>';
str += '<td>' + item.person + '</td>';
str += '<td>';
str += '<a href="#" onclick="personAttribute_edit(\'' + item.id + '\')">edit</a> ';
str += '<a href="#" onclick="personAttribute_delete(\'' + item.id + '\')">delete</a>';
str += '</td>';
str += '</tr>';
});
str += '</table>';
return str;
}
function personAttribute_list_init() {
$('#next', '#content').click(function() {
personAttribute_list_next();
});
$('#prev', '#content').click(function() {
personAttribute_list_prev();
});
}
function personAttribute_create() {
var str = '<table border="1" width="100%">';
str += '<tr><td colspan="2"><a href="#" id="post">post</a></td></tr>';
str += '<tr><td>name:</td><td><input type="text" id="name" style="width:95%"/></td></tr>';
str += '<tr><td>person:</td><td><input type="text" id="person" style="width:95%"/></td></tr>';
str += '<tr><td>unqiue:</td><td><input type="text" id="unique" style="width:95%"/></td></tr>';
str += '<tr><td>attributeList:</td><td><textarea id="attributeList" style="width:95%;height:200px"/></td></tr>';
str += '</table>';
$('#content').html(str);
$('#post', '#content').click(function() {
personAttribute_post();
});
}
function personAttribute_post() {
$.ajax({
type : 'post',
dataType : 'json',
url : personAttribute_parameter.root,
contentType : 'application/json; charset=utf-8',
data : JSON.stringify({
name : $('#name', '#content').val(),
person : $('#person', '#content').val(),
unique : $('#unique', '#content').val(),
attributeList : splitValue($('#attributeList', '#content').val())
}),
xhrFields : {
'withCredentials' : true
},
crossDomain : true
}).done(function(data) {
if (data.type == 'success') {
personAttribute_list_reload();
} else {
failure(data);
}
});
}
function personAttribute_edit(id) {
var str = '<table border="1" width="100%">';
str += '<tr><td colspan="2"><a href="#" id="put">put</a></td></tr>';
str += '<tr><td>id:</td><td id="id"></td></tr>';
str += '<tr><td>sequence:</td><td id="sequence"></td></tr>';
str += '<tr><td>name:</td><td><input type="text" id="name" style="width:95%"/></td></tr>';
str += '<tr><td>person:</td><td><input type="text" id="person" style="width:95%"/></td></tr>';
str += '<tr><td>unqiue:</td><td><input type="text" id="unique" style="width:95%"/></td></tr>';
str += '<tr><td>attributeList:</td><td><textarea id="attributeList" style="width:95%;height:200px"/></td></tr>';
str += '</table>';
$('#content').html(str);
$('#put', '#content').click(function() {
personAttribute_put(id);
});
$.ajax({
type : 'get',
dataType : 'json',
url : personAttribute_parameter.root + '/' + id,
xhrFields : {
'withCredentials' : true
},
crossDomain : true
}).done(function(data) {
if (data.type == 'success') {
$('#id', '#content').html(data.data.id);
$('#sequence', '#content').html(data.data.sequence);
$('#name', '#content').val(data.data.name);
$('#person', '#content').val(data.data.person);
$('#unique', '#content').val(data.data.unique);
$('#attributeList', '#content').val(data.data.attributeList).join(',');
} else {
failure(data);
}
});
}
function personAttribute_put(id) {
$.ajax({
type : 'put',
dataType : 'json',
url : personAttribute_parameter.root + '/' + id,
contentType : 'application/json; charset=utf-8',
data : JSON.stringify({
name : $('#name', '#content').val(),
person : $('#person', '#content').val(),
unique : $('#unique', '#content').val(),
attributeList : splitValue($('#attributeList', '#content').val())
}),
xhrFields : {
'withCredentials' : true
},
crossDomain : true
}).done(function(data) {
if (data.type == 'success') {
personAttribute_list_reload();
} else {
failure(data);
}
});
}
function personAttribute_delete(id) {
$.ajax({
type : 'delete',
dataType : 'json',
url : personAttribute_parameter.root + '/' + id,
xhrFields : {
'withCredentials' : true
},
crossDomain : true
}).done(function(data) {
if (data.type == 'success') {
personAttribute_list_reload();
} else {
failure(data);
}
});
}
function personAttribute_query_grid(items) {
var str = '<table border="1" width="100%">';
str += '<tr><th>id</th><th>name</th><th>person</th><th>operate</th></tr>';
$.each(items, function(index, item) {
str += '<tr>';
str += '<td>' + item.id + '</td>';
str += '<td>' + item.name + '</td>';
str += '<td>' + item.person + '</td>';
str += '<td>';
str += '<a href="#" onclick="personAttribute_edit(\'' + item.id + '\')">edit</a> ';
str += '<a href="#" onclick="personAttribute_delete(\'' + item.id + '\')">delete</a>';
str += '</td>';
str += '</tr>';
});
str += '</table>';
return str;
}
function personAttribute_query_init() {
var str = '<table border="1" width="100%">';
str += '<tr><td>query:</td><td><input type="text" id="query" style="width:95%"/></td></tr>';
str += '<tr><td colspan="2"><a href="#" id="withPerson">获取指定人员属性.</a></td></tr>';
str += '</table>';
$('#content').html(str);
$('#withPerson', '#content').click(function() {
personAttribute_query_withPerson($('#query', '#content').val());
});
}
function personAttribute_query_withPerson(id) {
personAttribute_parameter.list_action = personAttribute_query_withPerson;
personAttribute_parameter.list_action_parameter = id;
$.ajax({
type : 'get',
dataType : 'json',
url : personAttribute_parameter.root + '/list/person/' + id,
xhrFields : {
'withCredentials' : true
},
crossDomain : true
}).done(function(data) {
if (data.type == 'success') {
$('#content').html(personAttribute_query_grid(data.data));
} else {
failure(data);
}
});
}
|
All8Up/cpf
|
Cpf/Plugins/Graphics/DebugUI/Source/DebugUI.cpp
|
<reponame>All8Up/cpf
//////////////////////////////////////////////////////////////////////////
#include "DebugUI.hpp"
#include "CPF/Graphics/iBlob.hpp"
#include "CPF/Graphics/iDevice.hpp"
#include "CPF/Graphics/iSampler.hpp"
#include "CPF/Graphics/ImageDesc.hpp"
#include "Graphics/DepthStencilBuilder.hpp"
#include "Graphics/RasterizerStateBuilder.hpp"
#include "Graphics/PipelineStateBuilder.hpp"
#include "CPF/Graphics/ResourceBindingDesc.hpp"
#include "CPF/Graphics/Viewport.hpp"
#include "CPF/Graphics/ImageFlags.hpp"
#include "CPF/Graphics/FilterMode.hpp"
#include "CPF/Graphics/SamplerDesc.hpp"
#include "CPF/Graphics/ShaderType.hpp"
#include "CPF/Graphics/WrapMode.hpp"
#include "CPF/Graphics/ParamVisibility.hpp"
#include "CPF/Graphics/iImage.hpp"
#include "CPF/Graphics/PrimitiveTopology.hpp"
#include "CPF/Graphics/ResourceState.hpp"
#include "CPF/Graphics/HeapType.hpp"
#include "CPF/Graphics/ResourceType.hpp"
#include "CPF/Graphics/ResourceData.hpp"
#include "CPF/Graphics/ResourceDesc.hpp"
#include "CPF/Graphics/iCommandBuffer.hpp"
#include "CPF/Graphics/iFence.hpp"
#include "CPF/Graphics/iCommandPool.hpp"
#include "CPF/Graphics/iConstantBuffer.hpp"
#include "CPF/Graphics/Range.hpp"
#include "CPF/Graphics/iVertexBuffer.hpp"
#include "CPF/Graphics/iIndexBuffer.hpp"
#include "CPF/Graphics/iShader.hpp"
#include "CPF/Graphics/iPipeline.hpp"
#include "Graphics/BlendStateBuilder.hpp"
#include "Graphics/InputLayoutBuilder.hpp"
#include "Application/iWindow.hpp"
#include "CPF/Application/OSWindowData.hpp"
#include "imgui/imgui.h"
#include "IO/Stream.hpp"
#include "Resources/ID.hpp"
#include "Resources/iLocator.hpp"
#include "Math/Matrix44v.hpp"
#include "Math/Constants.hpp"
#include "CPF/Logging.hpp"
#include "CPF/Application/ScanCode.hpp"
#include "CPF/Application/KeyModifiers.hpp"
#include "CPF/Application/MouseButton.hpp"
#include "CPF/Application/iMouseDevice.hpp"
#include "CPF/Application/iKeyboardDevice.hpp"
#include "Application/iInputManager.hpp"
#include "CPF/Application/iClipboard.hpp"
#include "CPF/Plugin/iRegistry.hpp"
#include "Std/Memory.hpp"
#include "Plugin/tClassInstance.hpp"
using namespace CPF;
using namespace Graphics;
//////////////////////////////////////////////////////////////////////////
extern "C"
GOM::Result CPF_EXPORT CPF_STDCALL Install(Plugin::iRegistry* registry)
{
if (registry)
{
registry->Install(kDebugUICID.GetID(), new Plugin::tClassInstance<DebugUI>());
return GOM::kOK;
}
return GOM::kInvalidParameter;
}
extern "C"
GOM::Result CPF_EXPORT CPF_STDCALL Remove(Plugin::iRegistry* registry)
{
if (registry)
{
registry->Remove(kDebugUICID.GetID());
return GOM::kOK;
}
return GOM::kInvalidParameter;
}
//////////////////////////////////////////////////////////////////////////
IntrusivePtr<iClipboard> sClipboard;
char sClipboardText[1024];
DebugUI::DebugUI(Plugin::iRegistry*, iUnknown*)
: mpDevice(nullptr)
, mpLocator(nullptr)
, mWidth(0)
, mHeight(0)
, mMouseWheel(0.0f)
{
CPF_INIT_LOG(DebugUI);
CPF_LOG_LEVEL(DebugUI, Info);
}
DebugUI::~DebugUI()
{
CPF_DROP_LOG(DebugUI);
}
bool DebugUI::Initialize(iDevice* device, iInputManager* im, iWindow* window, Resources::iLocator* locator)
{
CPF_ASSERT(device != nullptr);
CPF_ASSERT(locator != nullptr);
mpDevice = device;
mpLocator = locator;
im->GetDevice(iMouseDevice::kDefault.GetID(), iMouseDevice::kIID.GetID(), mpMouse.AsVoidPP());
im->GetDevice(iKeyboardDevice::kDefault.GetID(), iKeyboardDevice::kIID.GetID(), mpKeyboard.AsVoidPP());
im->GetDevice(iClipboard::kDefault.GetID(), iClipboard::kIID.GetID(), sClipboard.AsVoidPP());
// Load the shaders.
// TODO: Consider moving the debug UI out so we don't need a dependency on resources.
// TODO: The decision will have to wait till later though, have to know how this ends up dealing with cross api shaders first.
{
IntrusivePtr<IO::Stream> vertexShaderHlsl(mpLocator->Open(RESOURCE_ID("shaders/", "ui_vs.hlsl")));
IntrusivePtr<IO::Stream> pixelShaderHlsl(mpLocator->Open(RESOURCE_ID("shaders/", "ui_ps.hlsl")));
if (vertexShaderHlsl && pixelShaderHlsl)
{
IntrusivePtr<iBlob> vertexShaderByteCode;
{
auto vertexShaderSrc = ReadText(vertexShaderHlsl);
mpDevice->CompileToByteCode("main", ShaderType::eVertex, vertexShaderSrc.size(), vertexShaderSrc.data(), vertexShaderByteCode.AsTypePP());
}
mpDevice->CreateShader(vertexShaderByteCode, mpVertexShader.AsTypePP());
IntrusivePtr<iBlob> pixelShaderByteCode;
{
auto pixelShaderSrc = ReadText(pixelShaderHlsl);
mpDevice->CompileToByteCode("main", ShaderType::ePixel, pixelShaderSrc.size(), pixelShaderSrc.data(), pixelShaderByteCode.AsTypePP());
}
mpDevice->CreateShader(pixelShaderByteCode, mpPixelShader.AsTypePP());
}
}
if (!mpVertexShader || !mpPixelShader)
return false;
// Create the projection matrix constant buffer.
ResourceDesc cbDesc{ ResourceType::eBuffer, HeapType::eUpload, ResourceState::eGenericRead, sizeof(Math::Matrix44fv), 0 };
mpDevice->CreateConstantBuffer(&cbDesc, nullptr, mpProjectionMatrix.AsTypePP());
// Create the atlas sampler.
SamplerDesc samplerDesc
{
FilterMode::ePoint,
FilterMode::ePoint,
FilterMode::ePoint,
WrapMode::eBorder,
WrapMode::eBorder,
WrapMode::eBorder,
0.0f,
Math::kFloatMax,
0.0f
};
mpDevice->CreateSampler(&samplerDesc, mpSampler.AsTypePP());
// Create the pipeline.
{
PipelineStateDesc pipelineDesc = Build<PipelineStateDesc>()
.VertexShader(mpVertexShader)
.PixelShader(mpPixelShader)
.Topology(TopologyType::eTriangle)
.Rasterizer(Build<RasterizerStateDesc>()
.CullMode(CullMode::eNone)
.WindingOrder(WindingOrder::eClockwise)
.DepthClipping(false)
)
.DepthStencil(Build<DepthStencilStateDesc>()
.DepthTest(false)
.DepthWriteMask(DepthWriteMask::eZero)
)
.InputLayout(Build<InputLayoutDesc>()
.Element("POSITION", 0, Format::eRG32f, 0, 0, InputClassification::ePerVertex, 0)
.Element("TEXCOORD", 0, Format::eRG32f, 0, 8, InputClassification::ePerVertex, 0)
.Element("COLOR", 0, Format::eRGBA8un, 0, 16, InputClassification::ePerVertex, 0)
)
.TargetBlend(0, Build<RenderTargetBlendStateDesc>()
.Blending(true)
.Op(BlendOp::eAdd)
.OpAlpha(BlendOp::eAdd)
.Src(BlendFunc::eSrcAlpha)
.SrcAlpha(BlendFunc::eOne)
.Dst(BlendFunc::eInvSrcAlpha)
.DstAlpha(BlendFunc::eInvSrcAlpha)
)
.RenderTargets({ Format::eRGBA8un })
.DepthStencilFormat(Format::eD32f)
;
// Create the binding.
ParamBindingDesc paramBindings[] =
{
{ BindingType::eConstantBuffer, 0, 0, ParamFlags::eStatic, ParamVisibility::eVertex },
{ BindingType::eSampler, 0, 0, ParamFlags::eStatic, ParamVisibility::eVisibilityAll },
{ BindingType::eTexture, 0, 0, ParamFlags::eStatic, ParamVisibility::eVisibilityAll }
};
ResourceBindingDesc bindings =
{
3,
paramBindings
};
mpDevice->CreateResourceBinding(&bindings, mpResourceBinding.AsTypePP());
// Create the actual pipeline object.
mpDevice->CreatePipeline(&pipelineDesc, mpResourceBinding, mpPipeline.AsTypePP());
}
if (!mpResourceBinding || !mpPipeline)
return false;
//////////////////////////////////////////////////////////////////////////
ImGuiIO& io = ImGui::GetIO();
io.KeyMap[ImGuiKey_Tab] = int(ScanCode::eTab);
io.KeyMap[ImGuiKey_LeftArrow] = int(ScanCode::eLeft);
io.KeyMap[ImGuiKey_RightArrow] = int(ScanCode::eRight);
io.KeyMap[ImGuiKey_UpArrow] = int(ScanCode::eUp);
io.KeyMap[ImGuiKey_DownArrow] = int(ScanCode::eDown);
io.KeyMap[ImGuiKey_PageUp] = int(ScanCode::ePageUp);
io.KeyMap[ImGuiKey_PageDown] = int(ScanCode::ePageDown);
io.KeyMap[ImGuiKey_Home] = int(ScanCode::eHome);
io.KeyMap[ImGuiKey_End] = int(ScanCode::eEnd);
io.KeyMap[ImGuiKey_Delete] = int(ScanCode::eDelete);
io.KeyMap[ImGuiKey_Backspace] = int(ScanCode::eBackspace);
io.KeyMap[ImGuiKey_Enter] = int(ScanCode::eReturn);
io.KeyMap[ImGuiKey_Escape] = int(ScanCode::eEscape);
io.KeyMap[ImGuiKey_A] = int(ScanCode::eA);
io.KeyMap[ImGuiKey_C] = int(ScanCode::eC);
io.KeyMap[ImGuiKey_V] = int(ScanCode::eV);
io.KeyMap[ImGuiKey_X] = int(ScanCode::eX);
io.KeyMap[ImGuiKey_Y] = int(ScanCode::eY);
io.KeyMap[ImGuiKey_Z] = int(ScanCode::eZ);
io.RenderDrawListsFn = nullptr;
io.SetClipboardTextFn = &DebugUI::_SetClipboardText;
io.GetClipboardTextFn = &DebugUI::_GetClipboardText;
#ifdef _WIN32
OSWindowData osData;
window->GetOSData(&osData);
io.ImeWindowHandle = osData.mHwnd;
#else
(void)window;
#endif
// Build the imgui font atlas.
{
unsigned char* pixels;
int width, height;
io.Fonts->GetTexDataAsRGBA32(&pixels, &width, &height);
if (pixels == nullptr)
return false;
const size_t dataSize = sizeof(uint32_t) * width * height;
ImageDesc atlasDesc;
atlasDesc.mFormat = Format::eRGBA8un;
atlasDesc.mWidth = width;
atlasDesc.mHeight = height;
atlasDesc.mDepth = 1;
atlasDesc.mMipLevels = 1;
atlasDesc.mSamples = SampleDesc{ 1, 0 };
atlasDesc.mState = ResourceState::eCopyDest;
atlasDesc.mFlags = ImageFlags::eNone;
// Create the target texture.
mpDevice->CreateImage2D(HeapType::eDefault, &atlasDesc, nullptr, mpUIAtlas.AsTypePP());
// Create the staging buffer.
ResourceDesc stagingDesc{ ResourceType::eBuffer, HeapType::eUpload, ResourceState::eGenericRead, int32_t(dataSize), 0 };
IntrusivePtr<iResource> staging;
mpDevice->CreateResource(&stagingDesc, staging.AsTypePP());
// Copy the image to the staging buffer.
void* buffer;
staging->Map(&buffer, nullptr);
Std::MemCpy(buffer, pixels, sizeof(uint32_t) * width * height);
staging->Unmap(nullptr);
// Create a temporary command buffer and a fence to issue the copy from staging buffer to the image.
IntrusivePtr<iCommandPool> tempPool;
mpDevice->CreateCommandPool(tempPool.AsTypePP());
IntrusivePtr<iCommandBuffer> tempCommands;
mpDevice->CreateCommandBuffer(tempPool, CommandBufferType::ePrimary, tempCommands.AsTypePP());
IntrusivePtr<iFence> tempFence;
mpDevice->CreateFence(0, tempFence.AsTypePP());
tempCommands->Reset(tempPool);
tempCommands->Begin(nullptr);
Graphics::ResourceData data;
data.mpData = pixels;
data.mPitch = width * sizeof(int32_t);
data.mSlicePitch = data.mPitch * height;
IntrusivePtr<iResource> targetResource;
mpUIAtlas->QueryInterface(iResource::kIID.GetID(), targetResource.AsVoidPP());
tempCommands->UpdateSubResource(staging, targetResource, &data);
tempCommands->End();
iCommandBuffer* commandBuffers[] = { tempCommands };
mpDevice->Submit(1, commandBuffers);
mpDevice->Signal(tempFence, 1);
tempFence->WaitFor(1);
}
if (!mpUIAtlas)
return false;
// Attach window events.
window->GetEmitter()->On<iWindow::OnMouseWheel>(Bind(&DebugUI::_OnMouseWheel, this, Placeholders::_1, Placeholders::_2));
window->GetEmitter()->On<iWindow::OnButtonDown>(Bind(&DebugUI::_OnMouseDown, this, Placeholders::_1, Placeholders::_2, Placeholders::_3));
window->GetEmitter()->On<iWindow::OnKeyDown>(Bind(&DebugUI::_OnKeyDown, this, Placeholders::_1));
window->GetEmitter()->On<iWindow::OnKeyUp>(Bind(&DebugUI::_OnKeyUp, this, Placeholders::_1));
// Create large buffers.
ResourceDesc vbDesc{ ResourceType::eBuffer, HeapType::eUpload, ResourceState::eGenericRead, kVertexBufferSize, 1 };
mpDevice->CreateVertexBuffer(&vbDesc, sizeof(ImDrawVert), mpVertexBuffer.AsTypePP());
ResourceDesc ibDesc{ ResourceType::eBuffer, HeapType::eUpload, ResourceState::eGenericRead, kIndexBufferSize, 0 };
mpDevice->CreateIndexBuffer(&ibDesc, Format::eR32u, mpIndexBuffer.AsTypePP());
return true;
}
void DebugUI::Shutdown()
{
mpVertexShader.Assign(nullptr);
mpPixelShader.Assign(nullptr);
mpResourceBinding.Assign(nullptr);
mpPipeline.Assign(nullptr);
mpUIAtlas.Assign(nullptr);
mpSampler.Assign(nullptr);
mpVertexBuffer.Assign(nullptr);
mpIndexBuffer.Assign(nullptr);
mpProjectionMatrix.Assign(nullptr);
sClipboard.Assign(nullptr);
}
void DebugUI::BeginFrame(iCommandBuffer* commands, float deltaTime)
{
(void)commands;
ImGuiIO& io = ImGui::GetIO();
io.DisplaySize = ImVec2(float(mWidth), float(mHeight));
io.DisplayFramebufferScale = ImVec2(1.0f, 1.0f);
io.DeltaTime = deltaTime;
Math::Matrix44fv projection = Math::Matrix44fv
(
{ 2.0f / io.DisplaySize.x, 0.0f, 0.0f, 0.0f },
{ 0.0f, 2.0f / -io.DisplaySize.y, 0.0f, 0.0f },
{ 0.0f, 0.0f, 0.5f, 0.0f },
{ -1.0f, 1.0f, 0.5f, 1.0f }
);
mpProjectionMatrix->Update(0, sizeof(Math::Matrix44fv), &projection);
//
int32_t mx, my;
mpMouse->GetPosition(&mx, &my);
if (mx<0 || mx>mWidth || my<0 || my>mHeight)
io.MousePos = ImVec2(-1.0f, -1.0f);
else
io.MousePos = ImVec2(static_cast<float>(mx), static_cast<float>(my));
MouseButton button;
mpMouse->GetButtonState(&button);
io.MouseDown[0] = mMousePressed[0] || ((button & MouseButton::eLeft) != MouseButton::eNone);
io.MouseDown[1] = mMousePressed[1] || ((button & MouseButton::eRight) != MouseButton::eNone);
io.MouseDown[2] = mMousePressed[2] || ((button & MouseButton::eMiddle) != MouseButton::eNone);
mMousePressed[0] = mMousePressed[1] = mMousePressed[2] = false;
io.MouseWheel = mMouseWheel;
mMouseWheel = 0.0f;
// Start the frame
ImGui::NewFrame();
// ImGui::ShowTestWindow();
//////////////////////////////////////////////////////////////////////////
/* Input testing
Begin("Debug Info");
ImGui::Text("%d", int32_t(button));
ImGui::ColorButton(io.MouseDown[0] ? ImVec4(1.0f, 0.0f, 0.0f, 1.0f) : ImVec4(0.2f, 0.2f, 0.2f, 1.0f));
ImGui::ColorButton(io.MouseDown[1] ? ImVec4(1.0f, 0.0f, 0.0f, 1.0f) : ImVec4(0.2f, 0.2f, 0.2f, 1.0f));
ImGui::ColorButton(io.MouseDown[2] ? ImVec4(1.0f, 0.0f, 0.0f, 1.0f) : ImVec4(0.2f, 0.2f, 0.2f, 1.0f));
End();
*/
}
void DebugUI::EndFrame(iCommandBuffer* commands)
{
ImGuiIO& io = ImGui::GetIO();
ImGui::Render();
ImDrawData* drawData = ImGui::GetDrawData();
Viewport viewport{
0.0f, 0.0f,
float(mWidth), float(mHeight),
0.0f, 1.0f
};
commands->SetViewports(1, &viewport);
drawData->ScaleClipRects(io.DisplayFramebufferScale);
commands->SetResourceBinding(mpResourceBinding);
commands->SetPipeline(mpPipeline);
commands->SetVertexBuffers(0, 1, mpVertexBuffer.AsTypePP());
commands->SetIndexBuffer(mpIndexBuffer);
commands->SetTopology(PrimitiveTopology::eTriangleList);
commands->SetConstantBuffer(0, mpProjectionMatrix);
commands->SetSampler(1, mpSampler);
commands->SetImage(2, mpUIAtlas);
{
Range vertRange{ 0, 0 };
ImDrawVert* vertices = nullptr;
reinterpret_cast<ImDrawVert*>(mpVertexBuffer->Map(reinterpret_cast<void**>(&vertices), nullptr));
Range indexRange{ 0, 0 };
ImDrawIdx* indices = nullptr;
reinterpret_cast<ImDrawIdx*>(mpIndexBuffer->Map(reinterpret_cast<void**>(&indices), nullptr));
for (int n = 0; n < drawData->CmdListsCount; ++n)
{
const ImDrawList* cmd_list = drawData->CmdLists[n];
::memcpy(vertices, &cmd_list->VtxBuffer.front(), cmd_list->VtxBuffer.size() * sizeof(ImDrawVert));
::memcpy(indices, &cmd_list->IdxBuffer.front(), cmd_list->IdxBuffer.size() * sizeof(ImDrawIdx));
vertices += cmd_list->VtxBuffer.Size;
vertRange.mEnd += cmd_list->VtxBuffer.Size * sizeof(ImDrawVert);
indices += cmd_list->IdxBuffer.Size;
indexRange.mEnd += cmd_list->IdxBuffer.Size * sizeof(ImDrawIdx);
}
mpVertexBuffer->Unmap(&vertRange);
mpIndexBuffer->Unmap(&indexRange);
}
size_t indexOffset = 0;
size_t vertexOffset = 0;
for (int n = 0; n < drawData->CmdListsCount; ++n)
{
const ImDrawList* cmd_list = drawData->CmdLists[n];
for (const ImDrawCmd* pcmd = cmd_list->CmdBuffer.begin(); pcmd != cmd_list->CmdBuffer.end(); pcmd++)
{
if (pcmd->UserCallback)
{
pcmd->UserCallback(cmd_list, pcmd);
}
else
{
Math::Rectanglei scissor
{
static_cast<int>(pcmd->ClipRect.x),
static_cast<int>(pcmd->ClipRect.z),
static_cast<int>(pcmd->ClipRect.y),
static_cast<int>(pcmd->ClipRect.w)
};
commands->SetScissorRects(1, &scissor);
commands->DrawIndexedInstanced(pcmd->ElemCount, 1, int32_t(indexOffset), int32_t(vertexOffset), 0);
}
indexOffset += pcmd->ElemCount;
}
vertexOffset += cmd_list->VtxBuffer.Size;
}
}
void DebugUI::PushItemWidth(int32_t width)
{
ImGui::PushItemWidth(float(width));
}
void DebugUI::PopItemWidth()
{
ImGui::PopItemWidth();
}
void DebugUI::Separator()
{
ImGui::Separator();
}
void DebugUI::Begin(const char* name, bool* isOpen, uint32_t flags)
{
ImGui::Begin(name, isOpen, flags);
}
void DebugUI::End()
{
ImGui::End();
}
void DebugUI::Text(const char* fmt, ...)
{
va_list vargs;
va_start(vargs, fmt);
ImGui::TextV(fmt, vargs);
va_end(vargs);
}
void DebugUI::TextColored(const Math::Vector4fv& color, const char* fmt, ...)
{
va_list vargs;
va_start(vargs, fmt);
ImVec4 c(color.x, color.y, color.z, color.w);
ImGui::TextColoredV(c, fmt, vargs);
va_end(vargs);
}
bool DebugUI::Button(const char* label, const Math::Vector2i size)
{
ImVec2 imSize(float(size.x), float(size.y));
return ImGui::Button(label, imSize);
}
bool DebugUI::SmallButton(const char* label)
{
return ImGui::SmallButton(label);
}
bool DebugUI::CheckBox(const char* label, bool* flag)
{
return ImGui::Checkbox(label, flag);
}
bool DebugUI::CheckBoxFlags(const char* label, uint32_t* flags, uint32_t flags_value)
{
return ImGui::CheckboxFlags(label, flags, flags_value);
}
bool DebugUI::Slider(const char* label, int32_t* value, int vmin, int vmax, const char* fmt)
{
int v = *value;
bool result = ImGui::SliderInt(label, &v, vmin, vmax, fmt);
*value = v;
return result;
}
void DebugUI::Histogram(const char* label, const float* values, int32_t count, int32_t offset, const char* overlay, float scaleMin, float scaleMax, const Math::Vector2i size, int32_t stride)
{
ImVec2 imSize(float(size.x), float(size.y));
ImGui::PlotHistogram(label, values, count, offset, overlay, scaleMin, scaleMax, imSize, stride);
}
void DebugUI::ListBox(const char* label, int32_t* selectedItem, const char** items, int32_t itemCount, int32_t itemHeight)
{
ImGui::ListBox(label, selectedItem, items, itemCount, itemHeight);
}
void DebugUI::SetWindowSize(int32_t width, int32_t height)
{
mWidth = width;
mHeight = height;
}
void DebugUI::Add(DebugUICall call, void* context)
{
mDebugCalls.push_back({ call, context });
}
void DebugUI::Remove(DebugUICall call, void* context)
{
DebugCallPair testPair{ call, context };
for (size_t i = 0; i<mDebugCalls.size(); ++i)
{
if (testPair == mDebugCalls[i])
{
mDebugCalls.erase(mDebugCalls.begin() + i);
return;
}
}
}
void DebugUI::Execute()
{
for (const auto& it : mDebugCalls)
{
(*it.first)(this, it.second);
}
}
void DebugUI::_OnMouseWheel(int32_t, int32_t y)
{
if (y > 0)
mMouseWheel = 1;
if (y < 0)
mMouseWheel = -1;
}
void DebugUI::_OnMouseDown(MouseButton button, int32_t, int32_t)
{
if ((button & MouseButton::eLeft) == MouseButton::eLeft)
mMousePressed[0] = true;
if ((button & MouseButton::eRight) == MouseButton::eRight)
mMousePressed[1] = true;
if ((button & MouseButton::eMiddle) == MouseButton::eMiddle)
mMousePressed[2] = true;
}
void DebugUI::_OnKeyDown(ScanCode code)
{
_HandleKey(true, code);
}
void DebugUI::_OnKeyUp(ScanCode code)
{
_HandleKey(false, code);
}
void DebugUI::_HandleKey(bool down, ScanCode code)
{
ImGuiIO& io = ImGui::GetIO();
int key = int(code);
KeyModifier mods;
mpKeyboard->GetModifiers(&mods);
io.KeysDown[key] = down;
io.KeyShift = (mods & KeyModifier::eShift) != KeyModifier::eNone;
io.KeyCtrl = (mods & KeyModifier::eControl) != KeyModifier::eNone;
io.KeyAlt = (mods & KeyModifier::eAlt) != KeyModifier::eNone;
io.KeySuper = (mods & KeyModifier::eOS) != KeyModifier::eNone;
}
const char* DebugUI::_GetClipboardText()
{
int32_t length = 1024;
sClipboard->GetClipboardText(&length, sClipboardText);
return sClipboardText;
}
void DebugUI::_SetClipboardText(const char* text)
{
sClipboard->SetClipboardText(text);
}
|
bisnupriyasahu/cmssw
|
CondTools/CTPPS/plugins/WriteCTPPSBeamParameters.cc
|
// -*- C++ -*-
//
// Class: WriteCTPPSBeamParameters
//
// Description: Test analyzer for CTPPS beam parameters condition data
//
// Simple analyzer that writes one CTTPSBeamParameters record into a sql
// database file, as a test of offline conditions implementation.
// Another analyzer is then used to retrieve these conditions.
//
// Original Author: <NAME>
// Created: Wed, 21 Nov 2018 17:35:07 GMT
//
//==================================================================================
// system include files
#include <memory>
// user include files
#include "FWCore/Framework/interface/MakerMacros.h"
#include "FWCore/Framework/interface/Frameworkfwd.h"
#include "FWCore/Framework/interface/one/EDAnalyzer.h"
#include "FWCore/Framework/interface/ESHandle.h"
#include "FWCore/Framework/interface/EventSetup.h"
#include "FWCore/Framework/interface/Event.h"
#include "FWCore/ParameterSet/interface/ParameterSet.h"
#include "FWCore/ServiceRegistry/interface/Service.h"
#include "CondCore/DBOutputService/interface/PoolDBOutputService.h"
#include "CondFormats/CTPPSReadoutObjects/interface/CTPPSBeamParameters.h"
#include "CondFormats/DataRecord/interface/CTPPSBeamParametersRcd.h"
#include <cstdint>
class WriteCTPPSBeamParameters : public edm::one::EDAnalyzer<>
{
public:
WriteCTPPSBeamParameters(const edm::ParameterSet&) {}
~WriteCTPPSBeamParameters() override = default;
private:
void analyze(const edm::Event&, const edm::EventSetup&) override;
};
//---------------------------------------------------------------------------------------
void WriteCTPPSBeamParameters::analyze(const edm::Event& iEvent, const edm::EventSetup& iSetup)
{
edm::ESHandle<CTPPSBeamParameters> bp ;
iSetup.get<CTPPSBeamParametersRcd>().get(bp) ;
// Pointer for the conditions data object
const CTPPSBeamParameters *p = bp.product() ;
// Using "lumiid" as IOV
const edm::LuminosityBlock &iLBlock = iEvent.getLuminosityBlock() ;
edm::LuminosityBlockID lu(iLBlock.run(), iLBlock.id().luminosityBlock()) ;
cond::Time_t ilumi = (cond::Time_t)(lu.value()) ;
// cond::Time_t itime = (cond::Time_t)(iEvent.time().value()) ; // use this for timestamp
edm::LogInfo("WriteCTPPSBeamParameters::analyze") << "cond::Time_t ilumi = " << ilumi
<< " = " << boost::posix_time::to_iso_extended_string( cond::time::to_boost( ilumi ) ) << "\n" ;
// Write to database or sqlite file
edm::Service<cond::service::PoolDBOutputService> poolDbService;
if( poolDbService.isAvailable() )
poolDbService->writeOne( p, ilumi, "CTPPSBeamParametersRcd" );
// poolDbService->writeOne( p, poolDbService->currentTime(), "CTPPSBeamParametersRcd" );
else
throw std::runtime_error("PoolDBService required.");
}
//define this as a plug-in
DEFINE_FWK_MODULE(WriteCTPPSBeamParameters);
|
hhcqit/yunjianxie
|
04.coding/api/src/main/java/com/clinicalmall/kuaixiu/swagger/configuration/WebMvcConfiguration.java
|
package com.clinicalmall.kuaixiu.swagger.configuration;
import org.springframework.web.servlet.config.annotation.DefaultServletHandlerConfigurer;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
public class WebMvcConfiguration extends WebMvcConfigurationSupport {
@Override
public void configureDefaultServletHandling(DefaultServletHandlerConfigurer configurer) {
configurer.enable();
}
}
|
Isotope-js/core
|
packages/prototope/lib/utils/sizing/min-width.js
|
import { createUtil } from "../util";
const minW0 = createUtil({ minWidth: "0" });
const minWFull = createUtil({ minWidth: "100%" });
export { minW0, minWFull };
//# sourceMappingURL=min-width.js.map
|
arielmorelli/server_core
|
model/resource.py
|
# encoding: utf-8
# Resource, ResourceTransformation, Hyperlink, Representation
from nose.tools import set_trace
from . import (
Base,
get_one,
get_one_or_create,
)
from ..config import Configuration
from constants import (
DataSourceConstants,
IdentifierConstants,
LinkRelations,
MediaTypes,
)
from edition import Edition
from licensing import (
LicensePool,
LicensePoolDeliveryMechanism,
)
from ..util.http import HTTP
from ..util.string_helpers import native_string
from io import BytesIO
import datetime
import json
import logging
from hashlib import md5
import os
from PIL import Image
import re
import requests
from sqlalchemy import (
Binary,
Column,
DateTime,
Float,
ForeignKey,
Integer,
Unicode,
UniqueConstraint,
)
from sqlalchemy.dialects.postgresql import JSON
from sqlalchemy.ext.mutable import MutableDict
from sqlalchemy.orm import (
backref,
relationship,
)
from sqlalchemy.orm.session import Session
from sqlalchemy.sql.expression import or_
import time
import traceback
import urllib
import urlparse
class Resource(Base):
"""An external resource that may be mirrored locally.
E.g: a cover image, an epub, a description.
"""
__tablename__ = 'resources'
# How many votes is the initial quality estimate worth?
ESTIMATED_QUALITY_WEIGHT = 5
# The point at which a generic geometric image is better
# than a lousy cover we got from the Internet.
MINIMUM_IMAGE_QUALITY = 0.25
id = Column(Integer, primary_key=True)
# A URI that uniquely identifies this resource. Most of the time
# this will be an HTTP URL, which is why we're calling it 'url',
# but it may also be a made-up URI.
url = Column(Unicode, index=True)
# Many Editions may choose this resource (as opposed to other
# resources linked to them with rel="image") as their cover image.
cover_editions = relationship("Edition", backref="cover", foreign_keys=[Edition.cover_id])
# Many Works may use this resource (as opposed to other resources
# linked to them with rel="description") as their summary.
from work import Work
summary_works = relationship("Work", backref="summary", foreign_keys=[Work.summary_id])
# Many LicensePools (but probably one at most) may use this
# resource in a delivery mechanism.
licensepooldeliverymechanisms = relationship(
"LicensePoolDeliveryMechanism", backref="resource",
foreign_keys=[LicensePoolDeliveryMechanism.resource_id]
)
links = relationship("Hyperlink", backref="resource")
# The DataSource that is the controlling authority for this Resource.
data_source_id = Column(Integer, ForeignKey('datasources.id'), index=True)
# An archived Representation of this Resource.
representation_id = Column(
Integer, ForeignKey('representations.id'), index=True)
# The rights status of this Resource.
rights_status_id = Column(Integer, ForeignKey('rightsstatus.id'))
# An optional explanation of the rights status.
rights_explanation = Column(Unicode)
# A Resource may be transformed into many derivatives.
transformations = relationship(
'ResourceTransformation',
primaryjoin="ResourceTransformation.original_id==Resource.id",
foreign_keys=id,
lazy="joined",
backref=backref('original', uselist=False),
uselist=True,
)
# A derivative resource may have one original.
derived_through = relationship(
'ResourceTransformation',
primaryjoin="ResourceTransformation.derivative_id==Resource.id",
foreign_keys=id,
backref=backref('derivative', uselist=False),
lazy="joined",
uselist=False,
)
# A calculated value for the quality of this resource, based on an
# algorithmic treatment of its content.
estimated_quality = Column(Float)
# The average of human-entered values for the quality of this
# resource.
voted_quality = Column(Float, default=float(0))
# How many votes contributed to the voted_quality value. This lets
# us scale new votes proportionately while keeping only two pieces
# of information.
votes_for_quality = Column(Integer, default=0)
# A combination of the calculated quality value and the
# human-entered quality value.
quality = Column(Float, index=True)
# URL must be unique.
__table_args__ = (
UniqueConstraint('url'),
)
@property
def final_url(self):
"""URL to the final, mirrored version of this resource, suitable
for serving to the client.
:return: A URL, or None if the resource has no mirrored
representation.
"""
if not self.representation:
return None
if not self.representation.mirror_url:
return None
return self.representation.mirror_url
def as_delivery_mechanism_for(self, licensepool):
"""If this Resource is used in a LicensePoolDeliveryMechanism for the
given LicensePool, return that LicensePoolDeliveryMechanism.
"""
for lpdm in licensepool.delivery_mechanisms:
if lpdm.resource == self:
return lpdm
def set_fetched_content(self, media_type, content, content_path):
"""Simulate a successful HTTP request for a representation
of this resource.
This is used when the content of the representation is obtained
through some other means.
"""
_db = Session.object_session(self)
if not (content or content_path):
raise ValueError(
"One of content and content_path must be specified.")
if content and content_path:
raise ValueError(
"Only one of content and content_path may be specified.")
representation, is_new = get_one_or_create(
_db, Representation, url=self.url, media_type=media_type)
self.representation = representation
representation.set_fetched_content(content, content_path)
def set_estimated_quality(self, estimated_quality):
"""Update the estimated quality."""
self.estimated_quality = estimated_quality
self.update_quality()
def add_quality_votes(self, quality, weight=1):
"""Record someone's vote as to the quality of this resource."""
self.voted_quality = self.voted_quality or 0
self.votes_for_quality = self.votes_for_quality or 0
total_quality = self.voted_quality * self.votes_for_quality
total_quality += (quality * weight)
self.votes_for_quality += weight
self.voted_quality = total_quality / float(self.votes_for_quality)
self.update_quality()
def reject(self):
"""Reject a Resource by making its voted_quality negative.
If the Resource is a cover, this rejection will render it unusable to
all Editions and Identifiers. Even if the cover is later `approved`
a rejection impacts the overall weight of the `vote_quality`.
"""
if not self.voted_quality:
self.add_quality_votes(-1)
return
if self.voted_quality < 0:
# This Resource has already been rejected.
return
# Humans have voted positively on this Resource, and now it's
# being rejected regardless.
logging.warn("Rejecting Resource with positive votes: %r", self)
# Make the voted_quality negative without impacting the weight
# of existing votes so the value can be restored relatively
# painlessly if necessary.
self.voted_quality = -self.voted_quality
# However, because `votes_for_quality` is incremented, a
# rejection will impact the weight of all `voted_quality` votes
# even if the Resource is later approved.
self.votes_for_quality += 1
self.update_quality()
def approve(self):
"""Approve a rejected Resource by making its human-generated
voted_quality positive while taking its rejection into account.
"""
if self.voted_quality < 0:
# This Resource has been rejected. Reset its value to be
# positive.
if self.voted_quality == -1 and self.votes_for_quality == 1:
# We're undoing a single rejection.
self.voted_quality = 0
else:
# An existing positive voted_quality was made negative.
self.voted_quality = abs(self.voted_quality)
self.votes_for_quality += 1
self.update_quality()
return
self.add_quality_votes(1)
def update_quality(self):
"""Combine computer-generated `estimated_quality` with
human-generated `voted_quality` to form overall `quality`.
"""
estimated_weight = self.ESTIMATED_QUALITY_WEIGHT
votes_for_quality = self.votes_for_quality or 0
total_weight = estimated_weight + votes_for_quality
voted_quality = (self.voted_quality or 0) * votes_for_quality
total_quality = (((self.estimated_quality or 0) * self.ESTIMATED_QUALITY_WEIGHT) +
voted_quality)
if voted_quality < 0 and total_quality > 0:
# If `voted_quality` is negative, the Resource has been
# rejected by a human and should no longer be available.
#
# This human-generated negativity must be passed to the final
# Resource.quality value.
total_quality = -(total_quality)
self.quality = total_quality / float(total_weight)
@classmethod
def image_type_priority(cls, media_type):
"""Where does the given image media type rank on our list of
preferences?
:return: A lower number is better. None means it's not an
image type or we don't care about it at all.
"""
if media_type in Representation.IMAGE_MEDIA_TYPES:
return Representation.IMAGE_MEDIA_TYPES.index(media_type)
return None
@classmethod
def best_covers_among(cls, resources):
"""Choose the best covers from a list of Resources."""
champions = []
champion_key = None
for r in resources:
rep = r.representation
if not rep:
# A Resource with no Representation is not usable, period
continue
media_priority = cls.image_type_priority(rep.media_type)
if media_priority is None:
media_priority = float('inf')
# This method will set the quality if it hasn't been set before.
r.quality_as_thumbnail_image
# Now we can use it.
quality = r.quality
if not quality >= cls.MINIMUM_IMAGE_QUALITY:
# A Resource below the minimum quality threshold is not
# usable, period.
continue
# In order, our criteria are: whether we
# mirrored the representation (which means we directly
# control it), image quality, and media type suitability.
#
# We invert media type suitability because it's given to us
# as a priority (where smaller is better), but we want to compare
# it as a quantity (where larger is better).
compare_key = (rep.mirror_url is not None, quality, -media_priority)
if not champion_key or (compare_key > champion_key):
# A new champion.
champions = [r]
champion_key = compare_key
elif compare_key == champion_key:
# This image is equally good as the existing champion.
champions.append(r)
return champions
@property
def quality_as_thumbnail_image(self):
"""Determine this image's suitability for use as a thumbnail image.
"""
rep = self.representation
if not rep:
return 0
quality = 1
# If the size of the image is known, that might affect
# the quality.
quality = quality * rep.thumbnail_size_quality_penalty
# Scale the estimated quality by the source of the image.
source_name = self.data_source.name
if source_name==DataSourceConstants.GUTENBERG_COVER_GENERATOR:
quality = quality * 0.60
elif source_name==DataSourceConstants.GUTENBERG:
quality = quality * 0.50
elif source_name==DataSourceConstants.OPEN_LIBRARY:
quality = quality * 0.25
elif source_name in DataSourceConstants.COVER_IMAGE_PRIORITY:
# Covers from the data sources listed in
# COVER_IMAGE_PRIORITY (e.g. the metadata wrangler
# and the administrative interface) are given priority
# over all others, relative to their position in
# COVER_IMAGE_PRIORITY.
i = DataSourceConstants.COVER_IMAGE_PRIORITY.index(source_name)
quality = quality * (i+2)
self.set_estimated_quality(quality)
return quality
def add_derivative(self, derivative_resource, settings=None):
_db = Session.object_session(self)
transformation, ignore = get_one_or_create(
_db, ResourceTransformation, derivative_id=derivative_resource.id)
transformation.original_id = self.id
transformation.settings = settings or {}
return transformation
class ResourceTransformation(Base):
"""A record that a resource is a derivative of another resource,
and the settings that were used to transform the original into it.
"""
__tablename__ = 'resourcetransformations'
# The derivative resource. A resource can only be derived from one other resource.
derivative_id = Column(
Integer, ForeignKey('resources.id'), index=True, primary_key=True)
# The original resource that was transformed into the derivative.
original_id = Column(
Integer, ForeignKey('resources.id'), index=True)
# The settings used for the transformation.
settings = Column(MutableDict.as_mutable(JSON), default={})
class Hyperlink(Base, LinkRelations):
"""A link between an Identifier and a Resource."""
__tablename__ = 'hyperlinks'
id = Column(Integer, primary_key=True)
# A Hyperlink is always associated with some Identifier.
identifier_id = Column(
Integer, ForeignKey('identifiers.id'), index=True, nullable=False)
# The DataSource through which this link was discovered.
data_source_id = Column(
Integer, ForeignKey('datasources.id'), index=True, nullable=False)
# The link relation between the Identifier and the Resource.
rel = Column(Unicode, index=True, nullable=False)
# The Resource on the other end of the link.
resource_id = Column(
Integer, ForeignKey('resources.id'), index=True, nullable=False)
@classmethod
def unmirrored(cls, collection):
"""Find all Hyperlinks associated with an item in the
given Collection that could be mirrored but aren't.
TODO: We don't cover the case where an image was mirrored but no
thumbnail was created of it. (We do cover the case where the thumbnail
was created but not mirrored.)
"""
from identifier import Identifier
_db = Session.object_session(collection)
qu = _db.query(Hyperlink).join(
Hyperlink.identifier
).join(
Identifier.licensed_through
).outerjoin(
Hyperlink.resource
).outerjoin(
Resource.representation
)
qu = qu.filter(LicensePool.collection_id==collection.id)
qu = qu.filter(Hyperlink.rel.in_(Hyperlink.MIRRORED))
qu = qu.filter(Hyperlink.data_source==collection.data_source)
qu = qu.filter(
or_(
Representation.id==None,
Representation.mirror_url==None,
)
)
# Without this ordering, the query does a table scan looking for
# items that match. With the ordering, they're all at the front.
qu = qu.order_by(Representation.mirror_url.asc().nullsfirst(),
Representation.id.asc().nullsfirst())
return qu
@classmethod
def generic_uri(cls, data_source, identifier, rel, content=None):
"""Create a generic URI for the other end of this hyperlink.
This is useful for resources that are obtained through means
other than fetching a single URL via HTTP. It lets us get a
URI that's most likely unique, so we can create a Resource
object without violating the uniqueness constraint.
If the output of this method isn't unique in your situation
(because the data source provides more than one link with a
given link relation for a given identifier), you'll need some
other way of coming up with generic URIs.
"""
l = [identifier.urn, urllib.quote(data_source.name), urllib.quote(rel)]
if content:
m = md5()
if isinstance(content, unicode):
content = content.encode("utf8")
m.update(content)
l.append(m.hexdigest())
return ":".join(l)
@classmethod
def _default_filename(self, rel):
if rel == self.OPEN_ACCESS_DOWNLOAD:
return 'content'
elif rel == self.IMAGE:
return 'cover'
elif rel == self.THUMBNAIL_IMAGE:
return 'cover-thumbnail'
@property
def default_filename(self):
return self._default_filename(self.rel)
class Representation(Base, MediaTypes):
"""A cached document obtained from (and possibly mirrored to) the Web
at large.
Sometimes this is a DataSource's representation of a specific
book.
Sometimes it's associated with a database Resource (which has a
well-defined relationship to one specific book).
Sometimes it's just a web page that we need a cached local copy
of.
"""
__tablename__ = 'representations'
id = Column(Integer, primary_key=True)
# URL from which the representation was fetched.
url = Column(Unicode, index=True)
# The media type of the representation.
media_type = Column(Unicode)
resource = relationship("Resource", backref="representation", uselist=False)
### Records of things we tried to do with this representation.
# When the representation was last fetched from `url`.
fetched_at = Column(DateTime, index=True)
# A textual description of the error encountered the last time
# we tried to fetch the representation
fetch_exception = Column(Unicode, index=True)
# A URL under our control to which this representation has been
# mirrored.
mirror_url = Column(Unicode, index=True)
# When the representation was last pushed to `mirror_url`.
mirrored_at = Column(DateTime, index=True)
# An exception that happened while pushing this representation
# to `mirror_url.
mirror_exception = Column(Unicode, index=True)
# If this image is a scaled-down version of some other image,
# `scaled_at` is the time it was last generated.
scaled_at = Column(DateTime, index=True)
# If this image is a scaled-down version of some other image,
# this is the exception that happened the last time we tried
# to scale it down.
scale_exception = Column(Unicode, index=True)
### End records of things we tried to do with this representation.
# An image Representation may be a thumbnail version of another
# Representation.
thumbnail_of_id = Column(
Integer, ForeignKey('representations.id'), index=True)
thumbnails = relationship(
"Representation",
backref=backref("thumbnail_of", remote_side = [id]),
lazy="joined", post_update=True)
# The HTTP status code from the last fetch.
status_code = Column(Integer)
# A textual representation of the HTTP headers sent along with the
# representation.
headers = Column(Unicode)
# The Location header from the last representation.
location = Column(Unicode)
# The Last-Modified header from the last representation.
last_modified = Column(Unicode)
# The Etag header from the last representation.
etag = Column(Unicode)
# The size of the representation, in bytes.
file_size = Column(Integer)
# If this representation is an image, the height of the image.
image_height = Column(Integer, index=True)
# If this representation is an image, the width of the image.
image_width = Column(Integer, index=True)
# The content of the representation itself.
content = Column(Binary)
# Instead of being stored in the database, the content of the
# representation may be stored on a local file relative to the
# data root.
local_content_path = Column(Unicode)
# A Representation may be a CachedMARCFile.
marc_file = relationship(
"CachedMARCFile", backref="representation",
cascade="all, delete-orphan",
)
# At any given time, we will have a single representation for a
# given URL and media type.
__table_args__ = (
UniqueConstraint('url', 'media_type'),
)
# A User-Agent to use when acting like a web browser.
# BROWSER_USER_AGENT = "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36 (Simplified)"
BROWSER_USER_AGENT = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:37.0) Gecko/20100101 Firefox/37.0"
@property
def age(self):
if not self.fetched_at:
return 1000000
return (datetime.datetime.utcnow() - self.fetched_at).total_seconds()
@property
def has_content(self):
if self.content and self.status_code == 200 and self.fetch_exception is None:
return True
if self.local_content_path and os.path.exists(self.local_content_path) and self.fetch_exception is None:
return True
return False
@property
def public_url(self):
"""Find the best URL to publish when referencing this Representation
in a public space.
:return: a bytestring
"""
url = None
if self.mirror_url:
url = self.mirror_url
elif self.url:
url = self.url
elif self.resource:
# This really shouldn't happen.
url = self.resource.url
return native_string(url)
@property
def is_usable(self):
"""Returns True if the Representation has some data or received
a status code that's not in the 5xx series.
"""
if not self.fetch_exception and (
self.content or self.local_path or self.status_code
and self.status_code // 100 != 5
):
return True
return False
@classmethod
def is_media_type(cls, s):
"""Return true if the given string looks like a media type."""
if not s:
return False
s = s.lower()
return any(s.startswith(x) for x in [
'application/',
'audio/',
'example/',
'image/',
'message/',
'model/',
'multipart/',
'text/',
'video/'
])
@classmethod
def guess_url_media_type_from_path(cls, url):
"""Guess a likely media type from the URL's path component."""
if not url:
return None
path = urlparse.urlparse(url).path
return cls.guess_media_type(path)
@classmethod
def guess_media_type(cls, filename):
"""Guess a likely media type from a filename."""
if not filename:
return None
filename = filename.lower()
for extension, media_type in cls.MEDIA_TYPE_FOR_EXTENSION.items():
if filename.endswith(extension):
return media_type
return None
def is_fresher_than(self, max_age):
# Convert a max_age timedelta to a number of seconds.
if isinstance(max_age, datetime.timedelta):
max_age = max_age.total_seconds()
if not self.is_usable:
return False
return (max_age is None or max_age > self.age)
@classmethod
def get(cls, _db, url, do_get=None, extra_request_headers=None,
accept=None, max_age=None, pause_before=0, allow_redirects=True,
presumed_media_type=None, debug=True, response_reviewer=None,
exception_handler=None, url_normalizer=None):
"""Retrieve a representation from the cache if possible.
If not possible, retrieve it from the web and store it in the
cache.
:param _db: A database connection.
:param url: The URL to use as the target of any HTTP request.
:param do_get: A function that takes arguments (url, headers)
and retrieves a representation over the network.
:param accept: A value for the Accept HTTP header.
:param extra_request_headers: Any additional HTTP headers to
include with the request.
:param max_age: A timedelta object representing the maximum
time to consider a cached representation fresh. (We ignore the
caching directives from web servers because they're usually
far too conservative for our purposes.)
:param pause_before: A number of seconds to pause before sending
the HTTP request. This is for use in situations where
HTTP requests are subject to throttling.
:param allow_redirects: Not currently used. (TODO: this seems like
a problem!)
:param presumed_media_type: If the response does not contain a
Content-Type header, or if the specified Content-Type is
too generic to use, the representation will be presumed to be
of this media type.
:param debug: If True, progress reports on the HTTP request will
be logged.
:param response_reviewer: A function that takes a 3-tuple
(status_code, headers, content) and raises an exception if
the response should not be treated as cacheable.
:param exception_handler: A function that takes a 3-tuple
(Representation, Exception, traceback) and handles
an exceptional condition that occured during the HTTP request.
:param url_normalizer: A function that takes the URL to be used in
the HTTP request, and returns the URL to use when storing
the corresponding Representation in the database. This can be
used to strip irrelevant or sensitive information from
URLs to increase the chances of a cache hit.
:return: A 2-tuple (representation, obtained_from_cache)
"""
representation = None
do_get = do_get or cls.simple_http_get
exception_handler = exception_handler or cls.record_exception
# TODO: We allow representations of the same URL in different
# media types, but we don't have a good solution here for
# doing content negotiation (letting the caller ask for a
# specific set of media types and matching against what we
# have cached). Fortunately this isn't an issue with any of
# the data sources we currently use, so for now we can treat
# different representations of a URL as interchangeable.
if url_normalizer:
normalized_url = url_normalizer(url)
else:
normalized_url = url
a = dict(url=normalized_url)
if accept:
a['media_type'] = accept
representation = get_one(_db, Representation, 'interchangeable', **a)
usable_representation = fresh_representation = False
if representation:
# Do we already have a usable representation?
usable_representation = representation.is_usable
# Assuming we have a usable representation, is it fresh?
fresh_representation = representation.is_fresher_than(max_age)
if debug is True:
debug_level = logging.DEBUG
elif debug is False:
debug_level = None
else:
debug_level = debug
if fresh_representation:
if debug_level is not None:
logging.info("Cached %s", url)
return representation, True
# We have a representation that is either not fresh or not usable.
# We must make an HTTP request.
if debug_level is not None:
logging.log(debug_level, "Fetching %s", url)
headers = {}
if extra_request_headers:
headers.update(extra_request_headers)
if accept:
headers['Accept'] = accept
if usable_representation:
# We have a representation but it's not fresh. We will
# be making a conditional HTTP request to see if there's
# a new version.
if representation.last_modified:
headers['If-Modified-Since'] = representation.last_modified
if representation.etag:
headers['If-None-Match'] = representation.etag
fetched_at = datetime.datetime.utcnow()
if pause_before:
time.sleep(pause_before)
media_type = None
fetch_exception = None
exception_traceback = None
try:
status_code, headers, content = do_get(url, headers)
if response_reviewer:
# An optional function passed to raise errors if the
# post response isn't worth caching.
response_reviewer((status_code, headers, content))
exception = None
media_type = cls._best_media_type(url, headers, presumed_media_type)
if isinstance(content, unicode):
content = content.encode("utf8")
except Exception as e:
# This indicates there was a problem with making the HTTP
# request, not that the HTTP request returned an error
# condition.
fetch_exception = e
logging.error("Error making HTTP request to %s", url, exc_info=fetch_exception)
exception_traceback = traceback.format_exc()
status_code = None
headers = None
content = None
media_type = None
# At this point we can create/fetch a Representation object if
# we don't have one already, or if the URL or media type we
# actually got from the server differs from what we thought
# we had.
if (not usable_representation
or media_type != representation.media_type
or normalized_url != representation.url):
representation, is_new = get_one_or_create(
_db, Representation, url=normalized_url,
media_type=unicode(media_type)
)
if fetch_exception:
exception_handler(
representation, fetch_exception, exception_traceback
)
representation.fetched_at = fetched_at
if status_code == 304:
# The representation hasn't changed since we last checked.
# Set its fetched_at property and return the cached
# version as though it were new.
representation.fetched_at = fetched_at
representation.status_code = status_code
return representation, False
if status_code:
status_code_series = status_code // 100
else:
status_code_series = None
if status_code_series in (2,3) or status_code in (404, 410):
# We have a new, good representation. Update the
# Representation object and return it as fresh.
representation.status_code = status_code
representation.content = content
representation.media_type = media_type
for header, field in (
('etag', 'etag'),
('last-modified', 'last_modified'),
('location', 'location')):
if header in headers:
value = headers[header]
else:
value = None
setattr(representation, field, value)
representation.headers = cls.headers_to_string(headers)
representation.content = content
representation.update_image_size()
return representation, False
# Okay, things didn't go so well.
date_string = fetched_at.strftime("%Y-%m-%d %H:%M:%S")
representation.fetch_exception = representation.fetch_exception or (
"Most recent fetch attempt (at %s) got status code %s" % (
date_string, status_code))
if usable_representation:
# If we have a usable (but stale) representation, we'd
# rather return the cached data than destroy the information.
return representation, True
# We didn't have a usable representation before, and we still don't.
# At this point we're just logging an error.
representation.status_code = status_code
representation.headers = cls.headers_to_string(headers)
representation.content = content
return representation, False
@classmethod
def _best_media_type(cls, url, headers, default):
"""Determine the most likely media type for the given HTTP headers.
Almost all the time, this is the value of the content-type
header, if present. However, if the content-type header has a
really generic value like "application/octet-stream" (as often
happens with binary files hosted on Github), we'll privilege
the default value. If there's no default value, we'll try to
derive one from the URL extension.
"""
default = default or cls.guess_url_media_type_from_path(url)
if not headers or not 'content-type' in headers:
return default
headers_type = headers['content-type'].lower()
clean = cls._clean_media_type(headers_type)
if clean in Representation.GENERIC_MEDIA_TYPES and default:
return default
return headers_type
@classmethod
def reraise_exception(cls, representation, exception, traceback):
"""Deal with a fetch exception by re-raising it."""
raise exception
@classmethod
def record_exception(cls, representation, exception, traceback):
"""Deal with a fetch exception by recording it
and moving on.
"""
representation.fetch_exception = traceback
@classmethod
def post(cls, _db, url, data, max_age=None, response_reviewer=None,
**kwargs):
"""Finds or creates POST request as a Representation"""
original_do_get = kwargs.pop('do_get', cls.simple_http_post)
def do_post(url, headers, **kwargs):
kwargs.update({'data' : data})
return original_do_get(url, headers, **kwargs)
return cls.get(
_db, url, do_get=do_post, max_age=max_age,
response_reviewer=response_reviewer, **kwargs
)
@property
def mirrorable_media_type(self):
"""Does this Representation look like the kind of thing we
create mirrors of?
Basically, images and books.
"""
return any(
self.media_type in x for x in
(Representation.BOOK_MEDIA_TYPES,
Representation.IMAGE_MEDIA_TYPES)
)
def update_image_size(self):
"""Make sure .image_height and .image_width are up to date.
Clears .image_height and .image_width if the representation
is not an image.
"""
if self.media_type and self.media_type.startswith('image/'):
image = self.as_image()
if image:
self.image_width, self.image_height = image.size
return
self.image_width = self.image_height = None
@classmethod
def normalize_content_path(cls, content_path, base=None):
if not content_path:
return None
base = base or Configuration.data_directory()
if content_path.startswith(base):
content_path = content_path[len(base):]
if content_path.startswith('/'):
content_path = content_path[1:]
return content_path
@property
def unicode_content(self):
"""Attempt to convert the content into Unicode.
If all attempts fail, we will return None rather than raise an exception.
"""
content = None
for encoding in ('utf-8', 'windows-1252'):
try:
content = self.content.decode(encoding)
break
except UnicodeDecodeError, e:
pass
return content
def set_fetched_content(self, content, content_path=None):
"""Simulate a successful HTTP request for this representation.
This is used when the content of the representation is obtained
through some other means.
"""
if isinstance(content, unicode):
content = content.encode("utf8")
self.content = content
self.local_content_path = self.normalize_content_path(content_path)
self.status_code = 200
self.fetched_at = datetime.datetime.utcnow()
self.fetch_exception = None
self.update_image_size()
def set_as_mirrored(self, mirror_url):
"""Record the fact that the representation has been mirrored
to the given URL.
This should only be called upon successful completion of the
mirror operation.
"""
self.mirror_url = mirror_url
self.mirrored_at = datetime.datetime.utcnow()
self.mirror_exception = None
@classmethod
def headers_to_string(cls, d):
if d is None:
return None
return json.dumps(dict(d))
@classmethod
def simple_http_get(cls, url, headers, **kwargs):
"""The most simple HTTP-based GET."""
if not 'allow_redirects' in kwargs:
kwargs['allow_redirects'] = True
response = HTTP.get_with_timeout(url, headers=headers, **kwargs)
return response.status_code, response.headers, response.content
@classmethod
def simple_http_post(cls, url, headers, **kwargs):
"""The most simple HTTP-based POST."""
data = kwargs.get('data')
if 'data' in kwargs:
del kwargs['data']
response = HTTP.post_with_timeout(url, data, headers=headers, **kwargs)
return response.status_code, response.headers, response.content
@classmethod
def http_get_no_timeout(cls, url, headers, **kwargs):
return Representation.simple_http_get(url, headers, timeout=None, **kwargs)
@classmethod
def http_get_no_redirect(cls, url, headers, **kwargs):
"""HTTP-based GET with no redirects."""
return cls.simple_http_get(url, headers, allow_redirects=False, **kwargs)
@classmethod
def browser_http_get(cls, url, headers, **kwargs):
"""GET the representation that would be displayed to a web browser.
"""
headers = dict(headers)
headers['User-Agent'] = cls.BROWSER_USER_AGENT
return cls.simple_http_get(url, headers, **kwargs)
@classmethod
def cautious_http_get(cls, url, headers, **kwargs):
"""Examine the URL we're about to GET, possibly going so far as to
perform a HEAD request, to avoid making a request (or
following a redirect) to a site known to cause problems.
The motivating case is that unglue.it contains gutenberg.org
links that appear to be direct links to EPUBs, but 1) they're
not direct links to EPUBs, and 2) automated requests to
gutenberg.org quickly result in IP bans. So we don't make those
requests.
"""
do_not_access = kwargs.pop(
'do_not_access', cls.AVOID_WHEN_CAUTIOUS_DOMAINS
)
check_for_redirect = kwargs.pop(
'check_for_redirect', cls.EXERCISE_CAUTION_DOMAINS
)
do_get = kwargs.pop('do_get', cls.simple_http_get)
head_client = kwargs.pop('cautious_head_client', requests.head)
if cls.get_would_be_useful(
url, headers, do_not_access, check_for_redirect,
head_client
):
# Go ahead and make the GET request.
return do_get(url, headers, **kwargs)
else:
logging.info(
"Declining to make non-useful HTTP request to %s", url
)
# 417 Expectation Failed - "... if the server is a proxy,
# the server has unambiguous evidence that the request
# could not be met by the next-hop server."
#
# Not quite accurate, but I think it's the closest match
# to "the HTTP client decided to not even make your
# request".
return (
417,
{"content-type" :
"application/vnd.librarysimplified-did-not-make-request"},
"Cautiously decided not to make a GET request to %s" % url
)
# Sites known to host both free books and redirects to a domain in
# AVOID_WHEN_CAUTIOUS_DOMAINS.
EXERCISE_CAUTION_DOMAINS = ['unglue.it']
# Sites that cause problems for us if we make automated
# HTTP requests to them while trying to find free books.
AVOID_WHEN_CAUTIOUS_DOMAINS = ['gutenberg.org', 'books.google.com']
@classmethod
def get_would_be_useful(
cls, url, headers, do_not_access=None, check_for_redirect=None,
head_client=None
):
"""Determine whether making a GET request to a given URL is likely to
have a useful result.
:param URL: URL under consideration.
:param headers: Headers that would be sent with the GET request.
:param do_not_access: Domains to which GET requests are not useful.
:param check_for_redirect: Domains to which we should make a HEAD
request, in case they redirect to a `do_not_access` domain.
:param head_client: Function for making the HEAD request, if
one becomes necessary. Should return requests.Response or a mock.
"""
do_not_access = do_not_access or cls.AVOID_WHEN_CAUTIOUS_DOMAINS
check_for_redirect = check_for_redirect or cls.EXERCISE_CAUTION_DOMAINS
head_client = head_client or requests.head
def has_domain(domain, check_against):
"""Is the given `domain` in `check_against`,
or maybe a subdomain of one of the domains in `check_against`?
"""
return any(domain == x or domain.endswith('.' + x)
for x in check_against)
netloc = urlparse.urlparse(url).netloc
if has_domain(netloc, do_not_access):
# The link points directly to a domain we don't want to
# access.
return False
if not has_domain(netloc, check_for_redirect):
# We trust this domain not to redirect to a domain we don't
# want to access.
return True
# We might be fine, or we might get redirected to a domain we
# don't want to access. Make a HEAD request to see what
# happens.
head_response = head_client(url, headers=headers)
if head_response.status_code // 100 != 3:
# It's not a redirect. Go ahead and make the GET request.
return True
# Yes, it's a redirect. Does it redirect to a
# domain we don't want to access?
location = head_response.headers.get('location', '')
netloc = urlparse.urlparse(location).netloc
return not has_domain(netloc, do_not_access)
@property
def is_image(self):
return self.media_type and self.media_type.startswith("image/")
@property
def local_path(self):
"""Return the full local path to the representation on disk."""
if not self.local_content_path:
return None
return os.path.join(Configuration.data_directory(),
self.local_content_path)
@property
def clean_media_type(self):
"""The most basic version of this representation's media type.
No profiles or anything.
"""
return self._clean_media_type(self.media_type)
@property
def url_extension(self):
"""The file extension in this representation's original url."""
url_path = urlparse.urlparse(self.url).path
# Known extensions can be followed by a version number (.epub3)
# or an additional extension (.epub.noimages)
known_extensions = "|".join(self.FILE_EXTENSIONS.values())
known_extension_re = re.compile("\.(%s)\d?\.?[\w\d]*$" % known_extensions, re.I)
known_match = known_extension_re.search(url_path)
if known_match:
return known_match.group()
else:
any_extension_re = re.compile("\.[\w\d]*$", re.I)
any_match = any_extension_re.search(url_path)
if any_match:
return any_match.group()
return None
def extension(self, destination_type=None):
"""Try to come up with a good file extension for this representation."""
if destination_type:
return self._extension(destination_type)
# We'd like to use url_extension because it has some extra
# features for preserving information present in the original
# URL. But if we're going to be changing the media type of the
# resource when mirroring it, the original URL is irrelevant
# and we need to use an extension associated with the
# outward-facing media type.
internal = self.clean_media_type
external = self._clean_media_type(self.external_media_type)
if internal != external:
# External media type overrides any information that might
# be present in the URL.
return self._extension(external)
# If there is information in the URL, use it.
extension = self.url_extension
if extension:
return extension
# Take a guess based on the internal media type.
return self._extension(internal)
@classmethod
def _clean_media_type(cls, media_type):
if not media_type:
return media_type
if ';' in media_type:
media_type = media_type[:media_type.index(';')].strip()
return media_type
@classmethod
def _extension(cls, media_type):
value = cls.FILE_EXTENSIONS.get(media_type, '')
if not value:
return value
return '.' + value
def default_filename(self, link=None, destination_type=None):
"""Try to come up with a good filename for this representation."""
scheme, netloc, path, query, fragment = urlparse.urlsplit(self.url)
path_parts = path.split("/")
filename = None
if path_parts:
filename = path_parts[-1]
if not filename and link:
filename = link.default_filename
if not filename:
# This is the absolute last-ditch filename solution, and
# it's basically only used when we try to mirror the root
# URL of a domain.
filename = 'resource'
default_extension = self.extension()
extension = self.extension(destination_type)
if default_extension and default_extension != extension and filename.endswith(default_extension):
filename = filename[:-len(default_extension)] + extension
elif extension and not filename.endswith(extension):
filename += extension
return filename
@property
def external_media_type(self):
return self.media_type
def external_content(self):
"""Return a filehandle to the representation's contents, as they
should be mirrored externally, and the media type to be used
when mirroring.
"""
return self.content_fh()
def content_fh(self):
"""Return an open filehandle to the representation's contents.
This works whether the representation is kept in the database
or in a file on disk.
"""
if self.content:
return BytesIO(self.content)
elif self.local_path:
if not os.path.exists(self.local_path):
raise ValueError("%s does not exist." % self.local_path)
return open(self.local_path, 'rb')
return None
def as_image(self):
"""Load this Representation's contents as a PIL image."""
if not self.is_image:
raise ValueError(
"Cannot load non-image representation as image: type %s."
% self.media_type)
if not self.content and not self.local_path:
raise ValueError("Image representation has no content.")
fh = self.content_fh()
if not fh or self.clean_media_type == self.SVG_MEDIA_TYPE:
return None
return Image.open(fh)
pil_format_for_media_type = {
"image/gif": "gif",
"image/png": "png",
"image/jpeg": "jpeg",
}
def scale(self, max_height, max_width,
destination_url, destination_media_type, force=False):
"""Return a Representation that's a scaled-down version of this
Representation, creating it if necessary.
:param destination_url: The URL the scaled-down resource will
(eventually) be uploaded to.
:return: A 2-tuple (Representation, is_new)
"""
_db = Session.object_session(self)
if not destination_media_type in self.pil_format_for_media_type:
raise ValueError("Unsupported destination media type: %s" % destination_media_type)
pil_format = self.pil_format_for_media_type[destination_media_type]
# Make sure we actually have an image to scale.
image = None
try:
image = self.as_image()
except Exception, e:
self.scale_exception = traceback.format_exc()
self.scaled_at = None
# This most likely indicates an error during the fetch
# phrase.
self.fetch_exception = "Error found while scaling: %s" % (
self.scale_exception)
logging.error("Error found while scaling %r", self, exc_info=e)
if not image:
return self, False
# Now that we've loaded the image, take the opportunity to set
# the image size of the original representation.
self.image_width, self.image_height = image.size
# If the image is already a thumbnail-size bitmap, don't bother.
if (self.clean_media_type != Representation.SVG_MEDIA_TYPE
and self.image_height <= max_height
and self.image_width <= max_width):
self.thumbnails = []
return self, False
# Do we already have a representation for the given URL?
thumbnail, is_new = get_one_or_create(
_db, Representation, url=destination_url,
media_type=destination_media_type
)
if thumbnail not in self.thumbnails:
thumbnail.thumbnail_of = self
if not is_new and not force:
# We found a preexisting thumbnail and we're allowed to
# use it.
return thumbnail, is_new
# At this point we have a parent Representation (self), we
# have a Representation that will contain a thumbnail
# (thumbnail), and we know we need to actually thumbnail the
# parent into the thumbnail.
#
# Because the representation of this image is being
# changed, it will need to be mirrored later on.
now = datetime.datetime.utcnow()
thumbnail.mirrored_at = None
thumbnail.mirror_exception = None
args = [(max_width, max_height),
Image.ANTIALIAS]
try:
image.thumbnail(*args)
except IOError, e:
# I'm not sure why, but sometimes just trying
# it again works.
original_exception = traceback.format_exc()
try:
image.thumbnail(*args)
except IOError, e:
self.scale_exception = original_exception
self.scaled_at = None
return self, False
# Save the thumbnail image to the database under
# thumbnail.content.
output = BytesIO()
if image.mode != 'RGB':
image = image.convert('RGB')
try:
image.save(output, pil_format)
except Exception, e:
self.scale_exception = traceback.format_exc()
self.scaled_at = None
# This most likely indicates a problem during the fetch phase,
# Set fetch_exception so we'll retry the fetch.
self.fetch_exception = "Error found while scaling: %s" % (self.scale_exception)
return self, False
thumbnail.content = output.getvalue()
thumbnail.image_width, thumbnail.image_height = image.size
output.close()
thumbnail.scale_exception = None
thumbnail.scaled_at = now
return thumbnail, True
@property
def thumbnail_size_quality_penalty(self):
return self._thumbnail_size_quality_penalty(
self.image_width, self.image_height
)
@classmethod
def _thumbnail_size_quality_penalty(cls, width, height):
"""Measure a cover image's deviation from the ideal aspect ratio, and
by its deviation (in the "too small" direction only) from the
ideal thumbnail resolution.
"""
quotient = 1
if not width or not height:
# In the absence of any information, assume the cover is
# just dandy.
#
# This is obviously less than ideal, but this code is used
# pretty rarely now that we no longer have hundreds of
# covers competing for the privilege of representing a
# public domain book, so I'm not too concerned about it.
#
# Look at it this way: this escape hatch only causes a
# problem if we compare an image whose size we know
# against an image whose size we don't know.
#
# In the circulation manager, we never know what size an
# image is, and we must always trust that the cover
# (e.g. Overdrive and the metadata wrangler) give us
# "thumbnail" images that are approximately the right
# size. So we always use this escape hatch.
#
# In the metadata wrangler and content server, we always
# have access to the covers themselves, so we always have
# size information and we never use this escape hatch.
return quotient
# Penalize an image for deviation from the ideal aspect ratio.
aspect_ratio = width / float(height)
ideal = IdentifierConstants.IDEAL_COVER_ASPECT_RATIO
if aspect_ratio > ideal:
deviation = ideal / aspect_ratio
else:
deviation = aspect_ratio/ideal
if deviation != 1:
quotient *= deviation
# Penalize an image for not being wide enough.
width_shortfall = (
float(width - IdentifierConstants.IDEAL_IMAGE_WIDTH) / IdentifierConstants.IDEAL_IMAGE_WIDTH)
if width_shortfall < 0:
quotient *= (1+width_shortfall)
# Penalize an image for not being tall enough.
height_shortfall = (
float(height - IdentifierConstants.IDEAL_IMAGE_HEIGHT) / IdentifierConstants.IDEAL_IMAGE_HEIGHT)
if height_shortfall < 0:
quotient *= (1+height_shortfall)
return quotient
@property
def best_thumbnail(self):
"""Find the best thumbnail among all the thumbnails associated with
this Representation.
Basically, we prefer a thumbnail that has been mirrored.
"""
champion = None
for thumbnail in self.thumbnails:
if thumbnail.mirror_url:
champion = thumbnail
break
elif not champion:
champion = thumbnail
return champion
|
sedurCode/nodOSC
|
firmware/hal/src/photon/wiced/network/NetX_Duo/WICED/wiced_network.h
|
<reponame>sedurCode/nodOSC
/*
* Broadcom Proprietary and Confidential. Copyright 2016 Broadcom
* All Rights Reserved.
*
* This is UNPUBLISHED PROPRIETARY SOURCE CODE of Broadcom Corporation;
* the contents of this file may not be disclosed to third parties, copied
* or duplicated in any form, in whole or in part, without the prior
* written permission of Broadcom Corporation.
*/
#pragma once
#include "nx_api.h"
#include "tx_port.h" /* Needed by nx_dhcp.h that follows */
#include "netx_applications/dhcp/nxd_dhcp_client.h"
#include "netx_applications/auto_ip/nx_auto_ip.h"
#include "wiced_result.h"
#include "tls_types.h"
#include "linked_list.h"
#include "wwd_network_constants.h"
#include "dtls_types.h"
#ifdef __cplusplus
extern "C"
{
#endif
/******************************************************
* Macros
******************************************************/
#define IP_HANDLE(interface) (*wiced_ip_handle[( interface )&3])
#define WICED_LINK_CHECK( interface ) { if ( !wiced_network_interface_is_up( &IP_HANDLE(interface) ) ){ return WICED_NOTUP; }}
#define WICED_LINK_CHECK_TCP_SOCKET( socket_in ) { if ( (socket_in)->socket.nx_tcp_socket_ip_ptr->nx_ip_driver_link_up == 0 ){ return WICED_NOTUP; }}
#define WICED_LINK_CHECK_UDP_SOCKET( socket_in ) { if ( (socket_in)->socket.nx_udp_socket_ip_ptr->nx_ip_driver_link_up == 0 ){ return WICED_NOTUP; }}
/******************************************************
* Constants
******************************************************/
#define WICED_MAXIMUM_NUMBER_OF_SOCKETS_WITH_CALLBACKS (NX_MAX_LISTEN_REQUESTS)
#define WICED_MAXIMUM_NUMBER_OF_SERVER_SOCKETS (WICED_MAXIMUM_NUMBER_OF_SOCKETS_WITH_CALLBACKS)
#define SIZE_OF_ARP_ENTRY sizeof(NX_ARP)
#ifdef DEBUG
#define IP_STACK_SIZE (3*1024)
#else
#define IP_STACK_SIZE (2*1024)
#endif
#define ARP_CACHE_SIZE (6 * SIZE_OF_ARP_ENTRY)
#define DHCP_STACK_SIZE (1280)
#define WICED_ANY_PORT (0)
#define WICED_NETWORK_MTU_SIZE (WICED_LINK_MTU)
#define WICED_SOCKET_MAGIC_NUMBER (0xfeedbead)
#define WICED_MAXIMUM_SEGMENT_SIZE(socket) MIN(socket->socket.nx_tcp_socket_mss, socket->socket.nx_tcp_socket_connect_mss)
/******************************************************
* Enumerations
******************************************************/
/******************************************************
* Type Definitions
******************************************************/
typedef NX_PACKET wiced_packet_t;
typedef enum
{
WICED_SOCKET_CLOSED,
WICED_SOCKET_CLOSING,
WICED_SOCKET_CONNECTING,
WICED_SOCKET_CONNECTED,
WICED_SOCKET_DATA_PENDING,
WICED_SOCKET_LISTEN,
WICED_SOCKET_ERROR
} wiced_socket_state_t;
typedef struct wiced_packet_pool_s
{
NX_PACKET_POOL pool;
} wiced_packet_pool_t;
/******************************************************
* Structures
******************************************************/
/* These should be in wiced_tcpip.h but are needed by wiced_tcp_socket_t, which would cause a circular include chain */
typedef struct wiced_tcp_socket_struct wiced_tcp_socket_t;
typedef struct wiced_udp_socket_struct wiced_udp_socket_t;
typedef wiced_result_t (*wiced_tcp_socket_callback_t)( wiced_tcp_socket_t* socket, void* arg );
typedef wiced_result_t (*wiced_udp_socket_callback_t)( wiced_udp_socket_t* socket, void* arg );
/* NOTE: Don't change the order or the fields within this wiced_tcp_socket_t and wiced_udp_socket_t.
* Socket must always be the first field.
* WICED TCP/IP layer uses socket magic number to differentiate between a native NX socket or a WICED socket.
* This allows access to WICED socket object from a NX callback without having to store its pointer globally.
*/
struct wiced_tcp_socket_struct
{
NX_TCP_SOCKET socket;
uint32_t socket_magic_number;
wiced_tls_context_t* tls_context;
wiced_bool_t context_malloced;
struct
{
wiced_tcp_socket_callback_t disconnect;
wiced_tcp_socket_callback_t receive;
wiced_tcp_socket_callback_t connect;
} callbacks;
void* callback_arg;
};
struct wiced_udp_socket_struct
{
NX_UDP_SOCKET socket;
uint32_t socket_magic_number;
wiced_dtls_context_t* dtls_context;
wiced_bool_t context_malloced;
wiced_udp_socket_callback_t receive_callback;
void* callback_arg;
};
typedef struct
{
linked_list_t socket_list;
int interface;
uint16_t port;
wiced_tls_identity_t* tls_identity;
} wiced_tcp_server_t;
typedef struct
{
linked_list_node_t socket_node;
wiced_tcp_socket_t socket;
} wiced_tcp_server_socket_t;
/******************************************************
* Global Variables
******************************************************/
/*
* Note: These objects are for internal use only!
*/
extern NX_IP* wiced_ip_handle [4];
extern NX_PACKET_POOL wiced_packet_pools[2]; /* 0=TX, 1=RX */
/******************************************************
* Function Declarations
******************************************************/
extern wiced_bool_t wiced_network_interface_is_up( NX_IP* ip_handle );
#ifdef __cplusplus
} /*extern "C" */
#endif
|
greenelab/adage-frontend
|
src/components/details/index.js
|
import React from 'react';
import { isValidElement } from 'react';
import { Fragment } from 'react';
import PropTypes from 'prop-types';
import Linkify from 'react-linkify';
import HorizontalLine from '../../components/horizontal-line';
import './index.css';
import { isNumber } from '../../util/types';
import { isObject } from '../../util/types';
import { isEmpty } from '../../util/types';
import { isString } from '../../util/types';
import { isArray } from '../../util/types';
// table showing all key/value pairs of an object/item
const Details = ({ data = {} }) => {
if (!isObject(data) || isEmpty(data))
return <></>;
return (
<>
{Object.keys(data).map((key, index, array) => (
<Fragment key={index}>
<div className='detail_row'>
<span className='nowrap weight_medium'>{key}</span>
<span>
<Linkify>{format(data[key])}</Linkify>
</span>
</div>
{index < array.length - 1 && <HorizontalLine />}
</Fragment>
))}
</>
);
};
Details.propTypes = {
data: PropTypes.any
};
export default Details;
const format = (value) => {
if (isNumber(value) || isString(value)) {
value = String(value);
value = value
.split('\n')
.map((line, index) => <div key={index}>{line}</div>);
} else if (isArray(value))
value = value.length;
else if (!isValidElement(value))
value = '-';
return value;
};
|
cmzmasek/cxio
|
src/main/java/org/cxio/core/interfaces/AspectFragmentWriter.java
|
package org.cxio.core.interfaces;
import java.io.IOException;
import java.util.List;
import org.cxio.filters.AspectKeyFilter;
import org.cxio.util.JsonWriter;
public interface AspectFragmentWriter extends Comparable<AspectFragmentReader> {
public String getAspectName();
public void addAspectKeyFilter(final AspectKeyFilter filter);
public void write(final List<AspectElement> aspects, final JsonWriter json_writer) throws IOException;
public void writeElement(final AspectElement element, final JsonWriter json_writer) throws IOException;
}
|
Jeanmilost/Visual-Mercutio
|
Visual Mercutio/zWinUtil32/PSS_ActivityEventViewerReminderCtrl.h
|
/****************************************************************************
* ==> PSS_ActivityEventViewerReminderCtrl ---------------------------------*
****************************************************************************
* Description : Provides an event activity viewer reminder control *
* Developer : Processsoft *
****************************************************************************/
#ifndef PSS_ActivityEventViewerReminderCtrlH
#define PSS_ActivityEventViewerReminderCtrlH
// change the definition of AFX_EXT... to make it import
#undef AFX_EXT_CLASS
#undef AFX_EXT_API
#undef AFX_EXT_DATA
#define AFX_EXT_CLASS AFX_CLASS_IMPORT
#define AFX_EXT_API AFX_API_IMPORT
#define AFX_EXT_DATA AFX_DATA_IMPORT
// processsoft
#include "zBaseLib\PSS_ListCtrl.h"
#include "zEvent\PSS_EventManager.h"
#include "zEvent\PSS_ActivityReminderEventManager.h"
#ifdef _ZWINUTIL32EXPORT
// put the values back to make AFX_EXT_CLASS export again
#undef AFX_EXT_CLASS
#undef AFX_EXT_API
#undef AFX_EXT_DATA
#define AFX_EXT_CLASS AFX_CLASS_EXPORT
#define AFX_EXT_API AFX_API_EXPORT
#define AFX_EXT_DATA AFX_DATA_EXPORT
#endif
/**
* Event activity viewer reminder control
*@author <NAME>, <NAME>
*/
class AFX_EXT_CLASS PSS_ActivityEventViewerReminderCtrl : public PSS_ListCtrl
{
public:
/**
* Constructor
*@param pReminderManager - reminder manager, can be NULL
*/
PSS_ActivityEventViewerReminderCtrl(PSS_ActivityReminderEventManager* pReminderManager = NULL);
virtual ~PSS_ActivityEventViewerReminderCtrl();
/**
* Put the event in the event controller
*@param activityEvent - the activity reminder event
*@param eventCtrl - the event controller
*@return the activity reminder event, to allow operator to be chained with other controllers, e.g a >> b >> c
*/
#ifdef _WIN32
AFX_EXT_API friend PSS_ActivityReminderEvent& operator >> (PSS_ActivityReminderEvent& activityEvent,
PSS_ActivityEventViewerReminderCtrl& eventCtrl);
#endif
/**
* Initializes the control
*@param pReminderManager - reminder manager, can be NULL
*/
virtual void Initialize(PSS_ActivityReminderEventManager* pReminderManager);
/**
* Gets the selected item
*@return the selected item, NULL if no item selected or on error
*/
PSS_ActivityReminderEvent* GetSelectedItem() const;
/**
* Refreshes the control
*/
virtual void Refresh();
protected:
//{{AFX_MSG(PSS_ActivityEventViewerReminderCtrl)
afx_msg LRESULT OnNewActivityEvent(WPARAM wParam, LPARAM lParam);
//}}AFX_MSG
DECLARE_MESSAGE_MAP()
private:
PSS_ActivityReminderEventManager* m_pReminderManager;
BOOL m_ColumnsHasBeenBuilt;
/**
* Copy constructor
*@param other - other object to copy from
*/
PSS_ActivityEventViewerReminderCtrl(const PSS_ActivityEventViewerReminderCtrl& other);
/**
* Copy operator
*@param other - other object to copy from
*@return copy of itself
*/
const PSS_ActivityEventViewerReminderCtrl& operator = (const PSS_ActivityEventViewerReminderCtrl& other);
/**
* Checks if columns were built
*@return TRUE if columns were built, otherwise FALSE
*/
inline BOOL ColumnsHasBeenBuilt() const;
/**
* Builds columns
*@return TRUE on success, otherwise FALSE
*/
BOOL BuildColumns();
};
//---------------------------------------------------------------------------
// PSS_ActivityEventViewerReminderCtrl
//---------------------------------------------------------------------------
BOOL PSS_ActivityEventViewerReminderCtrl::ColumnsHasBeenBuilt() const
{
return m_ColumnsHasBeenBuilt;
}
//---------------------------------------------------------------------------
#endif
|
blahed/frank
|
lib/frank/publish/shell_scp.rb
|
module Frank
module Publish
#TODO
class ShellSCP
def self.shell_copy(local_dir, remote_dir, options)
host = []
command = ["scp "]
command << "-P #{options[:port]} " if options[:port]
command << "-r #{local_dir}/* "
host << "#{options[:username]}" if options[:username]
host << ":#{options[:password]}" if options[:password]
host << "@#{options[:host]}:#{remote_dir}"
shell_command = "#{command.join('')}#{host.join('')}"
system(shell_command)
end
end
end
end
|
njazz/imgui-mvc-wrap
|
lib/IUImplementation.hpp
|
//
// IUImplementation.hpp
// nseq
//
// Created by Alex on 28/03/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
#ifndef IUImplementation_hpp
#define IUImplementation_hpp
#include <stdio.h>
#include "imgui.h"
//struct GLFWwindow;
#include "glcorearb.h"
#define GL_ARB_shader_objects
// GL3W/GLFW
#include "gl3w.h" // This example is using gl3w to access OpenGL functions (because it is small). You may use glew/glad/glLoadGen/etc. whatever already works for you.
#include "glfw3.h"
#ifdef _WIN32
#undef APIENTRY
#define GLFW_EXPOSE_NATIVE_WIN32
#define GLFW_EXPOSE_NATIVE_WGL
#include <GLFW/glfw3native.h>
#endif
#include <map>
class IUImplementation {
// GLFW data
double g_Time = 0.0f;
// bool g_MouseJustPressed[3] = { false, false, false };
GLFWcursor* g_MouseCursors[ImGuiMouseCursor_COUNT] = { 0 };
// OpenGL3 data
char g_GlslVersion[32] = {'#','v','e','r','s','i','o','n',' ','1','5','0','\0'};
GLuint g_FontTexture = 0;
int g_ShaderHandle = 0, g_VertHandle = 0, g_FragHandle = 0;
int g_AttribLocationTex = 0, g_AttribLocationProjMtx = 0;
int g_AttribLocationPosition = 0, g_AttribLocationUV = 0, g_AttribLocationColor = 0;
unsigned int g_VboHandle = 0, g_ElementsHandle = 0;
//
static std::map<GLFWwindow*, bool[3]> g_mousePressed;
// ---
void _renderDrawData(ImDrawData* draw_data);
static void _mouseButtonCallback(GLFWwindow*, int button, int action, int /*mods*/);
static void _scrollCallback(GLFWwindow*, double xoffset, double yoffset);
static void _keyCallback(GLFWwindow*, int key, int, int action, int mods);
static void _charCallback(GLFWwindow*, unsigned int c);
bool _createFontsTexture();
bool _createDeviceObjects();
void _invalidateDeviceObjects();
void _installCallbacks(GLFWwindow* window);
public:
IUImplementation(){};
GLFWwindow* glWindow = NULL;
ImGuiContext* context = NULL;
bool init(GLFWwindow* window, bool install_callbacks, const char* glsl_version = NULL);
void shutdown();
void newFrame();
void renderDrawData(ImDrawData* draw_data);
// Use if you want to reset your rendering device without losing ImGui state.
void invalidateDeviceObjects();
bool createDeviceObjects();
//
void switchContext();//GLFWwindow* window, ImGuiContext* ctx);
// todo
float zoomValue = 2.0;
};
#endif /* IUImplementation_hpp */
|
dolphingarlic/sketch-frontend
|
src/main/java/sketch/compiler/dataflow/recursionCtrl/AdvancedRControl.java
|
package sketch.compiler.dataflow.recursionCtrl;
import java.util.HashMap;
import java.util.Map;
import java.util.Stack;
import sketch.compiler.ast.core.FEReplacer;
import sketch.compiler.ast.core.Function;
import sketch.compiler.ast.core.Package;
import sketch.compiler.ast.core.Program;
import sketch.compiler.ast.core.exprs.ExprBinary;
import sketch.compiler.ast.core.exprs.ExprFunCall;
import sketch.compiler.ast.core.stmts.Statement;
import sketch.compiler.ast.core.stmts.StmtFor;
import sketch.compiler.ast.core.stmts.StmtIfThen;
import sketch.util.wrapper.ScRichString;
/**
*
*
*
*
* @author asolar
*
*/
class WeightFunctions extends FEReplacer{
String currFun = null;
int w = 0;
Map<String, Integer> funWeight = new HashMap<String, Integer>();
int maxWeight = 0;
public Object visitExprBinary(ExprBinary exp)
{
++w;
return super.visitExprBinary(exp);
}
public Object visitFunction(Function func)
{
currFun = func.getName();
w = 0;
Object obj = super.visitFunction(func);
funWeight.put(currFun, w);
if(w > maxWeight) maxWeight= w;
return obj;
}
}
public class AdvancedRControl extends RecursionControl {
public String debugMsg = null;
Stack<Integer> bfStack;
int branchingTheshold;
private int MAX_INLINE;
Map<String, FunInfo> funmap;
WeightFunctions funWeighter = new WeightFunctions();
int FACTOR = 0;
final boolean ignoreStatics;
/**
* For each function, we must keep the following information: <BR>
* - Current recursion depth <BR>
* - Does it make subcalls <BR>
* Note that the second field is a static property, where as the first one is
* a dynamic property.
* This information is kept by the FunInfo class.
*/
private static class FunInfo{
int rdepth;
final boolean isTerminal;
final boolean isStatic;
FunInfo(boolean isTerminal, boolean isStatic) {
this.isTerminal = isTerminal;
this.isStatic = isStatic;
rdepth = 0;
}
public String toString(){
return "(" + rdepth + "," + (isTerminal? "T" : "NT") + ")";
}
}
/**
* This class populates the funmap with initial values, setting the isTerminal field for all functions.
* @author asolar
*
*/
private class PopFunMap extends FEReplacer{
String currentFun;
int currentCalls;
PopFunMap(){
funmap = new HashMap<String, FunInfo>();
}
public Object visitPackage(Package spec) {
return super.visitPackage(spec);
}
public Object visitFunction(Function func){
String altName = null;
if(func.getSpecification() != null){
altName = func.getName();
Function tmp = nres.getFun(func.getSpecification());
if(tmp == null){
throw new RuntimeException("The function " + func.getSpecification() + " does not exist.\n\t" + func);
}
func = tmp;
}
currentFun = nres.getFunName(func.getName());
currentCalls = 0;
Object obj = super.visitFunction(func);
FunInfo fin = new FunInfo(currentCalls == 0, func.isStatic());
funmap.put(currentFun, fin);
if(altName != null){
funmap.put(nres.getFunName(altName), fin);
}
return obj;
}
public Object visitExprFunCall(ExprFunCall exp)
{
currentCalls++;
return super.visitExprFunCall(exp);
}
}
/**
* This visitor will be called on a statement (generally a block),
* and after the visiting is done, <BR>
* - <code>forbiddenCalls</code> will be true if any calls within the block can be guaranteed to fail testCall. <BR>
* - <code>bfactor</code> will have the minimum number of calls which must be made by the block.
*/
private class CheckBFandCalls extends FEReplacer{
int bfactor = 0;
boolean forbiddenCalls = false;
String callsContained = "";
public Object visitStmtIfThen(StmtIfThen stmt)
{
//We don't want to look into If Statements. We don't know if they'll execute.
return stmt;
}
public Object visitStmtFor(StmtFor stmt)
{
//We don't want to look into Loops either. We don't know if they'll execute.
return stmt;
}
public Object visitExprFunCall(ExprFunCall exp)
{
String func = nres.getFunName(exp.getName());
assert null != funmap.get(func) : "unknown function '" + func +
"'; known functions: " + new ScRichString(", ").join(funmap.keySet());
FunInfo fi = funmap.get(func);
if (!(fi.isStatic && ignoreStatics)) {
if (!(fi.isTerminal)) {
++bfactor; // += funWeighter.funWeight.get(exp.getName());
}
}
if( ! testCall(exp) ){
forbiddenCalls = true;
}
// System.out.println("Finished testing bf =" + bfactor);
callsContained += exp.getName() +"(" + fi.rdepth + ")"+ ", ";
return exp;
}
}
public AdvancedRControl(int branchingThreshold, int maxInline, boolean ignoreStatics,
Program prog)
{
this.branchingTheshold = branchingThreshold;
bfStack = new Stack<Integer>();
bfStack.push(1);
prog.accept(new PopFunMap());
prog.accept(funWeighter);
/*for(Map.Entry<String, Integer> en : funWeighter.funWeight.entrySet()){
System.out.println(en);
}*/
FACTOR = (funWeighter.maxWeight * 2 ) / 3;
MAX_INLINE = maxInline;
this.ignoreStatics = ignoreStatics;
}
private boolean bfactorTest(int bf){
int p = bfStack.peek();
if(p == 0){
assert bf == 0;
}
p = p*bf;
if( p > branchingTheshold){
return false;
}else{
bfStack.push(p);
return true;
}
}
public void doneWithBlock(Statement stmt) {
bfStack.pop();
}
/**
* This field is here solely for debugging reasons.
*/
int tt = 0; //DEBUGGING INFO
@Override
public int inlineLevel(ExprFunCall fun) {
FunInfo fi = funmap.get(nres.getFunName(fun.getName()));
return fi.rdepth;
}
@Override
public void popFunCall(ExprFunCall fun) {
strack.popCall(fun);
FunInfo fi = funmap.get(nres.getFunName(fun.getName()));
fi.rdepth--;
--tt;
}
public String callStack(){
return strack.getSstring();
}
public void pushFunCall(ExprFunCall fc, Function fun) {
strack.pushCall(fc);
FunInfo fi = funmap.get(nres.getFunName(fc.getName()));
if( tracing && ! fi.isTerminal ){
for(int i=0; i<tt; ++i) System.out.print(" "); //DEBUGGING INFO
System.out.println(fc.getName() + " " + this.bfStack.peek() /*+ " " + fc.hashCode() + " " + fc*/); //DEBUGGING INFO
}
debugMsg = fc.getName();
++tt;
fi.rdepth++;
}
public boolean leaveCallsBehind(){
return false;
}
public String debugMsg(){
return "Function " + debugMsg + " was not inlined enough. Increase inlining with --inlineamnt flag.";
}
public boolean testBlock(Statement stmt) {
/* First, we check if the block is legal. I.e. if it has any
* function calls that will surpass their max iteration depth.
*/
CheckBFandCalls check = new CheckBFandCalls();
check.setNres(nres);
stmt.accept(check);
if( ! check.forbiddenCalls ){
/*
* If it is, then we check the branching factor. That's the number
* of non-terminal calls made by the block.
*/
int bfactor = check.bfactor;
/*if(bfactor > 0){
bfactor = bfactor;
}
bfactor = bfactor / FACTOR;*/
if(bfactor < 1) bfactor = 1;
/*
* Then we test the cummulative branching factor. This is the
* product of all the elements in bfStack * bfactor.
* If it is larger than
* a threshold, we return false. Otherwise, we push bfactor into the
* bfStack and return true.
*
*/
boolean recurse = bfactorTest(bfactor);
if(tracing && !recurse) System.out.println("BRANCHING FACTOR EXCEEDED " + (bfactor*bfStack.peek()) + ">=" + this.branchingTheshold + " " + bfactor + " prevented " + check.callsContained);
return recurse;
}
if(tracing){
System.out.println("ITERATION DEPTH EXCEEDED prevented " + check.callsContained);
}
return false;
}
public boolean testCall(ExprFunCall fc) {
FunInfo fi = funmap.get(nres.getFunName(fc.getName()));
/*
if(tracing){
System.out.print("testing call " + fc.getName() + " fi.rdepth = " + fi.rdepth);
}
*/
if( fi.rdepth < MAX_INLINE ){
if(tracing){
// System.out.println(" succeed");
}
return true;
}else{
if(tracing){
// System.out.println(" fail");
}
return false;
}
}
}
|
manifest/flax-extra
|
src/flax_extra/combinator/__init__.py
|
r"""Combinator functions compose other functions."""
from redex.combinator import *
from flax_extra.combinator._concatenate import concatenate, Concatenate
__all__ = [
"concatenate",
"Concatenate",
# Redex.
"add",
"branch",
"Combinator",
"div",
"drop",
"Drop",
"dup",
"Dup",
"fold",
"foldl",
"Foldl",
"identity",
"Identity",
"mul",
"parallel",
"Parallel",
"residual",
"select",
"Select",
"serial",
"Serial",
"sub",
]
|
HKMUD/NT6
|
nitan/kungfu/skill/driving.c
|
// driving.c
inherit SKILL;
string type() { return "technic"; }
void skill_improved(object me)
{}
int valid_learn(object me)
{
return 1;
}
|
ramgopal99/centipede
|
src/lib/centipede/Crawler/Generic/__init__.py
|
from .Hashmap import Hashmap
|
Bleyddyn/malpi
|
train/hparam_plot.py
|
import pickle
import argparse
from collections import defaultdict
import numpy as np
from hyperopt import hp, STATUS_OK, Trials
from hyperopt.pyll_utils import expr_to_config
import hyperopt
import matplotlib.pyplot as plt
from matplotlib import style
import skopt
style.use('fivethirtyeight')
def plotRegressionHparam( losses, values, name, logx=False, logy=False ):
plt.figure(1,figsize=(10, 10), dpi=80)
#plt.suptitle( name, fontsize=16 )
if logx and logy:
plt.loglog(values, losses, '.')
elif logx:
plt.semilogx(values, losses, '.')
elif logy:
plt.semilogy(values, losses, '.')
else:
plt.plot(values, losses, '.')
plt.title(name)
plt.ylabel('Loss')
plt.xlabel('value')
plt.show()
def plotCategoricalHparam( category_dict, name ):
labels = category_dict.keys()
N = len(labels)
means = []
std = []
for key in labels:
values = category_dict[key]
means.append( np.mean(values) )
std.append( np.std(values) )
ind = np.arange(N) # the x locations for the groups
width = 0.35 # the width of the bars
fig, ax = plt.subplots()
rects1 = ax.bar(ind, means, width, color='r', yerr=std)
# add some text for labels, title and axes ticks
ax.set_yscale('log')
ax.set_ylabel('val acc')
ax.set_title(name)
ax.set_xticks(ind + (width/2.0))
ax.set_xticklabels(labels)
plt.show()
def autolabel(rects):
"""
Attach a text label above each bar displaying its height
"""
for rect in rects:
height = rect.get_height()
ax.text(rect.get_x() + rect.get_width()/2., 1.05*height,
'%d' % int(height),
ha='center', va='bottom')
def plotTrials(filename):
max_batch = 128
# This should be included in the trials object
space = { 'learning_rate': hp.loguniform('learning_rate', -9, -4 ),
'l2_reg': hp.loguniform('l2_reg', -10, -3 ),
'batch_size': hp.quniform('batch_size', 5, max_batch, 1),
'dropouts': hp.choice('dropouts', ["low","mid","high","up","down"]),
'optimizer': hp.choice('optimizer', ["RMSProp", "Adagrad", "Adadelta", "Adam"]),
'epochs': 40 }
#conditions = ()
#hps = {}
#expr_to_config(space, conditions, hps)
#print( "{}".format( hps ) )
#dropouts = space['dropouts']
#print( "{}".format( dropouts ) )
with open(filename,'r') as f:
trials = pickle.load(f)
#print( "trials.argmin: {}".format( trials.argmin ) )
#print( "eval: {}".format( hyperopt.space_eval( space, trials.argmin ) ) )
losses = []
hparams = {}
for trial in trials.trials:
# >>> trial.keys()
# ['refresh_time', 'book_time', 'misc', 'exp_key', 'owner', 'state', 'version', 'result', 'tid', 'spec']
# trial['misc'] = {'tid': 0, 'idxs': {'optimizer': [0], 'learning_rate': [0], 'batch_size': [0], 'timesteps': [0], 'dropouts': [0], 'l2_reg': [0]}, 'cmd': ('domain_attachment', 'FMinIter_Domain'), 'vals': {'optimizer': [0], 'learning_rate': [0.00021416260507967771], 'batch_size': [6.0], 'timesteps': [18.0], 'dropouts': [0], 'l2_reg': [0.005126253249084938]}, 'workdir': None}
# trial['exp_key']
# trial['owner']
# trial['state'] = 2
# trial['version'] = 0
# trial['tid'] = 0
# trial['spec']
# trial['result'].keys() = ['status', 'loss', 'val', 'history']
loss = trial['result']['val']
losses.append(loss)
argvals = trial['misc']['vals']
for key, val in argvals.iteritems():
argvals[key] = val[0]
hparam1 = hyperopt.space_eval( space, argvals )
# print( 'trial1: {}'.format( hparam1 ) )
# trial1: {'epochs': 40, 'optimizer': 'RMSProp', 'learning_rate': 0.00021416260507967771, 'dropouts': 'low', 'batch_size': 6.0, 'l2_reg': 0.005126253249084938}
for key, val in hparam1.iteritems():
if isinstance(val, basestring):
if key not in hparams:
hparams[key] = { val: [] }
if val not in hparams[key]:
hparams[key][val] = []
hparams[key][val].append(loss)
else:
if key not in hparams:
hparams[key] = []
hparams[key].append(val)
#print( "final: {}".format( hparams ) )
plotCategoricalHparam( hparams['dropouts'], 'dropouts' )
plotCategoricalHparam( hparams['optimizer'], 'optimizer' )
plotRegressionHparam( losses, hparams['batch_size'], 'Batch Size' )
plotRegressionHparam( losses, hparams['learning_rate'], 'Learning Rate', logx=True )
plotRegressionHparam( losses, hparams['l2_reg'], 'L2 Regularization', logx=True )
def plotCurrent(filename):
acc = []
# Only read in the lines back to the marker at the beginning of the current run
for line in reversed(open(filename).readlines()):
if line.startswith("#"):
break
acc.insert(0,float(line))
print( "Sorted: \n{}".format( sorted(acc) ) )
#acc = np.loadtxt('hparam_current.txt')
x = range(len(acc))
z = np.polyfit( x, acc, 2, rcond=None, full=False, w=None, cov=False)
p2 = np.poly1d(np.polyfit(x, acc, 2))
xp = np.linspace(0, len(acc), 100)
plt.plot(x, acc, '-', xp, p2(xp), '--')
plt.savefig( 'hparam_current.png')
plt.show()
def plotSkopt(filename):
#dict_keys(['x', 'fun', 'func_vals', 'x_iters', 'models', 'space', 'random_state', 'specs'])
#data['space']
#Space([Real(low=1e-10, high=0.001, prior='log-uniform', transform='normalize'),
#Categorical(categories=('low', 'mid', 'high', 'up', 'down'), prior=None),
#Real(low=1e-09, high=0.0001, prior='log-uniform', transform='normalize'),
#Integer(low=5, high=128),
#Categorical(categories=('RMSProp', 'Adagrad', 'Adadelta', 'Adam'), prior=None)])
#l2_reg, dropouts, learning_rate, batch_size, optimizer = args
#len(data['func_vals']) 100
#len(data['x_iters']) 100
data = skopt.load(filename)
print( "Best loss: {}".format( data['fun'] ) )
print( " Values: {}".format( data['x'] ) )
values = data['x_iters']
values = np.array(values)
#values.shape = (100, 5)
losses = data['func_vals']
#print( "Losses: {} {} {}".format( np.min(losses), np.mean(losses), np.max(losses) ) )
l2 = np.array(values[:,0]).astype(np.float)
lr = np.array(values[:,2]).astype(np.float)
batch = np.array(values[:,3]).astype(np.float)
#print( "L2 Reg: {} {} {}".format( np.min(l2), np.mean(l2), np.max(l2) ) )
plotRegressionHparam( losses, l2, "L2 Reg", logx=True, logy=True )
plotRegressionHparam( losses, lr, "Learning Rate", logx=True, logy=True )
plotRegressionHparam( losses, batch, "Batch Size", logx=False, logy=True )
def makeCatDict(values_idx):
catDict = defaultdict(list)
for idx, run in enumerate(values):
opt = run[values_idx]
loss = losses[idx]
catDict[opt].append(loss)
return catDict
plotCategoricalHparam( makeCatDict(4), "Optimizers" )
plotCategoricalHparam( makeCatDict(1), "Dropout" )
def runTests(args):
pass
def getOptions():
parser = argparse.ArgumentParser(description='Plot results from a hyperparameter optimization run.', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--trial', help='Plot results from a pickled hyperopt Trials object.')
parser.add_argument('--current', help='Plot results from the current hyperopt run.')
parser.add_argument('--skopt', help='Plot results from the given pickled skopt run.')
parser.add_argument('--test_only', action="store_true", default=False, help='run tests, then exit')
args = parser.parse_args()
return args
if __name__ == "__main__":
args = getOptions()
if args.test_only:
runTests(args)
exit()
if args.trial is not None:
plotTrials(args.trial)
elif args.current is not None:
plotCurrent(args.current)
elif args.skopt is not None:
plotSkopt(args.skopt)
|
cquiroz/scalajs-react-semantic-u
|
facade/src/test/scala/react/semanticui/elements/placeholder/PlaceholderHeaderSuite.scala
|
package react.semanticui.elements.placeholder
import japgolly.scalajs.react.test._
import japgolly.scalajs.react.vdom.html_<^._
import react.common.syntax.vdom._
class PlaceholderHeaderSuite extends munit.FunSuite {
test("render") {
val paragraph = PlaceholderHeader()
ReactTestUtils.withNewBodyElement { mountNode =>
paragraph.renderIntoDOM(mountNode)
assertEquals(mountNode.innerHTML, """<div class="header"></div>""")
}
}
test("renderChild") {
val paragraph = PlaceholderHeader(<.div("abc"))
ReactTestUtils.withNewBodyElement { mountNode =>
paragraph.renderIntoDOM(mountNode)
assertEquals(mountNode.innerHTML, """<div class="header"><div>abc</div></div>""")
}
}
}
|
18279811184/youcham-jxsswt
|
youcham-admin/src/main/java/io/youcham/modules/ins/service/impl/InsSuggestionsServiceImpl.java
|
<gh_stars>1-10
package io.youcham.modules.ins.service.impl;
import org.apache.commons.lang.StringUtils;
import org.springframework.stereotype.Service;
import java.util.Map;
import com.baomidou.mybatisplus.mapper.EntityWrapper;
import com.baomidou.mybatisplus.plugins.Page;
import com.baomidou.mybatisplus.service.impl.ServiceImpl;
import io.youcham.common.utils.PageUtils;
import io.youcham.common.utils.Query;
import io.youcham.modules.ins.dao.InsSuggestionsDao;
import io.youcham.modules.ins.entity.InsInformEntity;
import io.youcham.modules.ins.entity.InsSuggestionsEntity;
import io.youcham.modules.ins.service.InsSuggestionsService;
@Service("insSuggestionsService")
public class InsSuggestionsServiceImpl extends ServiceImpl<InsSuggestionsDao, InsSuggestionsEntity> implements InsSuggestionsService {
@Override
public PageUtils queryPage(Map<String, Object> params) {
String sugtitle = (String)params.get("sugtitle");
//状态
String statu = (String)params.get("statu");
Page<InsSuggestionsEntity> page = this.selectPage(
new Query<InsSuggestionsEntity>(params).getPage(),
new EntityWrapper<InsSuggestionsEntity>()
.like(StringUtils.isNotBlank(sugtitle),"sug_title", sugtitle)
.eq(StringUtils.isNotBlank(statu),"statu", statu)
);
/* new Query<InsInformEntity>(params).getPage(),
new EntityWrapper<InsInformEntity>()
.like(StringUtils.isNotBlank(informtitle),"inform_title", informtitle)
.orderBy("inform_Order")*/
return new PageUtils(page);
}
}
|
syntheticgio/fda-hive
|
vlib/slib/std/filesorted.cpp
|
<reponame>syntheticgio/fda-hive
/*
* ::718604!
*
* Copyright(C) November 20, 2014 U.S. Food and Drug Administration
* Authors: Dr. <NAME> (1), Dr. <NAME> (2), et al
* Affiliation: Food and Drug Administration (1), George Washington University (2)
*
* All rights Reserved.
*
* The MIT License (MIT)
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include <slib/std/file.hpp>
using namespace slib;
sFileSorted * sFileSorted::init( const char * flnm, idx lbackuplength)
{
backuplength=lbackuplength;
sStr Buf; char * buf=Buf.resize(backuplength);
// open the file
fl=fopen(flnm,"rb");if(!fl)return 0;
// read the first gi
fscanf(fl,"%" DEC, &firstGI);
// position to the end and get the size
fseek(fl,0,SEEK_END);
size=ftell(fl);
// position to somewhere before the end
if(size<backuplength)backuplength=size;
fseek(fl,(long)(size-backuplength), SEEK_SET);
fgets(buf, (int)backuplength, fl) ; // skip a string
idx pos;
while ( !feof(fl) ){ // read until it is readeable
pos=ftell(fl);
if( fscanf(fl,"%" DEC, &lastGI) >0)lastPos=pos;
fgets(buf, (int)backuplength, fl ) ;
}
return this;
}
idx sFileSorted::search( idx gi)
{
sStr Buf; char * buf=Buf.resize(backuplength);
if(gi<firstGI)
return (idx )(-1);
else if(gi==firstGI)
{fseek(fl,0,SEEK_SET);return 0;}
else if(gi==lastGI)
{fseek(fl,(long)lastPos,SEEK_SET);return lastPos;}
else if(gi>lastGI)
return (idx )(-1);
idx i,giCur=(idx )(-1);
idx pos=sNotIdx, posS=0;
idx posE=size;
idx giS=firstGI;
idx giE=lastGI;
idx guesspos;
for ( i=0; posS!=posE ; ++i ){
//guesspos= (idx) (posS+1.*( gi - giS)*(posE-posS)/ (giE-giS) );
guesspos= (idx) (1.*posS+posE)/2;
fseek(fl,(long)guesspos, SEEK_SET);
fgets(buf,sizeof(buf),fl);
pos=ftell(fl);
if( fscanf(fl,"%" DEC, &giCur) <1 )
break;
if(gi == giCur)
{fseek(fl,(long)pos,SEEK_SET);return pos;}
else if(gi<giCur)
{if(posE==pos) break;posE=pos;giE=giCur;}
else if(gi>giCur)
{if(posS==pos) break;posS=pos;giS=giCur;}
}
fseek(fl,(long)posS, SEEK_SET);
for( giCur=giS; giCur<gi ; ){ // read until it is readeable
pos=ftell(fl);
if( fscanf(fl,"%" DEC, &giCur )<1) break;
fgets(buf,sizeof(buf),fl);
if(giCur==gi)
{fseek(fl,(long)pos,SEEK_SET);return pos;}
}
return -1;
}
idx sFileSorted::search( const char * acc,const char * separ)
{
sStr Buf; char * buf=Buf.resize(backuplength);
sStr Acc; char * accCur=Acc.resize(backuplength);
idx i;
idx pos=sNotIdx, posS=0;
idx posE=size;
//idx giS=firstGI;
//idx giE=lastGI;
idx guesspos;
idx res=-1;
char * p;
for ( i=0; posS!=posE ; ++i ){
//guesspos= (idx) (posS+1.*( gi - giS)*(posE-posS)/ (giE-giS) );
guesspos= (idx) ((1.*posS+posE)/2);
fseek(fl,(long)guesspos, SEEK_SET);
fgets(buf,sizeof(buf),fl);
pos=ftell(fl);
fgets(accCur,sizeof(accCur)-1, fl);
p=strpbrk(accCur,separ); if(p)*p=0;
res=strcasecmp(acc,accCur);
if(!res)
{fseek(fl,(long)pos,SEEK_SET);return pos;}
else if(res<0)
{if(posE==pos) break;posE=pos;}
else //if(res>0)
{if(posS==pos) break;posS=pos;}
}
fseek(fl,(long)posS, SEEK_SET);
for( ; res<0 ; ){ // read until it is readeable
pos=ftell(fl);
fgets(accCur,sizeof(accCur)-1, fl);
p=strpbrk(accCur,separ); if(p)*p=0;
res=strcasecmp(accCur,acc);
if(!res)
{fseek(fl,(long)pos,SEEK_SET);return pos;}
}
return (idx)(-1);
}
idx sFileSorted::searchReverse( idx pos, idx gi)
{
idx giCur=gi;
sStr Buf; char * buf=Buf.resize(backuplength);
if(pos==0)return pos;
// backup until the gi is less than the one we look for
while( gi==giCur ){
// position
if(pos>backuplength)pos-=backuplength;else pos=0;
fseek(fl,(long)pos,SEEK_SET);
fgets(buf,sizeof(buf),fl);
pos=ftell(fl);
// read
fscanf (fl, "%" DEC, &giCur);
fgets(buf,sizeof(buf),fl);
// compare
if( giCur>gi)
return (idx)(-1);
}
//fseek(fl,pos,SEEK_SET);
for( ; giCur<gi ; ){ // read until we get the first line with the gi we want
pos=ftell(fl);
if( fscanf(fl,"%" DEC, &giCur )<1) break;
fgets(buf,sizeof(buf),fl);
if(giCur==gi)
{fseek(fl,(long)pos,SEEK_SET);return pos;}
}
return (idx )(-1);
}
|
jasl-lab/cybros_portal
|
db/migrate/20200424065534_add_view_hr_report_to_role.rb
|
class AddViewHrReportToRole < ActiveRecord::Migration[6.0]
def change
add_column :roles, :hr_report_admin, :boolean
add_column :roles, :hr_report_viewer, :boolean
add_column :roles, :hr_report_writer, :boolean
end
end
|
jaclu/hub3
|
ikuzo/service/x/imageproxy/request_test.go
|
<reponame>jaclu/hub3
package imageproxy
import (
"testing"
"github.com/google/go-cmp/cmp"
)
const (
imgURL = "http://example.com/123.jpg"
testCacheKey = "aHR0cDovL2V4YW1wbGUuY29tLzEyMy5qcGc="
)
func TestNewRequest(t *testing.T) {
type args struct {
key string
options []RequestOption
}
tests := []struct {
name string
args args
want *Request
wantErr bool
}{
{
"raw http request",
args{key: imgURL},
&Request{
CacheKey: testCacheKey,
SourceURL: imgURL,
},
false,
},
{
"filename too long",
args{key: "https://service.archief.nl/iipsrv?IIIF=%2F4d%2F68%2F78%2F49%2Ffe%2F2c%2F4f%2F85%2F8b%2F7a%2F97%2F88%2F78%2Fe0%2F2f%2F1d%2F7bb1c07c-14e9-4064-92a0-ffb9fd0132f8.jp2%2Ffull%2F213%2C%2F0%2Fdefault.jpg"},
&Request{
CacheKey: "xxhash64-3a748e637be3be2a",
SourceURL: "https://service.archief.nl/iipsrv?IIIF=%2F4d%2F68%2F78%2F49%2Ffe%2F2c%2F4f%2F85%2F8b%2F7a%2F97%2F88%2F78%2Fe0%2F2f%2F1d%2F7bb1c07c-14e9-4064-92a0-ffb9fd0132f8.jp2%2Ffull%2F213%2C%2F0%2Fdefault.jpg",
},
false,
},
{
"encoded request",
args{key: testCacheKey},
&Request{
CacheKey: testCacheKey,
SourceURL: imgURL,
},
false,
},
{
"raw http request with params",
args{
key: imgURL,
options: []RequestOption{
SetRawQueryString("size=200"),
SetEnableTransform(true),
},
},
&Request{
CacheKey: "<KEY>
SourceURL: "http://example.com/123.jpg?size=200",
RawQueryString: "size=200",
EnableTransform: true,
},
false,
},
{
"raw http request with params (ADLIB)",
args{
key: "http://rabk.adlibhosting.com/wwwopacx/wwwopac.ashx",
options: []RequestOption{
SetRawQueryString(`command=getcontent&server=images&value=\kerncollectie\3781.jpg`),
SetEnableTransform(true),
},
},
&Request{
CacheKey: `<KEY>` +
`<KEY>`,
SourceURL: `http://rabk.adlibhosting.com/wwwopacx/wwwopac.ashx?command=getcontent&server=images&value=\kerncollectie\3781.jpg`,
RawQueryString: `command=getcontent&server=images&value=\kerncollectie\3781.jpg`,
EnableTransform: true,
},
false,
},
{
"raw",
args{
key: imgURL,
options: []RequestOption{
SetTransform("raw"),
SetEnableTransform(true),
},
},
&Request{
CacheKey: testCacheKey,
SourceURL: imgURL,
TransformOptions: "raw",
EnableTransform: true,
},
false,
},
{
"thumbnail",
args{
key: imgURL,
options: []RequestOption{
SetTransform("500,smartcrop"),
SetEnableTransform(true),
},
},
&Request{
CacheKey: testCacheKey + "_500,smartcrop_tn.jpg",
SourceURL: imgURL,
TransformOptions: "500,smartcrop",
thumbnailOpts: "500",
SubPath: "_500,smartcrop_tn.jpg",
EnableTransform: true,
},
false,
},
{
"deepzoom dzi",
args{
key: imgURL + ".dzi",
options: []RequestOption{
SetTransform("deepzoom"),
SetEnableTransform(true),
},
},
&Request{
CacheKey: testCacheKey + ".dzi",
SourceURL: imgURL,
TransformOptions: "deepzoom",
SubPath: ".dzi",
EnableTransform: true,
},
false,
},
{
"deepzoom tiles",
args{
key: imgURL + "_files/9/0_0.jpeg",
options: []RequestOption{
SetTransform("deepzoom"),
SetEnableTransform(true),
},
},
&Request{
CacheKey: testCacheKey + "_files/9/0_0.jpeg",
SourceURL: imgURL,
TransformOptions: "deepzoom",
SubPath: "_files/9/0_0.jpeg",
EnableTransform: true,
},
false,
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
got, err := NewRequest(tt.args.key, tt.args.options...)
if (err != nil) != tt.wantErr {
t.Errorf("NewRequest() error = %v, wantErr %v", err, tt.wantErr)
return
}
if diff := cmp.Diff(tt.want, got, cmp.AllowUnexported(Request{})); diff != "" {
t.Errorf("NewRequest() %s = mismatch (-want +got):\n%s", tt.name, diff)
}
})
}
}
|
webguru001/Python-Django-Web
|
Francisco_Trujillo/Assignments/Cointoss/cointoss.py
|
import random
heads = 0
tails = 0
toss= random.random()
num = input("Enter number of Tosses: ")
num+=1
for num in range (1, num):
toss= random.random()
if toss >=0.50:
heads+=1
else:
tails+=1
print 'Attempt'+ str(num) + " : Throwing a coin... It's a head! ... Got "+ str(heads) + "head(s) so far and "+str(tails)+ " tail(s) so far "
print "Attempt "+ str(heads+tails) + " : Throwing a coin... It's a head! ... Got "+ str(heads) + "head(s) so far and "+str(tails)+ " tail(s) so far. Ending the program, thank you!"
|
michael-topchiev/hypershift
|
vendor/github.com/prometheus/client_golang/api/client.go
|
// Copyright 2015 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package api provides clients for the HTTP APIs.
package api
import (
"bytes"
"context"
"net"
"net/http"
"net/url"
"path"
"strings"
"time"
)
// DefaultRoundTripper is used if no RoundTripper is set in Config.
var DefaultRoundTripper http.RoundTripper = &http.Transport{
Proxy: http.ProxyFromEnvironment,
DialContext: (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
}).DialContext,
TLSHandshakeTimeout: 10 * time.Second,
}
// Config defines configuration parameters for a new client.
type Config struct {
// The address of the Prometheus to connect to.
Address string
// RoundTripper is used by the Client to drive HTTP requests. If not
// provided, DefaultRoundTripper will be used.
RoundTripper http.RoundTripper
}
func (cfg *Config) roundTripper() http.RoundTripper {
if cfg.RoundTripper == nil {
return DefaultRoundTripper
}
return cfg.RoundTripper
}
// Client is the interface for an API client.
type Client interface {
URL(ep string, args map[string]string) *url.URL
Do(context.Context, *http.Request) (*http.Response, []byte, error)
}
// NewClient returns a new Client.
//
// It is safe to use the returned Client from multiple goroutines.
func NewClient(cfg Config) (Client, error) {
u, err := url.Parse(cfg.Address)
if err != nil {
return nil, err
}
u.Path = strings.TrimRight(u.Path, "/")
return &httpClient{
endpoint: u,
client: http.Client{Transport: cfg.roundTripper()},
}, nil
}
type httpClient struct {
endpoint *url.URL
client http.Client
}
func (c *httpClient) URL(ep string, args map[string]string) *url.URL {
p := path.Join(c.endpoint.Path, ep)
for arg, val := range args {
arg = ":" + arg
p = strings.Replace(p, arg, val, -1)
}
u := *c.endpoint
u.Path = p
return &u
}
func (c *httpClient) Do(ctx context.Context, req *http.Request) (*http.Response, []byte, error) {
if ctx != nil {
req = req.WithContext(ctx)
}
resp, err := c.client.Do(req)
defer func() {
if resp != nil {
resp.Body.Close()
}
}()
if err != nil {
return nil, nil, err
}
var body []byte
done := make(chan struct{})
go func() {
var buf bytes.Buffer
_, err = buf.ReadFrom(resp.Body)
body = buf.Bytes()
close(done)
}()
select {
case <-ctx.Done():
<-done
err = resp.Body.Close()
if err == nil {
err = ctx.Err()
}
case <-done:
}
return resp, body, err
}
|
opendesk/schema
|
src/winnow/tests/json_encode_tests.py
|
import unittest
import decimal
from winnow.utils import json_loads, json_dumps, to_decimal, from_decimal
class TestJsonEncoding(unittest.TestCase):
def test_roundtrip_floats(self):
v = "2.2"
as_python_float = json_loads(v)
self.assertTrue(isinstance(as_python_float, float))
as_decimal = to_decimal(as_python_float)
self.assertTrue(isinstance(as_decimal, decimal.Decimal))
self.assertEqual(decimal.Decimal("2.2"), as_decimal)
self.assertEqual("2.2", json_dumps(json_loads(v)))
v = "2.2639520464"
as_python_float = json_loads(v)
self.assertTrue(isinstance(as_python_float, float))
as_decimal = to_decimal(as_python_float)
self.assertTrue(isinstance(as_decimal, decimal.Decimal))
self.assertEqual(decimal.Decimal(v), as_decimal)
self.assertEqual(v, json_dumps(json_loads(v)))
|
chenlulujing/talkback
|
src/main/java/com/android/talkback/speechrules/RulePager.java
|
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.android.talkback.speechrules;
import com.google.android.marvin.talkback.TalkBackService;
import android.content.Context;
import android.support.v4.view.accessibility.AccessibilityNodeInfoCompat;
import android.view.accessibility.AccessibilityEvent;
import com.android.talkback.R;
import com.android.utils.AccessibilityNodeInfoUtils;
import com.android.utils.Role;
/**
* Rule for processing ViewPagers, providing different feedback for pagers with multiple pages
* and those with only single pages.
*/
public class RulePager extends RuleDefault {
@Override
public boolean accept(AccessibilityNodeInfoCompat node, AccessibilityEvent event) {
return Role.getRole(node) == Role.ROLE_PAGER;
}
@Override
public CharSequence getHintText(Context context, AccessibilityNodeInfoCompat node) {
if (hasMultiplePages(node)) {
TalkBackService talkBack = TalkBackService.getInstance();
if (talkBack == null || talkBack.isDeviceTelevision()) {
return super.getHintText(context, node);
} else {
return context.getString(R.string.template_hint_pager);
}
} else {
return context.getString(R.string.template_hint_pager_single_page);
}
}
private static boolean hasMultiplePages(AccessibilityNodeInfoCompat node) {
return node != null && AccessibilityNodeInfoUtils.supportsAnyAction(node,
AccessibilityNodeInfoCompat.ACTION_SCROLL_FORWARD,
AccessibilityNodeInfoCompat.ACTION_SCROLL_BACKWARD);
}
}
|
maroozm/AliPhysics
|
OADB/AliOADBTriggerAnalysis.h
|
#ifndef AliOADBTriggerAnalysis_H
#define AliOADBTriggerAnalysis_H
/* Copyright(c) 1998-2007, ALICE Experiment at CERN, All rights reserved. *
* See cxx source for full Copyright notice */
//-------------------------------------------------------------------------
// OADB container for filling scheme information (BX ids, name ...)
// Author: <NAME>, CERN
// Current support and development: <NAME>, PNPI
//-------------------------------------------------------------------------
#include "TNamed.h"
class AliOADBTriggerAnalysis : public TNamed {
public :
AliOADBTriggerAnalysis(TString name="default");
virtual ~AliOADBTriggerAnalysis();
// Getters
Float_t GetZDCCutRefSumCorr() { return fZDCCutRefSumCorr; }
Float_t GetZDCCutRefDeltaCorr() { return fZDCCutRefDeltaCorr; }
Float_t GetZDCCutSigmaSumCorr() { return fZDCCutSigmaSumCorr; }
Float_t GetZDCCutSigmaDeltaCorr() { return fZDCCutSigmaDeltaCorr; }
Float_t GetZDCCutZNATimeCorrMax() { return fZDCCutZNATimeCorrMax; }
Float_t GetZDCCutZNATimeCorrMin() { return fZDCCutZNATimeCorrMin; }
Float_t GetZDCCutZNCTimeCorrMax() { return fZDCCutZNCTimeCorrMax; }
Float_t GetZDCCutZNCTimeCorrMin() { return fZDCCutZNCTimeCorrMin; }
Float_t GetSPDClsVsTklA() { return fSPDClsVsTklA; }
Float_t GetSPDClsVsTklB() { return fSPDClsVsTklB; }
Float_t GetV0C012vsTklA() { return fV0C012vsTklA; }
Float_t GetV0C012vsTklB() { return fV0C012vsTklB; }
Float_t GetV0MOnVsOfA() { return fV0MOnVsOfA; }
Float_t GetV0MOnVsOfB() { return fV0MOnVsOfB; }
Float_t GetSPDOnVsOfA() { return fSPDOnVsOfA; }
Float_t GetSPDOnVsOfB() { return fSPDOnVsOfB; }
Int_t GetVtxMinContributors() { return fVtxMinContributors; }
Float_t GetVtxMinZdist() { return fVtxMinZdist; }
Float_t GetVtxNSigmaZdist() { return fVtxNSigmaZdist; }
Float_t GetVtxNSigmaDiamXY() { return fVtxNSigmaDiamXY; }
Float_t GetVtxNSigmaDiamZ() { return fVtxNSigmaDiamZ; }
Float_t GetV0CasymA() { return fV0CasymA; }
Float_t GetV0CasymB() { return fV0CasymB; }
Int_t GetNBCsPast() { return fNBCsPast; }
Int_t GetNBCsFuture() { return fNBCsFuture; }
Int_t GetVIRBBAflags() { return fVIRBBAflags; }
Int_t GetVIRBBCflags() { return fVIRBBCflags; }
Int_t GetVIRBGAflags() { return fVIRBGAflags; }
Int_t GetVIRBGCflags() { return fVIRBGCflags; }
Int_t GetVHMBBAflags() { return fVHMBBAflags; }
Int_t GetVHMBBCflags() { return fVHMBBCflags; }
Int_t GetVHMBGAflags() { return fVHMBGAflags; }
Int_t GetVHMBGCflags() { return fVHMBGCflags; }
Int_t GetV0MOnThreshold() { return fV0MOnThreshold; }
Float_t GetV0MOfThreshold() { return fV0MOfThreshold; }
Int_t GetSPDGFOThreshhold() { return fSPDGFOThreshold; }
Int_t GetSH1OuterThreshold() { return fSH1OuterThreshold; }
Int_t GetSH2OuterThreshold() { return fSH2OuterThreshold; }
Int_t GetTklThreshold() { return fTklThreshold; }
Float_t GetFMDLowThreshold() { return fFMDLowCut; }
Float_t GetFMDHitThreshold() { return fFMDHitCut; }
Float_t GetTRDptHSE() { return fTRDptHSE; }
UChar_t GetTRDpidHSE() { return fTRDpidHSE; }
Float_t GetTRDptHQU() { return fTRDptHQU; }
UChar_t GetTRDpidHQU() { return fTRDpidHQU; }
Float_t GetTRDptHEE() { return fTRDptHEE; }
UChar_t GetTRDpidHEE() { return fTRDpidHEE; }
UChar_t GetTRDminSectorHEE() { return fTRDminSectorHEE; }
UChar_t GetTRDmaxSectorHEE() { return fTRDmaxSectorHEE; }
Float_t GetTRDptHJT() { return fTRDptHJT; }
UChar_t GetTRDnHJT() { return fTRDnHJT; }
// Setters
void SetSPDClsVsTklA(Float_t val) { fSPDClsVsTklA = val; }
void SetSPDClsVsTklB(Float_t val) { fSPDClsVsTklB = val; }
void SetV0C012vsTklA(Float_t val) { fV0C012vsTklA = val; }
void SetV0C012vsTklB(Float_t val) { fV0C012vsTklB = val; }
void SetV0MOnVsOfA(Float_t val) { fV0MOnVsOfA = val; }
void SetV0MOnVsOfB(Float_t val) { fV0MOnVsOfB = val; }
void SetSPDOnVsOfA(Float_t val) { fSPDOnVsOfA = val; }
void SetSPDOnVsOfB(Float_t val) { fSPDOnVsOfB = val; }
void SetVtxMinContributors(Int_t val) { fVtxMinContributors = val; }
void SetVtxMinZdist(Float_t val) { fVtxMinZdist = val; }
void SetVtxNSigmaZdist(Float_t val) { fVtxNSigmaZdist = val; }
void SetVtxNSigmaDiamXY(Float_t val) { fVtxNSigmaDiamXY = val; }
void SetVtxNSigmaDiamZ(Float_t val) { fVtxNSigmaDiamZ = val; }
void SetV0CasymA(Float_t val) { fV0CasymA = val; }
void SetV0CasymB(Float_t val) { fV0CasymB = val; }
void SetNBCsPast(Int_t val) { fNBCsPast = val; }
void SetNBCsFuture(Int_t val) { fNBCsFuture = val; }
void SetVIRBBAflags(Int_t val) { fVIRBBAflags = val; }
void SetVIRBBCflags(Int_t val) { fVIRBBCflags = val; }
void SetVIRBGAflags(Int_t val) { fVIRBGAflags = val; }
void SetVIRBGCflags(Int_t val) { fVIRBGCflags = val; }
void SetVHMBBAflags(Int_t val) { fVHMBBAflags = val; }
void SetVHMBBCflags(Int_t val) { fVHMBBCflags = val; }
void SetVHMBGAflags(Int_t val) { fVHMBGAflags = val; }
void SetVHMBGCflags(Int_t val) { fVHMBGCflags = val; }
void SetV0MOnThreshold(Int_t val) { fV0MOnThreshold = val; }
void SetV0MOfThreshold(Float_t val) { fV0MOfThreshold = val; }
void SetSPDGFOThreshhold(Int_t val) { fSPDGFOThreshold = val; }
void SetSH1OuterThreshold(Int_t val) { fSH1OuterThreshold = val; }
void SetSH2OuterThreshold(Int_t val) { fSH2OuterThreshold = val; }
void SetTklThreshold(Int_t val) { fTklThreshold = val; }
void SetZDCCorrParameters(Float_t sumCorr, Float_t deltaCorr, Float_t sigmaSumCorr, Float_t sigmaDeltaCorr){
fZDCCutRefSumCorr = sumCorr;
fZDCCutRefDeltaCorr = deltaCorr;
fZDCCutSigmaSumCorr = sigmaSumCorr;
fZDCCutSigmaDeltaCorr = sigmaDeltaCorr;
}
void SetZNCorrParameters(Float_t znaTimeCorrMin, Float_t znaTimeCorrMax, Float_t zncTimeCorrMin, Float_t zncTimeCorrMax){
fZDCCutZNATimeCorrMin = znaTimeCorrMin;
fZDCCutZNATimeCorrMax = znaTimeCorrMax;
fZDCCutZNCTimeCorrMin = zncTimeCorrMin;
fZDCCutZNCTimeCorrMax = zncTimeCorrMax;
}
void SetTRDTriggerParameters(Float_t ptHSE, UChar_t pidHSE, Float_t ptHQU, UChar_t pidHQU, Float_t ptHEE, UChar_t pidHEE, UChar_t minSectorHEE, UChar_t maxSectorHEE, Float_t ptHJT, UChar_t nHJT) {
fTRDptHSE = ptHSE; fTRDpidHSE = pidHSE;
fTRDptHQU = ptHQU; fTRDpidHQU = pidHQU;
fTRDptHEE = ptHEE; fTRDpidHEE = pidHEE;
fTRDminSectorHEE = minSectorHEE; fTRDmaxSectorHEE = maxSectorHEE;
fTRDptHJT = ptHJT; fTRDnHJT = nHJT;
}
void SetFMDThreshold(Float_t low, Float_t hit) { fFMDLowCut = low; fFMDHitCut = hit; }
virtual Bool_t IsFolder() const { return kTRUE; }
void Browse(TBrowser *b);
virtual void Print(Option_t* option = "") const;
protected:
Float_t fFMDLowCut; //
Float_t fFMDHitCut; //
Float_t fZDCCutRefSum; // ZDC time cut configuration
Float_t fZDCCutRefDelta; // ZDC time cut configuration
Float_t fZDCCutSigmaSum; // ZDC time cut configuration
Float_t fZDCCutSigmaDelta; // ZDC time cut configuration
Float_t fZDCCutRefSumCorr; // Corrected ZDC time cut configuration
Float_t fZDCCutRefDeltaCorr; // Corrected ZDC time cut configuration
Float_t fZDCCutSigmaSumCorr; // Corrected ZDC time cut configuration
Float_t fZDCCutSigmaDeltaCorr; // Corrected ZDC time cut configuration
Float_t fZDCCutZNATimeCorrMin; // Corrected ZNA minimum time cut configuration
Float_t fZDCCutZNATimeCorrMax; // Corrected ZNA maximum time cut configuration
Float_t fZDCCutZNCTimeCorrMin; // Corrected ZNC minimum time cut configuration
Float_t fZDCCutZNCTimeCorrMax; // Corrected ZNC maximum time cut configuration
Float_t fSPDClsVsTklA; // constant for the linear cut in SPD clusters vs tracklets
Float_t fSPDClsVsTklB; // slope for the linear cut in SPD clusters vs tracklets
Float_t fV0C012vsTklA; // constant for the linear cut in V0C012 vs tracklets
Float_t fV0C012vsTklB; // slope for the linear cut in V0C012 vs tracklets
Float_t fV0MOnVsOfA; // constant for the linear pileup cut in Online vs Offline V0M
Float_t fV0MOnVsOfB; // slope for the linear pileup cut in Online vs Offline V0M
Float_t fSPDOnVsOfA; // constant for the linear pileup cut in Online vs Offline SPD
Float_t fSPDOnVsOfB; // slope for the linear pileup cut in Online vs Offline SPD
Int_t fVtxMinContributors; // SPD vertex pileup cut: minimum number of contributors
Float_t fVtxMinZdist; // SPD vertex pileup cut: minimum z-vertex distance
Float_t fVtxNSigmaZdist; // SPD vertex pileup cut: n sigma distrance
Float_t fVtxNSigmaDiamXY; // SPD vertex pileup cut: n sigma xy diam
Float_t fVtxNSigmaDiamZ; // SPD vertex pileup cut: n sigma z diam
Float_t fV0CasymA; // constant for the linear cut on V0C012 vs V0C3 asymmetry
Float_t fV0CasymB; // slope for the linear cut on V0C012 vs V0C3 asymmetry
Int_t fNBCsPast; // VIR past-future protection: number of past BCs (BC%4=0)
Int_t fNBCsFuture; // VIR past-future protection: number of future BCs (BC%4=0)
Int_t fVIRBBAflags; // VIR past-future protection: min number of BBA flags in VIR definition
Int_t fVIRBBCflags; // VIR past-future protection: min number of BBC flags in VIR definition
Int_t fVIRBGAflags; // VIR past-future protection: min number of BGA flags in VIR definition
Int_t fVIRBGCflags; // VIR past-future protection: min number of BGC flags in VIR definition
Int_t fVHMBBAflags; // VHM trigger: min number of BBA flags (read out from OCDB)
Int_t fVHMBBCflags; // VHM trigger: min number of BBC flags (read out from OCDB)
Int_t fVHMBGAflags; // VHM trigger: min number of BGA flags (read out from OCDB)
Int_t fVHMBGCflags; // VHM trigger: min number of BGC flags (read out from OCDB)
Int_t fV0MOnThreshold; // V0M HM trigger: min V0M threshold (read out from OCDB)
Float_t fV0MOfThreshold; // V0M HM offline: min V0M threshold
Int_t fSPDGFOThreshold; // SPD GFO trigger: min number of outer chips
Int_t fSH1OuterThreshold; // SPD 0SH1 trigger: min number of outer chips
Int_t fSH2OuterThreshold; // SPD 0SH2 trigger: min number of outer chips
Int_t fTklThreshold; // Offline cut on number of tracklets (for high-multiplicity SPD trigger)
Float_t fTRDptHSE; // pt threshold for HSE trigger
UChar_t fTRDpidHSE; // PID threshold for HSE trigger
Float_t fTRDptHQU; // pt threshold for HQU trigger
UChar_t fTRDpidHQU; // PID threshold for HQU trigger
Float_t fTRDptHEE; // pt threshold for HEE trigger
UChar_t fTRDpidHEE; // PID threshold for HEE trigger
UChar_t fTRDminSectorHEE; // min sector for HEE trigger
UChar_t fTRDmaxSectorHEE; // max sector for HEE trigger
Float_t fTRDptHJT; // pt threshold for HJT trigger
UChar_t fTRDnHJT; // no of track threshold for HJT trigger
AliOADBTriggerAnalysis(const AliOADBTriggerAnalysis& cont); // not implemented
AliOADBTriggerAnalysis& operator=(const AliOADBTriggerAnalysis& cont); // not implemented
ClassDef(AliOADBTriggerAnalysis, 6);
};
#endif
|
RevansChen/online-judge
|
Codewars/7kyu/powers-of-3/Python/test.py
|
# Python - 3.6.0
Test.it('Example test cases')
Test.assert_equals(largestPower(3), 0)
Test.assert_equals(largestPower(4), 1)
|
gregmolnar/write_xlsx
|
lib/write_xlsx/gradient.rb
|
<reponame>gregmolnar/write_xlsx<filename>lib/write_xlsx/gradient.rb
module Writexlsx
module Gradient
def gradient_properties(args)
return unless ptrue?(args)
gradient = {}
types = {
'linear' => 'linear',
'radial' => 'circle',
'rectangular' => 'rect',
'path' => 'shape'
}
# Check the colors array exists and is valid.
raise "Gradient must include colors array" unless ptrue?(args[:colors])
# Check the colors array has the right number of entries.
raise "Gradient colors array must include at least 2 values" if args[:colors].size < 2
gradient[:colors] = args[:colors]
if ptrue?(args[:positions])
# Check the positions array has the right number of entries.
raise "Gradient positions not equal to numbers of colors" unless args[:positions].size == args[:colors].size
# Check the positions are in the correct range.
args[:positions].each do |pos|
if pos < 0 || pos > 100
raise "Gradient position '#{pos} must be in range 0 <= pos <= 100"
end
end
gradient[:positions] = args[:positions]
else
# Use the default gradient positions.
case args[:colors].size
when 2
gradient[:positions] = [0, 100]
when 3
gradient[:positions] = [0, 50, 100]
when 4
gradient[:positions] = [0, 33, 66, 100]
else
raise "Must specify gradient positions"
end
end
# Set the gradient angle.
if args[:angle]
angle = args[:angle]
if angle < 0 || angle > 359.9
raise "Gradient angle '#{angle} must be in range 0 <= pos < 360"
end
gradient[:angle] = angle
else
gradient[:angle] = 90
end
# Set the gradient type.
if args[:type]
type = args[:type]
unless types[type]
raise "Unknow gradient type '#{type}'"
end
gradient[:type] = types[type]
else
gradient[:type] = 'linear'
end
gradient
end
end
end
|
keryhu/xdidian-projection
|
user-account/src/main/java/com/xdidian/keryhu/user_account/stream/SignupConsumer.java
|
<reponame>keryhu/xdidian-projection
package com.xdidian.keryhu.user_account.stream;
import com.xdidian.keryhu.domain.SignupDto;
import com.xdidian.keryhu.user_account.domain.User;
import com.xdidian.keryhu.user_account.service.ConverterUtil;
import com.xdidian.keryhu.user_account.service.UserService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
/**
* @Description : 物业公司用户注册后,接受具体注册信息的message的方法
* @date : 2016年6月18日 下午9:25:40
* @author : keryHu <EMAIL>
*/
@EnableBinding(SignupInputChannel.class)
@Slf4j
public class SignupConsumer {
@Autowired
private ConverterUtil converterUtil;
@Autowired
private UserService userService;
@StreamListener(SignupInputChannel.NAME)
public void saveProperty(SignupDto dto) {
log.info("user-account 已经收到了用户注册信息,具体为 : " + dto);
User user = converterUtil.signupDtoToUser.apply(dto);
// 保存数据库
userService.save(user);
}
}
|
pervasync/myfaces-trinidad
|
trinidad-impl/src/main/java/org/apache/myfaces/trinidadinternal/agent/parse/DeviceComponentNode.java
|
<filename>trinidad-impl/src/main/java/org/apache/myfaces/trinidadinternal/agent/parse/DeviceComponentNode.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.myfaces.trinidadinternal.agent.parse;
/**
* Object that holds information about the device component node in capabilities file
*/
class DeviceComponentNode
{
/**
* The User Agent component of device
*/
public static final Object TYPE_BROWSER = "browser";
/**
* The software/os platform of the device
*/
public static final Object TYPE_PLATFORM = "platform";
/**
* The hardware platform of the device
*/
public static final Object TYPE_HARDWARE = "hardware";
/**
* The push component of the device
*/
public static final Object TYPE_PUSH = "push";
/**
* the mms component of the device
*/
public static final Object TYPE_MMS = "mms";
/**
* unknown component
*/
private static final Object TYPE_UNKNOWN = "unknown";
public DeviceComponentNode(String type,
IncludeNode[] nodesWithRef,
IncludeNode[] nodesWithSrc)
{
_type = _getType(type);
_nodesWithRef = nodesWithRef;
_nodesWithSrc = nodesWithSrc;
}
IncludeNode[] __getIncludesByUri()
{
return _nodesWithSrc;
}
IncludeNode[] __getIncludesByRef()
{
return _nodesWithRef;
}
Object __getType()
{
return _type;
}
private Object _getType(String type)
{
if (type == null)
return TYPE_UNKNOWN;
type = type.intern();
if ((type == TYPE_BROWSER) ||
(TYPE_BROWSER.equals(type)))
{
return TYPE_BROWSER;
}
else if ((type == TYPE_PLATFORM) ||
(TYPE_PLATFORM.equals(type)))
{
return TYPE_PLATFORM;
}
else if ((type == TYPE_HARDWARE) ||
(TYPE_HARDWARE.equals(type)))
{
return TYPE_HARDWARE;
}
else if ((type == TYPE_MMS) ||
(TYPE_MMS.equals(type)))
{
return TYPE_MMS;
}
else if ((type == TYPE_PUSH) ||
(TYPE_PUSH.equals(type)))
{
return TYPE_PUSH;
}
return TYPE_UNKNOWN;
}
private Object _type;
private IncludeNode[] _nodesWithRef;
private IncludeNode[] _nodesWithSrc;
}
|
SpokeU/miner
|
src/main/java/app/miner/models/StepConfiguration.java
|
package app.miner.models;
import org.javalite.activejdbc.Model;
import org.javalite.activejdbc.annotations.BelongsTo;
import org.javalite.activejdbc.annotations.Table;
import java.util.Map;
@Table("step_configurations")
@BelongsTo(parent = Step.class, foreignKeyName = "step_id")
public class StepConfiguration extends Model {
public enum fields{name, value, step_id}
public String getName() {
return getString("name");
}
public Object getValue() {
return get("value");
}
/**
* Saves step config from provided map
*
* @param map
* @return
*/
public static void saveConfig(Long stepId, Map<String, String> map) {
map.forEach((name, value) -> {
StepConfiguration stepConfig = new StepConfiguration();
stepConfig.set("name", name);
stepConfig.set("value", value);
stepConfig.set("step_id", stepId);
stepConfig.saveIt();
});
}
}
|
megahertz0/android_thunder
|
dex_src/com/google/zxing/c/v.java
|
<reponame>megahertz0/android_thunder<gh_stars>10-100
package com.google.zxing.c;
// compiled from: UPCEANExtension5Support.java
final class v {
static final int[] a;
final int[] b;
final StringBuilder c;
v() {
this.b = new int[4];
this.c = new StringBuilder();
}
static {
a = new int[]{24, 20, 18, 17, 12, 6, 3, 10, 9, 5};
}
}
|
mr337/mymove
|
pkg/services/storage_in_transit/patch_storage_in_transit.go
|
<reponame>mr337/mymove
package storageintransit
import (
"time"
"github.com/gobuffalo/pop"
"github.com/gobuffalo/validate"
"github.com/gofrs/uuid"
"github.com/transcom/mymove/pkg/auth"
"github.com/transcom/mymove/pkg/gen/apimessages"
"github.com/transcom/mymove/pkg/handlers"
"github.com/transcom/mymove/pkg/models"
"github.com/transcom/mymove/pkg/services"
)
type patchStorageInTransit struct {
db *pop.Connection
}
func patchStorageInTransitWithPayload(storageInTransit *models.StorageInTransit, payload *apimessages.StorageInTransit) {
if *payload.Location == "ORIGIN" {
storageInTransit.Location = models.StorageInTransitLocationORIGIN
} else {
storageInTransit.Location = models.StorageInTransitLocationDESTINATION
}
if payload.EstimatedStartDate != nil {
storageInTransit.EstimatedStartDate = *(*time.Time)(payload.EstimatedStartDate)
}
storageInTransit.Notes = handlers.FmtStringPtrNonEmpty(payload.Notes)
if payload.WarehouseID != nil {
storageInTransit.WarehouseID = *payload.WarehouseID
}
if payload.WarehouseName != nil {
storageInTransit.WarehouseName = *payload.WarehouseName
}
if payload.WarehouseAddress != nil {
updateAddressWithPayload(&storageInTransit.WarehouseAddress, payload.WarehouseAddress)
}
storageInTransit.WarehousePhone = handlers.FmtStringPtrNonEmpty(payload.WarehousePhone)
storageInTransit.WarehouseEmail = handlers.FmtStringPtrNonEmpty(payload.WarehouseEmail)
storageInTransit.ActualStartDate = (*time.Time)(payload.ActualStartDate)
storageInTransit.OutDate = (*time.Time)(payload.OutDate)
}
// PatchStorageInTransit edits an existing storage in transit and returns the updated object.
func (p *patchStorageInTransit) PatchStorageInTransit(payload apimessages.StorageInTransit, shipmentID uuid.UUID, storageInTransitID uuid.UUID, session *auth.Session) (*models.StorageInTransit, *validate.Errors, error) {
returnVerrs := validate.NewErrors()
// Both TSPs and Office users can do this. TSPs can edit based on whether or not its their shipment.
isAuthorized, err := authorizeStorageInTransitHTTPRequest(p.db, session, shipmentID, true)
if err != nil {
return nil, returnVerrs, err
}
if !isAuthorized {
return nil, returnVerrs, models.ErrFetchForbidden
}
storageInTransit, err := models.FetchStorageInTransitByID(p.db, storageInTransitID)
if err != nil {
return nil, returnVerrs, err
}
patchStorageInTransitWithPayload(storageInTransit, &payload)
verrs, err := models.SaveStorageInTransitAndAddress(p.db, storageInTransit)
if err != nil || verrs.HasAny() {
returnVerrs.Append(verrs)
return nil, returnVerrs, err
}
if session.IsTspUser() {
verrs, err = storageInTransit.SaveActualDeliveryDateAsOutDate(p.db, session, *(*time.Time)(payload.OutDate))
if err != nil || verrs.HasAny() {
returnVerrs.Append(verrs)
return storageInTransit, returnVerrs, err
}
}
return storageInTransit, returnVerrs, err
}
// NewStorageInTransitPatcher is the public constructor for a `NewStorageInTransitPatcher`
// using Pop
func NewStorageInTransitPatcher(db *pop.Connection) services.StorageInTransitPatcher {
return &patchStorageInTransit{db}
}
|
vadim8kiselev/social-matchmaker
|
src/main/java/com/kiselev/matchmaker/view/serialize/SerializeView.java
|
<reponame>vadim8kiselev/social-matchmaker<gh_stars>0
package com.kiselev.matchmaker.view.serialize;
import com.kiselev.matchmaker.api.model.Entity;
import java.io.File;
import java.util.List;
public interface SerializeView {
<Pojo extends Entity> void serialize(List<Pojo> entities, String filePath);
<Pojo extends Entity> File serialize(List<Pojo> entities);
}
|
crazyvalse/algorithms
|
src/104-knapsack/wanquan/01.test.js
|
<filename>src/104-knapsack/wanquan/01.test.js
const knapsack = require('./01')
test('1', () => {
// 保险箱中 物品的价值
let values = [5, 10, 20]
// 物品的尺寸
let weights = [3, 2, 2]
// 1. 获得 capacity <= 16的所有方法
// 总尺寸
let capacity = 5
expect(knapsack(capacity, weights, values)).toBe(40)
})
test('2', () => {
// 保险箱中 物品的价值
let values = [4]
// 物品的尺寸
let weights = [3]
// 1. 获得 capacity <= 16的所有方法
// 总尺寸
let capacity = 16
expect(knapsack(capacity, weights, values)).toBe(4)
})
test('3', () => {
// 1. 获得 capacity <= 16的所有方法
let capacity = 10
// 物品的尺寸
let weights = [2, 3, 4, 7]
// 保险箱中 物品的价值
let values = [1, 3, 5, 9]
expect(knapsack(capacity, weights, values)).toBe(12)
})
|
qaz4042/beBetter
|
3-common/src/main/java/bebetter/basejpa/constant/CacheNamesBase.java
|
<filename>3-common/src/main/java/bebetter/basejpa/constant/CacheNamesBase.java
package bebetter.basejpa.constant;
public class CacheNamesBase {
public static final String menuList = "menuList";
public static final String menu = "menu";
public static final String bannerList = "bannerList";
public static final String role = "role";
public static final String admin = "admin";
public static final String param = "param";
}
|
jorilallo/rich-markdown-editor-emotion
|
lib/plugins/EditList.js
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _slateEditList = require("slate-edit-list");
var _slateEditList2 = _interopRequireDefault(_slateEditList);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = (0, _slateEditList2.default)({
types: ["ordered-list", "bulleted-list", "todo-list"],
typeItem: "list-item",
typeDefault: "paragraph"
});
|
GullapalliAkhil/java-client
|
src/test/java/io/appium/java_client/events/listeners/ContextListener2.java
|
<gh_stars>0
package io.appium.java_client.events.listeners;
import io.appium.java_client.events.api.mobile.ContextEventListener;
import org.openqa.seleniumone.WebDriver;
public class ContextListener2 extends TestListener implements ContextEventListener {
@Override public void beforeSwitchingToContext(WebDriver driver, String context) {
messages.add("Externally defined listener: Attempt to change current context to " + context);
}
@Override public void afterSwitchingToContext(WebDriver driver, String context) {
messages.add("Externally defined listener: The previous context has been changed to " + context);
}
@Override protected void add() {
SingleListeners.listeners.put(ContextListener2.class, this);
}
}
|
liutang123/incubator-druid
|
server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.server.initialization.jetty;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.TypeLiteral;
import org.apache.druid.java.util.common.ISE;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.handler.RequestLogHandler;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.FilterMapping;
import org.eclipse.jetty.servlet.ServletContextHandler;
import javax.ws.rs.HttpMethod;
import java.util.Arrays;
import java.util.Set;
public class JettyServerInitUtils
{
private static final String[] GZIP_METHODS = new String[]{HttpMethod.GET, HttpMethod.POST};
public static GzipHandler wrapWithDefaultGzipHandler(final Handler handler, int inflateBufferSize, int compressionLevel)
{
GzipHandler gzipHandler = new GzipHandler();
gzipHandler.setMinGzipSize(0);
gzipHandler.setIncludedMethods(GZIP_METHODS);
gzipHandler.setInflateBufferSize(inflateBufferSize);
gzipHandler.setCompressionLevel(compressionLevel);
// We don't actually have any precomputed .gz resources, and checking for them inside jars is expensive.
gzipHandler.setCheckGzExists(false);
gzipHandler.setHandler(handler);
return gzipHandler;
}
public static void addExtensionFilters(ServletContextHandler handler, Injector injector)
{
Set<ServletFilterHolder> extensionFilters = injector.getInstance(Key.get(new TypeLiteral<Set<ServletFilterHolder>>(){}));
for (ServletFilterHolder servletFilterHolder : extensionFilters) {
// Check the Filter first to guard against people who don't read the docs and return the Class even
// when they have an instance.
FilterHolder holder;
if (servletFilterHolder.getFilter() != null) {
holder = new FilterHolder(servletFilterHolder.getFilter());
} else if (servletFilterHolder.getFilterClass() != null) {
holder = new FilterHolder(servletFilterHolder.getFilterClass());
} else {
throw new ISE(
"Filter[%s] for paths[%s] didn't have a Filter!?",
servletFilterHolder,
Arrays.toString(servletFilterHolder.getPaths())
);
}
if (servletFilterHolder.getInitParameters() != null) {
holder.setInitParameters(servletFilterHolder.getInitParameters());
}
FilterMapping filterMapping = new FilterMapping();
filterMapping.setFilterName(holder.getName());
filterMapping.setPathSpecs(servletFilterHolder.getPaths());
filterMapping.setDispatcherTypes(servletFilterHolder.getDispatcherType());
handler.getServletHandler().addFilter(holder, filterMapping);
}
}
public static Handler getJettyRequestLogHandler()
{
// Ref: http://www.eclipse.org/jetty/documentation/9.2.6.v20141205/configuring-jetty-request-logs.html
RequestLogHandler requestLogHandler = new RequestLogHandler();
requestLogHandler.setRequestLog(new JettyRequestLog());
return requestLogHandler;
}
}
|
Asap7772/rail-rl-franka-eval
|
experiments/murtaza/vae/torque_control/train_vae_torque_reacher_debug.py
|
from sklearn.model_selection import train_test_split
import railrl.misc.hyperparameter as hyp
from railrl.launchers.launcher_util import run_experiment
from railrl.misc.ml_util import PiecewiseLinearSchedule
from railrl.torch.vae.conv_vae import ConvVAE
from railrl.torch.vae.vae_trainer import ConvVAETrainer
import numpy as np
def experiment(variant):
from railrl.core import logger
import railrl.torch.pytorch_util as ptu
beta = variant["beta"]
representation_size = variant["representation_size"]
#this has both states and images so can't use generate vae dataset
X = np.load('/home/murtaza/vae_data/sawyer_torque_control_ou_imgs_zoomed_out10000.npy')
Y = np.load('/home/murtaza/vae_data/sawyer_torque_control_ou_states_zoomed_out10000.npy')
Y = np.concatenate((Y[:, :7], Y[:, 14:]), axis=1)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=.1)
info = dict()
logger.save_extra_data(info)
logger.get_snapshot_dir()
if 'beta_schedule_kwargs' in variant:
beta_schedule = PiecewiseLinearSchedule(**variant['beta_schedule_kwargs'])
else:
beta_schedule = None
m = ConvVAE(representation_size, input_channels=3, state_sim_debug=True, state_size = Y.shape[1], **variant['conv_vae_kwargs'])
if ptu.gpu_enabled():
m.cuda()
t = ConvVAETrainer((X_train, Y_train), (X_test, Y_test), m, beta=beta,
beta_schedule=beta_schedule, state_sim_debug=True, **variant['algo_kwargs'])
save_period = variant['save_period']
for epoch in range(variant['num_epochs']):
should_save_imgs = (epoch % save_period == 0)
t.train_epoch(epoch)
t.test_epoch(epoch, save_reconstruction=should_save_imgs,
save_scatterplot=should_save_imgs)
if should_save_imgs:
t.dump_samples(epoch)
if __name__ == "__main__":
n_seeds = 1
mode = 'local'
exp_prefix = 'sawyer_torque_vae_with_mse_loss_sweep'
use_gpu = True
variant = dict(
beta=5,
num_epochs=500,
get_data_kwargs=dict(
N=10000,
use_cached=True,
),
algo_kwargs=dict(
mse_weight=.1,
),
conv_vae_kwargs=dict(
min_variance=None,
),
save_period=1,
)
search_space = {
'representation_size': [16, 32],
'algo_kwargs.mse_weight':[10, 1, .1, .01],
'beta':[4, 5, 6, 10]
}
sweeper = hyp.DeterministicHyperparameterSweeper(
search_space, default_parameters=variant,
)
for _ in range(n_seeds):
for exp_id, variant in enumerate(sweeper.iterate_hyperparameters()):
run_experiment(
experiment,
exp_prefix=exp_prefix,
mode=mode,
variant=variant,
use_gpu=use_gpu,
snapshot_mode='gap',
snapshot_gap=20,
)
|
ScholliYT/RoboCom2018
|
DataDebugger/PCCommunication/src/nxt/object/ShutdownHook.java
|
package nxt.object;
import nxt.connection.PCCommunicationManager;
public class ShutdownHook extends Thread{
private PCCommunicationManager man;
private ShutdownHook(PCCommunicationManager man){
this.man = man;
Runtime.getRuntime().addShutdownHook(this);
}
@Override
public void run(){
if(man != null && man.isAvailable()){
man.close();
}
}
public static void addShutdownHook(PCCommunicationManager man){
new ShutdownHook(man);
}
}
|
hadoopeco/jok-boot
|
jok-common-v2/src/main/java/com/jokls/jok/util/SyncMap.java
|
<reponame>hadoopeco/jok-boot<filename>jok-common-v2/src/main/java/com/jokls/jok/util/SyncMap.java<gh_stars>0
package com.jokls.jok.util;
import java.util.HashMap;
/**
* Copyright (C) 2019
* All rights reserved
*
* @author: marik.wei
* @mail: <EMAIL>
* Date: 2019/6/25 11:22
*/
public class SyncMap<K, V> extends HashMap<K, V> {
private static final long serialVersionUID = 1L;
public synchronized V get(Object key){return super.get(key);}
public synchronized V put(K key, V value){return super.put(key, value);}
public synchronized V remove(Object key){return super.get(key);}
}
|
JaneMandy/CS
|
org/w3c/dom/css/CSSStyleSheet.java
|
package org.w3c.dom.css;
import org.w3c.dom.DOMException;
import org.w3c.dom.stylesheets.StyleSheet;
public interface CSSStyleSheet extends StyleSheet {
CSSRule getOwnerRule();
CSSRuleList getCssRules();
int insertRule(String var1, int var2) throws DOMException;
void deleteRule(int var1) throws DOMException;
}
|
forestfsl/APIKit
|
APIKit/Classes/MagicWorldAPI.h
|
<reponame>forestfsl/APIKit
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface MagicWorldAPI : NSObject
extern NSString *const kActivateForNotification;
extern NSString *const krequestLoginForNotification;
extern NSString *const kLoginCheckForNotification;
extern NSString *const kLogoutForNotification;
extern NSString *const kPurchaseForNotification;
extern NSString *const kuserID;
extern NSString *const kusername;
extern NSString *const ktoken;
extern NSString *const kCpOrderId;
extern NSString *const kGoodsId;
extern NSString *const kGoodsPrice;
extern NSString *const kServerId;
extern NSString *const kRoleId;
extern NSString *const kRoleName;
extern NSString *const kExtend;
+ (instancetype)board_sharedAPI_storyInstance;
+ (void)activeWithAppID:(NSString *)appID
unique_appKey:(NSString *)appKey;
+ (void)requestLogin;
+ (void)requestAuthUserInfo:(NSDictionary *)userInfo;
+ (void)requestLogout;
+ (void)sendPurchaseRequest:(NSDictionary *)params;
+ (NSDictionary *)requestVersionInfo;
@end
NS_ASSUME_NONNULL_END
|
skurmedel/cornelis
|
include/cornelis/Scene.hpp
|
#pragma once
#include <memory>
#include <cornelis/Camera.hpp>
#include <cornelis/Objects.hpp>
namespace cornelis {
class Scene {
public:
Scene();
~Scene();
Scene(Scene &&) = default;
auto operator=(Scene &&) -> Scene & = default;
Scene(Scene const &) = delete;
auto operator=(Scene const &) -> Scene & = delete;
auto setCamera(PerspectiveCameraPtr camera) -> void;
auto camera() const noexcept -> PerspectiveCameraPtr;
auto spheres() noexcept -> SurfaceBag<SphereSurface> &;
auto spheres() const noexcept -> SurfaceBag<SphereSurface> const &;
private:
struct State;
std::unique_ptr<State> me_;
};
} // namespace cornelis
|
Parveen3300/Reans
|
cms/migrations/0001_initial.py
|
# Generated by Django 3.2.8 on 2021-11-15 05:11
import ckeditor.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('catalogue', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='WebsiteCompanyLogo',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('is_active', models.BooleanField(default=True)),
('meta_title', models.CharField(blank=True, max_length=250, null=True, verbose_name='Meta Title')),
('meta_description', models.TextField(blank=True, null=True, verbose_name='Meta Description')),
('keywords', models.CharField(blank=True, max_length=250, null=True, verbose_name='Keyword')),
('logo_title', models.CharField(max_length=30, verbose_name='Home Name')),
('logo_image', models.ImageField(upload_to='company/logo_images', verbose_name='Logo Image (small)')),
('logo_image_large', models.ImageField(upload_to='company/logo_images_large', verbose_name='Logo Image (large)')),
('created_by', models.ForeignKey(blank=True, db_column='created_by', limit_choices_to=models.Q(('is_staff', 0), ('is_superuser', 0), _negated=True), null=True, on_delete=django.db.models.deletion.CASCADE, related_name='created_websitecompanylogos', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('updated_by', models.ForeignKey(blank=True, db_column='updated_by', limit_choices_to=models.Q(('is_staff', 0), ('is_superuser', 0), _negated=True), null=True, on_delete=django.db.models.deletion.CASCADE, related_name='updated_websitecompanylogos', to=settings.AUTH_USER_MODEL, verbose_name='Updated By')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ProductBanner',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('is_active', models.BooleanField(default=True)),
('meta_title', models.CharField(blank=True, max_length=250, null=True, verbose_name='Meta Title')),
('meta_description', models.TextField(blank=True, null=True, verbose_name='Meta Description')),
('keywords', models.CharField(blank=True, max_length=250, null=True, verbose_name='Keyword')),
('banner_title', models.CharField(max_length=30, verbose_name='Banner Name')),
('banner_image', models.ImageField(upload_to='product/banner_images', verbose_name='Banner Image (small)')),
('banner_image_large', models.ImageField(upload_to='product/banner_images_large', verbose_name='Banner Image (large)')),
('created_by', models.ForeignKey(blank=True, db_column='created_by', limit_choices_to=models.Q(('is_staff', 0), ('is_superuser', 0), _negated=True), null=True, on_delete=django.db.models.deletion.CASCADE, related_name='created_productbanners', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('updated_by', models.ForeignKey(blank=True, db_column='updated_by', limit_choices_to=models.Q(('is_staff', 0), ('is_superuser', 0), _negated=True), null=True, on_delete=django.db.models.deletion.CASCADE, related_name='updated_productbanners', to=settings.AUTH_USER_MODEL, verbose_name='Updated By')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='HomePageBanner',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('is_active', models.BooleanField(default=True)),
('meta_title', models.CharField(blank=True, max_length=250, null=True, verbose_name='Meta Title')),
('meta_description', models.TextField(blank=True, null=True, verbose_name='Meta Description')),
('keywords', models.CharField(blank=True, max_length=250, null=True, verbose_name='Keyword')),
('banner_title', models.CharField(max_length=30, verbose_name='Home Name')),
('banner_image', models.ImageField(upload_to='product/banner_images', verbose_name='Home Image (small)')),
('banner_image_large', models.ImageField(upload_to='product/banner_images_large', verbose_name='Home Image (large)')),
('created_by', models.ForeignKey(blank=True, db_column='created_by', limit_choices_to=models.Q(('is_staff', 0), ('is_superuser', 0), _negated=True), null=True, on_delete=django.db.models.deletion.CASCADE, related_name='created_homepagebanners', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('updated_by', models.ForeignKey(blank=True, db_column='updated_by', limit_choices_to=models.Q(('is_staff', 0), ('is_superuser', 0), _negated=True), null=True, on_delete=django.db.models.deletion.CASCADE, related_name='updated_homepagebanners', to=settings.AUTH_USER_MODEL, verbose_name='Updated By')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='DealOfTheDayProduct',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('is_active', models.BooleanField(default=True)),
('meta_title', models.CharField(blank=True, max_length=250, null=True, verbose_name='Meta Title')),
('meta_description', models.TextField(blank=True, null=True, verbose_name='Meta Description')),
('keywords', models.CharField(blank=True, max_length=250, null=True, verbose_name='Keyword')),
('title', models.CharField(max_length=30, verbose_name='Home Name')),
('image', models.ImageField(blank=True, null=True, upload_to='company/images', verbose_name='Logo Image')),
('short_description', models.TextField()),
('content', ckeditor.fields.RichTextField(blank=True, null=True)),
('created_by', models.ForeignKey(blank=True, db_column='created_by', limit_choices_to=models.Q(('is_staff', 0), ('is_superuser', 0), _negated=True), null=True, on_delete=django.db.models.deletion.CASCADE, related_name='created_dealofthedayproducts', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('product', models.ForeignKey(blank=True, limit_choices_to={'is_active': '1'}, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='deal_product', to='catalogue.product', verbose_name='Deal Products')),
('updated_by', models.ForeignKey(blank=True, db_column='updated_by', limit_choices_to=models.Q(('is_staff', 0), ('is_superuser', 0), _negated=True), null=True, on_delete=django.db.models.deletion.CASCADE, related_name='updated_dealofthedayproducts', to=settings.AUTH_USER_MODEL, verbose_name='Updated By')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='CompanyBanner',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('is_active', models.BooleanField(default=True)),
('meta_title', models.CharField(blank=True, max_length=250, null=True, verbose_name='Meta Title')),
('meta_description', models.TextField(blank=True, null=True, verbose_name='Meta Description')),
('keywords', models.CharField(blank=True, max_length=250, null=True, verbose_name='Keyword')),
('banner_title', models.CharField(max_length=30, verbose_name='Banner Name')),
('banner_image', models.ImageField(upload_to='company/banner_images', verbose_name='Banner Image (small)')),
('banner_image_large', models.ImageField(upload_to='company/banner_images_large', verbose_name='Banner Image (large)')),
('created_by', models.ForeignKey(blank=True, db_column='created_by', limit_choices_to=models.Q(('is_staff', 0), ('is_superuser', 0), _negated=True), null=True, on_delete=django.db.models.deletion.CASCADE, related_name='created_companybanners', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('updated_by', models.ForeignKey(blank=True, db_column='updated_by', limit_choices_to=models.Q(('is_staff', 0), ('is_superuser', 0), _negated=True), null=True, on_delete=django.db.models.deletion.CASCADE, related_name='updated_companybanners', to=settings.AUTH_USER_MODEL, verbose_name='Updated By')),
],
options={
'verbose_name': 'Company Banner',
'verbose_name_plural': 'Company Banner',
'db_table': 'company_banner',
},
),
]
|
AlmondDust/Assignment3-Final-AIFO
|
aifo_simulation/java-code/src/test/java/ch/ethz/systems/netbench/ext/bare/BareRunTest.java
|
package ch.ethz.systems.netbench.ext.bare;
import ch.ethz.systems.netbench.core.run.MainFromProperties;
import ch.ethz.systems.netbench.testutility.TestLogReader;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.runners.MockitoJUnitRunner;
import java.io.IOException;
import java.util.Map;
import static org.junit.Assert.*;
@RunWith(MockitoJUnitRunner.class)
public class BareRunTest {
@Test
public void testBareRun() throws IOException {
// Perform run (run folder: temp/test_n2)
String[] args = new String[]{"example/runs/test_n2_bare.properties"};
MainFromProperties.main(args);
// Fetch log mappings
Map<Pair<Integer, Integer>, TestLogReader.PortUtilizationTuple> portQueueStateTupleMap = TestLogReader.getPortUtilizationMapping("temp/test_n2_bare");
Map<Long, TestLogReader.FlowCompletionTuple> flowCompletionTupleMap = TestLogReader.getFlowCompletionMapping("temp/test_n2_bare");
// Flow completion
TestLogReader.FlowCompletionTuple tuple = flowCompletionTupleMap.get(0L);
assertEquals(50000, tuple.getEndTime());
assertFalse(tuple.isCompleted());
assertEquals(50000, tuple.getDuration());
assertEquals(55200, tuple.getSentBytes());
assertEquals(1000000000L, tuple.getTotalSizeBytes());
// Sent one pa
// Port utilization
// 0 -> 1
assertEquals(100.0, portQueueStateTupleMap.get(new ImmutablePair<>(0, 1)).getUtilizationPercentage(), 1e-6);
assertEquals(50000, portQueueStateTupleMap.get(new ImmutablePair<>(0, 1)).getUtilizationNs());
// 1 -> 0
//assertEquals((5*48) / 9479.0 * 100.0, portQueueStateTupleMap.get(new Pair<>(1, 0)).getUtilizationPercentage(), 1e-6);
//assertEquals((5*48), portQueueStateTupleMap.get(new Pair<>(1, 0)).getUtilizationNs());
}
}
|
drausin/libri
|
libri/librarian/server/peer/testing.go
|
package peer
import (
"fmt"
"math/rand"
"net"
"testing"
"time"
cerrors "github.com/drausin/libri/libri/common/errors"
"github.com/drausin/libri/libri/common/id"
"github.com/drausin/libri/libri/librarian/server/storage"
"github.com/stretchr/testify/assert"
)
// NewTestPeer generates a new peer suitable for testing using a random number generator for the
// ID and an index.
func NewTestPeer(rng *rand.Rand, idx int) Peer {
return New(
id.NewPseudoRandom(rng),
fmt.Sprintf("test-peer-%d", idx+1),
NewTestPublicAddr(idx),
)
}
// NewTestPublicAddr creates a new net.TCPAddr given a particular peer index.
func NewTestPublicAddr(idx int) *net.TCPAddr {
address, err := net.ResolveTCPAddr("tcp", fmt.Sprintf("127.0.0.1:%v", 20100+idx))
cerrors.MaybePanic(err)
return address
}
// NewTestPeers generates n new peers suitable for testing use with random IDs and incrementing
// values of other fields.
func NewTestPeers(rng *rand.Rand, n int) []Peer {
ps := make([]Peer, n)
for i := 0; i < n; i++ {
ps[i] = NewTestPeer(rng, i)
}
return ps
}
// NewTestStoredPeer generates a new storage.Peer suitable for testing using a random number
// generator for the ID and an index.
func NewTestStoredPeer(rng *rand.Rand, idx int) *storage.Peer {
now := time.Unix(int64(idx), 0).UTC()
return &storage.Peer{
Id: id.NewPseudoRandom(rng).Bytes(),
Name: fmt.Sprintf("peer-%d", idx+1),
PublicAddress: &storage.Address{
Ip: "192.168.1.1",
Port: uint32(20100 + idx),
},
QueryOutcomes: &storage.QueryOutcomes{
Responses: &storage.QueryTypeOutcomes{
Earliest: now.Unix(),
Latest: now.Unix(),
NQueries: 1,
NErrors: 0,
},
Requests: &storage.QueryTypeOutcomes{}, // everything will be zero
},
}
}
// AssertPeersEqual checks that the stored and non-stored representations of a peer are equal.
func AssertPeersEqual(t *testing.T, sp *storage.Peer, p Peer) {
assert.Equal(t, sp.Id, p.ID().Bytes())
publicAddres := p.(*peer).Address()
assert.Equal(t, sp.PublicAddress.Ip, publicAddres.IP.String())
assert.Equal(t, sp.PublicAddress.Port, uint32(publicAddres.Port))
}
|
CESNET/Nemea-Framework
|
unirec/unirec2csv.h
|
<gh_stars>10-100
/**
* \file unirec2csv.h
* \brief Definition of UniRec API to create CSV-like representation of UniRec data
* \author <NAME> <<EMAIL>>
* \date 2019
*/
/*
* Copyright (C) 2019 CESNET
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef _UNIREC2CSV_H_
#define _UNIREC2CSV_H_
#include "unirec.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
* \defgroup unirec2csv CSV representation
*
* Functions to convert UniRec template and data into CSV-like representation
*
* \code{.c}
* urcsv_t *csv = urcsv_init(tmplt, ',');
*
* char *str = urcsv_header(csv);
* fprintf(stderr, "%s\n", str);
* free(str);
*
* str = urcsv_record(csv, rec);
* fprintf(stderr, "%s\n", str);
* free(str);
*
* urcsv_free(&csv);
* \endcode
* @{
*/
/**
* Internal structure used by urcsv_init(), urcsv_free(), urcsv_header(), urcsv_record()
*/
typedef struct urcsv_s {
/**
* UniRec template associated with this conversion
*/
ur_template_t *tmplt;
/**
* Internal string buffer, allocated to buffer_size bytes
*/
char *buffer;
/**
* Internal position in the buffer to write next string
*/
char *curpos;
/**
* Current size of allocated memory for buffer
*/
uint32_t buffer_size;
/**
* Current free bytes in the buffer
*/
uint32_t free_space;
/**
* Delimiter that is put between columns
*/
char delimiter;
} urcsv_t;
/**
* Constructor for #urcsv_t
*
* The function initializes struct for urcsv_header() and urcsv_record().
*
* \param[in] tmplt UniRec template that will be used to access fields of the records
* \param[in] delimiter Delimiter that will be used to separate columns of output
* \return Pointer to newly allocated and initialized structure
*/
urcsv_t *urcsv_init(ur_template_t *tmplt, char delimiter);
/**
* Destructor for #urcsv_t
*
* The funtion deallocates internal memory and urcsv, the pointer is set to NULL.
* \param[in,out] urcsv Address of pointer to structure allocated by urcsv_init(), it will be set to NULL.
*/
void urcsv_free(urcsv_t **urcsv);
/**
* Create a header line
*
* The funtion creates a text representation of header according to template
* \param[in,out] urcsv Pointer to structure allocated by urcsv_init().
* \return Pointer to string, caller must free it
*/
char *urcsv_header(urcsv_t *urcsv);
/**
* Create a record line
*
* The funtion creates a text representation of UniRec record
* \param[in,out] urcsv Pointer to structure allocated by urcsv_init().
* \param[in] rec Pointer to data - UniRec message
* \return Pointer to string, caller must free it
*/
char *urcsv_record(urcsv_t *urcsv, const void *rec);
/**
* Convert value of UniRec field to its string representation.
*
* \param[out] dst Pointer to memory where to store result (pointer is not moved)
* \param[in] size Size of available memory for result
* \param[in] rec UniRec record - value of the field is taken
* \param[in] id UniRec field id
* \param[in] tmplt UniRec template
*
* \return Number of written bytes. If 0, there was not enough space and caller must increase the memory size.
*/
int urcsv_field(char *dst, uint32_t size, const void *rec, ur_field_type_t id, ur_template_t *tmplt);
/**
* @}
*//* unirec2csv */
#ifdef __cplusplus
} // extern "C"
#endif
#endif
|
danielfct/master-thesis
|
usmanager/manager/manager-services/src/main/java/pt/unl/fct/miei/usmanagement/manager/dtos/kafka/ContainerDTO.java
|
package pt.unl.fct.miei.usmanagement.manager.dtos.kafka;
import com.fasterxml.jackson.annotation.JsonIdentityInfo;
import com.fasterxml.jackson.annotation.ObjectIdGenerators;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import pt.unl.fct.miei.usmanagement.manager.containers.ContainerPortMapping;
import pt.unl.fct.miei.usmanagement.manager.containers.ContainerTypeEnum;
import pt.unl.fct.miei.usmanagement.manager.hosts.Coordinates;
import pt.unl.fct.miei.usmanagement.manager.regions.RegionEnum;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
@JsonIdentityInfo(generator = ObjectIdGenerators.UUIDGenerator.class, scope = ContainerDTO.class)
@AllArgsConstructor
@NoArgsConstructor
@Getter
@Setter
public class ContainerDTO {
private String id;
private ContainerTypeEnum type;
private long created;
private String name;
private String image;
private String command;
private String network;
private String publicIpAddress;
private String privateIpAddress;
private Set<String> mounts;
private Set<ContainerPortMapping> ports;
private Map<String, String> labels;
private RegionEnum region;
private Coordinates coordinates;
private String state;
private Set<ContainerRuleDTO> containerRules;
private Set<ContainerSimulatedMetricDTO> simulatedContainerMetrics;
public ContainerDTO(String id) {
this.id = id;
}
@Override
public int hashCode() {
return Objects.hashCode(getId());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof ContainerDTO)) {
return false;
}
ContainerDTO other = (ContainerDTO) o;
return id != null && id.equals(other.getId());
}
@Override
public String toString() {
return "ContainerDTO{" +
"id='" + id + '\'' +
", type=" + type +
", created=" + created +
", name='" + name + '\'' +
", image='" + image + '\'' +
", command='" + command + '\'' +
", network='" + network + '\'' +
", publicIpAddress='" + publicIpAddress + '\'' +
", privateIpAddress='" + privateIpAddress + '\'' +
", mounts=" + mounts +
", ports=" + ports +
", labels=" + labels +
", region=" + region +
", coordinates=" + coordinates +
", containerRules=" + (containerRules == null ? "null" : containerRules.stream().map(ContainerRuleDTO::getId).collect(Collectors.toSet())) +
", simulatedContainerMetrics=" + (simulatedContainerMetrics == null ? "null" : simulatedContainerMetrics.stream()
.map(ContainerSimulatedMetricDTO::getId).collect(Collectors.toSet())) +
'}';
}
}
|
ernestoresende/ernestoresende.com
|
src/components/common/BlogItem/index.js
|
import React from 'react'
import PropTypes from 'prop-types'
import * as S from './styled'
const BlogItem = ({ slug, timeToRead, title, description, fixedHoverStyle }) => {
return (
<S.PostContainer to={`${slug}`} title={title} data-fixed-hover={fixedHoverStyle}>
<div>
<S.TimeToRead>{timeToRead} minutes read</S.TimeToRead>
<S.Title>{title}</S.Title>
<S.Description>{description}</S.Description>
</div>
</S.PostContainer>
)
}
BlogItem.propTypes = {
slug: PropTypes.string.isRequired,
title: PropTypes.string.isRequired,
timeToRead: PropTypes.number.isRequired,
description: PropTypes.string.isRequired,
fixedHoverStyle: PropTypes.bool,
}
export default BlogItem
|
vncoelho/optmarket
|
OptFrame/Heuristics/VNS/MOVNS.hpp
|
// OptFrame - Optimization Framework
// Copyright (C) 2009, 2010, 2011
// http://optframe.sourceforge.net/
//
// This file is part of the OptFrame optimization framework. This framework
// is free software; you can redistribute it and/or modify it under the
// terms of the GNU Lesser General Public License v3 as published by the
// Free Software Foundation.
// This framework is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License v3 for more details.
// You should have received a copy of the GNU Lesser General Public License v3
// along with this library; see the file COPYING. If not, write to the Free
// Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
// USA.
#ifndef MULTIOBJECTIVEVNS_HPP_
#define MULTIOBJECTIVEVNS_HPP_
#include <algorithm>
#include "../../MultiObjSearch.hpp"
#include "../../Evaluator.hpp"
#include "../../Population.hpp"
#include "../../NSSeq.hpp"
#include "../../ParetoDominance.hpp"
#include "../../ParetoDominanceWeak.hpp"
namespace optframe
{
template<class R, class ADS = OPTFRAME_DEFAULT_ADS>
class MOVNS: public MultiObjSearch<R, ADS>
{
typedef vector<Evaluation*> FitnessValues;
private:
vector<NSSeq<R, ADS>*> neighbors;
vector<Evaluator<R, ADS>*> v_e;
ParetoDominance<R, ADS> pDominance;
ParetoDominanceWeak<R, ADS> pDominanceWeak;
RandGen& rg;
Pareto<R, ADS> pfMethod;
public:
MOVNS(vector<Evaluator<R, ADS>*> _v_e, vector<NSSeq<R, ADS>*> _neighbors, RandGen& _rg) :
v_e(_v_e), neighbors(_neighbors), rg(_rg), pDominance(ParetoDominance<R, ADS>(_v_e)), pDominanceWeak(ParetoDominanceWeak<R, ADS>(_v_e))
{
}
virtual ~MOVNS()
{
}
virtual void exec(Population<R, ADS>& p_0, FitnessValues& e_pop, double timelimit, double target_f)
{
Timer tnow;
cout << "exec: MOVNS" << endl;
int r = neighbors.size();
Population<R, ADS> D;
for (int ind = 0; ind < p_0.size(); ind++)
{
Solution<R, ADS>& s = p_0.at(ind).clone();
if (!pfMethod.addSolution(pDominance, pDominanceWeak, D, s))
delete &s;
}
cout << "Number of Inicial Non-Dominated solutions = " << D.size() << endl;
vector<bool> visited;
for (int ind = 0; ind < D.size(); ind++)
visited.push_back(false);
while (tnow.now() < timelimit)
{
cout << "Conjunto eficiente size = " << D.size() << endl;
int ind = rg.rand(D.size());
if (visited[ind] == true)
ind = rg.rand(D.size());
visited[ind] = true;
int neigh = rg.rand(neighbors.size());
Move<R, ADS>* move = &(neighbors[neigh]->move(D.at(ind)));
while (!(move->canBeApplied(D.at(ind))))
{
delete move;
move = &(neighbors[neigh]->move(D.at(ind)));
}
Solution<R, ADS>& s1 = D.at(ind).clone();
Move<R, ADS>& mov_rev = move->apply(s1);
delete &mov_rev;
NSIterator<R, ADS>& it = neighbors[neigh]->getIterator(s1.getR());
it.first(); //Primeiro vizinho
//verifica se existe vizinho a ser gerado
if (it.isDone())
{
delete ⁢
}
else
{
Move<R, ADS>* move = geraMovimentoValido(it, s1);
//cout << "!it.isDone() = " << !it.isDone() << " aplly = " << move->canBeApplied(p.at(ind)) << endl;
while ((!it.isDone()) && (move->canBeApplied(s1)))
{
Solution<R, ADS>& s2 = s1.clone();
Move<R, ADS>& mov_rev = move->apply(s2);
delete &mov_rev;
delete move;
bool added = pfMethod.addSolution(pDominance, pDominanceWeak, D, s2);
if (added)
cout << "Sol ADCIONADA NA POOL" << endl;
delete &s2;
it.next();
if (!it.isDone())
move = geraMovimentoValido(it, s1);
}
}
if (it.isDone())
delete ⁢
delete &s1;
int verifica = -1;
for (int v = 0; v < D.size(); v++)
{
if (visited[v] == false)
{
v = D.size();
verifica = 0;
}
}
if (verifica == -1)
for (int v = 0; v < D.size(); v++)
visited[v] = false;
}
p_0 = D;
}
Move<R, ADS>* geraMovimentoValido(NSIterator<R, ADS>& it, Solution<R, ADS>& s)
{
Move<R, ADS>* move = NULL;
if (it.isDone())
return NULL;
else
move = &it.current();
while (!move->canBeApplied(s))
{
delete move;
it.next();
if (!it.isDone())
move = &it.current();
else
{
return NULL;
}
}
return move;
}
};
}
#endif /*MULTIOBJECTIVEVNS_HPP_*/
|
proteanblank/ags
|
Common/util/stream.cpp
|
//=============================================================================
//
// Adventure Game Studio (AGS)
//
// Copyright (C) 1999-2011 <NAME> and 2011-20xx others
// The full list of copyright holders can be found in the Copyright.txt
// file, which is part of this source code distribution.
//
// The AGS source code is provided under the Artistic License 2.0.
// A copy of this license can be found in the file License.txt and at
// http://www.opensource.org/licenses/artistic-license-2.0.php
//
//=============================================================================
#include "util/stream.h"
#include <algorithm>
namespace AGS
{
namespace Common
{
size_t Stream::WriteByteCount(uint8_t b, size_t count)
{
if (!CanWrite())
return 0;
size_t size = 0;
for (; count > 0; --count, ++size)
{
if (WriteByte(b) < 0)
break;
}
return size;
}
soff_t CopyStream(Stream *in, Stream *out, soff_t length)
{
char buf[4096];
soff_t wrote_num = 0;
while (length > 0)
{
size_t to_read = (size_t)std::min((soff_t)sizeof(buf), length);
size_t was_read = in->Read(buf, to_read);
if (was_read == 0)
return wrote_num;
length -= was_read;
size_t to_write = was_read;
while (to_write > 0)
{
size_t wrote = out->Write(buf + was_read - to_write, to_write);
if (wrote == 0)
return wrote_num;
to_write -= wrote;
wrote_num += wrote;
};
};
return wrote_num;
}
} // namespace Common
} // namespace AGS
|
dentou/Hive2Hive
|
org.hive2hive.core/src/main/java/org/hive2hive/core/processes/share/ShareFolderNotificationMessageFactory.java
|
package org.hive2hive.core.processes.share;
import net.tomp2p.peers.PeerAddress;
import org.hive2hive.core.model.FolderIndex;
import org.hive2hive.core.model.UserPermission;
import org.hive2hive.core.network.messages.direct.BaseDirectMessage;
import org.hive2hive.core.network.userprofiletask.UserProfileTask;
import org.hive2hive.core.processes.notify.BaseNotificationMessageFactory;
import org.hive2hive.core.security.IH2HEncryption;
public class ShareFolderNotificationMessageFactory extends BaseNotificationMessageFactory {
private final FolderIndex fileNode;
private final UserPermission addedSharer;
public ShareFolderNotificationMessageFactory(IH2HEncryption encryption, FolderIndex fileNode, UserPermission addedSharer) {
super(encryption);
this.fileNode = fileNode;
this.addedSharer = addedSharer;
}
@Override
public BaseDirectMessage createPrivateNotificationMessage(PeerAddress receiver) {
// own clients must not be notified
return null;
}
@Override
public UserProfileTask createUserProfileTask(String sender) {
return new ShareFolderUserProfileTask(sender, generateProtectionKeys(), fileNode, addedSharer);
}
}
|
Barchid/CAR-TP01
|
src/main/java/ftp/controls/FtpMkdControl.java
|
package ftp.controls;
import java.io.IOException;
import java.nio.file.AccessDeniedException;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Paths;
import ftp.FtpCommand;
import ftp.FtpReply;
import ftp.SessionStore;
/**
* @author <NAME>
*
* Controller used for the FTP command "MKD [directory path]"
*/
public class FtpMkdControl extends FtpControl {
/**
* @param store the store of the client's connection
*/
public FtpMkdControl(SessionStore store) {
super(store);
}
/*
* (non-Javadoc)
*
* @see ftp.controls.FtpControl#handle(ftp.FtpCommand)
*/
@Override
public FtpReply handle(FtpCommand command) throws IOException {
if (!this.store.isLoggedIn()) {
return new FtpReply(5, 3, 0, "Please log in with USER and PASS first.");
}
String dirPath = command.getArg();
if (dirPath == null) {
return new FtpReply(5, 0, 1, "Syntax error");
}
String parentPath = dirPath.charAt(0) == '/' ? this.store.getRootDirectory() : this.store.getCurrentDirectory();
if (dirPath.charAt(0) == '/') {
dirPath = dirPath.substring(1);
}
// in windows implementation, the path can contain a \, so we have to kill it
if (dirPath.length() != 0 && dirPath.charAt(0) == '\\') {
dirPath = dirPath.substring(1);
}
try {
Files.createDirectories(Paths.get(parentPath, dirPath));
} catch (FileAlreadyExistsException e) {
if (Files.isDirectory(Paths.get(parentPath, dirPath))) {
return new FtpReply(5, 5, 0, "Directory already exists.");
} else {
return new FtpReply(5, 5, 0, "File with same name already exists.");
}
} catch (InvalidPathException ex) {
return new FtpReply(5, 0, 1, "Syntax error : invalid path");
} catch (SecurityException ex) {
return new FtpReply(5, 5, 0, "Permission denied");
}
return new FtpReply(2, 5, 7, dirPath + " created successfully");
}
}
|
razsilev/TelerikAcademy_Homework
|
JavaScript/JS_Part_One/Homeworks/Js_part_One_Homeworks/11_Strings/scripts/09-extract-emails.js
|
<filename>JavaScript/JS_Part_One/Homeworks/Js_part_One_Homeworks/11_Strings/scripts/09-extract-emails.js
/*
* 9. Write a function for extracting all email addresses
* from given text. All substrings that match the format
* <identifier>@<host>…<domain> should be recognized as emails.
* Return the emails as array of strings.
*/
function extractEmails(text) {
var regExp = new RegExp('\\b\\w+@\\w+[.]\\w{2,4}\\b', 'g'),
mails = text.match(regExp);
return mails;
}
var text = 'same mails <EMAIL> anather one <EMAIL>.' +
'invalid invalid invalid <EMAIL> mail';
var mails = extractEmails(text);
jsConsole.writeLine('text: ' + text + '<br />');
jsConsole.writeLine('mails from text:');
for (var i = 0; i < mails.length; i += 1) {
jsConsole.writeLine(mails[i]);
}
|
AsahiOS/gate
|
usr/src/uts/common/sys/1394/targets/dcam1394/dcam_frame.h
|
/*
* CDDL HEADER START
*
* The contents of this file are subject to the terms of the
* Common Development and Distribution License, Version 1.0 only
* (the "License"). You may not use this file except in compliance
* with the License.
*
* You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
* or http://www.opensolaris.org/os/licensing.
* See the License for the specific language governing permissions
* and limitations under the License.
*
* When distributing Covered Code, include this CDDL HEADER in each
* file and include the License file at usr/src/OPENSOLARIS.LICENSE.
* If applicable, add the following below this CDDL HEADER, with the
* fields enclosed by brackets "[]" replaced with your own identifying
* information: Portions Copyright [yyyy] [name of copyright owner]
*
* CDDL HEADER END
*/
/*
* Copyright 2005 Sun Microsystems, Inc. All rights reserved.
* Use is subject to license terms.
*/
#ifndef _SYS_1394_TARGETS_DCAM1394_FRAME_H
#define _SYS_1394_TARGETS_DCAM1394_FRAME_H
#pragma ident "%Z%%M% %I% %E% SMI"
#ifdef __cplusplus
extern "C" {
#endif
int dcam1394_ioctl_frame_rcv_start(dcam_state_t *softc_p);
int dcam_frame_rcv_init(dcam_state_t *softc_p, int vid_mode,
int frame_rate, int ring_buff_num_frames);
int dcam_frame_rcv_fini(dcam_state_t *softc_p);
int dcam_frame_rcv_start(dcam_state_t *softc_p);
int dcam_frame_rcv_stop(dcam_state_t *softc_p);
void dcam_frame_is_done(void *ssp, ixl1394_callback_t *ixlp);
#ifdef __cplusplus
}
#endif
#endif /* _SYS_1394_TARGETS_DCAM1394_FRAME_H */
|
leeokdkpvv5c/UIUC-data-miningv
|
clivia-assembly-base/clivia-httpClient-assembly-base/src/main/java/org/palading/clivia/httpClient/ContentType.java
|
<reponame>leeokdkpvv5c/UIUC-data-miningv<gh_stars>10-100
package org.palading.clivia.httpClient;
/**
* @author palading_cr
* @title ContentType
* @project clivia-gateway
*/
public class ContentType {
public static final String APPLICATION_FORM_URLENCODED = "application/x-www-form-urlencoded";
public static final String APPLICATION_JSON = "application/json";
public static final String MULTIPART_FORM_DATA = "multipart/form-data";
}
|
bbrinx/newsfeed
|
src/server/rss/fiveThirtyEight/index.js
|
const Parser = require('./parser')
const parser = new Parser()
module.exports = {
getAll: function () {
const url = 'https://fivethirtyeight.com/all/feed/'
return send(url)
},
getFeatures: function () {
const url = 'https://fivethirtyeight.com/features/feed/'
return send(url)
},
getPolitics: function () {
const url = 'https://fivethirtyeight.com/politics/feed/'
return send(url)
},
getSports: function () {
const url = 'https://fivethirtyeight.com/sports/feed/'
return send(url)
},
getEconomics: function () {
const url = 'https://fivethirtyeight.com/economics/feed/'
return send(url)
},
getScience: function () {
const url = 'https://fivethirtyeight.com/science/feed/'
return send(url)
},
getLife: function () {
const url = 'https://fivethirtyeight.com/life/feed/'
return send(url)
},
}
const send = async (url) => {
const articles = await parser.parseRss(url);
return { publisher: 'FiveThirtyEight', articles }
}
|
xavluiz/xdb
|
src/main/java/com/baddata/exception/BaddataException.java
|
<reponame>xavluiz/xdb
/**
* Copyright (c) 2016 by Baddata.
* All rights reserved.
*/
package com.baddata.exception;
public class BaddataException extends Exception {
/**
*
*/
private static final long serialVersionUID = 1L;
/**
*
* @param message
*/
public BaddataException(String message) {
super(message);
}
/**
*
* @param message
* @param cause
*/
public BaddataException(String message, Throwable cause) {
super(message, cause);
}
}
|
Celebrate-future/jzy3d-api
|
jzy3d-core/src/main/java/org/jzy3d/plot2d/primitives/LineSerie2d.java
|
package org.jzy3d.plot2d.primitives;
import java.util.List;
import org.jzy3d.colors.Color;
import org.jzy3d.maths.Coord2d;
import org.jzy3d.maths.Coord3d;
import org.jzy3d.plot3d.primitives.ConcurrentLineStrip;
import org.jzy3d.plot3d.primitives.Point;
public class LineSerie2d implements Serie2d {
protected ConcurrentLineStrip line;
protected String name;
public LineSerie2d(String name) {
this.name = name;
this.line = new ConcurrentLineStrip();
}
@Override
public void add(float x, float y) {
line.add(new Point(new Coord3d(x, y, 0)));
}
@Override
public void add(double x, double y) {
line.add(new Point(new Coord3d(x, y, 0)));
}
@Override
public void add(Coord2d c) {
line.add(new Point(new Coord3d(c.x, c.y, 0)));
}
@Override
public void add(Coord2d c, Color color) {
line.add(new Point(new Coord3d(c.x, c.y, 0), color));
}
@Override
public void add(float x, float y, Color color) {
line.add(new Point(new Coord3d(x, y, 0), color));
}
@Override
public void add(double x, double y, Color color) {
line.add(new Point(new Coord3d(x, y, 0), color));
}
@Override
public void add(List<Coord2d> c) {
for (Coord2d c2 : c) {
line.add(new Point(new Coord3d(c2.x, c2.y, 0)));
}
}
@Override
public void setColor(Color color) {
line.setWireframeColor(color);
}
@Override
public Color getColor() {
return line.getWireframeColor();
}
@Override
public String getName() {
return name;
}
@Override
public ConcurrentLineStrip getDrawable() {
return line;
}
@Override
public void clear() {
line.clear();
}
@Override
public void setWidth(int width) {
line.setWireframeWidth(width);
}
}
|
LoveWFan/MPlayer
|
labase/src/main/java/ma/bay/com/labase/common/glide/BlurTransform.java
|
package ma.bay.com.labase.common.glide;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ColorMatrix;
import android.graphics.ColorMatrixColorFilter;
import android.graphics.Paint;
import androidx.annotation.NonNull;
import com.bumptech.glide.load.engine.bitmap_recycle.BitmapPool;
import com.bumptech.glide.load.resource.bitmap.BitmapTransformation;
import java.security.MessageDigest;
public class BlurTransform extends BitmapTransformation {
private static final String ID = BlurTransform.class.getSimpleName();
private static final byte[] ID_BYTES = ID.getBytes(CHARSET);
public Bitmap blur(Bitmap source, BitmapPool bitmapPool) {
if (source == null) {
return null;
}
float scaleFactor = 1f;
float radius = 10;
Bitmap overlay = bitmapPool.get((int) (source.getWidth() / scaleFactor), (int) (source.getHeight() / scaleFactor),
Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(overlay);
canvas.scale(scaleFactor, scaleFactor);
ColorMatrix cMatrix = new ColorMatrix();
int brightness = 20;
cMatrix.setSaturation(1.0f);
cMatrix.set(new float[]{1, 0, 0, 0, brightness, 0, 1, 0, 0, brightness, 0, 0, 1, 0, brightness, 0, 0, 0, 1, 0});
Paint paint = new Paint();
paint.setFlags(Paint.FILTER_BITMAP_FLAG);
paint.setColorFilter(new ColorMatrixColorFilter(cMatrix));
canvas.drawBitmap(source, 0, 0, paint);
overlay = FastBlur.doBlur(overlay, (int) radius, true);
return overlay;
}
@Override
protected Bitmap transform(@NonNull BitmapPool bitmapPool, @NonNull Bitmap bitmap, int i, int i1) {
return blur(bitmap, bitmapPool);
}
@Override
public void updateDiskCacheKey(MessageDigest messageDigest) {
messageDigest.update(ID_BYTES);
}
}
|
BenLloydPearson/nylas-java
|
src/main/java/com/nylas/UnsupportedQuery.java
|
<reponame>BenLloydPearson/nylas-java
package com.nylas;
/*
* Currently does nothing, but required for RestfulCollection implementation for consistency sake
*/
class UnsupportedQuery extends RestfulQuery<UnsupportedQuery> {
}
|
wedusk101/sig
|
src/sig/gs_quat.cpp
|
<reponame>wedusk101/sig<filename>src/sig/gs_quat.cpp
/*=======================================================================
Copyright (c) 2018-2019 <NAME>.
This software is distributed under the Apache License, Version 2.0.
All copies must contain the full copyright notice licence.txt located
at the base folder of the distribution.
=======================================================================*/
# include <sig/gs_quat.h>
# include <sig/gs_euler.h>
# include <sig/gs_string.h>
# include <sig/gs_random.h>
//============================== Static Data ====================================
const GsQuat GsQuat::null ( 1.0f, 0, 0, 0 ); // w=cos(0)=1, (0,0,0)=axis with len sin(0)
//============================ public members ====================================
void GsQuat::normalize ()
{
float f = sqrtf( norm2() );
if ( f==0 ) return;
w/=f; x/=f; y/=f; z/=f;
if ( w<0 ) { w=-w; x=-x; y=-y; z=-z; }
}
// from "Effective Sampling and Distance Metrics for 3D Rigid Body Path Planning"
void GsQuat::setrandom ()
{
float s = gs_random();
float s1 = sqrtf ( 1-s );
float s2 = sqrtf ( s );
float t1 = gs2pi * gs_random();
float t2 = gs2pi * gs_random();
w = cosf(t2) * s2;
x = sinf(t1) * s1;
y = cosf(t1) * s1;
z = sinf(t2) * s2;
}
inline float GSVECANGLE ( const GsVec& v1, const GsVec& v2 ) { return ::angle(v1,v2); }
void GsQuat::set ( const GsVec& v1, const GsVec& v2 )
{
/* alternative test, probably faster, implementation:
instead of calling acos in angle, do:
v1.normalize();
v2.normalize();
r = cross(v1,v2);
s = sqrt ( 2 * ( 1+dot(v1,v2) ) );
q = ( 2s, r/s ); */
float ang = GSVECANGLE(v1,v2);
if ( GS_NEXT(ang,gspi,gstiny) )
set ( GsVec::i, gspi );
else
set ( cross(v1,v2), ang );
}
void GsQuat::set ( const GsVec& axis, float radians )
{
float f;
// normalize axis:
x=axis.x; y=axis.y; z=axis.z;
f = x*x + y*y + z*z;
if ( f==0||radians==0 ) { *this=null; return; }
if ( f>0 ) { f=sqrtf(f); x/=f; y/=f; z/=f; }
// set the quaternion:
radians/=2;
f = sinf ( radians );
x*=f; y*=f; z*=f;
w = cosf ( radians );
}
void GsQuat::set ( const GsVec& axisangle )
{
float ang;
// normalize axis ang extract angle:
x=axisangle.x; y=axisangle.y; z=axisangle.z;
ang = x*x + y*y + z*z;
if ( ang>0 )
{ ang = sqrtf ( ang );
x/=ang; y/=ang; z/=ang;
}
// set the quaternion:
ang/=2;
w = cosf ( ang );
ang = sinf ( ang );
x*=ang; y*=ang; z*=ang;
}
void GsQuat::get ( GsVec& axis, float& radians ) const
{
// if GsQuat==(1,0,0,0), the axis will be null, so we
// set the axis to (1,0,0) (GsVec::i); the angle will be 0.
// this is also done in GsQuat::axis()
axis.set ( x, y, z );
float n = axis.norm();
if ( n==0 ) { axis=GsVec::i; radians=0; return; }
axis/=n;
radians = w==1.0f? 0 : 2.0f * acosf ( w );
if ( radians>gspi ) { axis=-axis; radians-=gs2pi; }
}
void GsQuat::get ( GsVec& axisangle ) const
{
axisangle.set ( x, y, z );
axisangle.len ( 2.0f * acosf ( w ) );
}
void GsQuat::set ( const GsMat& m )
{
mat2quat(m,*this);
}
GsMat& GsQuat::get ( GsMat& m ) const
{
quat2mat(*this,m);
return m;
}
GsVec GsQuat::apply ( const GsVec &v ) const
{
// compare with: return (1-w*w)p + 2(dot(v,p))v + dot(2w,cross(v,p))
GsQuat qv ( 0, v.x, v.y, v.z );
qv = (*this) * qv * conjugate();
return GsVec ( qv.x, qv.y, qv.z );
}
//=================================== Friend Functions ===================================
GsQuat operator * ( const GsQuat &q1, const GsQuat &q2 )
{
GsQuat q;
// the following is the same as:
// ( w1*w2-dot(v1,v2), w1*v1 + w2*v2 + cross (v1,v2) )
q.w = (q1.w*q2.w) - (q1.x*q2.x + q1.y*q2.y + q1.z*q2.z); // w1*w2-dot(v1,v2)
q.x = q1.y*q2.z - q1.z*q2.y; // cross (q1.v,q2.v)
q.y = q1.z*q2.x - q1.x*q2.z;
q.z = q1.x*q2.y - q1.y*q2.x;
q.x += (q1.x*q2.w) + (q2.x*q1.w); // += w1*v1 + w2*v2
q.y += (q1.y*q2.w) + (q2.y*q1.w);
q.z += (q1.z*q2.w) + (q2.z*q1.w);
return q;
}
bool operator == ( const GsQuat &q1, const GsQuat &q2 )
{
return q1.w==q2.w && q1.x==q2.x && q1.y==q2.y && q1.z==q2.z ;
}
bool operator != ( const GsQuat &q1, const GsQuat &q2 )
{
return q1.w==q2.w && q1.x==q2.x && q1.y==q2.y && q1.z==q2.z ? false:true;
}
//=================================== Global Functions ===================================
void swap ( GsQuat &q1, GsQuat &q2 )
{
float tmp;
GS_SWAP(q1.w,q2.w);
GS_SWAP(q1.x,q2.x);
GS_SWAP(q1.y,q2.y);
GS_SWAP(q1.z,q2.z);
}
void gslerp ( const float* q1const, const float* q2, float t, float* q )
{
float* q1 = (float*) q1const; // q1 is actually a "mutable const"
float dot = q1[0]*q2[0] + q1[1]*q2[1] + q1[2]*q2[2] + q1[3]*q2[3];
if ( dot < 0 )
{ // the quaternions are pointing in opposite directions, so
// use the equivalent alternative representation for q1
q1[0]=-q1[0]; q1[1]=-q1[1]; q1[2]=-q1[2]; q1[3]=-q1[3];
dot = -dot;
}
// interpolation factors
float r, s;
// decide according to an epsilon (30fps motions are of E-6 order)
// this IS needed for baked motions in order to avoid dealing with the E-6 values in floats
if ( 1.0f-dot < 0.01f )
{ // the quaternions are nearly parallel, just use linear interpolation
r = 1-t;
s = t;
}
else
{ // calculate spherical linear interpolation factors
float a = acosf(dot);
float g = 1.0f / sinf(a);
r = sinf ( (1-t)*a ) * g;
s = sinf ( t*a ) * g;
}
// set the interpolated quaternion
q[0] = r*q1[0] + s*q2[0];
q[1] = r*q1[1] + s*q2[1];
q[2] = r*q1[2] + s*q2[2];
q[3] = r*q1[3] + s*q2[3];
// check if we should normalize it, typically f will be inside [0.99,1.01],
// here we choose the level to renormalize the result:
float f = q[0]*q[0] + q[1]*q[1] + q[2]*q[2] + q[3]*q[3];
if ( (f>0.999f&&f<1.001f) || f==0 ) return;
f = sqrtf(f);
q[0]/=f; q[1]/=f; q[2]/=f; q[3]/=f;
if ( q[0]<0 ) { q[0]=-q[0]; q[1]=-q[1]; q[2]=-q[2]; q[3]=-q[3]; }
}
GsOutput& operator<< ( GsOutput& out, const GsQuat& q )
{
GsVec axis; float ang; q.get(axis,ang);
return out << "axis " << axis << " ang " << GS_TODEG(ang);
}
GsInput& operator>> ( GsInput& in, GsQuat& q )
{
enum Format { AA, XZY } fmt;
GsVec vec;
float ang;
fmt = AA; // axis-angle is the default
if ( in.check()==GsInput::String )
{ in.get(); // "axis" or another supported keyword
if ( in.ltoken()=="xzy" ) fmt=XZY;
}
switch (fmt)
{ case AA: { in >> vec;
if ( in.check()==GsInput::String ) in.get(); // "ang"
in >> ang;
q.set ( vec, GS_TORAD(ang) );
} break;
case XZY: { GsMat m;
in>>vec; // get euler angles here
gs_rot_xzy ( m, GS_TORAD(vec.x), GS_TORAD(vec.y), GS_TORAD(vec.z), 'L' ); // build rot mat
// for a generic order, use instead:
// enum gsEulerOrder { gsXYZ=123, gsXZY=132, gsYXZ=213, gsYZX=231, gsZXY=312, gsZYX=321 };
// void gs_rot ( gsEulerOrder order, GsMat& m, float rx, float ry, float rz, char fmt );
mat2quat ( m, q );
} break;
default: q = GsQuat::null;
}
return in;
}
//=============================== swing-twist conversions ======================================
void st2quat ( float sx, float sy, float tw, GsQuat& q )
{
q.set ( GsVec(sx,sy,0.0f) ); // swing component in axis-angle
if ( tw!=0 ) // compose twist component
{ GsQuat twist ( GsVec::k, tw );
q = q * twist;
}
}
void ts2quat ( float tw, float sx, float sy, GsQuat& q )
{
q.set ( GsVec(sx,sy,0.0f) ); // swing component in axis-angle
if ( tw!=0 ) // compose twist component
{ GsQuat twist ( GsVec::k, tw );
q = twist * q;
}
}
bool quat2st ( const GsQuat& q, float& sx, float& sy, float& tw )
{
// Decompose q into swing-twist (from Paolo's thesis).
// First test if the swing is in the singularity:
if ( GS_NEXTZ(q.z,gstiny) && GS_NEXTZ(q.w,gstiny) ) { sx=sy=gspi; tw=0; return false; }
// Get q in double and normalize q to always have qw>0 :
double qw, qx, qy, qz;
if ( q.w<0 )
{ qw=-q.w; qx=-q.x; qy=-q.y; qz=-q.z; }
else
{ qw=q.w; qx=q.x; qy=q.y; qz=q.z; }
// Get the twist t:
double t = 2.0 * atan2(qz,qw);
double bet = atan2( sqrt(qx*qx+qy*qy), sqrt(qz*qz+qw*qw) );
double gam = t/2.0;
double sinc = GS_NEXTZ(bet,gstiny)? 1.0 : sin(bet)/bet;
double singam = sin(gam);
double cosgam = cos(gam);
sx = float( (2.0/sinc) * (cosgam*qx - singam*qy) );
sy = float( (2.0/sinc) * (singam*qx + cosgam*qy) );
tw = float( t );
return true;
}
bool quat2ts ( const GsQuat& q, float& tw, float& sx, float& sy )
{
// First test if the swing is in the singularity:
if ( GS_NEXTZ(q.z,gstiny) && GS_NEXTZ(q.w,gstiny) ) { sx=sy=gspi; tw=0; return false; }
// Decompose q into twist-swing
// by solving the equation Qtwist(t*2) * Qswing(s*2) = q
// note: (x,y) is the normalized swing axis (x*x+y*y=1)
// ( Ct 0 0 St ) * ( Cs xSs ySs 0 ) = ( qw qx qy qz )
// ( CtCs xSsCt-yStSs xStSs+ySsCt StCs ) = ( qw qx qy qz ) (1)
// From (1) CtCs / StCs = qw/qz => Ct/St = qw/qz => tan(t) = qz/qw (2)
// The swing rotation/2 s comes from:
// From (1) (CtCs)^2 + (StCs)^2 = qw^2 + qz^2 => Cs = sqrt ( qw^2 + qz^2 ) (3)
// From (1) (xSsCt-yStSs)^2 + (xStSs+ySsCt)^2 = qx^2 + qy^2 => Ss = sqrt ( qx^2 + qy^2 ) (4)
// From (1) : |SsCt -StSs| |x| = |qx|
// |StSs +SsCt| |y| |qy| (5)
double qw, qx, qy, qz;
if ( q.w<0 )
{ qw=-q.w; qx=-q.x; qy=-q.y; qz=-q.z; }
else
{ qw=q.w; qx=q.x; qy=q.y; qz=q.z; }
double t = atan2 ( qz, qw ); // from (2)
double s = atan2( sqrt(qx*qx+qy*qy), sqrt(qz*qz+qw*qw) ); // from (3) and (4)
double x=0, y=0, sins=sin(s);
if ( !GS_NEXTZ(sins,gstiny) )
{ double sint = sin(t);
double cost = cos(t);
// by solving the linear system in (5):
y = (-qx*sint + qy*cost)/sins;
x = ( qx*cost + qy*sint)/sins;
}
tw = float(2.0*t);
sx = float(x*2.0*s);
sy = float(y*2.0*s);
return true;
}
void mat2quat ( const GsMat& m, GsQuat& q ) // PerfNote: this implementation can be optimized
{
# define E(i) m(i)
# define M(i,j) m.cget(i,j)
# define Q(i) q.e[i+1] // Q(0)=x, Q(1)=y, Q(2)=z
float s;
float tr = E(0) + E(5) + E(10);
if ( tr>0 )
{ s = sqrtf ( 1.0f + tr );
q.w = s / 2.0f;
s = 0.5f / s;
q.x = (E(6) - E(9)) * s;
q.y = (E(8) - E(2)) * s;
q.z = (E(1) - E(4)) * s;
}
else
{ int i = M(1,1)>M(0,0)? 1:0;
if ( M(2,2)> M(i,i) ) i=2;
int j = (i+1)%3;
int k = (j+1)%3;
s = sqrtf ( (M(i,i) - (M(j,j)+M(k,k))) + 1.0f );
Q(i) = s * 0.5f;
if ( s!=0 ) // s should never be equal to 0 if matrix is orthogonal
s = 0.5f / s;
q.w = (M(j,k) - M(k,j)) * s;
Q(j) = (M(i,j) + M(j,i)) * s;
Q(k) = (M(i,k) + M(k,i)) * s;
}
// original code above was for column-major mat, fix result here:
q.x=-q.x; q.y=-q.y; q.z=-q.z; // invert (for unit quat)
# undef E
# undef M
# undef Q
}
void quat2mat ( const GsQuat& q,
float& x1, float& y1, float& z1,
float& x2, float& y2, float& z2,
float& x3, float& y3, float& z3 )
{
x2 = q.x+q.x;
float x2x = x2*q.x;
float x2y = x2*q.y;
float x2z = x2*q.z;
float x2w = x2*q.w;
y2 = q.y+q.y;
float y2y = y2*q.y;
float y2z = y2*q.z;
float y2w = y2*q.w;
z2 = q.z+q.z;
float z2z = z2*q.z;
float z2w = z2*q.w;
x1 = 1.0f - y2y - z2z; y1 = x2y - z2w; z1 = x2z + y2w;
x2 = x2y + z2w; y2 = 1.0f - x2x - z2z; z2 = y2z - x2w;
x3 = x2z - y2w; y3 = y2z + x2w; z3 = 1.0f - x2x - y2y;
}
void quat2mat ( const GsQuat& q, GsMat& m )
{
quat2mat ( q, m[0], m[1], m[2],
m[4], m[5], m[6],
m[8], m[9], m[10] );
m[3] = m[7] = m[11] = m[12] = m[13] = m[14] = 0.0f;
m[15] = 1.0f;
}
void compose ( const GsQuat& q, const GsVec& t, GsMat& m )
{
quat2mat ( q, m[0], m[1], m[2],
m[4], m[5], m[6],
m[8], m[9], m[10] );
m.setrans ( t );
m.setl4 ( 0, 0, 0, 1.0f );
}
//================================== End of File =======================================
|
amelieykw/Mentoring
|
frontend/helpers/workerClient.js
|
<filename>frontend/helpers/workerClient.js
import socketClient from './socketClient';
import dataProcesser from './dataProcesser';
import { DEFAULT_DATA, URL } from '../config';
import fetchWithHeaders from '../utils/fetchWithHeaders';
let chartData;
let ws;
function recivedMessageFromSocket(newData) {
chartData = dataProcesser(newData, chartData, DEFAULT_DATA);
if (chartData) postMessage(chartData);
}
console.log('worker on connection');
onmessage = ({ data }) => {
switch (data) {
case 'start':
fetchWithHeaders(URL.PRICES)
.then((result) => {
recivedMessageFromSocket(result);
});
ws = socketClient((newData) => recivedMessageFromSocket(newData));
break;
case 'close':
if (ws) {
ws.close();
ws = null;
}
break;
default:
break;
}
};
|
EliahKagan/old-practice-snapshot
|
main/kruskalmstrsub/kruskalmstrsub-2.c
|
<gh_stars>0
#ifdef _MSC_VER
#define _CRT_SECURE_NO_WARNINGS
#endif
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
static inline void *xcalloc(const size_t count, const size_t size)
{
void *const ret = calloc(count, size);
if (!ret) abort();
return ret;
}
struct dsu {
int *parents;
int *ranks;
int element_count;
};
static struct dsu dsu_make_sets(const int element_count)
{
struct dsu d = {xcalloc(element_count, sizeof *d.parents),
xcalloc(element_count, sizeof *d.ranks),
element_count};
for (int i = 0; i < element_count; ++i) d.parents[i] = i;
return d;
}
static void dsu_free(struct dsu *const dp)
{
free(dp->parents);
dp->parents = NULL;
free(dp->ranks);
dp->ranks = NULL;
dp->element_count = -1; // to make debugging easier
}
static int dsu_detail_find_set(const struct dsu *const dp, int i)
{
// Find the ancestor.
int j = i;
while (j != dp->parents[j]) j = dp->parents[j];
// Compress the path.
while (i != j) {
const int parent = dp->parents[i];
dp->parents[i] = j;
i = parent;
}
return j;
}
static bool dsu_union(const struct dsu *const dp, int i, int j)
{
// Find the ancestors and stop if they are already the same.
i = dsu_detail_find_set(dp, i);
j = dsu_detail_find_set(dp, j);
if (i == j) return false;
// Unite by rank.
if (dp->ranks[i] < dp->ranks[j]) {
dp->parents[i] = j;
} else {
if (dp->ranks[i] == dp->ranks[j]) ++dp->ranks[i];
dp->parents[j] = i;
}
return true;
}
struct edge {
int u;
int v;
int weight;
};
static inline void edge_read(struct edge *const ep)
{
(void)scanf("%d%d%d", &ep->u, &ep->v, &ep->weight);
}
static int edge_compare(const void *const p, const void *const q)
{
const struct edge *const ep = p, *const eq = q;
if (ep->weight < eq->weight) return -1;
if (ep->weight > eq->weight) return +1;
return 0;
}
static struct edge *read_all_edges(const int edge_count)
{
struct edge *const edges = xcalloc(edge_count, sizeof *edges);
for (int i = 0; i < edge_count; ++i) edge_read(&edges[i]);
return edges;
}
static int kruskal(const int vertex_count, const int edge_count,
const struct edge *const sorted_edges)
{
int total_weight = 0;
struct dsu d = dsu_make_sets(vertex_count + 1); // 1-based indexing
const struct edge *const ep_end = sorted_edges + edge_count;
for (const struct edge *ep = sorted_edges; ep != ep_end; ++ep)
if (dsu_union(&d, ep->u, ep->v)) total_weight += ep->weight;
dsu_free(&d);
return total_weight;
}
int main(void)
{
int vertex_count = 0, edge_count = 0;
(void)scanf("%d%d", &vertex_count, &edge_count);
struct edge *edges = read_all_edges(edge_count);
qsort(edges, edge_count, sizeof *edges, edge_compare);
const int total_weight = kruskal(vertex_count, edge_count, edges);
free(edges);
edges = NULL;
printf("%d\n", total_weight);
}
|
mrnetsun/korea
|
scripts/editor/scripts/language/nn-NO/list.js
|
function loadTxt()
{
var txtLang = document.getElementsByName("txtLang");
txtLang[0].innerHTML = "Tall & bokstaver";
txtLang[1].innerHTML = "Bildepunkt";
txtLang[2].innerHTML = "Start - nr.";
txtLang[3].innerHTML = "Venstre marg";
txtLang[4].innerHTML = "Bruk bilde"
txtLang[5].innerHTML = "Venstre marg";
document.getElementById("btnCancel").value = "Avbryt";
document.getElementById("btnApply").value = "Oppdater";
document.getElementById("btnOk").value = " Ok ";
}
function getTxt(s)
{
switch(s)
{
case "Please select a list.":return "Velg en type."
default:return "";
}
}
function writeTitle()
{
document.write("<title>Punktoppstilling</title>")
}
|
zoho/zohocrm-scala-sdk-2.0
|
src/main/scala/com/zoho/crm/api/fields/Currency.scala
|
<filename>src/main/scala/com/zoho/crm/api/fields/Currency.scala<gh_stars>0
package com.zoho.crm.api.fields
import com.zoho.crm.api.util.Model
import scala.collection.mutable.HashMap
class Currency extends Model {
private var roundingOption:Option[String] = None
private var precision:Option[Int] = None
private var keyModified:HashMap[String, Int] = HashMap()
def getRoundingOption() :Option[String] ={
return this.roundingOption
}
def setRoundingOption( roundingOption: Option[String]) ={
this.roundingOption = roundingOption
this.keyModified("rounding_option") = 1
}
def getPrecision() :Option[Int] ={
return this.precision
}
def setPrecision( precision: Option[Int]) ={
this.precision = precision
this.keyModified("precision") = 1
}
def isKeyModified( key: String) :Any ={
if((( this.keyModified.contains(key))))
{
return this.keyModified(key)
}
return None
}
def setKeyModified( key: String, modification: Int) ={
this.keyModified(key) = modification
}}
|
betopp/pathetix
|
programs/oksh-6.9/src/lex.c
|
/* $OpenBSD: lex.c,v 1.78 2018/01/15 14:58:05 jca Exp $ */
/*
* lexical analysis and source input
*/
#include <ctype.h>
#include <errno.h>
#include <libgen.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include "sh.h"
/*
* states while lexing word
*/
#define SINVALID -1 /* invalid state */
#define SBASE 0 /* outside any lexical constructs */
#define SWORD 1 /* implicit quoting for substitute() */
#define SLETPAREN 2 /* inside (( )), implicit quoting */
#define SSQUOTE 3 /* inside '' */
#define SDQUOTE 4 /* inside "" */
#define SBRACE 5 /* inside ${} */
#define SCSPAREN 6 /* inside $() */
#define SBQUOTE 7 /* inside `` */
#define SASPAREN 8 /* inside $(( )) */
#define SHEREDELIM 9 /* parsing <<,<<- delimiter */
#define SHEREDQUOTE 10 /* parsing " in <<,<<- delimiter */
#define SPATTERN 11 /* parsing *(...|...) pattern (*+?@!) */
#define STBRACE 12 /* parsing ${..[#%]..} */
#define SBRACEQ 13 /* inside "${}" */
/* Structure to keep track of the lexing state and the various pieces of info
* needed for each particular state.
*/
typedef struct lex_state Lex_state;
struct lex_state {
int ls_state;
union {
/* $(...) */
struct scsparen_info {
int nparen; /* count open parenthesis */
int csstate; /* XXX remove */
#define ls_scsparen ls_info.u_scsparen
} u_scsparen;
/* $((...)) */
struct sasparen_info {
int nparen; /* count open parenthesis */
int start; /* marks start of $(( in output str */
#define ls_sasparen ls_info.u_sasparen
} u_sasparen;
/* ((...)) */
struct sletparen_info {
int nparen; /* count open parenthesis */
#define ls_sletparen ls_info.u_sletparen
} u_sletparen;
/* `...` */
struct sbquote_info {
int indquotes; /* true if in double quotes: "`...`" */
#define ls_sbquote ls_info.u_sbquote
} u_sbquote;
Lex_state *base; /* used to point to next state block */
} ls_info;
};
typedef struct State_info State_info;
struct State_info {
Lex_state *base;
Lex_state *end;
};
static void readhere(struct ioword *);
static int getsc__(void);
static void getsc_line(Source *);
static int getsc_bn(void);
static char *get_brace_var(XString *, char *);
static int arraysub(char **);
static const char *ungetsc(int);
static void gethere(void);
static Lex_state *push_state_(State_info *, Lex_state *);
static Lex_state *pop_state_(State_info *, Lex_state *);
static char *special_prompt_expand(char *);
static int dopprompt(const char *, int, const char **, int);
int promptlen(const char *cp, const char **spp);
static int backslash_skip;
static int ignore_backslash_newline;
Source *source; /* yyparse/yylex source */
YYSTYPE yylval; /* result from yylex */
struct ioword *heres[HERES], **herep;
char ident[IDENT+1];
char **history; /* saved commands */
char **histptr; /* last history item */
uint32_t histsize; /* history size */
/* optimized getsc_bn() */
#define getsc() (*source->str != '\0' && *source->str != '\\' \
&& !backslash_skip ? *source->str++ : getsc_bn())
/* optimized getsc__() */
#define getsc_() ((*source->str != '\0') ? *source->str++ : getsc__())
#define STATE_BSIZE 32
#define PUSH_STATE(s) do { \
if (++statep == state_info.end) \
statep = push_state_(&state_info, statep); \
state = statep->ls_state = (s); \
} while (0)
#define POP_STATE() do { \
if (--statep == state_info.base) \
statep = pop_state_(&state_info, statep); \
state = statep->ls_state; \
} while (0)
/*
* Lexical analyzer
*
* tokens are not regular expressions, they are LL(1).
* for example, "${var:-${PWD}}", and "$(size $(whence ksh))".
* hence the state stack.
*/
int
yylex(int cf)
{
Lex_state states[STATE_BSIZE], *statep;
State_info state_info;
int c, state;
XString ws; /* expandable output word */
char *wp; /* output word pointer */
char *sp, *dp;
int c2;
Again:
states[0].ls_state = SINVALID;
states[0].ls_info.base = NULL;
statep = &states[1];
state_info.base = states;
state_info.end = &states[STATE_BSIZE];
Xinit(ws, wp, 64, ATEMP);
backslash_skip = 0;
ignore_backslash_newline = 0;
if (cf&ONEWORD)
state = SWORD;
else if (cf&LETEXPR) {
*wp++ = OQUOTE; /* enclose arguments in (double) quotes */
state = SLETPAREN;
statep->ls_sletparen.nparen = 0;
} else { /* normal lexing */
state = (cf & HEREDELIM) ? SHEREDELIM : SBASE;
while ((c = getsc()) == ' ' || c == '\t')
;
if (c == '#') {
ignore_backslash_newline++;
while ((c = getsc()) != '\0' && c != '\n')
;
ignore_backslash_newline--;
}
ungetsc(c);
}
if (source->flags & SF_ALIAS) { /* trailing ' ' in alias definition */
source->flags &= ~SF_ALIAS;
/* In POSIX mode, a trailing space only counts if we are
* parsing a simple command
*/
if (!Flag(FPOSIX) || (cf & CMDWORD))
cf |= ALIAS;
}
/* Initial state: one of SBASE SHEREDELIM SWORD SASPAREN */
statep->ls_state = state;
/* collect non-special or quoted characters to form word */
while (!((c = getsc()) == 0 ||
((state == SBASE || state == SHEREDELIM) && ctype(c, C_LEX1)))) {
Xcheck(ws, wp);
switch (state) {
case SBASE:
if (Flag(FCSHHISTORY) && (source->flags & SF_TTY) &&
c == '!') {
char **replace = NULL;
int get, i;
char match[200] = { 0 }, *str = match;
size_t mlen;
c2 = getsc();
if (c2 == '\0' || c2 == ' ' || c2 == '\t')
;
else if (c2 == '!')
replace = hist_get_newest(0);
else if (isdigit(c2) || c2 == '-' ||
isalpha(c2)) {
get = !isalpha(c2);
*str++ = c2;
do {
if ((c2 = getsc()) == '\0')
break;
if (c2 == '\t' || c2 == ' ' ||
c2 == '\n') {
ungetsc(c2);
break;
}
*str++ = c2;
} while (str < &match[sizeof(match)-1]);
*str = '\0';
if (get) {
int h = findhistrel(match);
if (h >= 0)
replace = &history[h];
} else {
int h = findhist(-1, 0, match, true);
if (h >= 0)
replace = &history[h];
}
}
/*
* XXX ksh history buffer saves un-expanded
* commands. Until the history buffer code is
* changed to contain expanded commands, we
* ignore the bad commands (spinning sucks)
*/
if (replace && **replace == '!')
ungetsc(c2);
else if (replace) {
Source *s;
/* do not strdup replacement via alloc */
s = pushs(SREREAD, source->areap);
s->start = s->str = *replace;
s->next = source;
s->u.freeme = NULL;
source = s;
continue;
} else if (*match != '\0') {
/* restore what followed the '!' */
mlen = strlen(match);
for (i = mlen-1; i >= 0; i--)
ungetsc(match[i]);
} else
ungetsc(c2);
}
if (c == '[' && (cf & (VARASN|ARRAYVAR))) {
*wp = EOS; /* temporary */
if (is_wdvarname(Xstring(ws, wp), false)) {
char *p, *tmp;
if (arraysub(&tmp)) {
*wp++ = CHAR;
*wp++ = c;
for (p = tmp; *p; ) {
Xcheck(ws, wp);
*wp++ = CHAR;
*wp++ = *p++;
}
afree(tmp, ATEMP);
break;
} else {
Source *s;
s = pushs(SREREAD,
source->areap);
s->start = s->str
= s->u.freeme = tmp;
s->next = source;
source = s;
}
}
*wp++ = CHAR;
*wp++ = c;
break;
}
/* FALLTHROUGH */
Sbase1: /* includes *(...|...) pattern (*+?@!) */
if (c == '*' || c == '@' || c == '+' || c == '?' ||
c == '!') {
c2 = getsc();
if (c2 == '(' /*)*/ ) {
*wp++ = OPAT;
*wp++ = c;
PUSH_STATE(SPATTERN);
break;
}
ungetsc(c2);
}
/* FALLTHROUGH */
Sbase2: /* doesn't include *(...|...) pattern (*+?@!) */
switch (c) {
case '\\':
c = getsc();
if (c) /* trailing \ is lost */
*wp++ = QCHAR, *wp++ = c;
break;
case '\'':
if ((cf & HEREDOC) || state == SBRACEQ) {
*wp++ = CHAR, *wp++ = c;
break;
}
*wp++ = OQUOTE;
ignore_backslash_newline++;
PUSH_STATE(SSQUOTE);
break;
case '"':
*wp++ = OQUOTE;
PUSH_STATE(SDQUOTE);
break;
default:
goto Subst;
}
break;
Subst:
switch (c) {
case '\\':
c = getsc();
switch (c) {
case '\\':
case '$': case '`':
*wp++ = QCHAR, *wp++ = c;
break;
case '"':
if ((cf & HEREDOC) == 0) {
*wp++ = QCHAR, *wp++ = c;
break;
}
/* FALLTHROUGH */
default:
if (cf & UNESCAPE) {
*wp++ = QCHAR, *wp++ = c;
break;
}
Xcheck(ws, wp);
if (c) { /* trailing \ is lost */
*wp++ = CHAR, *wp++ = '\\';
*wp++ = CHAR, *wp++ = c;
}
break;
}
break;
case '$':
c = getsc();
if (c == '(') /*)*/ {
c = getsc();
if (c == '(') /*)*/ {
PUSH_STATE(SASPAREN);
statep->ls_sasparen.nparen = 2;
statep->ls_sasparen.start =
Xsavepos(ws, wp);
*wp++ = EXPRSUB;
} else {
ungetsc(c);
PUSH_STATE(SCSPAREN);
statep->ls_scsparen.nparen = 1;
statep->ls_scsparen.csstate = 0;
*wp++ = COMSUB;
}
} else if (c == '{') /*}*/ {
*wp++ = OSUBST;
*wp++ = '{'; /*}*/
wp = get_brace_var(&ws, wp);
c = getsc();
/* allow :# and :% (ksh88 compat) */
if (c == ':') {
*wp++ = CHAR, *wp++ = c;
c = getsc();
}
/* If this is a trim operation,
* treat (,|,) specially in STBRACE.
*/
if (c == '#' || c == '%') {
ungetsc(c);
PUSH_STATE(STBRACE);
} else {
ungetsc(c);
if (state == SDQUOTE ||
state == SBRACEQ)
PUSH_STATE(SBRACEQ);
else
PUSH_STATE(SBRACE);
}
} else if (ctype(c, C_ALPHA)) {
*wp++ = OSUBST;
*wp++ = 'X';
do {
Xcheck(ws, wp);
*wp++ = c;
c = getsc();
} while (ctype(c, C_ALPHA) || digit(c));
*wp++ = '\0';
*wp++ = CSUBST;
*wp++ = 'X';
ungetsc(c);
} else if (ctype(c, C_VAR1) || digit(c)) {
Xcheck(ws, wp);
*wp++ = OSUBST;
*wp++ = 'X';
*wp++ = c;
*wp++ = '\0';
*wp++ = CSUBST;
*wp++ = 'X';
} else {
*wp++ = CHAR, *wp++ = '$';
ungetsc(c);
}
break;
case '`':
PUSH_STATE(SBQUOTE);
*wp++ = COMSUB;
/* Need to know if we are inside double quotes
* since sh/at&t-ksh translate the \" to " in
* "`..\"..`".
*/
statep->ls_sbquote.indquotes = 0;
Lex_state *s = statep;
Lex_state *base = state_info.base;
while (1) {
for (; s != base; s--) {
if (s->ls_state == SDQUOTE) {
statep->ls_sbquote.indquotes = 1;
break;
}
}
if (s != base)
break;
if (!(s = s->ls_info.base))
break;
base = s-- - STATE_BSIZE;
}
break;
default:
*wp++ = CHAR, *wp++ = c;
}
break;
case SSQUOTE:
if (c == '\'') {
POP_STATE();
if (state == SBRACEQ) {
*wp++ = CHAR, *wp++ = c;
break;
}
*wp++ = CQUOTE;
ignore_backslash_newline--;
} else
*wp++ = QCHAR, *wp++ = c;
break;
case SDQUOTE:
if (c == '"') {
POP_STATE();
*wp++ = CQUOTE;
} else
goto Subst;
break;
case SCSPAREN: /* $( .. ) */
/* todo: deal with $(...) quoting properly
* kludge to partly fake quoting inside $(..): doesn't
* really work because nested $(..) or ${..} inside
* double quotes aren't dealt with.
*/
switch (statep->ls_scsparen.csstate) {
case 0: /* normal */
switch (c) {
case '(':
statep->ls_scsparen.nparen++;
break;
case ')':
statep->ls_scsparen.nparen--;
break;
case '\\':
statep->ls_scsparen.csstate = 1;
break;
case '"':
statep->ls_scsparen.csstate = 2;
break;
case '\'':
statep->ls_scsparen.csstate = 4;
ignore_backslash_newline++;
break;
}
break;
case 1: /* backslash in normal mode */
case 3: /* backslash in double quotes */
--statep->ls_scsparen.csstate;
break;
case 2: /* double quotes */
if (c == '"')
statep->ls_scsparen.csstate = 0;
else if (c == '\\')
statep->ls_scsparen.csstate = 3;
break;
case 4: /* single quotes */
if (c == '\'') {
statep->ls_scsparen.csstate = 0;
ignore_backslash_newline--;
}
break;
}
if (statep->ls_scsparen.nparen == 0) {
POP_STATE();
*wp++ = 0; /* end of COMSUB */
} else
*wp++ = c;
break;
case SASPAREN: /* $(( .. )) */
/* todo: deal with $((...); (...)) properly */
/* XXX should nest using existing state machine
* (embed "..", $(...), etc.) */
if (c == '(')
statep->ls_sasparen.nparen++;
else if (c == ')') {
statep->ls_sasparen.nparen--;
if (statep->ls_sasparen.nparen == 1) {
/*(*/
if ((c2 = getsc()) == ')') {
POP_STATE();
*wp++ = 0; /* end of EXPRSUB */
break;
} else {
char *s;
ungetsc(c2);
/* mismatched parenthesis -
* assume we were really
* parsing a $(..) expression
*/
s = Xrestpos(ws, wp,
statep->ls_sasparen.start);
memmove(s + 1, s, wp - s);
*s++ = COMSUB;
*s = '('; /*)*/
wp++;
statep->ls_scsparen.nparen = 1;
statep->ls_scsparen.csstate = 0;
state = statep->ls_state =
SCSPAREN;
}
}
}
*wp++ = c;
break;
case SBRACEQ:
/*{*/
if (c == '}') {
POP_STATE();
*wp++ = CSUBST;
*wp++ = /*{*/ '}';
} else
goto Sbase2;
break;
case SBRACE:
/*{*/
if (c == '}') {
POP_STATE();
*wp++ = CSUBST;
*wp++ = /*{*/ '}';
} else
goto Sbase1;
break;
case STBRACE:
/* Same as SBRACE, except (,|,) treated specially */
/*{*/
if (c == '}') {
POP_STATE();
*wp++ = CSUBST;
*wp++ = /*{*/ '}';
} else if (c == '|') {
*wp++ = SPAT;
} else if (c == '(') {
*wp++ = OPAT;
*wp++ = ' '; /* simile for @ */
PUSH_STATE(SPATTERN);
} else
goto Sbase1;
break;
case SBQUOTE:
if (c == '`') {
*wp++ = 0;
POP_STATE();
} else if (c == '\\') {
switch (c = getsc()) {
case '\\':
case '$': case '`':
*wp++ = c;
break;
case '"':
if (statep->ls_sbquote.indquotes) {
*wp++ = c;
break;
}
/* FALLTHROUGH */
default:
if (c) { /* trailing \ is lost */
*wp++ = '\\';
*wp++ = c;
}
break;
}
} else
*wp++ = c;
break;
case SWORD: /* ONEWORD */
goto Subst;
case SLETPAREN: /* LETEXPR: (( ... )) */
/*(*/
if (c == ')') {
if (statep->ls_sletparen.nparen > 0)
--statep->ls_sletparen.nparen;
/*(*/
else if ((c2 = getsc()) == ')') {
c = 0;
*wp++ = CQUOTE;
goto Done;
} else
ungetsc(c2);
} else if (c == '(')
/* parenthesis inside quotes and backslashes
* are lost, but at&t ksh doesn't count them
* either
*/
++statep->ls_sletparen.nparen;
goto Sbase2;
case SHEREDELIM: /* <<,<<- delimiter */
/* XXX chuck this state (and the next) - use
* the existing states ($ and \`..` should be
* stripped of their specialness after the
* fact).
*/
/* here delimiters need a special case since
* $ and `..` are not to be treated specially
*/
if (c == '\\') {
c = getsc();
if (c) { /* trailing \ is lost */
*wp++ = QCHAR;
*wp++ = c;
}
} else if (c == '\'') {
PUSH_STATE(SSQUOTE);
*wp++ = OQUOTE;
ignore_backslash_newline++;
} else if (c == '"') {
state = statep->ls_state = SHEREDQUOTE;
*wp++ = OQUOTE;
} else {
*wp++ = CHAR;
*wp++ = c;
}
break;
case SHEREDQUOTE: /* " in <<,<<- delimiter */
if (c == '"') {
*wp++ = CQUOTE;
state = statep->ls_state = SHEREDELIM;
} else {
if (c == '\\') {
switch (c = getsc()) {
case '\\': case '"':
case '$': case '`':
break;
default:
if (c) { /* trailing \ lost */
*wp++ = CHAR;
*wp++ = '\\';
}
break;
}
}
*wp++ = CHAR;
*wp++ = c;
}
break;
case SPATTERN: /* in *(...|...) pattern (*+?@!) */
if ( /*(*/ c == ')') {
*wp++ = CPAT;
POP_STATE();
} else if (c == '|') {
*wp++ = SPAT;
} else if (c == '(') {
*wp++ = OPAT;
*wp++ = ' '; /* simile for @ */
PUSH_STATE(SPATTERN);
} else
goto Sbase1;
break;
}
}
Done:
Xcheck(ws, wp);
if (statep != &states[1])
/* XXX figure out what is missing */
yyerror("no closing quote\n");
/* This done to avoid tests for SHEREDELIM wherever SBASE tested */
if (state == SHEREDELIM)
state = SBASE;
dp = Xstring(ws, wp);
if ((c == '<' || c == '>') && state == SBASE &&
((c2 = Xlength(ws, wp)) == 0 ||
(c2 == 2 && dp[0] == CHAR && digit(dp[1])))) {
struct ioword *iop = alloc(sizeof(*iop), ATEMP);
if (c2 == 2)
iop->unit = dp[1] - '0';
else
iop->unit = c == '>'; /* 0 for <, 1 for > */
c2 = getsc();
/* <<, >>, <> are ok, >< is not */
if (c == c2 || (c == '<' && c2 == '>')) {
iop->flag = c == c2 ?
(c == '>' ? IOCAT : IOHERE) : IORDWR;
if (iop->flag == IOHERE) {
if ((c2 = getsc()) == '-')
iop->flag |= IOSKIP;
else
ungetsc(c2);
}
} else if (c2 == '&')
iop->flag = IODUP | (c == '<' ? IORDUP : 0);
else {
iop->flag = c == '>' ? IOWRITE : IOREAD;
if (c == '>' && c2 == '|')
iop->flag |= IOCLOB;
else
ungetsc(c2);
}
iop->name = NULL;
iop->delim = NULL;
iop->heredoc = NULL;
Xfree(ws, wp); /* free word */
yylval.iop = iop;
return REDIR;
}
if (wp == dp && state == SBASE) {
Xfree(ws, wp); /* free word */
/* no word, process LEX1 character */
switch (c) {
default:
return c;
case '|':
case '&':
case ';':
if ((c2 = getsc()) == c)
c = (c == ';') ? BREAK :
(c == '|') ? LOGOR :
(c == '&') ? LOGAND :
YYERRCODE;
else if (c == '|' && c2 == '&')
c = COPROC;
else
ungetsc(c2);
return c;
case '\n':
gethere();
if (cf & CONTIN)
goto Again;
return c;
case '(': /*)*/
if (!Flag(FSH)) {
if ((c2 = getsc()) == '(') /*)*/
/* XXX need to handle ((...); (...)) */
c = MDPAREN;
else
ungetsc(c2);
}
return c;
/*(*/
case ')':
return c;
}
}
*wp++ = EOS; /* terminate word */
yylval.cp = Xclose(ws, wp);
if (state == SWORD || state == SLETPAREN) /* ONEWORD? */
return LWORD;
ungetsc(c); /* unget terminator */
/* copy word to unprefixed string ident */
for (sp = yylval.cp, dp = ident; dp < ident+IDENT && (c = *sp++) == CHAR; )
*dp++ = *sp++;
/* Make sure the ident array stays '\0' padded */
memset(dp, 0, (ident+IDENT) - dp + 1);
if (c != EOS)
*ident = '\0'; /* word is not unquoted */
if (*ident != '\0' && (cf&(KEYWORD|ALIAS))) {
struct tbl *p;
int h = hash(ident);
/* { */
if ((cf & KEYWORD) && (p = ktsearch(&keywords, ident, h)) &&
(!(cf & ESACONLY) || p->val.i == ESAC || p->val.i == '}')) {
afree(yylval.cp, ATEMP);
return p->val.i;
}
if ((cf & ALIAS) && (p = ktsearch(&aliases, ident, h)) &&
(p->flag & ISSET)) {
Source *s;
for (s = source; s->type == SALIAS; s = s->next)
if (s->u.tblp == p)
return LWORD;
/* push alias expansion */
s = pushs(SALIAS, source->areap);
s->start = s->str = p->val.s;
s->u.tblp = p;
s->next = source;
source = s;
afree(yylval.cp, ATEMP);
goto Again;
}
}
return LWORD;
}
static void
gethere(void)
{
struct ioword **p;
for (p = heres; p < herep; p++)
readhere(*p);
herep = heres;
}
/*
* read "<<word" text into temp file
*/
static void
readhere(struct ioword *iop)
{
int c;
char *volatile eof;
char *eofp;
int skiptabs;
XString xs;
char *xp;
int xpos;
eof = evalstr(iop->delim, 0);
if (!(iop->flag & IOEVAL))
ignore_backslash_newline++;
Xinit(xs, xp, 256, ATEMP);
for (;;) {
eofp = eof;
skiptabs = iop->flag & IOSKIP;
xpos = Xsavepos(xs, xp);
while ((c = getsc()) != 0) {
if (skiptabs) {
if (c == '\t')
continue;
skiptabs = 0;
}
if (c != *eofp)
break;
Xcheck(xs, xp);
Xput(xs, xp, c);
eofp++;
}
/* Allow EOF here so commands with out trailing newlines
* will work (eg, ksh -c '...', $(...), etc).
*/
if (*eofp == '\0' && (c == 0 || c == '\n')) {
xp = Xrestpos(xs, xp, xpos);
break;
}
ungetsc(c);
while ((c = getsc()) != '\n') {
if (c == 0)
yyerror("here document `%s' unclosed\n", eof);
Xcheck(xs, xp);
Xput(xs, xp, c);
}
Xcheck(xs, xp);
Xput(xs, xp, c);
}
Xput(xs, xp, '\0');
iop->heredoc = Xclose(xs, xp);
if (!(iop->flag & IOEVAL))
ignore_backslash_newline--;
}
void
yyerror(const char *fmt, ...)
{
va_list va;
/* pop aliases and re-reads */
while (source->type == SALIAS || source->type == SREREAD)
source = source->next;
source->str = null; /* zap pending input */
error_prefix(true);
va_start(va, fmt);
shf_vfprintf(shl_out, fmt, va);
va_end(va);
errorf(NULL);
}
/*
* input for yylex with alias expansion
*/
Source *
pushs(int type, Area *areap)
{
Source *s;
s = alloc(sizeof(Source), areap);
s->type = type;
s->str = null;
s->start = NULL;
s->line = 0;
s->cmd_offset = 0;
s->errline = 0;
s->file = NULL;
s->flags = 0;
s->next = NULL;
s->areap = areap;
if (type == SFILE || type == SSTDIN) {
char *dummy;
Xinit(s->xs, dummy, 256, s->areap);
} else
memset(&s->xs, 0, sizeof(s->xs));
return s;
}
static int
getsc__(void)
{
Source *s = source;
int c;
while ((c = *s->str++) == 0) {
s->str = NULL; /* return 0 for EOF by default */
switch (s->type) {
case SEOF:
s->str = null;
return 0;
case SSTDIN:
case SFILE:
getsc_line(s);
break;
case SWSTR:
break;
case SSTRING:
break;
case SWORDS:
s->start = s->str = *s->u.strv++;
s->type = SWORDSEP;
break;
case SWORDSEP:
if (*s->u.strv == NULL) {
s->start = s->str = "\n";
s->type = SEOF;
} else {
s->start = s->str = " ";
s->type = SWORDS;
}
break;
case SALIAS:
if (s->flags & SF_ALIASEND) {
/* pass on an unused SF_ALIAS flag */
source = s->next;
source->flags |= s->flags & SF_ALIAS;
s = source;
} else if (*s->u.tblp->val.s &&
isspace((unsigned char)strchr(s->u.tblp->val.s, 0)[-1])) {
source = s = s->next; /* pop source stack */
/* Note that this alias ended with a space,
* enabling alias expansion on the following
* word.
*/
s->flags |= SF_ALIAS;
} else {
/* At this point, we need to keep the current
* alias in the source list so recursive
* aliases can be detected and we also need
* to return the next character. Do this
* by temporarily popping the alias to get
* the next character and then put it back
* in the source list with the SF_ALIASEND
* flag set.
*/
source = s->next; /* pop source stack */
source->flags |= s->flags & SF_ALIAS;
c = getsc__();
if (c) {
s->flags |= SF_ALIASEND;
s->ugbuf[0] = c; s->ugbuf[1] = '\0';
s->start = s->str = s->ugbuf;
s->next = source;
source = s;
} else {
s = source;
/* avoid reading eof twice */
s->str = NULL;
break;
}
}
continue;
case SREREAD:
if (s->start != s->ugbuf) /* yuck */
afree(s->u.freeme, ATEMP);
source = s = s->next;
continue;
}
if (s->str == NULL) {
s->type = SEOF;
s->start = s->str = null;
return '\0';
}
if (s->flags & SF_ECHO) {
shf_puts(s->str, shl_out);
shf_flush(shl_out);
}
}
return c;
}
static void
getsc_line(Source *s)
{
char *xp = Xstring(s->xs, xp);
int interactive = Flag(FTALKING) && s->type == SSTDIN;
int have_tty = interactive && (s->flags & SF_TTY);
/* Done here to ensure nothing odd happens when a timeout occurs */
XcheckN(s->xs, xp, LINE);
*xp = '\0';
s->start = s->str = xp;
if (have_tty && ksh_tmout) {
ksh_tmout_state = TMOUT_READING;
alarm(ksh_tmout);
}
if (have_tty && (0
#ifdef VI
|| Flag(FVI)
#endif /* VI */
#ifdef EMACS
|| Flag(FEMACS) || Flag(FGMACS)
#endif /* EMACS */
)) {
int nread;
nread = x_read(xp, LINE);
if (nread < 0) /* read error */
nread = 0;
xp[nread] = '\0';
xp += nread;
} else {
if (interactive) {
pprompt(prompt, 0);
} else
s->line++;
while (1) {
char *p = shf_getse(xp, Xnleft(s->xs, xp), s->u.shf);
if (!p && shf_error(s->u.shf) &&
s->u.shf->errno_ == EINTR) {
shf_clearerr(s->u.shf);
if (trap)
runtraps(0);
continue;
}
if (!p || (xp = p, xp[-1] == '\n'))
break;
/* double buffer size */
xp++; /* move past null so doubling works... */
XcheckN(s->xs, xp, Xlength(s->xs, xp));
xp--; /* ...and move back again */
}
/* flush any unwanted input so other programs/builtins
* can read it. Not very optimal, but less error prone
* than flushing else where, dealing with redirections,
* etc..
* todo: reduce size of shf buffer (~128?) if SSTDIN
*/
if (s->type == SSTDIN)
shf_flush(s->u.shf);
}
/* XXX: temporary kludge to restore source after a
* trap may have been executed.
*/
source = s;
if (have_tty && ksh_tmout) {
ksh_tmout_state = TMOUT_EXECUTING;
alarm(0);
}
s->start = s->str = Xstring(s->xs, xp);
strip_nuls(Xstring(s->xs, xp), Xlength(s->xs, xp));
/* Note: if input is all nulls, this is not eof */
if (Xlength(s->xs, xp) == 0) { /* EOF */
if (s->type == SFILE)
shf_fdclose(s->u.shf);
s->str = NULL;
} else if (interactive) {
char *p = Xstring(s->xs, xp);
if (cur_prompt == PS1)
while (*p && ctype(*p, C_IFS) && ctype(*p, C_IFSWS))
p++;
if (*p) {
s->line++;
histsave(s->line, s->str, 1);
}
}
if (interactive)
set_prompt(PS2);
}
static char *
special_prompt_expand(char *str)
{
char *p = str;
while ((p = strstr(p, "\\$")) != NULL) {
*(p+1) = 'p';
}
return str;
}
void
set_prompt(int to)
{
char *ps1;
Area *saved_atemp;
cur_prompt = to;
switch (to) {
case PS1: /* command */
ps1 = str_save(str_val(global("PS1")), ATEMP);
saved_atemp = ATEMP; /* ps1 is freed by substitute() */
newenv(E_ERRH);
if (sigsetjmp(genv->jbuf, 0)) {
prompt = safe_prompt;
/* Don't print an error - assume it has already
* been printed. Reason is we may have forked
* to run a command and the child may be
* unwinding its stack through this code as it
* exits.
*/
} else {
/* expand \$ before other substitutions are done */
char *tmp = special_prompt_expand(ps1);
prompt = str_save(substitute(tmp, 0), saved_atemp);
}
quitenv(NULL);
break;
case PS2: /* command continuation */
prompt = str_val(global("PS2"));
break;
}
}
static int
dopprompt(const char *sp, int ntruncate, const char **spp, int doprint)
{
char strbuf[1024], tmpbuf[1024], *p, *str, nbuf[32], delimiter = '\0';
int len, c, n, totlen = 0, indelimit = 0, counting = 1, delimitthis;
const char *cp = sp;
struct tm *tm;
time_t t;
if (*cp && cp[1] == '\r') {
delimiter = *cp;
cp += 2;
}
while (*cp != 0) {
delimitthis = 0;
if (indelimit && *cp != delimiter)
;
else if (*cp == '\n' || *cp == '\r') {
totlen = 0;
sp = cp + 1;
} else if (*cp == '\t') {
if (counting)
totlen = (totlen | 7) + 1;
} else if (*cp == delimiter) {
indelimit = !indelimit;
delimitthis = 1;
}
if (*cp == '\\') {
cp++;
if (!*cp)
break;
/* Expand \h and \$ for both, sh(1) and ksh(1) */
if (Flag(FSH) && !(*cp == 'h' || *cp == 'p'))
snprintf(strbuf, sizeof strbuf, "\\%c", *cp);
else switch (*cp) {
case 'a': /* '\' 'a' bell */
strbuf[0] = '\007';
strbuf[1] = '\0';
break;
case 'd': /* '\' 'd' Dow Mon DD */
time(&t);
tm = localtime(&t);
strftime(strbuf, sizeof strbuf, "%a %b %d", tm);
break;
case 'D': /* '\' 'D' '{' strftime format '}' */
p = strchr(cp + 2, '}');
if (cp[1] != '{' || p == NULL) {
snprintf(strbuf, sizeof strbuf,
"\\%c", *cp);
break;
}
strlcpy(tmpbuf, cp + 2, sizeof tmpbuf);
p = strchr(tmpbuf, '}');
if (p)
*p = '\0';
time(&t);
tm = localtime(&t);
strftime(strbuf, sizeof strbuf, tmpbuf, tm);
cp = strchr(cp + 2, '}');
break;
case 'e': /* '\' 'e' escape */
strbuf[0] = '\033';
strbuf[1] = '\0';
break;
case 'h': /* '\' 'h' shortened hostname */
gethostname(strbuf, sizeof strbuf);
p = strchr(strbuf, '.');
if (p)
*p = '\0';
break;
case 'H': /* '\' 'H' full hostname */
gethostname(strbuf, sizeof strbuf);
break;
case 'j': /* '\' 'j' number of jobs */
snprintf(strbuf, sizeof strbuf, "%d",
j_njobs());
break;
case 'l': /* '\' 'l' basename of tty */
p = ttyname(0);
if (p)
p = basename(p);
if (p)
strlcpy(strbuf, p, sizeof strbuf);
break;
case 'n': /* '\' 'n' newline */
strbuf[0] = '\n';
strbuf[1] = '\0';
totlen = 0; /* reset for prompt re-print */
sp = cp + 1;
break;
case 'p': /* '\' '$' $ or # */
strbuf[0] = ksheuid ? '$' : '#';
strbuf[1] = '\0';
break;
case 'r': /* '\' 'r' return */
strbuf[0] = '\r';
strbuf[1] = '\0';
totlen = 0; /* reset for prompt re-print */
sp = cp + 1;
break;
case 's': /* '\' 's' basename $0 */
strlcpy(strbuf, kshname, sizeof strbuf);
break;
case 't': /* '\' 't' 24 hour HH:MM:SS */
time(&t);
tm = localtime(&t);
strftime(strbuf, sizeof strbuf, "%T", tm);
break;
case 'T': /* '\' 'T' 12 hour HH:MM:SS */
time(&t);
tm = localtime(&t);
strftime(strbuf, sizeof strbuf, "%l:%M:%S", tm);
break;
case '@': /* '\' '@' 12 hour am/pm format */
time(&t);
tm = localtime(&t);
strftime(strbuf, sizeof strbuf, "%r", tm);
break;
case 'A': /* '\' 'A' 24 hour HH:MM */
time(&t);
tm = localtime(&t);
strftime(strbuf, sizeof strbuf, "%R", tm);
break;
case 'u': /* '\' 'u' username */
strlcpy(strbuf, username, sizeof strbuf);
break;
case 'v': /* '\' 'v' version (short) */
p = strchr(ksh_version, ' ');
if (p)
p = strchr(p + 1, ' ');
if (p) {
p++;
strlcpy(strbuf, p, sizeof strbuf);
p = strchr(strbuf, ' ');
if (p)
*p = '\0';
}
break;
case 'V': /* '\' 'V' version (long) */
strlcpy(strbuf, ksh_version, sizeof strbuf);
break;
case 'w': /* '\' 'w' cwd */
p = str_val(global("PWD"));
n = strlen(str_val(global("HOME")));
if (strcmp(p, "/") == 0) {
strlcpy(strbuf, p, sizeof strbuf);
} else if (strcmp(p, str_val(global("HOME"))) == 0) {
strbuf[0] = '~';
strbuf[1] = '\0';
} else if (strncmp(p, str_val(global("HOME")), n)
== 0 && p[n] == '/') {
snprintf(strbuf, sizeof strbuf, "~/%s",
str_val(global("PWD")) + n + 1);
} else
strlcpy(strbuf, p, sizeof strbuf);
break;
case 'W': /* '\' 'W' basename(cwd) */
p = str_val(global("PWD"));
if (strcmp(p, str_val(global("HOME"))) == 0) {
strbuf[0] = '~';
strbuf[1] = '\0';
} else
strlcpy(strbuf, basename(p), sizeof strbuf);
break;
case '!': /* '\' '!' history line number */
snprintf(strbuf, sizeof strbuf, "%d",
source->line + 1);
break;
case '#': /* '\' '#' command line number */
snprintf(strbuf, sizeof strbuf, "%d",
source->line - source->cmd_offset + 1);
break;
case '0': /* '\' '#' '#' ' #' octal numeric handling */
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
if ((cp[1] > '7' || cp[1] < '0') ||
(cp[2] > '7' || cp[2] < '0')) {
snprintf(strbuf, sizeof strbuf,
"\\%c", *cp);
break;
}
n = (cp[0] - '0') * 8 * 8 + (cp[1] - '0') * 8 +
(cp[2] - '0');
snprintf(strbuf, sizeof strbuf, "%c", n);
cp += 2;
break;
case '\\': /* '\' '\' */
strbuf[0] = '\\';
strbuf[1] = '\0';
break;
case '[': /* '\' '[' .... stop counting */
strbuf[0] = '\0';
counting = 0;
break;
case ']': /* '\' ']' restart counting */
strbuf[0] = '\0';
counting = 1;
break;
default:
snprintf(strbuf, sizeof strbuf, "\\%c", *cp);
break;
}
cp++;
str = strbuf;
len = strlen(str);
if (ntruncate) {
if (ntruncate >= len) {
ntruncate -= len;
continue;
}
str += ntruncate;
len -= ntruncate;
ntruncate = 0;
}
if (doprint)
shf_write(str, len, shl_out);
if (counting && !indelimit && !delimitthis)
totlen += len;
continue;
} else if (*cp != '!')
c = *cp++;
else if (*++cp == '!')
c = *cp++;
else {
shf_snprintf(p = nbuf, sizeof(nbuf), "%d",
source->line + 1);
len = strlen(nbuf);
if (ntruncate) {
if (ntruncate >= len) {
ntruncate -= len;
continue;
}
p += ntruncate;
len -= ntruncate;
ntruncate = 0;
}
if (doprint)
shf_write(p, len, shl_out);
if (counting && !indelimit && !delimitthis)
totlen += len;
continue;
}
if (counting && ntruncate)
--ntruncate;
else if (doprint) {
shf_putc(c, shl_out);
}
if (counting && !indelimit && !delimitthis)
totlen++;
}
if (doprint)
shf_flush(shl_out);
if (spp)
*spp = sp;
return (totlen);
}
void
pprompt(const char *cp, int ntruncate)
{
dopprompt(cp, ntruncate, NULL, 1);
}
int
promptlen(const char *cp, const char **spp)
{
return dopprompt(cp, 0, spp, 0);
}
/* Read the variable part of a ${...} expression (ie, up to but not including
* the :[-+?=#%] or close-brace.
*/
static char *
get_brace_var(XString *wsp, char *wp)
{
enum parse_state {
PS_INITIAL, PS_SAW_HASH, PS_IDENT,
PS_NUMBER, PS_VAR1, PS_END
}
state;
char c;
state = PS_INITIAL;
while (1) {
c = getsc();
/* State machine to figure out where the variable part ends. */
switch (state) {
case PS_INITIAL:
if (c == '#') {
state = PS_SAW_HASH;
break;
}
/* FALLTHROUGH */
case PS_SAW_HASH:
if (letter(c))
state = PS_IDENT;
else if (digit(c))
state = PS_NUMBER;
else if (ctype(c, C_VAR1))
state = PS_VAR1;
else
state = PS_END;
break;
case PS_IDENT:
if (!letnum(c)) {
state = PS_END;
if (c == '[') {
char *tmp, *p;
if (!arraysub(&tmp))
yyerror("missing ]\n");
*wp++ = c;
for (p = tmp; *p; ) {
Xcheck(*wsp, wp);
*wp++ = *p++;
}
afree(tmp, ATEMP);
c = getsc(); /* the ] */
}
}
break;
case PS_NUMBER:
if (!digit(c))
state = PS_END;
break;
case PS_VAR1:
state = PS_END;
break;
case PS_END: /* keep gcc happy */
break;
}
if (state == PS_END) {
*wp++ = '\0'; /* end of variable part */
ungetsc(c);
break;
}
Xcheck(*wsp, wp);
*wp++ = c;
}
return wp;
}
/*
* Save an array subscript - returns true if matching bracket found, false
* if eof or newline was found.
* (Returned string double null terminated)
*/
static int
arraysub(char **strp)
{
XString ws;
char *wp;
char c;
int depth = 1; /* we are just past the initial [ */
Xinit(ws, wp, 32, ATEMP);
do {
c = getsc();
Xcheck(ws, wp);
*wp++ = c;
if (c == '[')
depth++;
else if (c == ']')
depth--;
} while (depth > 0 && c && c != '\n');
*wp++ = '\0';
*strp = Xclose(ws, wp);
return depth == 0 ? 1 : 0;
}
/* Unget a char: handles case when we are already at the start of the buffer */
static const char *
ungetsc(int c)
{
if (backslash_skip)
backslash_skip--;
/* Don't unget eof... */
if (source->str == null && c == '\0')
return source->str;
if (source->str > source->start)
source->str--;
else {
Source *s;
s = pushs(SREREAD, source->areap);
s->ugbuf[0] = c; s->ugbuf[1] = '\0';
s->start = s->str = s->ugbuf;
s->next = source;
source = s;
}
return source->str;
}
/* Called to get a char that isn't a \newline sequence. */
static int
getsc_bn(void)
{
int c, c2;
if (ignore_backslash_newline)
return getsc_();
if (backslash_skip == 1) {
backslash_skip = 2;
return getsc_();
}
backslash_skip = 0;
while (1) {
c = getsc_();
if (c == '\\') {
if ((c2 = getsc_()) == '\n')
/* ignore the \newline; get the next char... */
continue;
ungetsc(c2);
backslash_skip = 1;
}
return c;
}
}
static Lex_state *
push_state_(State_info *si, Lex_state *old_end)
{
Lex_state *new = areallocarray(NULL, STATE_BSIZE,
sizeof(Lex_state), ATEMP);
new[0].ls_info.base = old_end;
si->base = &new[0];
si->end = &new[STATE_BSIZE];
return &new[1];
}
static Lex_state *
pop_state_(State_info *si, Lex_state *old_end)
{
Lex_state *old_base = si->base;
si->base = old_end->ls_info.base - STATE_BSIZE;
si->end = old_end->ls_info.base;
afree(old_base, ATEMP);
return si->base + STATE_BSIZE - 1;
}
|
Calcaware/SeekReactNative
|
components/UIComponents/Buttons/GreenButton.js
|
<gh_stars>10-100
// @flow
import * as React from "react";
import { Text, TouchableOpacity } from "react-native";
import i18n from "../../../i18n";
import { viewStyles, textStyles } from "../../../styles/uiComponents/buttons/greenButton";
type Props = {
+color?: ?Object,
+handlePress: Function,
+letterSpacing?: number,
+text: string,
+login?: boolean,
+fontSize?: number,
+width?: ?number,
+allowFontScaling?: boolean,
+disabled?: boolean
}
const GreenButton = ( {
color,
handlePress,
letterSpacing,
login,
fontSize,
text,
width,
allowFontScaling,
disabled
}: Props ): React.Node => {
let widthStyle = null;
if ( width ) {
widthStyle = { width };
}
return (
<TouchableOpacity
onPress={handlePress}
style={[
viewStyles.greenButton, color
&& { backgroundColor: color },
login && viewStyles.loginHeight,
widthStyle
]}
disabled={disabled}
testID="greenButton"
>
<Text
style={[textStyles.buttonText, { letterSpacing }, { fontSize }]}
allowFontScaling={allowFontScaling}
>
{i18n.t( text ).toLocaleUpperCase()}
</Text>
</TouchableOpacity>
);
};
GreenButton.defaultProps = {
fontSize: 18,
login: false,
letterSpacing: 1.0,
color: null,
width: null,
allowFontScaling: true
};
export default GreenButton;
|
RomaLytar/theatre
|
resources/assets/theatre/_blocks/filter/filter-event.js
|
(function() {
if (document.querySelector(`[data-event-parent]`)) {
class FilterEvent {
constructor(item) {
this.item = item;
this.unicDateArr = [];
this.actorsArr = this.item.querySelectorAll(`[data-event-artist]`);
this.filter = new FilterValue(this.item.querySelector(`[data-filter-item="date"]`));
this.CONSTANT = window.CONSTANT;
this.getUnicDates();
this.addDateItems();
this.item.addEventListener(`filterChanged`, (e) => {
this.sortArtistForDates(e.detail.value);
});
}
transformDate(numericDate) {
const fullDate = new Date(numericDate),
dateDay = fullDate.getDate(),
dateMonth = fullDate.getMonth(),
dateYear = fullDate.getFullYear();
return `${dateDay} ${this.CONSTANT.MONTH_GENITIVE[dateMonth][this.CONSTANT.LANG]} ${dateYear}`;
}
getUnicDates() {
this.actorsArr.forEach((item) => {
item.getAttribute(`data-date`).split(`,`).forEach(date => {
if(this.unicDateArr.find(item => item == date)) return false;
this.unicDateArr.push(date);
})
})
this.unicDateArr.sort((a, b) => +new Date(a) > +new Date(b) ? 1 : -1);
}
// ФУнкция наполнения списка
addDateItems(){
this.filter.list.insertAdjacentHTML("beforeEnd", this.unicDateArr.map((item) => `<li><a href="${item}">${this.transformDate(item)}</a></li>`).join(``));
}
sortArtistForDates(choosedDate) {
this.actorsArr.forEach((item) => {
if(item.getAttribute(`data-date`).indexOf(choosedDate) != -1){
item.removeAttribute(`data-hidden`);
} else {
item.setAttribute(`data-hidden`, true);
}
})
}
}
window.addEventListener(`load`, () => {
new FilterEvent(document.querySelector(`[data-event-parent]`));
});
}
})();
|
storset/vtk
|
src/main/java/vtk/security/web/saml/UserData.java
|
/* Copyright (c) 2010, University of Oslo, Norway
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of the University of Oslo nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package vtk.security.web.saml;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.namespace.QName;
import org.opensaml.common.xml.SAMLConstants;
import org.opensaml.saml2.core.Assertion;
import org.opensaml.saml2.core.Attribute;
import org.opensaml.saml2.core.AttributeStatement;
import org.opensaml.saml2.core.AttributeValue;
import org.opensaml.xml.XMLObject;
import org.opensaml.xml.schema.XSString;
public class UserData {
private Map<String, List<String>> attrs = new HashMap<String, List<String>>();
public UserData(Assertion assertion) {
for (AttributeStatement attrStatement : assertion.getAttributeStatements()) {
for (Attribute attr : attrStatement.getAttributes()) {
this.attrs.put(attr.getName(), extractValues(attr));
}
}
if (getUsername() == null) {
throw new IllegalArgumentException("Assertion does not contain uid attribute");
}
}
public String getUsername() {
if (getSimpleAttribute("eduPersonPrincipalName") != null) {
return getSimpleAttribute("eduPersonPrincipalName");
} else {
// XXX: Mapping hack for non full qualified webid users
String uid = getSimpleAttribute("uid");
String userAndDomain[] = uid.split("@");
if (userAndDomain.length > 1 && userAndDomain[1].contentEquals("webid")) {
return uid + ".uio.no";
} else {
return uid;
}
}
}
public String getCommonName() {
return getSimpleAttribute("cn");
}
public List<String> getAttribute(String name) {
List<String> attributes = this.attrs.get(name);
if (attributes == null) {
return null;
}
return Collections.unmodifiableList(attributes);
}
public String getSimpleAttribute(String name) {
List<String> list = this.attrs.get(name);
if (list == null) {
return null;
}
if (list.isEmpty()) {
return null;
}
return list.get(0);
}
public Set<String> getAttributeNames() {
return Collections.unmodifiableSet(this.attrs.keySet());
}
private List<String> extractValues(Attribute attribute) {
List<XMLObject> values = attribute.getAttributeValues();
List<String> result = new ArrayList<String>();
for (XMLObject val : values) {
if (!(val instanceof XSString)) {
continue;
}
XSString s = (XSString) val;
QName qname = s.getElementQName();
if (!SAMLConstants.SAML20_NS.equals(qname.getNamespaceURI())) {
continue;
}
if (!AttributeValue.DEFAULT_ELEMENT_LOCAL_NAME.equals(qname.getLocalPart())) {
continue;
}
result.add(s.getValue());
}
return result;
}
}
|
mijaros/apiman-test
|
apiman-it-rest/src/test/java/io/apiman/test/integration/rest/plugins/policies/simpleheader/AbstractSimpleRequestHeaderPolicyIT.java
|
/*
* Copyright 2016 Red Hat Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.test.integration.rest.plugins.policies.simpleheader;
import static io.apiman.test.integration.runner.RestAssuredUtils.givenGateway;
import static io.apiman.test.integration.runner.RestAssuredUtils.when;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.isEmptyOrNullString;
import static org.hamcrest.core.IsNot.not;
import static org.junit.Assert.assertThat;
import io.apiman.test.integration.base.AbstractApiTest;
import io.apiman.test.integration.runner.annotations.entity.Plugin;
import com.jayway.restassured.path.json.JsonPath;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Test;
/**
* Created by pstanko.
*/
@Plugin(artifactId = "apiman-plugins-simple-header-policy")
public abstract class AbstractSimpleRequestHeaderPolicyIT extends AbstractApiTest {
protected static final String HEADER_NAME = "X-Request";
protected static final String HEADER_VALUE = "This is request";
protected abstract String getResourceURL();
protected abstract String getApiEndpoint();
@Test
public void shouldAddXRequestHeaderAtRequest() throws Exception {
when().
get(getResourceURL()).
then().
body("headers." + HEADER_NAME, equalTo(HEADER_VALUE));
}
@Test
public void shouldNotAddXRequestHeaderAtRequest() throws Exception {
when().
get(getResourceURL()).
then().
header(HEADER_NAME, isEmptyOrNullString());
}
}
|
iTitus/PDXTools
|
pdx-tools/src/main/java/io/github/ititus/pdx/stellaris/user/save/Budget.java
|
package io.github.ititus.pdx.stellaris.user.save;
import io.github.ititus.pdx.pdxscript.PdxScriptObject;
public class Budget {
public Budget(PdxScriptObject o) {
// TODO: income, expenses, balance
}
}
|
otvorenesudy/otvorenesudy
|
spec/probe_spec_helper.rb
|
<reponame>otvorenesudy/otvorenesudy<filename>spec/probe_spec_helper.rb
require 'spec_helper'
require 'rake'
|
RichardRanft/RakNet
|
DependentExtensions/cat/Platform.hpp
|
<gh_stars>1000+
/*
Copyright (c) 2009-2010 <NAME>. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of LibCat nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef CAT_PLATFORM_HPP
#define CAT_PLATFORM_HPP
#include <cat/Config.hpp>
#include <string.h>
namespace cat {
//// Compiler ////
// Mac OS X additional compilation flags
#ifdef __APPLE__
# include <TargetConditionals.h>
#endif
//-----------------------------------------------------------------------------
// Intel C++ Compiler : Interoperates with MSVC and GCC
#if defined(__INTEL_COMPILER) || defined(__ICL) || defined(__ICC) || defined(__ECC)
# define CAT_COMPILER_ICC
# define CAT_FENCE_COMPILER __memory_barrier();
#endif
//-----------------------------------------------------------------------------
// Borland C++ Compiler : Compatible with MSVC syntax
#if defined(__BORLANDC__)
# define CAT_COMPILER_BORLAND
# define CAT_COMPILER_COMPAT_MSVC
# define CAT_INLINE __inline
# define CAT_ASM_EMIT __emit__
//-----------------------------------------------------------------------------
// Digital Mars C++ Compiler (previously known as Symantec C++)
#elif defined(__DMC__) || defined(__SC__) || defined(__SYMANTECC__)
# define CAT_COMPILER_DMARS
# define CAT_COMPILER_COMPAT_MSVC
# define CAT_INLINE __inline
# define CAT_ASM_EMIT __emit__
//-----------------------------------------------------------------------------
// Codeplay VectorC C++ Compiler : Compatible with GCC and MSVC syntax, prefer GCC
#elif defined(__VECTORC__)
# define CAT_COMPILER_CODEPLAY
# define CAT_COMPILER_COMPAT_GCC
//-----------------------------------------------------------------------------
// Pathscale C++ Compiler : Compatible with GCC syntax
#elif defined(__PATHSCALE__)
# define CAT_COMPILER_PATHSCALE
# define CAT_COMPILER_COMPAT_GCC
//-----------------------------------------------------------------------------
// Watcom C++ Compiler : Compatible with GCC and MSVC syntax, prefer GCC
#elif defined(__WATCOMC__)
# define CAT_COMPILER_WATCOM
# define CAT_COMPILER_COMPAT_GCC
//-----------------------------------------------------------------------------
// SUN C++ Compiler : Compatible with GCC syntax
#elif defined(__SUNPRO_CC)
# define CAT_COMPILER_SUN
# define CAT_COMPILER_COMPAT_GCC
//-----------------------------------------------------------------------------
// Metrowerks C++ Compiler : Compatible with MSVC syntax
#elif defined(__MWERKS__)
# define CAT_COMPILER_MWERKS
# define CAT_COMPILER_COMPAT_MSVC
# define CAT_INLINE inline
# define CAT_ASM_BEGIN _asm {
# define CAT_ASM_EMIT __emit__
//-----------------------------------------------------------------------------
// GNU C++ Compiler
// SN Systems ProDG C++ Compiler : Compatible with GCC
#elif defined(__GNUC__) || defined(__APPLE_CC__) || defined(__SNC__)
# define CAT_COMPILER_GCC
# define CAT_COMPILER_COMPAT_GCC
# define CAT_FASTCALL __attribute__ ((fastcall))
//-----------------------------------------------------------------------------
// Microsoft Visual Studio C++ Compiler
#elif defined(_MSC_VER)
# define CAT_COMPILER_MSVC
# define CAT_COMPILER_COMPAT_MSVC
# define CAT_FASTCALL __fastcall
} // namespace cat
# include <cstdlib> // Intrinsics
# include <intrin.h> // Intrinsics
namespace cat {
//-----------------------------------------------------------------------------
// Otherwise unknown compiler
#else
# define CAT_COMPILER_UNKNOWN
# define CAT_ALIGNED(n) /* no way to detect alignment syntax */
# define CAT_PACKED /* no way to detect packing syntax */
# define CAT_INLINE inline
// No way to support inline assembly code here
# define CAT_RESTRICT
#endif
/*
A lot of compilers have similar syntax to MSVC or GCC,
so for simplicity I have those two defined below, and
any deviations are implemented with overrides above.
*/
// MSVC-compatible compilers
#if defined(CAT_COMPILER_COMPAT_MSVC)
#if !defined(CAT_ALIGNED)
# define CAT_ALIGNED(n) __declspec(align(n))
#endif
#if !defined(CAT_PACKED)
# define CAT_PACKED
# define CAT_PRAGMA_PACK
#endif
#if !defined(CAT_INLINE)
# define CAT_INLINE __forceinline
#endif
#if !defined(CAT_ASM_INTEL)
# define CAT_ASM_INTEL
#endif
#if !defined(CAT_ASM_BEGIN)
# define CAT_ASM_BEGIN __asm {
#endif
#if !defined(CAT_ASM_EMIT)
# define CAT_ASM_EMIT _emit
#endif
#if !defined(CAT_ASM_END)
# define CAT_ASM_END }
#endif
#if !defined(CAT_TLS)
# define CAT_TLS __declspec( thread )
#endif
#if !defined(CAT_RESTRICT)
# define CAT_RESTRICT __restrict
#endif
#if !defined(CAT_FENCE_COMPILER)
# if defined(CAT_COMPILER_MSVC)
# pragma intrinsic(_ReadWriteBarrier)
# endif
# define CAT_FENCE_COMPILER _ReadWriteBarrier();
#endif
#if !defined(CAT_DLL_EXPORT)
# define CAT_DLL_EXPORT __declspec(dllexport)
#endif
#if !defined(CAT_DLL_IMPORT)
# define CAT_DLL_IMPORT __declspec(dllimport)
#endif
// GCC-compatible compilers
#elif defined(CAT_COMPILER_COMPAT_GCC)
#if !defined(CAT_ALIGNED)
# define CAT_ALIGNED(n) __attribute__ ((aligned (n)))
#endif
#if !defined(CAT_PACKED)
# define CAT_PACKED __attribute__ ((packed))
#endif
#if !defined(CAT_INLINE)
# define CAT_INLINE inline /* __inline__ __attribute__((always_inline)) */
#endif
#if !defined(CAT_ASM_ATT)
# define CAT_ASM_ATT
#endif
#if !defined(CAT_ASM_BEGIN)
# define CAT_ASM_BEGIN __asm__ __volatile__ (
#endif
#if !defined(CAT_ASM_EMIT)
# define CAT_ASM_EMIT .byte
#endif
#if !defined(CAT_ASM_END)
# define CAT_ASM_END );
#endif
#if !defined(CAT_TLS)
# define CAT_TLS __thread
#endif
#if !defined(CAT_RESTRICT)
# define CAT_RESTRICT __restrict__
#endif
#if !defined(CAT_FENCE_COMPILER)
# define CAT_FENCE_COMPILER CAT_ASM_BEGIN "" ::: "memory" CAT_ASM_END
#endif
#if !defined(CAT_DLL_EXPORT)
# define CAT_DLL_EXPORT __attribute__((dllexport))
#endif
#if !defined(CAT_DLL_IMPORT)
# define CAT_DLL_IMPORT __attribute__((dllimport))
#endif
#endif // CAT_COMPILER_COMPAT_*
//// Debug Flag ////
#if defined(CAT_COMPILER_MSVC)
# if defined(_DEBUG)
# define CAT_DEBUG
# endif
#else
# if !defined(NDEBUG)
# define CAT_DEBUG
# endif
#endif
//// Instruction Set Architecture ////
#if defined(__powerpc__) || defined(__ppc__) || defined(_POWER) || defined(_M_PPC) || \
defined(_M_MPPC) || defined(__POWERPC) || defined(powerpc) || defined(__ppc64__) || \
defined(_PS3) || defined(__PS3__) || defined(SN_TARGET_PS3) || defined(__POWERPC__)
# define CAT_ISA_PPC
#elif defined(__i386__) || defined(i386) || defined(intel) || defined(_M_IX86) || \
defined(__ia64) || defined(__ia64__) || defined(__x86_64) || defined(_M_IA64) || \
defined(_M_X64)
# define CAT_ISA_X86
#elif defined(TARGET_CPU_ARM)
# define CAT_ISA_ARM
#elif defined(__mips__)
# define CAT_ISA_MIPS
#elif defined(__ALPHA__)
# define CAT_ISA_ALPHA
#else
# define CAT_ISA_UNKNOWN
#endif
//// Endianness ////
// Okay -- Technically IA64 and PPC can switch endianness with an MSR bit
// flip, but come on no one does that! ...Right?
// If it's not right, make sure that one of the first two flags are defined.
#if defined(__LITTLE_ENDIAN__)
# define CAT_ENDIAN_LITTLE
#elif defined(__BIG_ENDIAN__)
# define CAT_ENDIAN_BIG
#elif defined(CAT_ISA_X86)
# define CAT_ENDIAN_LITTLE
#elif defined(CAT_ISA_PPC)
# define CAT_ENDIAN_BIG
#else
# define CAT_ENDIAN_UNKNOWN /* Must be detected at runtime */
#endif
//// Word Size ////
#if defined(_LP64) || defined(__LP64__) || defined(__arch64__) || \
defined(_WIN64) || defined(_M_X64) || defined(__ia64) || \
defined(__ia64__) || defined(__x86_64) || defined(_M_IA64) || \
defined(__mips64)
# define CAT_WORD_64
// 64-bit MSVC does not support inline assembly
# if defined(CAT_COMPILER_MSVC)
# undef CAT_ASM_INTEL
# endif
#else // Assuming 32-bit otherwise!
# define CAT_WORD_32
#endif
// __fastcall calling convention is rarely supported, and doesn't make sense for 64-bit targets
#if !defined(CAT_FASTCALL)
# define CAT_FASTCALL
#elif !defined(CAT_ISA_X86) || defined(CAT_WORD_64)
# undef CAT_FASTCALL
# define CAT_FASTCALL
#endif
//// Operating System ////
#if defined(__APPLE__) && defined(TARGET_OS_IPHONE)
# define CAT_OS_IPHONE
# define CAT_OS_APPLE
#elif defined(__APPLE__) && (defined(__MACH__) || defined(__DARWIN__))
# define CAT_OS_OSX
# define CAT_OS_APPLE
#elif defined(__OpenBSD__) || defined(__NetBSD__) || defined(__FreeBSD__)
# define CAT_OS_BSD
#elif defined(__linux__) || defined(__unix__)
# define CAT_OS_LINUX
#elif defined(_WIN32_WCE)
# define CAT_OS_WINDOWS_CE
# define CAT_OS_WINDOWS /* Also defined */
#elif defined(_WIN32)
# define CAT_OS_WINDOWS
#elif defined(_XBOX) || defined(_X360)
# define CAT_OS_XBOX
#elif defined(_PS3) || defined(__PS3__) || defined(SN_TARGET_PS3)
# define CAT_OS_PS3
#elif defined(__OS2__)
# define CAT_OS_OS2
#elif defined(__APPLE__)
# define CAT_OS_APPLE
#else
# define CAT_OS_UNKNOWN
#endif
// Detect CYGWIN environment
#if defined(__CYGWIN__) || defined(__CYGWIN32__)
# define CAT_CYGWIN
#endif
// DLL import/export macros based on OS
#if defined(CAT_OS_WINDOWS) || defined(CAT_CYGWIN)
# if defined(CAT_NEUTER_EXPORT)
# define CAT_EXPORT /* Do not import or export any symbols */
# elif defined(CAT_BUILD_DLL)
# define CAT_EXPORT CAT_DLL_EXPORT /* Implementing a DLL so export this symbol */
# else
# define CAT_EXPORT CAT_DLL_IMPORT /* Using a DLL so import this symbol, faster on Windows */
# endif
#else
# undef CAT_DLL_EXPORT
# undef CAT_DLL_IMPORT
# define CAT_DLL_EXPORT
# define CAT_DLL_IMPORT
# define CAT_EXPORT
#endif
//// Basic types ////
#if defined(CAT_COMPILER_MSVC)
// MSVC does not ship with stdint.h (C99 standard...)
typedef unsigned __int8 u8;
typedef signed __int8 s8;
typedef unsigned __int16 u16;
typedef signed __int16 s16;
typedef unsigned __int32 u32;
typedef signed __int32 s32;
typedef unsigned __int64 u64;
typedef signed __int64 s64;
#else
} // namespace cat
#include <stdint.h>
namespace cat {
// All other compilers use this
typedef uint8_t u8;
typedef int8_t s8;
typedef uint16_t u16;
typedef int16_t s16;
typedef uint32_t u32;
typedef int32_t s32;
typedef uint64_t u64;
typedef int64_t s64;
#endif
#if defined(CAT_COMPILER_GCC) && defined(CAT_WORD_64)
// GCC also adds 128-bit types :D
typedef __uint128_t u128;
typedef __int128_t s128;
#endif
typedef float f32;
typedef double f64;
union Float32 {
float f;
u32 i;
Float32(float n) { f = n; }
Float32(u32 n) { i = n; }
};
//// String and buffer macros ////
// Same as strncpy() in all ways except that the result is guaranteed to
// be a nul-terminated C string
#if defined(CAT_COMPILER_MSVC)
# define CAT_STRNCPY(dest, src, size) { strncpy_s(dest, size, src, size); (dest)[(size)-1] = '\0'; }
#else
# define CAT_STRNCPY(dest, src, size) { strncpy(dest, src, size); (dest)[(size)-1] = '\0'; }
#endif
// Because memory clearing is a frequent operation
#define CAT_CLR(dest, size) memset(dest, 0, size)
// Works for arrays, also
#define CAT_OBJCLR(object) memset((void*)&(object), 0, sizeof(object))
// Stringize
#define CAT_STRINGIZE(X) DO_CAT_STRINGIZE(X)
#define DO_CAT_STRINGIZE(X) #X
// Variable-length data trailing a struct
template<typename T> CAT_INLINE u8 *GetTrailingBytes(T *t) { return reinterpret_cast<u8*>( t ) + sizeof(T); }
// Bounds
template<typename T> CAT_INLINE T BoundMin(const T &minimum, const T &x)
{
if (x < minimum) return minimum;
return x;
}
template<typename T> CAT_INLINE T BoundMax(const T &maximum, const T &x)
{
if (x > maximum) return maximum;
return x;
}
template<typename T> CAT_INLINE T Bound(const T &minimum, const T &maximum, const T &x)
{
if (x < minimum) return minimum;
if (x > maximum) return maximum;
return x;
}
//// Miscellaneous bitwise macros ////
#define CAT_BITCLRHI8(reg, count) ((u8)((u8)(reg) << (count)) >> (count)) /* sets to zero a number of high bits in a byte */
#define CAT_BITCLRLO8(reg, count) ((u8)((u8)(reg) >> (count)) << (count)) /* sets to zero a number of low bits in a byte */
#define CAT_BITCLRHI16(reg, count) ((u16)((u16)(reg) << (count)) >> (count)) /* sets to zero a number of high bits in a 16-bit word */
#define CAT_BITCLRLO16(reg, count) ((u16)((u16)(reg) >> (count)) << (count)) /* sets to zero a number of low bits in a 16-bit word */
#define CAT_BITCLRHI32(reg, count) ((u32)((u32)(reg) << (count)) >> (count)) /* sets to zero a number of high bits in a 32-bit word */
#define CAT_BITCLRLO32(reg, count) ((u32)((u32)(reg) >> (count)) << (count)) /* sets to zero a number of low bits in a 32-bit word */
//// Integer macros ////
#define CAT_AT_LEAST_2_BITS(n) ( (n) & ((n) - 1) )
#define CAT_LEAST_SIGNIFICANT_BIT(n) ( (n) & (u32)(-(s32)(n)) ) /* 0 -> 0 */
#define CAT_IS_POWER_OF_2(n) ( n && !CAT_AT_LEAST_2_BITS(n) )
// Safely take the average of two numbers without possibility of overflow
#define CAT_SAFE_AVERAGE(A, B) (((A) & (B)) + (((A) ^ (B)) >> 1))
// Bump 'n' to the next unit of 'width'
// 0=CAT_CEIL_UNIT(0, 16), 1=CAT_CEIL_UNIT(1, 16), 1=CAT_CEIL_UNIT(16, 16), 2=CAT_CEIL_UNIT(17, 16)
#define CAT_CEIL_UNIT(n, width) ( ( (n) + (width) - 1 ) / (width) )
// 0=CAT_CEIL(0, 16), 16=CAT_CEIL(1, 16), 16=CAT_CEIL(16, 16), 32=CAT_CEIL(17, 16)
#define CAT_CEIL(n, width) ( CAT_CEIL_UNIT(n, width) * (width) )
//// Rotation macros ////
#define CAT_ROL8(n, r) ( ((u8)(n) << (r)) | ((u8)(n) >> ( 8 - (r))) ) /* only works for u8 */
#define CAT_ROR8(n, r) ( ((u8)(n) >> (r)) | ((u8)(n) << ( 8 - (r))) ) /* only works for u8 */
#define CAT_ROL16(n, r) ( ((u16)(n) << (r)) | ((u16)(n) >> (16 - (r))) ) /* only works for u16 */
#define CAT_ROR16(n, r) ( ((u16)(n) >> (r)) | ((u16)(n) << (16 - (r))) ) /* only works for u16 */
#define CAT_ROL32(n, r) ( ((u32)(n) << (r)) | ((u32)(n) >> (32 - (r))) ) /* only works for u32 */
#define CAT_ROR32(n, r) ( ((u32)(n) >> (r)) | ((u32)(n) << (32 - (r))) ) /* only works for u32 */
#define CAT_ROL64(n, r) ( ((u64)(n) << (r)) | ((u64)(n) >> (64 - (r))) ) /* only works for u64 */
#define CAT_ROR64(n, r) ( ((u64)(n) >> (r)) | ((u64)(n) << (64 - (r))) ) /* only works for u64 */
//// Byte-order swapping ////
#define CAT_BOSWAP16(n) CAT_ROL16(n, 8)
#define CAT_BOSWAP32(n) ( (CAT_ROL32(n, 8) & 0x00ff00ff) | (CAT_ROL32(n, 24) & 0xff00ff00) )
#define CAT_BOSWAP64(n) ( ((u64)CAT_BOSWAP32((u32)n) << 32) | CAT_BOSWAP32((u32)(n >> 32)) )
//// Intrinsics ////
#if defined(CAT_OS_WINDOWS_CE)
#pragma intrinsic(_lrotl)
#pragma intrinsic(_lrotr)
#undef CAT_ROL32
#undef CAT_ROR32
#define CAT_ROL32(n, r) _lrotl(n, r)
#define CAT_ROR32(n, r) _lrotr(n, r)
#elif defined(CAT_COMPILER_MSVC)
#pragma intrinsic(_rotl)
#pragma intrinsic(_rotr)
#pragma intrinsic(_rotl64)
#pragma intrinsic(_rotr64)
#pragma intrinsic(_byteswap_ushort)
#pragma intrinsic(_byteswap_ulong)
#pragma intrinsic(_byteswap_uint64)
#pragma intrinsic(_BitScanForward)
#pragma intrinsic(_BitScanReverse)
#pragma intrinsic(__emulu)
#pragma intrinsic(_InterlockedExchange)
#pragma intrinsic(_interlockedbittestandset)
#pragma intrinsic(_interlockedbittestandreset)
#if defined(CAT_WORD_64)
#pragma intrinsic(__rdtsc)
#pragma intrinsic(_umul128)
#pragma intrinsic(_BitScanForward64)
#pragma intrinsic(_BitScanReverse64)
#pragma intrinsic(_InterlockedCompareExchange128)
#else
#pragma intrinsic(_InterlockedCompareExchange64)
#endif
#undef CAT_ROL32
#undef CAT_ROR32
#undef CAT_ROL64
#undef CAT_ROR64
#undef CAT_BOSWAP16
#undef CAT_BOSWAP32
#undef CAT_BOSWAP64
#define CAT_ROL32(n, r) _rotl(n, r)
#define CAT_ROR32(n, r) _rotr(n, r)
#define CAT_ROL64(n, r) _rotl64(n, r)
#define CAT_ROR64(n, r) _rotr64(n, r)
#define CAT_BOSWAP16(n) _byteswap_ushort(n)
#define CAT_BOSWAP32(n) _byteswap_ulong(n)
#define CAT_BOSWAP64(n) _byteswap_uint64(n)
#endif
} // namespace cat
#endif // CAT_PLATFORM_HPP
|
cameroncooke/XcodeHeaders
|
Frameworks/IBFoundation/IBICLaunchImageSet.h
|
<reponame>cameroncooke/XcodeHeaders<gh_stars>1-10
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import <IBFoundation/IBICSlottedAsset.h>
@interface IBICLaunchImageSet : IBICSlottedAsset
{
}
+ (id)importPriority;
+ (id)contentReferenceTypeName;
+ (id)classNameComponents;
+ (id)catalogItemFileExtension;
+ (id)defaultInstanceForIdioms:(id)arg1 enforceStrictIdioms:(BOOL)arg2;
+ (id)defaultName;
+ (Class)assetRepClass;
- (BOOL)requiresRootNamespace;
- (id)intrinsicallyOrderedChildren;
- (id)children;
- (id)assetRepForStructuredIdentifier:(id)arg1;
- (id)childForIdentifier:(id)arg1;
- (id)assetRepForIdentifier:(id)arg1;
- (id)assetRepForSlot:(id)arg1;
- (id)initializeManifestArchivist;
@end
|
sandeepreddymurthy1/bigdime
|
bigdime-core/src/test/java/io/bigdime/core/channel/ChannelFactoryTest.java
|
<filename>bigdime-core/src/test/java/io/bigdime/core/channel/ChannelFactoryTest.java<gh_stars>0
/**
* Copyright (C) 2015 Stubhub.
*/
package io.bigdime.core.channel;
import java.util.HashMap;
import java.util.Map;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.testng.AbstractTestNGSpringContextTests;
import org.testng.Assert;
import org.testng.annotations.Test;
import io.bigdime.core.AdaptorConfigurationException;
import io.bigdime.core.InvalidDataTypeConfigurationException;
import io.bigdime.core.InvalidValueConfigurationException;
import io.bigdime.core.RequiredParameterMissingConfigurationException;
import io.bigdime.core.commons.JsonHelper;
import io.bigdime.core.config.AdaptorConfigConstants.ChannelConfigConstants;
import io.bigdime.core.config.ChannelConfig;
//TODO Find out the the best practice for using context configuration, individual classes or location for xml files.
@Configuration
@ContextConfiguration(classes = { JsonHelper.class, ChannelFactory.class, MemoryChannel.class })
public class ChannelFactoryTest extends AbstractTestNGSpringContextTests {
@Autowired
ChannelFactory channelFactory;
/**
* Assert that if a valid channel-class is specified, getChannel method will
* return a non null value.
*
* @throws AdaptorConfigurationException
*/
@Test(expectedExceptions = NullPointerException.class)
public void testGetChannelWithNullChannelConfig() throws AdaptorConfigurationException {
Assert.assertNotNull(channelFactory.getChannel(getNullConfig()));
}
/*
* Have to use this method, Sonar doesnt like passing null directly to
* getChannel.
*/
private ChannelConfig getNullConfig() {
return null;
}
/**
* Assert that if a valid channel-class is specified, getChannel method will
* return a non null value.
*
* @throws AdaptorConfigurationException
*/
@Test(expectedExceptions = RequiredParameterMissingConfigurationException.class)
public void testGetChannelWithNullChannelClass() throws AdaptorConfigurationException {
ChannelConfig channelConfig = new ChannelConfig();
Assert.assertNotNull(channelFactory.getChannel(channelConfig));
}
/**
* Assert that if a valid channel-class is specified, getChannel method will
* return a non null value.
*
* @throws AdaptorConfigurationException
*/
@Test
public void testGetChannel() throws AdaptorConfigurationException {
ChannelConfig channelConfig = new ChannelConfig();
channelConfig.setChannelProperties(new HashMap<String, Object>());
channelConfig.setChannelClass("io.bigdime.core.channel.MemoryChannel");
Assert.assertNotNull(channelFactory.getChannel(channelConfig));
}
/**
* Set an invalid class in the channel-class and make sure that channel
* can't be built.
*
* @throws Throwable
*/
@Test(expectedExceptions = ClassNotFoundException.class)
public void testNegativeWithInvalidChannelClass() throws Throwable {
try {
ChannelConfig channelConfig = new ChannelConfig();
channelConfig.getChannelProperties().put(ChannelConfigConstants.CONCURRENCY, 1);
channelConfig.setChannelClass("unit-channel-class-testNegativeWithInvalidChannelClass");
channelFactory.getChannel(channelConfig);
Assert.fail("should have thrown a AdaptorConfigurationException");
} catch (AdaptorConfigurationException e) {
throw e.getCause();
}
}
/**
* If the concurrency value is set to 0 or less, throw
* InvalidValueConfigurationException.
*/
@Test(expectedExceptions = InvalidValueConfigurationException.class)
public void testNegativeWithLessThanOneConcurrencyValue() throws AdaptorConfigurationException {
ChannelConfig channelConfig = Mockito.mock(ChannelConfig.class);
@SuppressWarnings("unchecked")
Map<String, Object> properties = Mockito.mock(Map.class);
Mockito.when(channelConfig.getChannelProperties()).thenReturn(properties);
Mockito.when(properties.get("concurrency")).thenReturn("0");
Mockito.when(channelConfig.getChannelClass()).thenReturn("unit-channel-class");
channelFactory.getChannel(channelConfig);
Assert.fail("should have thrown a AdaptorConfigurationException");
}
/**
* If the concurrency value is set to a not-a-number value, throw
* InvalidDataTypeConfigurationException.
*/
@Test(expectedExceptions = InvalidDataTypeConfigurationException.class)
public void testNegativeWithNotNumberConcurrencyValue() throws AdaptorConfigurationException {
ChannelConfig channelConfig = Mockito.mock(ChannelConfig.class);
@SuppressWarnings("unchecked")
Map<String, Object> properties = Mockito.mock(Map.class);
Mockito.when(channelConfig.getChannelProperties()).thenReturn(properties);
Mockito.when(properties.get("concurrency")).thenReturn("not-a-number");
Mockito.when(channelConfig.getChannelClass()).thenReturn("unit-channel-class");
channelFactory.getChannel(channelConfig);
Assert.fail("should have thrown a AdaptorConfigurationException");
}
}
|
anthonyAgnone/vSchoolAssignments
|
projects/plot-fullstack/client/src/index.js
|
<reponame>anthonyAgnone/vSchoolAssignments
import React from 'react';
import ReactDOM from 'react-dom';
import { BrowserRouter } from 'react-router-dom';
import App from './App';
import { DragDropContextProvider } from 'react-dnd';
import HTML5Backend from 'react-dnd-html5-backend';
import GameProvider from './components/GameProvider';
import TimeProvider from './components/TimeProvider';
import Reboot from './components/Reboot';
ReactDOM.render(
<DragDropContextProvider backend={HTML5Backend}>
<TimeProvider>
<GameProvider>
<BrowserRouter>
<Reboot>
<App />
</Reboot>
</BrowserRouter>
</GameProvider>
</TimeProvider>
</DragDropContextProvider>,
document.getElementById('root')
);
|
VirtualGamer/SnowEngine
|
Dependencies/opengles/src/org/lwjgl/opengles/NVFramebufferBlit.java
|
<reponame>VirtualGamer/SnowEngine
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.opengles;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.JNI.*;
/**
* Native bindings to the <a href="https://www.khronos.org/registry/gles/extensions/NV/NV_framebuffer_blit.txt">NV_framebuffer_blit</a> extension.
*
* <p>This extension modifies OpenGL ES 2.0 by splitting the framebuffer object binding point into separate DRAW and READ bindings. This allows copying
* directly from one framebuffer to another. In addition, a new high performance blit function is added to facilitate these blits and perform some data
* conversion where allowed.</p>
*
* <p>Requires {@link GLES20 GLES 2.0}.</p>
*/
public class NVFramebufferBlit {
/**
* Accepted by the {@code target} parameter of BindFramebuffer, CheckFramebufferStatus, FramebufferTexture2D, FramebufferRenderbuffer, and
* GetFramebufferAttachmentParameteriv.
*/
public static final int
GL_READ_FRAMEBUFFER_NV = 0x8CA8,
GL_DRAW_FRAMEBUFFER_NV = 0x8CA9;
/** Accepted by the {@code pname} parameters of GetIntegerv and GetFloatv. */
public static final int
GL_DRAW_FRAMEBUFFER_BINDING_NV = 0x8CA6,
GL_READ_FRAMEBUFFER_BINDING_NV = 0x8CAA;
protected NVFramebufferBlit() {
throw new UnsupportedOperationException();
}
static boolean isAvailable(GLESCapabilities caps) {
return checkFunctions(
caps.glBlitFramebufferNV
);
}
// --- [ glBlitFramebufferNV ] ---
public static void glBlitFramebufferNV(int srcX0, int srcY0, int srcX1, int srcY1, int dstX0, int dstY0, int dstX1, int dstY1, int mask, int filter) {
long __functionAddress = GLES.getCapabilities().glBlitFramebufferNV;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callV(__functionAddress, srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter);
}
}
|
BulkSecurityGeneratorProject/Cashcash
|
src/main/webapp/js/entities/cash-transaction/cash-transaction.state.js
|
<reponame>BulkSecurityGeneratorProject/Cashcash
export default function stateConfig($stateProvider) {
"ngInject";
$stateProvider
.state('cash-transaction', {
parent: 'entity',
url: '/cash-transaction?page&sort&search&accountIdList&startDate&endDate',
data: {
authorities: ['ROLE_USER'],
pageTitle: 'CashTransactions'
},
views: {
'content@': 'cashTransactionComponent'
},
params: {
page: {
value: '1',
squash: true
},
sort: {
value: 'id,asc',
squash: true
},
search: null
},
resolve: {
pagingParams: ['$stateParams', 'PaginationUtil', function ($stateParams, PaginationUtil) {
return {
page: PaginationUtil.parsePage($stateParams.page),
sort: $stateParams.sort,
predicate: PaginationUtil.parsePredicate($stateParams.sort),
ascending: PaginationUtil.parseAscending($stateParams.sort),
search: $stateParams.search
};
}],
filterParams: ['$stateParams', function ($stateParams) {
return {
accountIdList: $stateParams.accountIdList,
startDate: $stateParams.startDate,
endDate: $stateParams.endDate
};
}]
}
})
.state('cash-transaction-detail', {
parent: 'entity',
url: '/cash-transaction/{id}',
data: {
authorities: ['ROLE_USER'],
pageTitle: 'CashTransaction'
},
views: {
'content@': 'cashTransactionDetailComponent'
},
resolve: {
cashTransaction: ['$stateParams', 'CashTransaction', function ($stateParams, CashTransaction) {
return CashTransaction.get({id: $stateParams.id}).$promise;
}]
}
})
.state('cash-transaction-new', {
parent: 'cash-transaction',
url: '/new',
data: {
authorities: ['ROLE_USER']
},
onEnter: ['$stateParams', '$state', '$uibModal', function ($stateParams, $state, $uibModal) {
$uibModal.open({
component: 'cashTransactionDialogComponent',
backdrop: 'static',
size: 'lg'
}).result.then(function () {
$state.go('cash-transaction', null, {reload: true});
}, function () {
$state.go('^');
});
}]
})
.state('cash-transaction-edit', {
parent: 'cash-transaction',
url: '/{id}/edit',
data: {
authorities: ['ROLE_USER']
},
onEnter: ['$stateParams', '$state', '$uibModal', function ($stateParams, $state, $uibModal) {
$uibModal.open({
component: 'cashTransactionDialogComponent',
backdrop: 'static',
size: 'lg',
resolve: {
entity: ['CashTransaction', function (CashTransaction) {
return CashTransaction.get({id: $stateParams.id}).$promise;
}]
}
}).result.then(function () {
$state.go('cash-transaction', null, {reload: true});
}, function () {
$state.go('^');
});
}]
})
.state('cash-transaction-delete', {
parent: 'cash-transaction',
url: '/{id}/delete',
data: {
authorities: ['ROLE_USER']
},
onEnter: ['$stateParams', '$state', '$uibModal', function ($stateParams, $state, $uibModal) {
$uibModal.open({
component: 'cashTransactionDeleteComponent',
size: 'md',
resolve: {
entity: ['CashTransaction', function (CashTransaction) {
return CashTransaction.get({id: $stateParams.id}).$promise;
}]
}
}).result.then(function () {
$state.go('cash-transaction', null, {reload: true});
}, function () {
$state.go('^');
});
}]
})
.state('cash-transaction-importFile', {
parent: 'cash-transaction',
url: '/upload',
data: {
authorities: ['ROLE_USER']
},
onEnter: ['$stateParams', '$state', '$uibModal', function ($stateParams, $state, $uibModal) {
$uibModal.open({
component: 'cashTransactionImportComponent',
size: 'md'
}).result.then(function (result) {
if (result) {
$state.go('cash-transaction-newList', {cashTransactionList: result}, {reload: true});
} else {
$state.go('^');
}
}, function () {
$state.go('^');
});
}]
})
.state('cash-transaction-newList', {
parent: 'cash-transaction',
url: '/newList',
params: {cashTransactionList: null},
data: {
authorities: ['ROLE_USER']
},
onEnter: ['$stateParams', '$state', '$uibModal', function ($stateParams, $state, $uibModal) {
$uibModal.open({
component: 'cashTransactionNewListDialogComponent',
size: 'lg',
backdrop: 'static',
keyboard: false,
resolve: {
cashTransactionList: function () {
return $stateParams.cashTransactionList;
}
}
}).result.then(function (result) {
$state.go('cash-transaction', null, {reload: true});
}, function () {
$state.go('^');
});
}]
});
};
|
penghaiYin/bm-work-2020
|
server/src/main/java/edp/vap/dto/organizationDto/OrganizationPut.java
|
package edp.vap.dto.organizationDto;
import lombok.Data;
import javax.validation.constraints.Max;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.NotNull;
@Data
@NotNull(message = "organzation info cannot be null")
public class OrganizationPut {
@Min(value = 1L, message = "Invalid organzation id")
private Long id;
@NotBlank(message = "organzation name cannot be EMPTY")
private String name;
private String description;
private String code;
private Long parentId;
private Long userId;
private String avatar;
private Boolean allowCreateProject;
@Min(value = 0L, message = "Invalid permission")
@Max(value = 1L, message = "Invalid permission")
private Short memberPermission;
}
|
spmallette/groovy
|
src/main/java/org/codehaus/groovy/ast/builder/AstStringCompiler.java
|
<filename>src/main/java/org/codehaus/groovy/ast/builder/AstStringCompiler.java<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.ast.builder;
import groovy.lang.GroovyClassLoader;
import groovy.lang.GroovyCodeSource;
import org.codehaus.groovy.ast.ASTNode;
import org.codehaus.groovy.ast.stmt.BlockStatement;
import org.codehaus.groovy.control.CompilationUnit;
import org.codehaus.groovy.control.CompilePhase;
import org.codehaus.groovy.control.CompilerConfiguration;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.LinkedList;
import java.util.List;
import java.util.stream.Collectors;
/**
* This class handles converting Strings to ASTNode lists.
*/
public class AstStringCompiler {
/**
* Performs the String source to {@link java.util.List} of {@link ASTNode}.
*
* @param script
* a Groovy script in String form
* @param compilePhase
* the int based CompilePhase to compile it to.
* @param statementsOnly
* @return {@link java.util.List} of {@link ASTNode}
*/
public List<ASTNode> compile(String script, CompilePhase compilePhase, boolean statementsOnly) {
final String scriptClassName = makeScriptClassName();
GroovyCodeSource codeSource = new GroovyCodeSource(script, scriptClassName + ".groovy", "/groovy/script");
CompilationUnit cu = new CompilationUnit(CompilerConfiguration.DEFAULT, codeSource.getCodeSource(),
AccessController.doPrivileged((PrivilegedAction<GroovyClassLoader>) GroovyClassLoader::new));
cu.addSource(codeSource.getName(), script);
cu.compile(compilePhase.getPhaseNumber());
// collect all the ASTNodes into the result, possibly ignoring the script body if desired
List<ASTNode> result = cu.getAST().getModules().stream().reduce(new LinkedList<>(), (acc, node) -> {
BlockStatement statementBlock = node.getStatementBlock();
if (null != statementBlock) {
acc.add(statementBlock);
}
acc.addAll(
node.getClasses().stream()
.filter(c -> !(statementsOnly && scriptClassName.equals(c.getName())))
.collect(Collectors.toList())
);
return acc;
}, (o1, o2) -> o1);
return result;
}
/**
* Performs the String source to {@link java.util.List} of statement {@link ASTNode}.
*
* @param script a Groovy script in String form
* @return {@link java.util.List} of statement {@link ASTNode}
* @since 3.0.0
*/
public List<ASTNode> compile(String script) {
return this.compile(script, CompilePhase.CONVERSION, true);
}
private static String makeScriptClassName() {
return "Script" + System.nanoTime();
}
}
|
cdluminate/advorder
|
lib/datasets/__init__.py
|
<gh_stars>1-10
'''
Copyright (C) 2020-2021 <NAME> <<EMAIL>>
Released under the Apache-2.0 License.
'''
from . import fashion
from . import sop
|
markzhai/DBFlow
|
DBFlow-Core/src/main/java/com/raizlabs/android/dbflow/annotation/ModelContainer.java
|
<reponame>markzhai/DBFlow
package com.raizlabs.android.dbflow.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Author: andrewgrosner
* Description: Will generate a $Container class definition for the Model class. It is required when using
* ModelContainers, that we mark every contained class with this annotation so we can handle them properly.
*/
@Retention(RetentionPolicy.SOURCE)
@Target(ElementType.TYPE)
public @interface ModelContainer {
}
|
rajyan/AtCoder
|
ABC/ABC075/C.cpp
|
//#include <cassert>
//#include <cstdio>
//#include <cmath>
//#include <iostream>
//#include <sstream>
//#include <string>
//#include <vector>
//#include <map>
//#include <queue>
//#include <algorithm>
//
//const int MOD = 1000000007, INF = 1111111111;
//using namespace std;
//using lint = long long;
//
//template <class T>
//ostream &operator<<(ostream &os, const vector<T> &vec) {
// for (int i = 0; i < (int)vec.size(); i++) {
// os << vec[i] << (i + 1 == vec.size() ? "" : " ");
// }
// return os;
//}
//
//#ifdef _DEBUG
//template <class Head>
//void dump(const char* str, Head &&h) { cerr << str << " = " << h << "\n"; };
//template <class Head, class... Tail>
//void dump(const char* str, Head &&h, Tail &&... t) {
// while (*str != ',') cerr << *str++; cerr << " = " << h << "\n";
// dump(str + 1, t...);
//}
//#define DMP(...) dump(#__VA_ARGS__, __VA_ARGS__)
//#else
//#define DMP(...) ((void)0)
//#endif
//
//int main() {
//
// cin.tie(nullptr);
// ios::sync_with_stdio(false);
//
// int N, M;
// cin >> N >> M;
//
// vector<vector<int>> edge(N);
// vector<pair<int, int>> memo(M);
// int a, b;
// for (int i = 0; i < M; i++) {
// cin >> a >> b;
// a--, b--;
// edge[a].emplace_back(b);
// edge[b].emplace_back(a);
// memo[i] = { a, b };
// }
//
// auto dfs = [&](auto &&f, int now, int par, int st, vector<int> visited) -> bool {
//
// visited[now] = 1;
//
// bool flag = (now == st);
// for (const auto &e : edge[now]) {
// if (!visited[e] && e != par) flag |= f(f, e, now, st, visited);
// }
// return flag;
// };
//
// int cnt = 0;
// for (int i = 0; i < M; i++) {
// if (dfs(dfs, memo[i].first, memo[i].second, memo[i].second, vector<int>(N))) cnt++;
// }
//
// cout << M - cnt << "\n";
//
// return 0;
//}
|
HanochZhu/vts-browser-unity-plugin
|
externals/browser/externals/browser/externals/libimgproc/imgproc/detail/clahe.hpp
|
<reponame>HanochZhu/vts-browser-unity-plugin
/**
* Copyright (c) 2017 Melown Technologies SE
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
// *****************************************************************************
/*
* ANSI C code from the article
* "Contrast Limited Adaptive Histogram Equalization"
* by <NAME>, <EMAIL>
* in "Graphics Gems IV", Academic Press, 1994
*
*
* These functions implement Contrast Limited Adaptive Histogram Equalization.
* The main routine (CLAHE) expects an input image that is stored contiguously in
* memory; the CLAHE output image overwrites the original input image and has the
* same minimum and maximum values (which must be provided by the user).
* This implementation assumes that the X- and Y image resolutions are an integer
* multiple of the X- and Y sizes of the contextual regions. A check on various other
* error conditions is performed.
*
* #define the symbol BYTE_IMAGE to make this implementation suitable for
* 8-bit images. The maximum number of contextual regions can be redefined
* by changing uiMAX_REG_X and/or uiMAX_REG_Y; the use of more than 256
* contextual regions is not recommended.
*
* The code is ANSI-C and is also C++ compliant.
*
* Author: <NAME>, Computer Vision Research Group,
* Utrecht, The Netherlands (<EMAIL>)
*/
/*
EULA: The Graphics Gems code is copyright-protected. In other words, you cannot
claim the text of the code as your own and resell it. Using the code is permitted
in any program, product, or library, non-commercial or commercial. Giving credit
is not required, though is a nice gesture. The code comes as-is, and if there are
any flaws or problems with any Gems code, nobody involved with Gems - authors,
editors, publishers, or webmasters - are to be held responsible. Basically,
don't be a jerk, and remember that anything free comes with no guarantee.
- http://tog.acm.org/resources/GraphicsGems/ (August 2009)
*/
#include <cstring>
#include <limits>
#include <iostream>
namespace imgproc {
namespace detail {
/*********************** Local prototypes ************************/
static void ClipHistogram (unsigned long*, unsigned int, unsigned long);
template <class kz_pixel_t>
static void MakeHistogram (kz_pixel_t*, unsigned int, unsigned int, unsigned int,
unsigned long*, unsigned int, kz_pixel_t*);
template <class kz_pixel_t>
static void MapHistogram (unsigned long*, kz_pixel_t, kz_pixel_t,
unsigned int, unsigned long);
template <class kz_pixel_t>
static void MakeLut (kz_pixel_t*, kz_pixel_t, kz_pixel_t, unsigned int);
template <class kz_pixel_t>
static void Interpolate (kz_pixel_t*, int, unsigned long*, unsigned long*,
unsigned long*, unsigned long*, unsigned int, unsigned int, kz_pixel_t*);
// *****************************************************************************
/************** Start of actual code **************/
#include <stdlib.h> /* To get prototypes of malloc() and free() */
const static unsigned int uiMAX_REG_X = 1024; /* max. # contextual regions in x-direction */
const static unsigned int uiMAX_REG_Y = 1024; /* max. # contextual regions in y-direction */
/************************** main function CLAHE ******************/
template <class kz_pixel_t>
static int CLAHE (kz_pixel_t* pImage, unsigned int uiXRes, unsigned int uiYRes,
kz_pixel_t Min, kz_pixel_t Max, unsigned int uiNrX, unsigned int uiNrY,
unsigned int uiNrBins, float fCliplimit)
/* pImage - Pointer to the input/output image
* uiXRes - Image resolution in the X direction
* uiYRes - Image resolution in the Y direction
* Min - Minimum greyvalue of input image (also becomes minimum of output image)
* Max - Maximum greyvalue of input image (also becomes maximum of output image)
* uiNrX - Number of contextial regions in the X direction (min 2, max uiMAX_REG_X)
* uiNrY - Number of contextial regions in the Y direction (min 2, max uiMAX_REG_Y)
* uiNrBins - Number of greybins for histogram ("dynamic range")
* float fCliplimit - Normalized cliplimit (higher values give more contrast)
* The number of "effective" greylevels in the output image is set by uiNrBins; selecting
* a small value (eg. 128) speeds up processing and still produce an output image of
* good quality. The output image will have the same minimum and maximum value as the input
* image. A clip limit smaller than 1 results in standard (non-contrast limited) AHE.
*/
{
unsigned int uiX, uiY; /* counters */
unsigned int uiXSize, uiYSize, uiSubX, uiSubY; /* size of context. reg. and subimages */
unsigned int uiXL, uiXR, uiYU, uiYB; /* auxiliary variables interpolation routine */
unsigned long ulClipLimit, ulNrPixels;/* clip limit and region pixel count */
kz_pixel_t* pImPointer; /* pointer to image */
kz_pixel_t aLUT[ std::numeric_limits<kz_pixel_t>::max() ];
/* lookup table used for scaling of input image */
unsigned long* pulHist, *pulMapArray; /* pointer to histogram and mappings*/
unsigned long* pulLU, *pulLB, *pulRU, *pulRB; /* auxiliary pointers interpolation */
if (uiNrX > uiMAX_REG_X) return -1; /* # of regions x-direction too large */
if (uiNrY > uiMAX_REG_Y) return -2; /* # of regions y-direction too large */
if (uiXRes % uiNrX) return -3; /* x-resolution no multiple of uiNrX */
if (uiYRes % uiNrY) return -4; /* y-resolution no multiple of uiNrY #TPB FIX */
if (Min >= Max) return -6; /* minimum equal or larger than maximum */
if (uiNrX < 2 || uiNrY < 2) return -7;/* at least 4 contextual regions required */
if (fCliplimit == 1.0) return 0; /* is OK, immediately returns original image. */
if (uiNrBins == 0) uiNrBins = 128; /* default value when not specified */
pulMapArray=(unsigned long *)malloc(sizeof(unsigned long)*uiNrX*uiNrY*uiNrBins);
if (pulMapArray == 0) return -8; /* Not enough memory! (try reducing uiNrBins) */
uiXSize = uiXRes/uiNrX; uiYSize = uiYRes/uiNrY; /* Actual size of contextual regions */
ulNrPixels = (unsigned long)uiXSize * (unsigned long)uiYSize;
if(fCliplimit > 0.0) { /* Calculate actual cliplimit */
ulClipLimit = (unsigned long) (fCliplimit * (uiXSize * uiYSize) / uiNrBins);
ulClipLimit = (ulClipLimit < 1UL) ? 1UL : ulClipLimit;
}
else ulClipLimit = 1UL<<14; /* Large value, do not clip (AHE) */
MakeLut<kz_pixel_t>(aLUT, Min, Max, uiNrBins); /* Make lookup table for mapping of greyvalues */
/* Calculate greylevel mappings for each contextual region */
for (uiY = 0, pImPointer = pImage; uiY < uiNrY; uiY++) {
for (uiX = 0; uiX < uiNrX; uiX++, pImPointer += uiXSize) {
pulHist = &pulMapArray[uiNrBins * (uiY * uiNrX + uiX)];
MakeHistogram<kz_pixel_t>(pImPointer,uiXRes,uiXSize,uiYSize,pulHist,uiNrBins,aLUT);
ClipHistogram(pulHist, uiNrBins, ulClipLimit);
MapHistogram<kz_pixel_t>(pulHist, Min, Max, uiNrBins, ulNrPixels);
}
pImPointer += (uiYSize - 1) * uiXRes; /* skip lines, set pointer */
}
/* Interpolate greylevel mappings to get CLAHE image */
for (pImPointer = pImage, uiY = 0; uiY <= uiNrY; uiY++) {
if (uiY == 0) { /* special case: top row */
uiSubY = uiYSize >> 1; uiYU = 0; uiYB = 0;
}
else {
if (uiY == uiNrY) { /* special case: bottom row */
uiSubY = uiYSize >> 1; uiYU = uiNrY-1; uiYB = uiYU;
}
else { /* default values */
uiSubY = uiYSize; uiYU = uiY - 1; uiYB = uiYU + 1;
}
}
for (uiX = 0; uiX <= uiNrX; uiX++) {
if (uiX == 0) { /* special case: left column */
uiSubX = uiXSize >> 1; uiXL = 0; uiXR = 0;
}
else {
if (uiX == uiNrX) { /* special case: right column */
uiSubX = uiXSize >> 1; uiXL = uiNrX - 1; uiXR = uiXL;
}
else { /* default values */
uiSubX = uiXSize; uiXL = uiX - 1; uiXR = uiXL + 1;
}
}
pulLU = &pulMapArray[uiNrBins * (uiYU * uiNrX + uiXL)];
pulRU = &pulMapArray[uiNrBins * (uiYU * uiNrX + uiXR)];
pulLB = &pulMapArray[uiNrBins * (uiYB * uiNrX + uiXL)];
pulRB = &pulMapArray[uiNrBins * (uiYB * uiNrX + uiXR)];
Interpolate<kz_pixel_t>(pImPointer,uiXRes,pulLU,pulRU,pulLB,pulRB,uiSubX,uiSubY,aLUT);
pImPointer += uiSubX; /* set pointer on next matrix */
}
pImPointer += (uiSubY - 1) * uiXRes;
}
free(pulMapArray); /* free space for histograms */
return 0; /* return status OK */
}
inline void ClipHistogram (unsigned long* pulHistogram, unsigned int
uiNrGreylevels, unsigned long ulClipLimit)
/* This function performs clipping of the histogram and redistribution of bins.
* The histogram is clipped and the number of excess pixels is counted. Afterwards
* the excess pixels are equally redistributed across the whole histogram (providing
* the bin count is smaller than the cliplimit).
*/
{
unsigned long* pulBinPointer, *pulEndPointer, *pulHisto;
unsigned long ulNrExcess, ulUpper, ulBinIncr, ulStepSize, i;
unsigned long ulOldNrExcess; // #IAC Modification
long lBinExcess;
ulNrExcess = 0; pulBinPointer = pulHistogram;
for (i = 0; i < uiNrGreylevels; i++) { /* calculate total number of excess pixels */
lBinExcess = (long) pulBinPointer[i] - (long) ulClipLimit;
if (lBinExcess > 0) ulNrExcess += lBinExcess; /* excess in current bin */
};
/* Second part: clip histogram and redistribute excess pixels in each bin */
ulBinIncr = ulNrExcess / uiNrGreylevels; /* average binincrement */
ulUpper = ulClipLimit - ulBinIncr; /* Bins larger than ulUpper set to cliplimit */
for (i = 0; i < uiNrGreylevels; i++) {
if (pulHistogram[i] > ulClipLimit) pulHistogram[i] = ulClipLimit; /* clip bin */
else {
if (pulHistogram[i] > ulUpper) { /* high bin count */
ulNrExcess -= pulHistogram[i] - ulUpper; pulHistogram[i]=ulClipLimit;
}
else { /* low bin count */
ulNrExcess -= ulBinIncr; pulHistogram[i] += ulBinIncr;
}
}
}
// while (ulNrExcess) { /* Redistribute remaining excess */
// pulEndPointer = &pulHistogram[uiNrGreylevels]; pulHisto = pulHistogram;
//
// while (ulNrExcess && pulHisto < pulEndPointer) {
// ulStepSize = uiNrGreylevels / ulNrExcess;
// if (ulStepSize < 1) ulStepSize = 1; /* stepsize at least 1 */
// for (pulBinPointer=pulHisto; pulBinPointer < pulEndPointer && ulNrExcess;
// pulBinPointer += ulStepSize) {
// if (*pulBinPointer < ulClipLimit) {
// (*pulBinPointer)++; ulNrExcess--; /* reduce excess */
// }
// }
// pulHisto++; /* restart redistributing on other bin location */
// }
//}
/* ####
IAC Modification:
In the original version of the loop below (commented out above) it was possible for an infinite loop to get
created. If there was more pixels to be redistributed than available space then the
while loop would never end. This problem has been fixed by stopping the loop when all
pixels have been redistributed OR when no pixels where redistributed in the previous iteration.
This change allows very low clipping levels to be used.
*/
do { /* Redistribute remaining excess */
pulEndPointer = &pulHistogram[uiNrGreylevels]; pulHisto = pulHistogram;
ulOldNrExcess = ulNrExcess; /* Store number of excess pixels for test later. */
while (ulNrExcess && pulHisto < pulEndPointer)
{
ulStepSize = uiNrGreylevels / ulNrExcess;
if (ulStepSize < 1)
ulStepSize = 1; /* stepsize at least 1 */
for (pulBinPointer=pulHisto; pulBinPointer < pulEndPointer && ulNrExcess;
pulBinPointer += ulStepSize)
{
if (*pulBinPointer < ulClipLimit)
{
(*pulBinPointer)++; ulNrExcess--; /* reduce excess */
}
}
pulHisto++; /* restart redistributing on other bin location */
}
} while ((ulNrExcess) && (ulNrExcess < ulOldNrExcess));
/* Finish loop when we have no more pixels or we can't redistribute any more pixels */
}
template <class kz_pixel_t>
void MakeHistogram (kz_pixel_t* pImage, unsigned int uiXRes,
unsigned int uiSizeX, unsigned int uiSizeY,
unsigned long* pulHistogram,
unsigned int uiNrGreylevels, kz_pixel_t* pLookupTable)
/* This function classifies the greylevels present in the array image into
* a greylevel histogram. The pLookupTable specifies the relationship
* between the greyvalue of the pixel (typically between 0 and 4095) and
* the corresponding bin in the histogram (usually containing only 128 bins).
*/
{
unsigned int i,j=0;
for (i = 0; i < uiNrGreylevels; i++) pulHistogram[i] = 0L; /* clear histogram */
for (i = 0; i < uiSizeY; i++) {
j = 0;
while (j < uiSizeX) { pulHistogram[pLookupTable[pImage[j]]]++; ++j; }
pImage = &pImage[uiXRes];
}
}
template <class kz_pixel_t>
void MapHistogram (unsigned long* pulHistogram, kz_pixel_t Min, kz_pixel_t Max,
unsigned int uiNrGreylevels, unsigned long ulNrOfPixels)
/* This function calculates the equalized lookup table (mapping) by
* cumulating the input histogram. Note: lookup table is rescaled in range [Min..Max].
*/
{
unsigned int i; unsigned long ulSum = 0;
const float fScale = ((float)(Max - Min)) / ulNrOfPixels;
const unsigned long ulMin = (unsigned long) Min;
for (i = 0; i < uiNrGreylevels; i++) {
ulSum += pulHistogram[i]; pulHistogram[i]=(unsigned long)(ulMin+ulSum*fScale);
if (pulHistogram[i] > Max) pulHistogram[i] = Max;
//std::cout << i << "->" << pulHistogram[i] << std::endl;
}
}
template <class kz_pixel_t>
void MakeLut (kz_pixel_t * pLUT, kz_pixel_t Min, kz_pixel_t Max, unsigned int uiNrBins)
/* To speed up histogram clipping, the input image [Min,Max] is scaled down to
* [0,uiNrBins-1]. This function calculates the LUT.
*/
{
int i;
const kz_pixel_t BinSize = (kz_pixel_t) (1 + (Max - Min) / uiNrBins);
//std::cout << "BinSize: " << (int) BinSize << std::endl;
for (i = Min; i <= Max; i++) pLUT[i] = (i - Min) / BinSize;
}
template <class kz_pixel_t>
void Interpolate (kz_pixel_t * pImage, int uiXRes, unsigned long * pulMapLU,
unsigned long * pulMapRU, unsigned long * pulMapLB, unsigned long * pulMapRB,
unsigned int uiXSize, unsigned int uiYSize, kz_pixel_t * pLUT)
/* pImage - pointer to input/output image
* uiXRes - resolution of image in x-direction
* pulMap* - mappings of greylevels from histograms
* uiXSize - uiXSize of image submatrix
* uiYSize - uiYSize of image submatrix
* pLUT - lookup table containing mapping greyvalues to bins
* This function calculates the new greylevel assignments of pixels within a submatrix
* of the image with size uiXSize and uiYSize. This is done by a bilinear interpolation
* between four different mappings in order to eliminate boundary artifacts.
* It uses a division; since division is often an expensive operation, I added code to
* perform a logical shift instead when feasible.
*/
{
const unsigned int uiIncr = uiXRes-uiXSize; /* Pointer increment after processing row */
kz_pixel_t GreyValue; unsigned int uiNum = uiXSize*uiYSize; /* Normalization factor */
unsigned int uiXCoef, uiYCoef, uiXInvCoef, uiYInvCoef, uiShift = 0;
if (uiNum & (uiNum - 1)) /* If uiNum is not a power of two, use division */
for (uiYCoef = 0, uiYInvCoef = uiYSize; uiYCoef < uiYSize;
uiYCoef++, uiYInvCoef--,pImage+=uiIncr) {
for (uiXCoef = 0, uiXInvCoef = uiXSize; uiXCoef < uiXSize;
uiXCoef++, uiXInvCoef--) {
GreyValue = pLUT[*pImage]; /* get histogram bin value */
*pImage++ = (kz_pixel_t ) ((uiYInvCoef * (uiXInvCoef*pulMapLU[GreyValue]
+ uiXCoef * pulMapRU[GreyValue])
+ uiYCoef * (uiXInvCoef * pulMapLB[GreyValue]
+ uiXCoef * pulMapRB[GreyValue])) / uiNum);
}
}
else { /* avoid the division and use a right shift instead */
while (uiNum >>= 1) uiShift++; /* Calculate 2log of uiNum */
for (uiYCoef = 0, uiYInvCoef = uiYSize; uiYCoef < uiYSize;
uiYCoef++, uiYInvCoef--,pImage+=uiIncr) {
for (uiXCoef = 0, uiXInvCoef = uiXSize; uiXCoef < uiXSize;
uiXCoef++, uiXInvCoef--) {
GreyValue = pLUT[*pImage]; /* get histogram bin value */
*pImage++ = (kz_pixel_t)((uiYInvCoef* (uiXInvCoef * pulMapLU[GreyValue]
+ uiXCoef * pulMapRU[GreyValue])
+ uiYCoef * (uiXInvCoef * pulMapLB[GreyValue]
+ uiXCoef * pulMapRB[GreyValue])) >> uiShift);
}
}
}
}
// *****************************************************************************
} // namespace detail
} // namespace imgproc
|
Grosskopf/openoffice
|
main/unotools/inc/unotools/startoptions.hxx
|
<reponame>Grosskopf/openoffice
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
#ifndef INCLUDED_unotools_STARTOPTIONS_HXX
#define INCLUDED_unotools_STARTOPTIONS_HXX
//_________________________________________________________________________________________________________________
// includes
//_________________________________________________________________________________________________________________
#include "unotools/unotoolsdllapi.h"
#include <sal/types.h>
#include <osl/mutex.hxx>
#include <rtl/ustring.hxx>
#include <unotools/options.hxx>
//_________________________________________________________________________________________________________________
// forward declarations
//_________________________________________________________________________________________________________________
/*-************************************************************************************************************//**
@short forward declaration to our private date container implementation
@descr We use these class as internal member to support small memory requirements.
You can create the container if it is necessary. The class which use these mechanism
is faster and smaller then a complete implementation!
*//*-*************************************************************************************************************/
class SvtStartOptions_Impl;
//_________________________________________________________________________________________________________________
// declarations
//_________________________________________________________________________________________________________________
/*-************************************************************************************************************//**
@short collect informations about startup features
@descr -
@implements -
@base -
@devstatus ready to use
*//*-*************************************************************************************************************/
class UNOTOOLS_DLLPUBLIC SvtStartOptions: public utl::detail::Options
{
//-------------------------------------------------------------------------------------------------------------
// public methods
//-------------------------------------------------------------------------------------------------------------
public:
//---------------------------------------------------------------------------------------------------------
// constructor / destructor
//---------------------------------------------------------------------------------------------------------
/*-****************************************************************************************************//**
@short standard constructor and destructor
@descr This will initialize an instance with default values.
We implement these class with a refcount mechanism! Every instance of this class increase it
at create and decrease it at delete time - but all instances use the same data container!
He is implemented as a static member ...
@seealso member m_nRefCount
@seealso member m_pDataContainer
@param -
@return -
@onerror -
*//*-*****************************************************************************************************/
SvtStartOptions();
virtual ~SvtStartOptions();
//---------------------------------------------------------------------------------------------------------
// interface
//---------------------------------------------------------------------------------------------------------
/*-****************************************************************************************************//**
@short interface methods to get and set value of config key "org.openoffice.Office.Common/Start/..."
@descr These options describe internal states to enable/disable features of installed office.
The values are fixed at runtime - and implemented as readonly!
IsIntroEnabled() : Setting, if the StarOffice Logo is displayed when starting StarOffice.
Default = true
EnableIntro() : Use it to enable/disable the logo at startup.
@seealso configuration package "org.openoffice.Office.Common/Start"
*//*-*****************************************************************************************************/
sal_Bool IsIntroEnabled ( ) const ;
void EnableIntro ( sal_Bool bState ) ;
/*-****************************************************************************************************//**
@short returns or set the connection URL of an office
@descr Specifies the URL for an UNO connection.
No default is given, the URL has to be entered manually by the admin/user.
zB.: "socket,host=pc1.test.de,port=6001;iiop;"
@seealso configuration package "org.openoffice.Office.Common/Start"
*//*-*****************************************************************************************************/
::rtl::OUString GetConnectionURL( ) const ;
void SetConnectionURL( const ::rtl::OUString& sURL ) ;
//-------------------------------------------------------------------------------------------------------------
// private methods
//-------------------------------------------------------------------------------------------------------------
private:
/*-****************************************************************************************************//**
@short return a reference to a static mutex
@descr These class use his own static mutex to be threadsafe.
We create a static mutex only for one ime and use at different times.
@seealso -
@param -
@return A reference to a static mutex member.
@onerror -
*//*-*****************************************************************************************************/
UNOTOOLS_DLLPRIVATE static ::osl::Mutex& GetOwnStaticMutex();
//-------------------------------------------------------------------------------------------------------------
// private member
//-------------------------------------------------------------------------------------------------------------
private:
/*Attention
Don't initialize these static member in these header!
a) Double dfined symbols will be detected ...
b) and unresolved externals exist at linking time.
Do it in your source only.
*/
static SvtStartOptions_Impl* m_pDataContainer ; /// impl. data container as dynamic pointer for smaller memory requirements!
static sal_Int32 m_nRefCount ; /// internal ref count mechanism
}; // class SvtStartOptions
#endif // #ifndef INCLUDED_unotools_STARTOPTIONS_HXX
|
HGyllensvard/geofence-manager
|
src/main/java/com/hgyllensvard/geofencemanager/toolbar/ToolbarTitleManager.java
|
package com.hgyllensvard.geofencemanager.toolbar;
import javax.inject.Inject;
import javax.inject.Singleton;
import io.reactivex.Observable;
import io.reactivex.subjects.BehaviorSubject;
@Singleton
public class ToolbarTitleManager {
private BehaviorSubject<ToolbarTitle> toolbarTitleBehaviorSubject;
@Inject
public ToolbarTitleManager() {
toolbarTitleBehaviorSubject = BehaviorSubject.create();
}
public void title(ToolbarTitle toolbarTitle) {
toolbarTitleBehaviorSubject.onNext(toolbarTitle);
}
public ToolbarTitle title() {
return toolbarTitleBehaviorSubject.getValue();
}
public Observable<ToolbarTitle> observeToolbarTitle() {
return toolbarTitleBehaviorSubject
.distinctUntilChanged();
}
}
|
myxzjie/cms
|
cms-boot-web/src/main/java/com/xzjie/cms/model/Topic.java
|
package com.xzjie.cms.model;
import lombok.Data;
import org.hibernate.annotations.SQLDelete;
import org.hibernate.annotations.Where;
import javax.persistence.*;
import java.time.LocalDateTime;
@Data
@Entity
@Table(name = "cms_topic")
@Where(clause = "state = 1")
@SQLDelete(sql = "update cms_topic set state = 0 where id = ?")
public class Topic extends BaseEntity<Topic>{
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
private String topic;
private String description;
private String coverUrl;
private Integer sort;
private Integer recommendStat;
private Integer state;
private LocalDateTime createDate;
private LocalDateTime updateDate;
@Override
public void copy(Topic obj) {
}
}
|
hanzeitest/mattermost-plugin-solar-lottery
|
server/solarlottery/autofill/queue/autofill.go
|
// Copyright (c) 2019-present Mattermost, Inc. All Rights Reserved.
// See License for license information.
package queue
import (
"github.com/pkg/errors"
sl "github.com/mattermost/mattermost-plugin-solar-lottery/server/solarlottery"
"github.com/mattermost/mattermost-plugin-solar-lottery/server/utils/bot"
)
const Type = "queue"
type autofiller struct{}
var _ sl.Autofiller = (*autofiller)(nil)
func New(logger bot.Logger) sl.Autofiller {
return &autofiller{}
}
// FillShift automatically fills the shift. The caller (sl.Guess) is supposed
// to have fully expanded, and deep-cloned the original rotation, so its data is
// not modified. FillShift shallow-clones rotation.Users to preserve the orinal
// map intact, but when called for a sequence of shifts, it relies on the caller
// to carry the users from one call to the next, presumably by using the same
// rotation object.
func (*autofiller) FillShift(rotation *sl.Rotation, shiftNumber int, shift *sl.Shift, logger bot.Logger) (sl.UserMap, error) {
return nil, errors.New("Queue autofill is not implemented")
}
|
xlvchao/spartacus
|
spartacus-auth/src/main/java/com/xlc/spartacus/auth/core/properties/SessionProperties.java
|
<filename>spartacus-auth/src/main/java/com/xlc/spartacus/auth/core/properties/SessionProperties.java
package com.xlc.spartacus.auth.core.properties;
import lombok.Data;
/**
* 配置
*
* @author xlc, since 2021
*/
@Data
public class SessionProperties {
/**
* 并发控制:
* 同一个用户在系统中的最大session数,默认1
*/
private int maximumSessions = 1;
/**
* 达到最大session时是否阻止新的登录请求,默认为false,不阻止,新的登录会将老的登录失效掉
*/
private boolean maxSessionsPreventsLogin = false;
/**
* session失效时跳转的地址
*/
private String sessionInvalidUrl = SecurityConstants.DEFAULT_SESSION_INVALID_URL;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.