repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
Svardl/elasticsearch
|
server/src/test/java/org/apache/lucene/search/QueriesTests.java
|
<filename>server/src/test/java/org/apache/lucene/search/QueriesTests.java<gh_stars>1000+
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.search;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
public class QueriesTests extends ESTestCase {
public void testNonNestedQuery() {
for (Version version : VersionUtils.allVersions()) {
// This is a custom query that extends AutomatonQuery and want to make sure the equals method works
assertEquals(Queries.newNonNestedFilter(version), Queries.newNonNestedFilter(version));
assertEquals(Queries.newNonNestedFilter(version).hashCode(), Queries.newNonNestedFilter(version).hashCode());
if (version.onOrAfter(Version.V_6_1_0)) {
assertEquals(Queries.newNonNestedFilter(version), new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME));
} else {
assertEquals(Queries.newNonNestedFilter(version), new BooleanQuery.Builder()
.add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER)
.add(Queries.newNestedFilter(), BooleanClause.Occur.MUST_NOT)
.build());
}
}
}
}
|
opentaps/opentaps-1
|
opentaps/opentaps-common/src/common/org/opentaps/gwt/common/server/lookup/AccountingTagConfigurationLookupService.java
|
<gh_stars>1-10
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.gwt.common.server.lookup;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.ofbiz.base.util.Debug;
import org.opentaps.domain.organization.AccountingTagConfigurationForOrganizationAndUsage;
import org.opentaps.domain.organization.OrganizationRepositoryInterface;
import org.opentaps.foundation.exception.FoundationException;
import org.opentaps.foundation.repository.RepositoryException;
import org.opentaps.gwt.common.client.lookup.configuration.AccountingTagLookupConfiguration;
import org.opentaps.gwt.common.server.HttpInputProvider;
import org.opentaps.gwt.common.server.InputProviderInterface;
/**
* The RPC service used to retrieve the accounting tag configuration for an organization and usage type.
*/
public class AccountingTagConfigurationLookupService extends EntityLookupService {
private static final String MODULE = AccountingTagConfigurationLookupService.class.getName();
private String organizationPartyId;
private String accountingTagUsageTypeId;
private OrganizationRepositoryInterface repository;
protected AccountingTagConfigurationLookupService(InputProviderInterface provider) throws RepositoryException {
super(provider, AccountingTagLookupConfiguration.LIST_OUT_FIELDS);
organizationPartyId = getProvider().getParameter(AccountingTagLookupConfiguration.IN_ORGANIZATION_PARTY_ID);
accountingTagUsageTypeId = getProvider().getParameter(AccountingTagLookupConfiguration.IN_TAG_USAGE_TYPE_ID);
// auto set the invoice repository as the service repository
repository = getDomainsDirectory().getOrganizationDomain().getOrganizationRepository();
setRepository(repository);
}
/**
* AJAX event to fetch the accounting tags configuration.
* @param request a <code>HttpServletRequest</code> value
* @param response a <code>HttpServletResponse</code> value
* @return the resulting JSON response
* @throws FoundationException if an error occurs
*/
public static String findAccountingTagsConfiguration(HttpServletRequest request, HttpServletResponse response) throws FoundationException {
InputProviderInterface provider = new HttpInputProvider(request);
JsonResponse json = new JsonResponse(response);
AccountingTagConfigurationLookupService service = new AccountingTagConfigurationLookupService(provider);
service.findTagsConfiguration();
return json.makeLookupResponse(AccountingTagLookupConfiguration.OUT_TAG_INDEX, service, request.getSession(true).getServletContext());
}
/**
* Finds the tag configuration that applies to the given organization and usage type.
* @return a list of <code>Map</code> representing the tags configuration
*/
private List<AccountingTagConfigurationForOrganizationAndUsage> findTagsConfiguration() {
if (organizationPartyId == null) {
Debug.logError("Missing required parameter organizationPartyId", MODULE);
return null;
}
if (accountingTagUsageTypeId == null) {
Debug.logError("Missing required parameter accountingTagUsageTypeId", MODULE);
return null;
}
try {
List<AccountingTagConfigurationForOrganizationAndUsage> conf = repository.getAccountingTagConfiguration(organizationPartyId, accountingTagUsageTypeId);
setResultTotalCount(conf.size());
setResults(conf);
return conf;
} catch (FoundationException e) {
storeException(e);
return null;
}
}
}
|
i-vikash/node-disk-manager
|
pkg/util/mountutil_test.go
|
<filename>pkg/util/mountutil_test.go
/*
Copyright 2018 OpenEBS Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package util
import (
"errors"
"io/ioutil"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
)
func TestOsDiskPath(t *testing.T) {
filePath := "/proc/self/mounts"
mountPointUtil := NewMountUtil(filePath, "/")
path, err := mountPointUtil.GetDiskPath()
tests := map[string]struct {
actualPath string
actualError error
expectedError error
}{
"test case for os disk path": {actualPath: path, actualError: err, expectedError: nil},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
_, err := filepath.EvalSymlinks(test.actualPath)
if err != nil {
t.Error(err)
}
assert.Equal(t, test.expectedError, test.actualError)
})
}
}
func TestGetOsPartitionName(t *testing.T) {
filePath := "/tmp/data"
mountPoint := "/"
fileContent1 := []byte("/dev/sda4 / ext4 rw,relatime,errors=remount-ro,data=ordered 0 0")
fileContent2 := []byte("/dev/sda4 /newpath ext4 rw,relatime,errors=remount-ro,data=ordered 0 0")
expectedErr2 := errors.New("error while geting os partition name")
tests := map[string]struct {
fileContent []byte
osPartition string
expectedError error
}{
"/ path present": {fileContent: fileContent1, osPartition: "sda4", expectedError: nil},
"/ path not present": {fileContent: fileContent2, osPartition: "", expectedError: expectedErr2},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
err := ioutil.WriteFile(filePath, test.fileContent, 0644)
if err != nil {
t.Fatal(err)
}
mountPointUtil := MountUtil{
FilePath: filePath,
MountPoint: mountPoint,
}
partName, err := mountPointUtil.getPartitionName()
assert.Equal(t, test.osPartition, partName)
assert.Equal(t, test.expectedError, err)
os.Remove(filePath)
})
}
// Test case for invalid file path
mountPointUtil := MountUtil{
FilePath: filePath,
MountPoint: mountPoint,
}
_, err := mountPointUtil.getPartitionName()
if err == nil {
t.Fatal("error should not be nil for invalid path")
}
}
|
pupper68k/arcusplatform
|
common/arcus-drivers/groovy-bindings/src/main/java/com/iris/driver/metadata/ProtocolEventMatcher.java
|
/*
* Copyright 2019 Arcus Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package com.iris.driver.metadata;
import org.apache.commons.lang3.StringUtils;
/**
*
*/
public class ProtocolEventMatcher extends EventMatcher {
private String protocolName;
public String getProtocolName() {
return protocolName;
}
public void setProtocolName(String protocolName) {
this.protocolName = protocolName;
}
public boolean matchesAnyProtocol() {
return StringUtils.isEmpty(protocolName);
}
@Override
public String toString() {
return "ProtocolEventMatcher [protocolName=" + getStringOrWildcard(protocolName) + "]";
}
}
|
TBK/argonx
|
common/steamid.hh
|
#pragma once
#include "language.hh"
#include "platform.hh"
#pragma pack(push, 1)
namespace Argonx {
struct SteamId {
operator u64() const { return steamId64; };
SteamId() = default;
SteamId(const u64 x) { steamId64 = x; }
union {
struct {
unsigned id : 32;
unsigned instance : 20;
unsigned type : 4;
unsigned universe : 8;
};
u64 steamId64;
};
};
} // namespace Argonx
#pragma pack(pop)
|
mcheung610/modeify
|
lib/auth0.js
|
<reponame>mcheung610/modeify
const jwt = require('express-jwt')
const fetch = require('isomorphic-fetch')
const moment = require('moment')
const qs = require('querystring')
require('./config')
const jwtMiddleWare = jwt({
algorithms: ['HS256', 'RS256'],
secret: process.env.AUTH0_SIGNING_CERTIFICATE
})
module.exports.adminRequired = function adminRequired (req, res, next) {
if (!req.user || !req.user.app_metadata || !req.user.app_metadata.isAdmin) {
res.status(401).send('Unauthorized')
} else {
next()
}
}
// authenticationOptional is additional middleware that catches any error
// from the jwtMiddleWare and simply continues. If the user existed, they'll
// be present in req.user, otherwise things simply continue
module.exports.authenticationOptional = (maybeError, req, res, next) => { next() }
module.exports.authenticateUser = jwtMiddleWare
let managementAPIAccessToken
let managementAPIAccessTokenExpirationTime
module.exports.getAccounts = function (params, callback) {
function getUsers () {
fetch(`https://${process.env.AUTH0_DOMAIN}/api/v2/users?${qs.stringify(params)}`, {
headers: {
authorization: `Bearer ${managementAPIAccessToken}`,
'content-type': 'application/json'
}
})
.then((res) => res.json())
.then((json) => {
callback(null, json)
})
.catch(callback)
}
// get API token for management API if needed
if (managementAPIAccessToken &&
moment.isMoment(managementAPIAccessTokenExpirationTime) &&
managementAPIAccessTokenExpirationTime.isAfter(moment())
) {
getUsers()
} else {
fetch(`https://${process.env.AUTH0_DOMAIN}/oauth/token`, {
body: JSON.stringify({
grant_type: 'client_credentials',
client_id: process.env.AUTH0_NON_INTERACTIVE_CLIENT_ID,
client_secret: process.env.AUTH0_NON_INTERACTIVE_CLIENT_SECRET,
audience: `https://${process.env.AUTH0_DOMAIN}/api/v2/`
}),
headers: { 'content-type': 'application/json' },
method: 'POST'
})
.then((res) => res.json())
.then((json) => {
if (!json.access_token) {
console.error(json)
callback(new Error('error connecting to Auth0 Management API'))
} else {
console.log('received access to management api')
managementAPIAccessToken = json.access_token
managementAPIAccessTokenExpirationTime = moment().add(json.expires_in, 'seconds')
getUsers()
}
})
.catch(callback)
}
}
|
ScalablyTyped/SlinkyTyped
|
d/dot-object/src/main/scala/typingsSlinky/dotObject/DotObject.scala
|
<reponame>ScalablyTyped/SlinkyTyped
package typingsSlinky.dotObject
import org.scalablytyped.runtime.Instantiable1
import org.scalablytyped.runtime.Instantiable2
import org.scalablytyped.runtime.Instantiable3
import org.scalablytyped.runtime.Instantiable4
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
object DotObject {
@js.native
trait Dot extends StObject {
/**
*
* Copy a property from one object to another object.
*
* If the source path does not exist (undefined)
* the property on the other object will not be set.
*
* @param {String} source
* @param {String} target
* @param {Object} obj1
* @param {Object} obj2
* @param {Function|Array} mods
* @param {Boolean} merge
*/
def copy(source: String, target: String, obj1: js.Any, obj2: js.Any): Unit = js.native
def copy(
source: String,
target: String,
obj1: js.Any,
obj2: js.Any,
mods: js.UndefOr[scala.Nothing],
merge: Boolean
): Unit = js.native
def copy(
source: String,
target: String,
obj1: js.Any,
obj2: js.Any,
mods: js.Array[ModifierFunctionWrapper]
): Unit = js.native
def copy(
source: String,
target: String,
obj1: js.Any,
obj2: js.Any,
mods: js.Array[ModifierFunctionWrapper],
merge: Boolean
): Unit = js.native
def copy(source: String, target: String, obj1: js.Any, obj2: js.Any, mods: ModifierFunctionWrapper): Unit = js.native
def copy(
source: String,
target: String,
obj1: js.Any,
obj2: js.Any,
mods: ModifierFunctionWrapper,
merge: Boolean
): Unit = js.native
/**
*
* Remove value from an object using dot notation.
*
* @param {String | Array<String>} path
* @param {Object} obj
* @return {Mixed} The removed value
*/
def del(path: String, obj: js.Any): js.Any = js.native
def del(path: js.Array[String], obj: js.Any): js.Any = js.native
/**
*
* Delete value from an object using dot notation.
*
* @param {String | Array<String>} path
* @param {Object} obj
* @return {any} The removed value
*/
def delete(path: String, obj: js.Any): js.Any = js.native
def delete(path: js.Array[String], obj: js.Any): js.Any = js.native
/**
*
* Convert object to dotted-key/value pair
*
* Usage:
*
* var tgt = dot.dot(obj)
* @param {Object} obj source object
* @return {Object} result
*/
def dot(obj: js.Any): js.Any = js.native
/**
*
* Convert object to dotted-key/value pair
*
* Usage:
* var tgt = {}
* dot.dot(obj, tgt)
*
* @param {Object} obj source object
* @param {Object} tgt target object
*/
def dot(obj: js.Any, tgt: js.Any): Unit = js.native
/**
*
* Keep array
*
* example:
*
* var obj = {
* "id": "my-id",
* "other": [1, 2, 3]
* "some": {
* "array": ["A", "B"]
* }
* }
*
* if the keepArray property is true:
*
* {
* "id": "my-id",
* "other": [1, 2, 3],
* "some.array": ["A", "B"]
* }
*/
var keepArray: Boolean = js.native
/**
*
* Move a property from one place to the other.
*
* If the source path does not exist (undefined)
* the target property will not be set.
*
* @param {String} source
* @param {String} target
* @param {Object} obj
* @param {Function|Array} mods
* @param {Boolean} merge
*/
def move(source: String, target: String, obj: js.Any): Unit = js.native
def move(source: String, target: String, obj: js.Any, mods: js.UndefOr[scala.Nothing], merge: Boolean): Unit = js.native
def move(source: String, target: String, obj: js.Any, mods: js.Array[ModifierFunctionWrapper]): Unit = js.native
def move(
source: String,
target: String,
obj: js.Any,
mods: js.Array[ModifierFunctionWrapper],
merge: Boolean
): Unit = js.native
def move(source: String, target: String, obj: js.Any, mods: ModifierFunctionWrapper): Unit = js.native
def move(source: String, target: String, obj: js.Any, mods: ModifierFunctionWrapper, merge: Boolean): Unit = js.native
/**
*
* Converts an object with dotted-key/value pairs to it's expanded version
*
* Optionally transformed by a set of modifiers.
*
* Usage:
*
* var row = {
* 'nr': 200,
* 'doc.name': ' My Document '
* }
*
* var mods = {
* 'doc.name': [_s.trim, _s.underscored]
* }
*
* dot.object(row, mods)
*
* @param {Object} obj
* @param {Object} mods
*/
def `object`(obj: js.Object): js.Object = js.native
def `object`(obj: js.Object, mods: js.Array[ModifierFunctionWrapper]): js.Object = js.native
def `object`(obj: js.Object, mods: ModifierFunctionWrapper): js.Object = js.native
/**
*
* Pick a value from an object using dot notation.
*
* Optionally remove the value
*
* @param {String} path
* @param {Object} obj
* @param {Boolean} remove
*/
def pick(path: String, obj: js.Any): js.Any = js.native
def pick(path: String, obj: js.Any, remove: Boolean): js.Any = js.native
/**
*
* Remove value from an object using dot notation.
*
* @param {String | Array<String>} path
* @param {Object} obj
* @return {Mixed} The removed value
*/
def remove(path: String, obj: js.Any): js.Any = js.native
def remove(path: js.Array[String], obj: js.Any): js.Any = js.native
/**
*
* Replace/merge an object to an existing object property
*
* @param {String} path dotted path
* @param {Object} v object to be set
* @param {Object} obj object to be modified
* @param {Boolean} merge optional merge
*/
def set(path: String, v: js.Any, obj: js.Object): Unit = js.native
def set(path: String, v: js.Any, obj: js.Object, merge: Boolean): Unit = js.native
/**
*
* Replace/create with a string
*
* @param {String} path dotted path
* @param {String} v value to be set
* @param {Object} obj object to be modified
* @param {Function|Array} mods optional modifier
*/
def str(path: String, v: js.Any, obj: js.Object): Unit = js.native
def str(path: String, v: js.Any, obj: js.Object, mods: js.Array[ModifierFunctionWrapper]): Unit = js.native
def str(path: String, v: js.Any, obj: js.Object, mods: ModifierFunctionWrapper): Unit = js.native
/**
*
* Transfer a property from one object to another object.
*
* If the source path does not exist (undefined)
* the property on the other object will not be set.
*
* @param {String} source
* @param {String} target
* @param {Object} obj1
* @param {Object} obj2
* @param {Function|Array} mods
* @param {Boolean} merge
*/
def transfer(source: String, target: String, obj1: js.Any, obj2: js.Any): Unit = js.native
def transfer(
source: String,
target: String,
obj1: js.Any,
obj2: js.Any,
mods: js.UndefOr[scala.Nothing],
merge: Boolean
): Unit = js.native
def transfer(
source: String,
target: String,
obj1: js.Any,
obj2: js.Any,
mods: js.Array[ModifierFunctionWrapper]
): Unit = js.native
def transfer(
source: String,
target: String,
obj1: js.Any,
obj2: js.Any,
mods: js.Array[ModifierFunctionWrapper],
merge: Boolean
): Unit = js.native
def transfer(source: String, target: String, obj1: js.Any, obj2: js.Any, mods: ModifierFunctionWrapper): Unit = js.native
def transfer(
source: String,
target: String,
obj1: js.Any,
obj2: js.Any,
mods: ModifierFunctionWrapper,
merge: Boolean
): Unit = js.native
/**
*
* Transform an object
*
* Usage:
*
* var obj = {
* "id": 1,
* "some": {
* "thing": "else"
* }
* }
*
* var transform = {
* "id": "nr",
* "some.thing": "name"
* }
*
* var tgt = dot.transform(transform, obj)
*
* @param {Object} recipe Transform recipe
* @param {Object} obj Object to be transformed
* @param {Array} mods modifiers for the target
*/
def transform(recipe: js.Any, obj: js.Any): Unit = js.native
def transform(recipe: js.Any, obj: js.Any, mods: js.Array[ModifierFunctionWrapper]): Unit = js.native
def transform(recipe: js.Any, obj: js.Any, mods: ModifierFunctionWrapper): Unit = js.native
}
@js.native
trait DotConstructor
extends Dot
with Instantiable1[/* separator */ String, Dot]
with Instantiable2[/* separator */ String, /* override */ Boolean, Dot]
with Instantiable3[
/* separator */ String,
js.UndefOr[/* override */ Boolean],
/* useArray */ Boolean,
Dot
]
with Instantiable4[
/* separator */ String,
js.UndefOr[/* override */ Boolean],
js.UndefOr[/* useArray */ Boolean],
/* useBrackets */ Boolean,
Dot
]
type ModifierFunctionWrapper = js.Function1[/* arg */ js.Any, js.Any]
}
|
smoe/interproscan
|
core/io/src/main/java/uk/ac/ebi/interpro/scan/io/match/prosite/PrositePfsearchMatchParser.java
|
<gh_stars>0
package uk.ac.ebi.interpro.scan.io.match.prosite;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Required;
import uk.ac.ebi.interpro.scan.io.match.AbstractLineMatchParser;
import uk.ac.ebi.interpro.scan.model.PatternScanMatch;
import uk.ac.ebi.interpro.scan.model.SignatureLibrary;
import uk.ac.ebi.interpro.scan.model.raw.PfScanRawMatch;
import uk.ac.ebi.interpro.scan.model.raw.ProfileScanRawMatch;
import uk.ac.ebi.interpro.scan.model.raw.alignment.CigarAlignmentEncoder;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*
* @author <NAME>
* @version $Id$
* @since 1.0-SNAPSHOT
*/
public abstract class PrositePfsearchMatchParser extends AbstractLineMatchParser<PfScanRawMatch> {
private static final Logger LOGGER = Logger.getLogger(PrositePfsearchMatchParser.class.getName());
private CigarAlignmentEncoder cigarEncoder;
private static final String START_OF_MATCH = ">";
private Pattern PrositeMatchPattern = Pattern.compile("/^>(\\S+)\\/(\\d+)\\-(\\d+)\\s+\\S+=(\\S+)\\|\\S+\\s+\\S+=(\\S+)\\s+\\S+=(\\S+)\\s+\\S+=(\\S+)\\s+\\S+=(\\S+)\\s+\\S+=(\\S+)\\s(.*)/");
protected PrositePfsearchMatchParser(SignatureLibrary signatureLibrary, String signatureLibraryRelease) {
super(signatureLibrary, signatureLibraryRelease);
}
@Required
public void setCigarEncoder(CigarAlignmentEncoder cigarEncoder) {
this.cigarEncoder = cigarEncoder;
}
/**
* Returns {@link uk.ac.ebi.interpro.scan.model.raw.RawMatch} instance using values from parameters.
*
* @param line Line read from input file.
* @return {@link uk.ac.ebi.interpro.scan.model.raw.RawMatch} instance using values from parameters
*/
@Override
protected PfScanRawMatch createMatch(String line) {
//>testseq1/7-307 motif=MF_00001|Asp_carb_tr norm_score=38.022 raw_score=6449 level=1 seq_end=-5 motif_start=1 motif_end=-1
LOGGER.debug("parsing line: " + line);
if (line.startsWith(START_OF_MATCH)) {
Matcher sequenceMatchLineMatcher = PrositeSequenceMatch.SEQUENCE_LINE_PATTERN.matcher(line);
if (sequenceMatchLineMatcher.matches()) {
LOGGER.debug("We found match ...");
PrositeSequenceMatch sequenceMatch = new PrositeSequenceMatch(sequenceMatchLineMatcher);
return buildMatchObject(
sequenceMatch.getSequenceIdentifier(),
sequenceMatch.getModel(),
this.getSignatureLibraryRelease(),
sequenceMatch.getSequenceStart(),
sequenceMatch.getSequenceEnd(),
cigarEncoder.encode(sequenceMatch.getAlignment()),
sequenceMatch.getScore(),
ProfileScanRawMatch.Level.byLevelString(Integer.toString(sequenceMatch.getLevel())),
null
);
}
}
return null;
}
/**
* Method to be implemented that builds the correct kind of PfScanRawMatch.
*
* @param sequenceIdentifier protein sequence identifier
* @param model the accession / ID of the model
* @param signatureLibraryRelease the current release number
* @param seqStart sequence match start coordinate
* @param seqEnd sequence match stop coordinate
* @param cigarAlign cigar alignment String
* @param score the score for the match
* @param profileLevel optional level for a Profile match
* @param patternLevel optional level for a Pattern match
* @return an implementation of a PfScanRawMatch object.
*/
protected abstract PfScanRawMatch buildMatchObject(String sequenceIdentifier,
String model,
String signatureLibraryRelease,
int seqStart,
int seqEnd,
String cigarAlign,
Double score,
ProfileScanRawMatch.Level profileLevel,
PatternScanMatch.PatternScanLocation.Level patternLevel);
}
|
mnieber/moonleap
|
titan/react_pkg/pkg/scssfilemerger.py
|
<reponame>mnieber/moonleap
import os
from pathlib import Path
from moonleap.render.merge import FileMerger
class ScssFileMerger(FileMerger):
patterns = ["index.scss"]
@classmethod
def add_pattern(cls, pattern):
if pattern not in cls.patterns:
cls.patterns.append(pattern)
def matches(self, fn):
return Path(fn).name in ScssFileMerger.patterns
def merge(self, lhs_content, rhs_content):
return lhs_content + rhs_content
|
devasia1000/chromium
|
android_webview/browser/aw_autofill_manager_delegate.cc
|
// Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "android_webview/browser/aw_autofill_manager_delegate.h"
#include "android_webview/browser/aw_browser_context.h"
#include "android_webview/browser/aw_content_browser_client.h"
#include "android_webview/browser/aw_pref_store.h"
#include "base/logging.h"
#include "base/prefs/pref_registry_simple.h"
#include "base/prefs/pref_service.h"
#include "base/prefs/pref_service_builder.h"
#include "components/autofill/browser/autocheckout/whitelist_manager.h"
#include "components/autofill/browser/webdata/autofill_webdata_service.h"
#include "components/autofill/common/autofill_pref_names.h"
#include "components/user_prefs/user_prefs.h"
namespace {
// Shows notifications which correspond to PersistentPrefStore's reading errors.
void HandleReadError(PersistentPrefStore::PrefReadError error) {
}
}
namespace android_webview {
AwAutofillManagerDelegate::AwAutofillManagerDelegate(bool enabled) {
PrefRegistrySimple* pref_registry = new PrefRegistrySimple();
pref_registry->RegisterBooleanPref(
autofill::prefs::kAutofillEnabled, enabled);
pref_registry->RegisterDoublePref(
autofill::prefs::kAutofillPositiveUploadRate, 0.0);
pref_registry->RegisterDoublePref(
autofill::prefs::kAutofillNegativeUploadRate, 0.0);
PrefServiceBuilder pref_service_builder;
pref_service_builder.WithUserPrefs(new AwPrefStore());
pref_service_builder.WithReadErrorCallback(base::Bind(&HandleReadError));
AwBrowserContext* context = AwContentBrowserClient::GetAwBrowserContext();
components::UserPrefs::Set(context,
pref_service_builder.Create(pref_registry));
}
AwAutofillManagerDelegate::~AwAutofillManagerDelegate() { }
void AwAutofillManagerDelegate::SetSaveFormData(bool enabled) {
PrefService* service = GetPrefs();
DCHECK(service);
service->SetBoolean(autofill::prefs::kAutofillEnabled, enabled);
}
bool AwAutofillManagerDelegate::GetSaveFormData() {
PrefService* service = GetPrefs();
DCHECK(service);
return service->GetBoolean(autofill::prefs::kAutofillEnabled);
}
PrefService* AwAutofillManagerDelegate::GetPrefs() {
return components::UserPrefs::Get(
AwContentBrowserClient::GetAwBrowserContext());
}
autofill::PersonalDataManager*
AwAutofillManagerDelegate::GetPersonalDataManager() {
return NULL;
}
autofill::autocheckout::WhitelistManager*
AwAutofillManagerDelegate::GetAutocheckoutWhitelistManager() const {
return NULL;
}
void AwAutofillManagerDelegate::HideRequestAutocompleteDialog() {
}
void AwAutofillManagerDelegate::OnAutocheckoutError() {
}
void AwAutofillManagerDelegate::ShowAutofillSettings() {
}
void AwAutofillManagerDelegate::ConfirmSaveCreditCard(
const autofill::AutofillMetrics& metric_logger,
const autofill::CreditCard& credit_card,
const base::Closure& save_card_callback) {
}
void AwAutofillManagerDelegate::ShowAutocheckoutBubble(
const gfx::RectF& bounding_box,
bool is_google_user,
const base::Callback<void(bool)>& callback) {
}
void AwAutofillManagerDelegate::HideAutocheckoutBubble() {
}
void AwAutofillManagerDelegate::ShowRequestAutocompleteDialog(
const autofill::FormData& form,
const GURL& source_url,
autofill::DialogType dialog_type,
const base::Callback<void(const autofill::FormStructure*,
const std::string&)>& callback) {
}
void AwAutofillManagerDelegate::ShowAutofillPopup(
const gfx::RectF& element_bounds,
const std::vector<string16>& values,
const std::vector<string16>& labels,
const std::vector<string16>& icons,
const std::vector<int>& identifiers,
base::WeakPtr<autofill::AutofillPopupDelegate> delegate) {
}
void AwAutofillManagerDelegate::HideAutofillPopup() {
}
void AwAutofillManagerDelegate::UpdateProgressBar(double value) {
}
} // namespace android_webview
|
bopopescu/SDK
|
lib/surface/deployment_manager/manifests/list.py
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""manifests list command."""
from googlecloudsdk.api_lib.deployment_manager import dm_v2_util
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.third_party.apitools.base.py import list_pager
class List(base.Command):
"""List manifests in a deployment.
Prints a table with summary information on all manifests in the deployment.
"""
detailed_help = {
'DESCRIPTION': '{description}',
'EXAMPLES': """\
To print out a list of manifests in a deployment, run:
$ {command} --deployment my-deployment
To print only the name of each manifest, run:
$ {command} --deployment my-deployment --simple-list
""",
}
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Args:
parser: An argparse parser that you can use to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
parser.add_argument('--limit', type=int,
help='The maximum number of results to list.')
def Run(self, args):
"""Run 'manifests list'.
Args:
args: argparse.Namespace, The arguments that this command was invoked
with.
Returns:
The list of manifests for the specified deployment.
Raises:
HttpException: An http error response was received while executing api
request.
"""
client = self.context['deploymentmanager-client']
messages = self.context['deploymentmanager-messages']
project = properties.VALUES.core.project.Get(required=True)
request = messages.DeploymentmanagerManifestsListRequest(
project=project,
deployment=args.deployment,
)
return dm_v2_util.YieldWithHttpExceptions(list_pager.YieldFromList(
client.manifests, request, field='manifests', limit=args.limit,
batch_size=500))
def Display(self, unused_args, result):
"""Display prints information about what just happened to stdout.
Args:
unused_args: The same as the args in Run.
result: a generator of Manifests, where each dict has a name attribute.
Raises:
ValueError: if result is None or not a generator
"""
empty_generator = True
for manifest in result:
empty_generator = False
log.Print(manifest.name)
if empty_generator:
log.Print('No Manifests were found in your deployment!')
|
wingenedu/uflo
|
uflo-console/src/main/java/com/bstek/uflo/console/handler/impl/todo/TodoServletHandler.java
|
/*******************************************************************************
* Copyright 2017 Bstek
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.bstek.uflo.console.handler.impl.todo;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import com.bstek.uflo.console.handler.impl.PageData;
import com.bstek.uflo.console.handler.impl.RenderPageServletHandler;
import com.bstek.uflo.model.HistoryTask;
import com.bstek.uflo.model.task.Task;
import com.bstek.uflo.model.task.TaskState;
import com.bstek.uflo.query.HistoryTaskQuery;
import com.bstek.uflo.query.TaskQuery;
import com.bstek.uflo.service.HistoryService;
import com.bstek.uflo.service.TaskService;
import com.bstek.uflo.utils.EnvironmentUtils;
/**
* @author Jacky.gao
* @since 2016年12月7日
*/
public class TodoServletHandler extends RenderPageServletHandler {
private TaskService taskService;
private HistoryService historyService;
@Override
public void execute(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String method=retriveMethod(req);
if(method!=null){
invokeMethod(method, req, resp);
}else{
VelocityContext context = new VelocityContext();
context.put("contextPath", req.getContextPath());
resp.setContentType("text/html");
resp.setCharacterEncoding("utf-8");
Template template=ve.getTemplate("uflo-html/todo.html","utf-8");
PrintWriter writer=resp.getWriter();
template.merge(context, writer);
writer.close();
}
}
public void cliamTask(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String loginUsername=EnvironmentUtils.getEnvironment().getLoginUser();
String taskId=req.getParameter("taskId");
taskService.claim(Long.valueOf(taskId), loginUsername);
}
public void loadTodo(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String loginUsername=EnvironmentUtils.getEnvironment().getLoginUser();
String taskName=req.getParameter("taskName");
int pageSize=Integer.valueOf(req.getParameter("pageSize"));
int pageIndex=Integer.valueOf(req.getParameter("pageIndex"));
int firstResult=(pageIndex-1)*pageSize;
TaskQuery query=taskService.createTaskQuery();
query.addTaskState(TaskState.Created);
query.addTaskState(TaskState.InProgress);
query.addTaskState(TaskState.Ready);
query.addTaskState(TaskState.Suspended);
query.addTaskState(TaskState.Reserved);
query.addAssignee(loginUsername).addOrderDesc("createDate").page(firstResult, pageSize);
if(StringUtils.isNotBlank(taskName)){
query.nameLike("%"+taskName+"%");
}
int total=query.count();
List<Task> tasks=query.list();
PageData pageData=new PageData(tasks,pageSize,pageIndex,total);
writeObjectToJson(resp, pageData);
}
public void loadCliam(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String loginUsername=EnvironmentUtils.getEnvironment().getLoginUser();
int pageSize=Integer.valueOf(req.getParameter("pageSize"));
int pageIndex=Integer.valueOf(req.getParameter("pageIndex"));
String taskName=req.getParameter("taskName");
int firstResult=(pageIndex-1)*pageSize;
TaskQuery query=taskService.createTaskQuery();
if(StringUtils.isNotBlank(taskName)){
query.nameLike("%"+taskName+"%");
}
query.addTaskState(TaskState.Ready).addParticipator(loginUsername).addOrderDesc("createDate").page(firstResult, pageSize);
int total=query.count();
List<Task> tasks=query.list();
PageData pageData=new PageData(tasks,pageSize,pageIndex,total);
writeObjectToJson(resp, pageData);
}
public void loadHistory(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String loginUsername=EnvironmentUtils.getEnvironment().getLoginUser();
int pageSize=Integer.valueOf(req.getParameter("pageSize"));
int pageIndex=Integer.valueOf(req.getParameter("pageIndex"));
String taskName=req.getParameter("taskName");
int firstResult=(pageIndex-1)*pageSize;
HistoryTaskQuery query=historyService.createHistoryTaskQuery();
if(StringUtils.isNotBlank(taskName)){
query.nameLike("%"+taskName+"%");
}
query.assignee(loginUsername).addOrderDesc("endDate").page(firstResult, pageSize);
int total=query.count();
List<HistoryTask> tasks=query.list();
PageData pageData=new PageData(tasks,pageSize,pageIndex,total);
writeObjectToJson(resp, pageData);
}
public void setTaskService(TaskService taskService) {
this.taskService = taskService;
}
public void setHistoryService(HistoryService historyService) {
this.historyService = historyService;
}
@Override
public String url() {
return "/todo";
}
}
|
jmandy/Get-word-class-and-lemma
|
src/test/java/net/didion/jwnl/test/generic/ExamplesTest.java
|
package net.didion.jwnl.test.generic;
import junit.framework.TestCase;
import net.didion.jwnl.JWNL;
import net.didion.jwnl.JWNLException;
import net.didion.jwnl.TestDefaults;
import net.didion.jwnl.data.IndexWord;
import net.didion.jwnl.data.POS;
import net.didion.jwnl.dictionary.Dictionary;
public class ExamplesTest extends TestCase {
public void testMorphological() {
try {
JWNL.initialize();
IndexWord iw = Dictionary.getInstance().lookupIndexWord(POS.VERB, "running-away");
System.out.println("Index word : " + iw.toString());
} catch (JWNLException e) {
e.printStackTrace();
}
}
}
|
tusharchoudhary0003/Custom-Football-Game
|
sources/com/mopub/nativeads/C11586ab.java
|
package com.mopub.nativeads;
import org.jacoco.agent.p025rt.internal_8ff85ea.Offline;
/* renamed from: com.mopub.nativeads.ab */
/* compiled from: SomaMopubAdapterInterstitial */
class C11586ab implements Runnable {
/* renamed from: a */
private static transient /* synthetic */ boolean[] f36115a;
/* renamed from: b */
final /* synthetic */ C11589bb f36116b;
/* renamed from: a */
private static /* synthetic */ boolean[] m38412a() {
boolean[] zArr = f36115a;
if (zArr != null) {
return zArr;
}
boolean[] probes = Offline.getProbes(6034332036614564437L, "com/mopub/nativeads/SomaMopubAdapterInterstitial$7$1", 5);
f36115a = probes;
return probes;
}
C11586ab(C11589bb this$1) {
boolean[] a = m38412a();
this.f36116b = this$1;
a[0] = true;
}
public void run() {
boolean[] a = m38412a();
if (!SomaMopubAdapterInterstitial.m38377c(this.f36116b.f36123b).mo39917d()) {
a[1] = true;
} else {
a[2] = true;
SomaMopubAdapterInterstitial.m38377c(this.f36116b.f36123b).mo39921g();
a[3] = true;
}
a[4] = true;
}
}
|
FinnC/moth-SOMns
|
src/tools/dym/nodes/AllocationProfilingNode.java
|
<reponame>FinnC/moth-SOMns
package tools.dym.nodes;
import com.oracle.truffle.api.frame.VirtualFrame;
import tools.dym.profiles.AllocationProfile;
import tools.dym.profiles.AllocationProfile.AllocProfileNode;
public final class AllocationProfilingNode extends CountingNode<AllocationProfile> {
@Child private AllocProfileNode profile;
public AllocationProfilingNode(final AllocationProfile profile) {
super(profile);
this.profile = profile.getProfile();
}
@Override
protected void onReturnValue(final VirtualFrame frame, final Object result) {
profile.executeProfiling(result);
}
}
|
14ms/Minecraft-Disclosed-Source-Modifications
|
Astomero/net/minecraft/block/BlockMushroom.java
|
<reponame>14ms/Minecraft-Disclosed-Source-Modifications<filename>Astomero/net/minecraft/block/BlockMushroom.java
package net.minecraft.block;
import net.minecraft.world.*;
import net.minecraft.util.*;
import net.minecraft.block.state.*;
import java.util.*;
import net.minecraft.init.*;
import net.minecraft.block.properties.*;
import net.minecraft.world.gen.feature.*;
public class BlockMushroom extends BlockBush implements IGrowable
{
protected BlockMushroom() {
final float f = 0.2f;
this.setBlockBounds(0.5f - f, 0.0f, 0.5f - f, 0.5f + f, f * 2.0f, 0.5f + f);
this.setTickRandomly(true);
}
@Override
public void updateTick(final World worldIn, BlockPos pos, final IBlockState state, final Random rand) {
if (rand.nextInt(25) == 0) {
int i = 5;
final int j = 4;
for (final BlockPos blockpos : BlockPos.getAllInBoxMutable(pos.add(-4, -1, -4), pos.add(4, 1, 4))) {
if (worldIn.getBlockState(blockpos).getBlock() == this && --i <= 0) {
return;
}
}
BlockPos blockpos2 = pos.add(rand.nextInt(3) - 1, rand.nextInt(2) - rand.nextInt(2), rand.nextInt(3) - 1);
for (int k = 0; k < 4; ++k) {
if (worldIn.isAirBlock(blockpos2) && this.canBlockStay(worldIn, blockpos2, this.getDefaultState())) {
pos = blockpos2;
}
blockpos2 = pos.add(rand.nextInt(3) - 1, rand.nextInt(2) - rand.nextInt(2), rand.nextInt(3) - 1);
}
if (worldIn.isAirBlock(blockpos2) && this.canBlockStay(worldIn, blockpos2, this.getDefaultState())) {
worldIn.setBlockState(blockpos2, this.getDefaultState(), 2);
}
}
}
@Override
public boolean canPlaceBlockAt(final World worldIn, final BlockPos pos) {
return super.canPlaceBlockAt(worldIn, pos) && this.canBlockStay(worldIn, pos, this.getDefaultState());
}
@Override
protected boolean canPlaceBlockOn(final Block ground) {
return ground.isFullBlock();
}
@Override
public boolean canBlockStay(final World worldIn, final BlockPos pos, final IBlockState state) {
if (pos.getY() >= 0 && pos.getY() < 256) {
final IBlockState iblockstate = worldIn.getBlockState(pos.down());
return iblockstate.getBlock() == Blocks.mycelium || (iblockstate.getBlock() == Blocks.dirt && iblockstate.getValue(BlockDirt.VARIANT) == BlockDirt.DirtType.PODZOL) || (worldIn.getLight(pos) < 13 && this.canPlaceBlockOn(iblockstate.getBlock()));
}
return false;
}
public boolean generateBigMushroom(final World worldIn, final BlockPos pos, final IBlockState state, final Random rand) {
worldIn.setBlockToAir(pos);
WorldGenerator worldgenerator = null;
if (this == Blocks.brown_mushroom) {
worldgenerator = new WorldGenBigMushroom(Blocks.brown_mushroom_block);
}
else if (this == Blocks.red_mushroom) {
worldgenerator = new WorldGenBigMushroom(Blocks.red_mushroom_block);
}
if (worldgenerator != null && worldgenerator.generate(worldIn, rand, pos)) {
return true;
}
worldIn.setBlockState(pos, state, 3);
return false;
}
@Override
public boolean canGrow(final World worldIn, final BlockPos pos, final IBlockState state, final boolean isClient) {
return true;
}
@Override
public boolean canUseBonemeal(final World worldIn, final Random rand, final BlockPos pos, final IBlockState state) {
return rand.nextFloat() < 0.4;
}
@Override
public void grow(final World worldIn, final Random rand, final BlockPos pos, final IBlockState state) {
this.generateBigMushroom(worldIn, pos, state, rand);
}
}
|
Governikus/AusweisApp2-Omapi
|
src/services/Service.h
|
/*!
* \copyright Copyright (c) 2017-2019 Governikus GmbH & Co. KG, Germany
*/
#pragma once
#include "AppUpdater.h"
#include "Env.h"
#include <QTimer>
namespace governikus
{
class Service
: public QObject
{
Q_OBJECT
friend class Env;
private:
QTimer mTimer;
bool mUpdateScheduled;
bool mExplicitSuccessMessage;
const int mOneDayInMs = 1000 * 60 * 60 * 24;
protected:
Service();
virtual ~Service() = default;
static Service& getInstance();
private Q_SLOTS:
void doConfigurationsUpdate();
void doAppUpdate(bool pIgnoreNextVersionskip);
void onTimedUpdateTriggered();
void onAppUpdateFinished(bool pUpdateAvailable, const GlobalStatus& pError);
public:
void updateConfigurations();
void updateApp(bool pIgnoreNextVersionskip = false);
bool isUpdateScheduled();
void runUpdateIfNeeded();
const AppUpdateData& getUpdateData() const;
Q_SIGNALS:
void fireAppUpdateFinished(bool pUpdateAvailable, const GlobalStatus& pError);
void fireUpdateScheduled();
};
} // namespace governikus
|
midineo/BotFramework-WebChat
|
packages/component/src/hooks/useDictateInterims.js
|
import { useContext } from 'react';
import { useSelector } from '../WebChatReduxContext';
import WebChatUIContext from '../WebChatUIContext';
export default function useDictateInterims() {
return [useSelector(({ dictateInterims }) => dictateInterims) || [], useContext(WebChatUIContext).setDictateInterims];
}
|
shashanksingh28/code-similarity
|
data/Big Java 6th Edition/ch04/section_3/Volume.java
|
import java.util.Scanner;
/**
This program prints the price per liter for a six-pack of cans and
a two-liter bottle.
*/
public class Volume
{
public static void main(String[] args)
{
// Read price per pack
Scanner in = new Scanner(System.in);
System.out.print("Please enter the price for a six-pack: ");
double packPrice = in.nextDouble();
// Read price per bottle
System.out.print("Please enter the price for a two-liter bottle: ");
double bottlePrice = in.nextDouble();
final double CANS_PER_PACK = 6;
final double CAN_VOLUME = 0.355; // 12 oz. = 0.355 l
final double BOTTLE_VOLUME = 2;
// Compute and print price per liter
double packPricePerLiter = packPrice / (CANS_PER_PACK * CAN_VOLUME);
double bottlePricePerLiter = bottlePrice / BOTTLE_VOLUME;
System.out.printf("Pack price per liter: %8.2f", packPricePerLiter);
System.out.println();
System.out.printf("Bottle price per liter: %8.2f", bottlePricePerLiter);
System.out.println();
}
}
|
zealoussnow/chromium
|
storage/browser/file_system/file_stream_test_utils.h
|
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef STORAGE_BROWSER_FILE_SYSTEM_FILE_STREAM_TEST_UTILS_H_
#define STORAGE_BROWSER_FILE_SYSTEM_FILE_STREAM_TEST_UTILS_H_
#include <string>
namespace storage {
class FileStreamReader;
class FileStreamWriter;
// Reads upto |size| bytes of data from |reader|, an initialized
// FileStreamReader. The read bytes will be written to |data| and the actual
// number of bytes or the error code will be written to |result|.
void ReadFromReader(FileStreamReader* reader,
std::string* data,
size_t size,
int* result);
// Returns the length of the file if it could be successfully retrieved,
// otherwise a net error.
int64_t GetLengthFromReader(FileStreamReader* reader);
// Writes |data| to |writer|, an initialized FileStreamWriter. Returns net::OK
// if successful, otherwise a net error.
int WriteStringToWriter(FileStreamWriter* writer, const std::string& data);
} // namespace storage
#endif // STORAGE_BROWSER_FILE_SYSTEM_FILE_STREAM_TEST_UTILS_H_
|
JavaQualitasCorpus/jspwiki-2.8.4
|
tests/com/ecyrd/jspwiki/auth/AllTests.java
|
<reponame>JavaQualitasCorpus/jspwiki-2.8.4
package com.ecyrd.jspwiki.auth;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
public class AllTests extends TestCase
{
public AllTests( String s )
{
super( s );
}
public static Test suite()
{
TestSuite suite = new TestSuite("AAA package tests");
suite.addTest( AuthenticationManagerTest.suite() );
suite.addTest( AuthorizationManagerTest.suite() );
suite.addTest( GroupManagerTest.suite() );
suite.addTest( com.ecyrd.jspwiki.auth.acl.AllTests.suite() );
suite.addTest( com.ecyrd.jspwiki.auth.authorize.AllTests.suite() );
suite.addTest( com.ecyrd.jspwiki.auth.login.AllTests.suite() );
suite.addTest( com.ecyrd.jspwiki.auth.permissions.AllTests.suite() );
suite.addTest( com.ecyrd.jspwiki.auth.user.AllTests.suite() );
suite.addTestSuite( com.ecyrd.jspwiki.auth.UserManagerTest.class );
return suite;
}
}
|
Pretorer/Drachenhorn
|
docs/search/enumvalues_5.js
|
<gh_stars>0
var searchData=
[
['generationpoints',['GenerationPoints',['../namespace_drachenhorn_1_1_xml_1_1_template.html#ad30e884897db6e1f29d5c04687adf71facfbabf38350d661e2910e029676e605a',1,'Drachenhorn::Xml::Template']]]
];
|
Bozar/Nand2Tetris
|
06/Symbol/hackSymbolTable.py
|
import re
def parse(parsedCode):
symbolTable = _getPredefinedSymbol()
_addLabelToSymbolTable(parsedCode, symbolTable)
_parseSymbol(parsedCode, symbolTable)
return parsedCode
def _parseSymbol(parsedCode, symbolTable):
varAddress = 16
for i in range(len(parsedCode)):
if _isLCommand(parsedCode[i][0]):
parsedCode[i][1] = symbolTable[parsedCode[i][1]]
elif _isACommand(parsedCode[i][0]) and _isSymbol(parsedCode[i][1]):
if parsedCode[i][1] not in symbolTable.keys():
symbolTable[parsedCode[i][1]] = str(varAddress)
varAddress += 1
parsedCode[i][1] = symbolTable[parsedCode[i][1]]
def _addLabelToSymbolTable(parsedCode, symbolTable):
lineCount = 0
for i in range(len(parsedCode)):
if _isLCommand(parsedCode[i][0]):
symbolTable[parsedCode[i][1]] = str(lineCount)
else:
lineCount += 1
def _isLCommand(checkType):
typeL = 'L_COMMAND'
return checkType == typeL
def _isACommand(checkType):
typeA = 'A_COMMAND'
return checkType == typeA
def _isSymbol(checkValue):
regSymbol = re.compile(r'^\d')
return regSymbol.search(checkValue) == None
def _getPredefinedSymbol():
symbols = {
'SP': '0',
'LCL': '1',
'ARG': '2',
'THIS': '3',
'THAT': '4',
'SCREEN': '16384',
'KBD': '24576',
'R0': '0',
'R1': '1',
'R2': '2',
'R3': '3',
'R4': '4',
'R5': '5',
'R6': '6',
'R7': '7',
'R8': '8',
'R9': '9',
'R10': '10',
'R11': '11',
'R12': '12',
'R13': '13',
'R14': '14',
'R15': '15'
}
return symbols
|
vipalade/solidframe
|
tutorials/mprpc_request/mprpc_request_messages.hpp
|
<reponame>vipalade/solidframe
#pragma once
#include "solid/frame/mprpc/mprpccontext.hpp"
#include "solid/frame/mprpc/mprpcmessage.hpp"
#include "solid/frame/mprpc/mprpcprotocol_serialization_v3.hpp"
#include <map>
#include <vector>
namespace rpc_request {
struct Request : solid::frame::mprpc::Message {
std::string userid_regex;
Request() {}
Request(std::string&& _ustr)
: userid_regex(std::move(_ustr))
{
}
SOLID_REFLECT_V1(_rr, _rthis, _rctx)
{
_rr.add(_rthis.userid_regex, _rctx, 1, "userid_regex");
}
};
struct Date {
uint8_t day;
uint8_t month;
uint16_t year;
SOLID_REFLECT_V1(_rr, _rthis, _rctx)
{
_rr.add(_rthis.day, _rctx, 1, "day").add(_rthis.month, _rctx, 2, "month").add(_rthis.year, _rctx, 3, "year");
}
};
struct UserData {
std::string full_name;
std::string email;
std::string country;
std::string city;
Date birth_date;
SOLID_REFLECT_V1(_rr, _rthis, _rctx)
{
_rr.add(_rthis.full_name, _rctx, 1, "full_name").add(_rthis.email, _rctx, 2, "email").add(_rthis.country, _rctx, 3, "country");
_rr.add(_rthis.city, _rctx, 4, "city").add(_rthis.birth_date, _rctx, 5, "birth_date");
}
};
struct Response : solid::frame::mprpc::Message {
using UserDataMapT = std::map<std::string, UserData>;
UserDataMapT user_data_map;
Response() {}
Response(const solid::frame::mprpc::Message& _rmsg)
: solid::frame::mprpc::Message(_rmsg)
{
}
SOLID_REFLECT_V1(_rr, _rthis, _rctx)
{
_rr.add(_rthis.user_data_map, _rctx, 1, "user_data_map");
}
};
template <class Reg>
inline void configure_protocol(Reg _rreg)
{
_rreg(1, "Request", solid::TypeToType<Request>());
_rreg(2, "Response", solid::TypeToType<Response>());
}
} //namespace rpc_request
|
Baizey/CytubeBot
|
bin/core/Time.js
|
const utils = require("./Utils");
class Time {
/**
* @param {Number|String} millis
* @param {Number} seconds
* @param {Number} minutes
* @param {Number} hours
* @param {Number} days
* @returns {Time}
*/
static from(millis = 0, seconds = 0, minutes = 0, hours = 0, days = 0) {
return new Time(millis, seconds, minutes, hours, days);
}
/**
* @returns {Time}
* @param {Number} min
* @param {Number} max
*/
static fromMillis(min = 0, max = min) {
return Time.from(utils.random(min, max));
}
/**
* @returns {Time}
* @param {Number} min
* @param {Number} max
*/
static fromSeconds(min = 0, max = min) {
return Time.from(0, utils.random(min, max));
}
/**
* @returns {Time}
* @param {Number} min
* @param {Number} max
*/
static fromMinutes(min = 0, max = min) {
return Time.from(0, 0, utils.random(min, max));
}
/**
* @returns {Time}
* @param {Number} min
* @param {Number} max
*/
static fromHours(min = 0, max = min) {
return Time.from(0, 0, 0, utils.random(min, max));
}
/**
* @returns {Time}
* @param {Number} min
* @param {Number} max
*/
static fromDays(min = 0, max = min) {
return Time.from(0, 0, 0, 0, utils.random(min, max));
}
/**
* @returns {Time}
*/
static current() {
return Time.from(Date.now());
}
/**
* @param {Number|String} millis
* @param {Number} seconds
* @param {Number} minutes
* @param {Number} hours
* @param {Number} days
*/
constructor(millis = 0, seconds = 0, minutes = 0, hours = 0, days = 0) {
if (typeof millis === "string") {
const temp = millis.split(":");
millis = 0;
seconds = temp.length <= 0 ? 0 : temp[temp.length - 1] - 0;
minutes = temp.length <= 1 ? 0 : temp[temp.length - 2] - 0;
hours = temp.length <= 2 ? 0 : temp[temp.length - 3] - 0;
}
hours += days * 24;
minutes += hours * 60;
seconds += minutes * 60;
this._millis = Math.round(millis + seconds * 1000);
}
/**
* @returns {number}
*/
get days() {
return Math.floor(this.hours / 24);
}
/**
* @returns {number}
*/
get hours() {
return Math.floor(this.minutes / 60);
}
/**
* @returns {number}
*/
get minutes() {
return Math.floor(this.seconds / 60);
}
/**
* @returns {number}
*/
get seconds() {
return Math.floor(this.millis / 1000);
}
/**
* @returns {number}
*/
get millis() {
return this._millis;
}
/**
* Time scaled to best fitting unit(s)
* fx 5 minutes
* or 3 minutes and 32 seconds
* milliseconds will only ever be displayed if there is no bigger unit fitting
* @returns {string}
*/
get asUnit() {
const scale = [ 1, 1000, 60, 60, 24];
const names = ["millisecond", "second", "minute", "hour", "day"];
let time = this.millis;
let i = 1;
for (; i < names.length; i++) {
const temp = time / scale[i];
if (temp < 1) break;
time = temp;
}
const bigUnit = Math.floor(time);
const smallUnit = Math.floor(scale[i - 1] * (time - bigUnit));
const bigUnitString = utils.pluralise(bigUnit, names[i - 1]);
return smallUnit > 0 && i > 2
? `${bigUnitString} and ${utils.pluralise(smallUnit, names[i - 2])}`
: bigUnitString;
}
/**
* @returns {String} Time in HH:MM:SS format
*/
get asPlaytime() {
const seconds = Math.abs(this.seconds);
const hr = `${Math.floor(seconds / 3600)}`.padStart(2, '0');
const min = `${Math.floor(Math.floor(seconds / 60) % 60)}`.padStart(2, '0');
const sec = `${Math.floor(seconds % 60)}`.padStart(2, '0');
return `${hr}:${min}:${sec}`;
}
/**
* @param {Time} other
* @returns {boolean}
*/
isBiggerThan(other) {
return this.millis > other.millis;
}
/**
* @param {Time} other
* @returns {boolean}
*/
isEqual(other) {
return this.millis === other.millis;
}
/**
* @param {Time} other
* @returns {boolean}
*/
isSmallerThan(other) {
return this.millis < other.millis;
}
/**
* @returns {Boolean}
*/
isNegative() {
return this.millis < 0;
}
/**
* @returns {Boolean}
*/
isPositive() {
return this.millis > 0;
}
/**
* @returns {Boolean}
*/
isZero() {
return this.millis === 0;
}
/**
* @param {String|Number|Time} millis
* @param {Number} seconds
* @param {Number} minutes
* @param {Number} hours
* @param {Number} days
*/
add(millis = 0, seconds = 0, minutes = 0, hours = 0, days = 0) {
if (millis instanceof Time)
this._millis += millis.millis;
else
this._millis += new Time(millis, seconds, minutes, hours, days).millis;
this._millis = Math.round(this._millis);
return this;
}
/**
* @param {Number} minMillis
* @param {Number} maxMillis
*/
addMillis(minMillis = 0, maxMillis = minMillis) {
this.add(Time.fromMillis(minMillis, maxMillis));
return this;
}
/**
* @param {Number} minSeconds
* @param {Number} maxSeconds
*/
addSeconds(minSeconds = 0, maxSeconds = minSeconds) {
this.add(Time.fromSeconds(minSeconds, maxSeconds));
return this;
}
/**
* @param {Number} minMinutes
* @param {Number} maxMinutes
*/
addMinutes(minMinutes = 0, maxMinutes = minMinutes) {
this.add(Time.fromMinutes(minMinutes, minMinutes));
return this;
}
/**
* @param {Number} minHours
* @param {Number} maxHours
*/
addHours(minHours = 0, maxHours = minHours) {
this.add(Time.fromHours(minHours, maxHours));
return this;
}
/**
* @param {Number} minDays
* @param {Number} maxDays
*/
addDays(minDays = 0, maxDays = minDays) {
this.add(Time.fromDays(minDays, maxDays));
return this;
}
}
module.exports = Time;
|
renyuanceshi/KingKingRE
|
app/src/main/java/android/support/v4/app/BundleCompat.java
|
package android.support.v4.app;
import android.os.Build;
import android.os.Bundle;
import android.os.IBinder;
public final class BundleCompat {
private BundleCompat() {
}
public static IBinder getBinder(Bundle bundle, String str) {
return Build.VERSION.SDK_INT >= 18 ? BundleCompatJellybeanMR2.getBinder(bundle, str) : BundleCompatGingerbread.getBinder(bundle, str);
}
public static void putBinder(Bundle bundle, String str, IBinder iBinder) {
if (Build.VERSION.SDK_INT >= 18) {
BundleCompatJellybeanMR2.putBinder(bundle, str, iBinder);
} else {
BundleCompatGingerbread.putBinder(bundle, str, iBinder);
}
}
}
|
henryyan/springside4
|
modules/core/src/test/java/org/springside/modules/log/MockLog4jAppenderTest.java
|
<reponame>henryyan/springside4
package org.springside.modules.log;
import static org.junit.Assert.*;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MockLog4jAppenderTest {
@Test
public void normal() {
String testString1 = "Hello";
String testString2 = "World";
MockLog4jAppender appender = new MockLog4jAppender();
appender.addToLogger(MockLog4jAppenderTest.class);
Logger logger = LoggerFactory.getLogger(MockLog4jAppenderTest.class);
logger.warn(testString1);
logger.warn(testString2);
//getFirstLog/getLastLog
assertEquals(testString1, appender.getFirstMessage());
assertEquals(testString2, appender.getLastMessage());
//getAllLogs
assertEquals(2, appender.getAllLogs().size());
assertEquals(testString2, appender.getAllLogs().get(1).getMessage());
//clearLogs
appender.clearLogs();
assertNull(appender.getFirstLog());
assertNull(appender.getLastLog());
}
@Test
public void addAndRemoveAppender() {
String testString = "Hello";
Logger logger = LoggerFactory.getLogger(MockLog4jAppenderTest.class);
MockLog4jAppender appender = new MockLog4jAppender();
//class
appender.addToLogger(MockLog4jAppenderTest.class);
logger.warn(testString);
assertNotNull(appender.getFirstLog());
appender.clearLogs();
appender.removeFromLogger(MockLog4jAppenderTest.class);
logger.warn(testString);
assertNull(appender.getFirstLog());
//name
appender.clearLogs();
appender.addToLogger("org.springside.modules.log");
logger.warn(testString);
assertNotNull(appender.getFirstLog());
appender.clearLogs();
appender.removeFromLogger("org.springside.modules.log");
logger.warn(testString);
assertNull(appender.getFirstLog());
}
}
|
SusheelThapa/Code-With-C-Plus-Plus
|
OOPS/Constructor_Overloading.cpp
|
<filename>OOPS/Constructor_Overloading.cpp<gh_stars>0
#include <iostream>
using namespace std;
class Complex
{
int real, imaginary;
public:
//More than one constructor with different parameter is called constructor overloading
Complex(int x, int y)
{
real = x;
imaginary = y;
}
Complex(int x)
{
real = x;
imaginary = 0;
}
Complex(void)
{
real = imaginary = 0;
}
void printComplex()
{
cout << "Complex Number is " << real << " + " << imaginary << "i" << endl;
}
};
int main()
{
Complex a(4, 5); //Implicit call
Complex b = Complex(7); //Explicit call
Complex c;
a.printComplex();
b.printComplex();
c.printComplex();
return 0;
}
|
gruberjl/gitbit
|
src/gitbit/pages/assets/index.js
|
<gh_stars>0
const React = require('react')
const {fetch} = require('whatwg-fetch')
const {toast} = require('react-toastify')
const {Nav} = require('../../components/nav')
class Assets extends React.Component {
constructor(props) {
super(props)
this.state = {
filename: '',
file: {}
}
}
setFile(event) {
this.setState({file: event.target.files[0]})
}
setName(event) {
this.setState({filename: event.target.value})
}
submit() {
const data = new FormData()
data.append('filename', this.state.filename)
data.append('file', this.state.file)
fetch('/api/assets/create', {method: 'POST', body: data})
.then(response => response.json())
.then(() => {
toast('saved')
})
.catch(err => toast.error(err.toString()))
}
render() {
return (
<div className="pure-g">
<Nav />
<main className="pure-u-4-5">
<h1>Assets</h1>
<form className="pure-form pure-form-stacked">
<input onChange={this.setFile.bind(this)} type="file" className="pure-input-1" />
<input onChange={this.setName.bind(this)} type="text" placeholder="Enter the desired name of file" className="pure-input-1" />
<button onClick={this.submit.bind(this)} type="button" className="pure-input-1">Upload</button>
</form>
</main>
</div>
)
}
}
module.exports = {Assets}
|
joshanderson-kw/SMQTK
|
python/smqtk/representation/descriptor_element_factory.py
|
from smqtk.representation import \
SmqtkRepresentation, \
DescriptorElement
from smqtk.utils.configuration import (
cls_conf_from_config_dict,
cls_conf_to_config_dict,
make_default_config,
)
from smqtk.utils.dict import merge_dict
__author__ = "<EMAIL>"
class DescriptorElementFactory (SmqtkRepresentation):
"""
Factory class for producing DescriptorElement instances of a specified type
and configuration.
"""
def __init__(self, d_type, type_config):
"""
Initialize the factory to produce DescriptorElement instances of the
given type and configuration.
:param d_type: Type of descriptor element this factory should produce.
:type d_type: type
:param type_config: Initialization parameter dictionary that should
contain all additional construction parameters for the provided type
except for the expected `type_str` and `uuid` arguments that should
be the first and second positional arguments respectively.
:type type_config: dict
"""
#: :type: type | smqtk.representation.DescriptorElement
self._d_type = d_type
self._d_type_config = type_config
@classmethod
def get_default_config(cls):
"""
Generate and return a default configuration dictionary for this class.
This will be primarily used for generating what the configuration
dictionary would look like for this class without instantiating it.
It is not be guaranteed that the configuration dictionary returned
from this method is valid for construction of an instance of this class.
:return: Default configuration dictionary for the class.
:rtype: dict
"""
return make_default_config(DescriptorElement.get_impls())
@classmethod
def from_config(cls, config_dict, merge_default=True):
"""
Instantiate a new instance of this class given the configuration
JSON-compliant dictionary encapsulating initialization arguments.
This method should not be called via super unless and instance of the
class is desired.
:param config_dict: JSON compliant dictionary encapsulating
a configuration.
:type config_dict: dict
:param merge_default: Merge the given configuration on top of the
default provided by ``get_default_config``.
:type merge_default: bool
:return: Constructed instance from the provided config.
:rtype: DescriptorElementFactory
"""
if merge_default:
config_dict = merge_dict(cls.get_default_config(), config_dict)
de_type, de_conf = cls_conf_from_config_dict(
config_dict, DescriptorElement.get_impls()
)
return DescriptorElementFactory(de_type, de_conf)
def get_config(self):
return cls_conf_to_config_dict(self._d_type, self._d_type_config)
def new_descriptor(self, type_str, uuid):
"""
Create a new DescriptorElement instance of the configured implementation
:param type_str: Type of descriptor. This is usually the name of the
content descriptor that generated this vector.
:type type_str: str
:param uuid: UUID to associate with the descriptor
:type uuid: collections.abc.Hashable
:return: New DescriptorElement instance
:rtype: smqtk.representation.DescriptorElement
"""
return self._d_type.from_config(self._d_type_config, type_str, uuid)
def __call__(self, type_str, uuid):
"""
Create a new DescriptorElement instance of the configured implementation
:param type_str: Type of descriptor. This is usually the name of the
content descriptor that generated this vector.
:type type_str: str
:param uuid: UUID to associate with the descriptor
:type uuid: collections.abc.Hashable
:return: New DescriptorElement instance
:rtype: smqtk.representation.DescriptorElement
"""
return self.new_descriptor(type_str, uuid)
|
uk-gov-mirror/hmcts.fpl-ccd-configuration
|
service/src/main/java/uk/gov/hmcts/reform/fpl/events/UpcomingHearingsFound.java
|
<gh_stars>1-10
package uk.gov.hmcts.reform.fpl.events;
import lombok.Value;
import uk.gov.hmcts.reform.ccd.client.model.CaseDetails;
import java.time.LocalDate;
import java.util.List;
@Value
public class UpcomingHearingsFound {
LocalDate hearingDate;
List<CaseDetails> caseDetails;
}
|
sho25/hbase
|
hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestProcedure.java
|
<gh_stars>0
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
end_comment
begin_package
package|package
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hbase
operator|.
name|procedure2
operator|.
name|store
operator|.
name|region
package|;
end_package
begin_import
import|import static
name|org
operator|.
name|junit
operator|.
name|Assert
operator|.
name|assertEquals
import|;
end_import
begin_import
import|import
name|java
operator|.
name|io
operator|.
name|IOException
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hbase
operator|.
name|procedure2
operator|.
name|Procedure
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hbase
operator|.
name|procedure2
operator|.
name|ProcedureStateSerializer
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hbase
operator|.
name|procedure2
operator|.
name|ProcedureTestingUtility
operator|.
name|NoopProcedure
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hbase
operator|.
name|thirdparty
operator|.
name|com
operator|.
name|google
operator|.
name|protobuf
operator|.
name|Int64Value
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hbase
operator|.
name|shaded
operator|.
name|protobuf
operator|.
name|generated
operator|.
name|ProcedureProtos
operator|.
name|ProcedureState
import|;
end_import
begin_class
specifier|public
class|class
name|RegionProcedureStoreTestProcedure
extends|extends
name|NoopProcedure
argument_list|<
name|Void
argument_list|>
block|{
specifier|private
specifier|static
name|long
name|SEQ_ID
init|=
literal|0
decl_stmt|;
specifier|public
name|RegionProcedureStoreTestProcedure
parameter_list|()
block|{
name|setProcId
argument_list|(
operator|++
name|SEQ_ID
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Override
specifier|protected
name|Procedure
argument_list|<
name|Void
argument_list|>
index|[]
name|execute
parameter_list|(
name|Void
name|env
parameter_list|)
block|{
return|return
literal|null
return|;
block|}
annotation|@
name|Override
specifier|protected
name|void
name|rollback
parameter_list|(
name|Void
name|env
parameter_list|)
block|{ }
annotation|@
name|Override
specifier|protected
name|boolean
name|abort
parameter_list|(
name|Void
name|env
parameter_list|)
block|{
return|return
literal|false
return|;
block|}
annotation|@
name|Override
specifier|protected
name|void
name|serializeStateData
parameter_list|(
name|ProcedureStateSerializer
name|serializer
parameter_list|)
throws|throws
name|IOException
block|{
name|long
name|procId
init|=
name|getProcId
argument_list|()
decl_stmt|;
if|if
condition|(
name|procId
operator|%
literal|2
operator|==
literal|0
condition|)
block|{
name|Int64Value
operator|.
name|Builder
name|builder
init|=
name|Int64Value
operator|.
name|newBuilder
argument_list|()
operator|.
name|setValue
argument_list|(
name|procId
argument_list|)
decl_stmt|;
name|serializer
operator|.
name|serialize
argument_list|(
name|builder
operator|.
name|build
argument_list|()
argument_list|)
expr_stmt|;
block|}
block|}
annotation|@
name|Override
specifier|protected
name|void
name|deserializeStateData
parameter_list|(
name|ProcedureStateSerializer
name|serializer
parameter_list|)
throws|throws
name|IOException
block|{
name|long
name|procId
init|=
name|getProcId
argument_list|()
decl_stmt|;
if|if
condition|(
name|procId
operator|%
literal|2
operator|==
literal|0
condition|)
block|{
name|Int64Value
name|value
init|=
name|serializer
operator|.
name|deserialize
argument_list|(
name|Int64Value
operator|.
name|class
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
name|procId
argument_list|,
name|value
operator|.
name|getValue
argument_list|()
argument_list|)
expr_stmt|;
block|}
block|}
specifier|public
name|void
name|setParent
parameter_list|(
name|Procedure
argument_list|<
name|?
argument_list|>
name|proc
parameter_list|)
block|{
name|super
operator|.
name|setParentProcId
argument_list|(
name|proc
operator|.
name|getProcId
argument_list|()
argument_list|)
expr_stmt|;
block|}
specifier|public
name|void
name|finish
parameter_list|()
block|{
name|setState
argument_list|(
name|ProcedureState
operator|.
name|SUCCESS
argument_list|)
expr_stmt|;
block|}
block|}
end_class
end_unit
|
LaudateCorpus1/math
|
test/unit/math/opencl/rev/tail_test.cpp
|
<reponame>LaudateCorpus1/math
#ifdef STAN_OPENCL
#include <stan/math/rev.hpp>
#include <test/unit/math/opencl/util.hpp>
#include <test/unit/util.hpp>
#include <gtest/gtest.h>
#include <vector>
TEST(MathMatrixTailCL, tail_size) {
using stan::math::matrix_cl;
using stan::math::tail;
using stan::math::var_value;
matrix_cl<double> v(3, 1);
EXPECT_EQ(0, tail(v, 0).eval().size());
EXPECT_EQ(3, tail(v, 3).eval().size());
EXPECT_THROW(tail(v, 4), std::out_of_range);
matrix_cl<double> rv(1, 3);
EXPECT_EQ(0, tail(rv, 0).eval().size());
EXPECT_EQ(3, tail(rv, 3).eval().size());
EXPECT_THROW(tail(rv, 4), std::out_of_range);
var_value<matrix_cl<double>> v_var = v;
EXPECT_EQ(0, tail(v_var, 0).size());
EXPECT_EQ(3, tail(v_var, 3).size());
EXPECT_THROW(tail(v_var, 4), std::out_of_range);
var_value<matrix_cl<double>> rv_var = rv;
EXPECT_EQ(0, tail(rv_var, 0).size());
EXPECT_EQ(3, tail(rv_var, 3).size());
EXPECT_THROW(tail(rv_var, 4), std::out_of_range);
}
auto tail_functor = [](const auto& a) { return stan::math::tail(a, 5); };
TEST(MathMatrixTailCL, tail_value_check_vector) {
stan::math::vector_d m1(9);
m1 << 1, 2, 3, 4, 5, 6, 7, 8, 9;
stan::math::test::compare_cpu_opencl_prim_rev(tail_functor, m1);
}
TEST(MathMatrixTailCL, tail_value_check_row_vector) {
stan::math::row_vector_d m1(9);
m1 << 1, 2, 3, 4, 5, 6, 7, 8, 9;
stan::math::test::compare_cpu_opencl_prim_rev(tail_functor, m1);
}
#endif
|
tangtaoshadow/Propro-Server
|
src/main/java/com/westlake/air/propro/algorithm/fitter/LinearFitter.java
|
package com.westlake.air.propro.algorithm.fitter;
import com.westlake.air.propro.domain.bean.score.SlopeIntercept;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.math3.fitting.PolynomialCurveFitter;
import org.apache.commons.math3.fitting.WeightedObservedPoints;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* Created by <NAME>
* Time: 2019-05-23 22:34
*/
@Component("linearFitter")
public class LinearFitter {
public static final Logger logger = LoggerFactory.getLogger(LinearFitter.class);
/**
* 最小二乘法线性拟合RTPairs
*/
public SlopeIntercept leastSquare(List<Pair<Double,Double>> rtPairs) {
WeightedObservedPoints obs = new WeightedObservedPoints();
for (Pair<Double,Double> rtPair : rtPairs) {
obs.add(rtPair.getRight(), rtPair.getLeft());
}
PolynomialCurveFitter fitter = PolynomialCurveFitter.create(1);
double[] coeff = fitter.fit(obs.toList());
SlopeIntercept slopeIntercept = new SlopeIntercept();
slopeIntercept.setSlope(coeff[1]);
slopeIntercept.setIntercept(coeff[0]);
return slopeIntercept;
}
public SlopeIntercept huberFit(List<Pair<Double, Double>> rtPairs, double delta) throws Exception {
double tolerance = 0.001d;
SlopeIntercept lastSlopeIntercept = getInitSlopeIntercept(rtPairs);
SlopeIntercept slopeIntercept = updateHuberSlopeIntercept(rtPairs, lastSlopeIntercept, delta);
int count = 1;
while (count < 10000 && Math.abs(getHuberSlopeGradient(rtPairs, slopeIntercept, delta)) > tolerance
|| Math.abs(getHuberInterceptGradient(rtPairs, slopeIntercept, delta)) > tolerance){
slopeIntercept = updateHuberSlopeIntercept(rtPairs, slopeIntercept, delta);
count ++;
}
logger.info("----------------------- Huber " + count + " epochs -----------------------");
return slopeIntercept;
}
public SlopeIntercept proproFit(List<Pair<Double, Double>> rtPairs, double delta) throws Exception {
double tolerance = 0.001d;
SlopeIntercept lastSlopeIntercept = getInitSlopeIntercept(rtPairs);
SlopeIntercept slopeIntercept = updateProproSlopeIntercept(rtPairs, lastSlopeIntercept, delta);
int count = 1;
while (count < 10000 && Math.abs(getProproSlopeGradient(rtPairs, slopeIntercept, delta)) > tolerance
|| Math.abs(getProproInterceptGradient(rtPairs, slopeIntercept, delta)) > tolerance){
slopeIntercept = updateProproSlopeIntercept(rtPairs, slopeIntercept, delta);
count ++;
}
logger.info("----------------------- Propro " + count + " epochs -----------------------");
return slopeIntercept;
}
private double getHuberLoss(List<Pair<Double, Double>> rtPairs, double slope, double intercept, double delta){
double loss = 0d;
for (Pair<Double,Double> rtPair: rtPairs){
double tempDiff = Math.abs(rtPair.getRight() * slope + intercept - rtPair.getLeft());
if (tempDiff <= delta){
loss += 0.5 * tempDiff * tempDiff;
}else {
loss += delta * tempDiff - 0.5 * delta * delta;
}
}
return loss;
}
private double getProproLoss(List<Pair<Double, Double>> rtPairs, double slope, double intercept, double delta){
double loss = 0d;
for (Pair<Double,Double> rtPair: rtPairs){
double tempDiff = Math.abs(rtPair.getRight() * slope + intercept - rtPair.getLeft());
if (tempDiff <= delta){
loss += 0.5 * tempDiff * tempDiff;
}else {
loss += (Math.log(tempDiff) - Math.log(delta) + 0.5d) * delta * delta;
}
}
return loss;
}
private double getHuberSlopeGradient(List<Pair<Double, Double>> rtPairs, SlopeIntercept slopeIntercept, double delta){
double deltaSlope = 0.00000001d;
double loss = getHuberLoss(rtPairs, slopeIntercept.getSlope() - deltaSlope, slopeIntercept.getIntercept(), delta);
double deltaLoss = getHuberLoss(rtPairs, slopeIntercept.getSlope() + deltaSlope, slopeIntercept.getIntercept(), delta) - loss;
return deltaLoss/deltaSlope/2d;
}
private double getProproSlopeGradient(List<Pair<Double, Double>> rtPairs, SlopeIntercept slopeIntercept, double delta){
double deltaSlope = 0.00000001d;
double loss = getProproLoss(rtPairs, slopeIntercept.getSlope() - deltaSlope, slopeIntercept.getIntercept(), delta);
double deltaLoss = getProproLoss(rtPairs, slopeIntercept.getSlope() + deltaSlope, slopeIntercept.getIntercept(), delta) - loss;
return deltaLoss/deltaSlope/2d;
}
private double getHuberInterceptGradient(List<Pair<Double, Double>> rtPairs, SlopeIntercept slopeIntercept, double delta){
double deltaIntercept = 0.00000001d;
double loss = getHuberLoss(rtPairs, slopeIntercept.getSlope(), slopeIntercept.getIntercept() - deltaIntercept, delta);
double deltaLoss = getHuberLoss(rtPairs, slopeIntercept.getSlope(), slopeIntercept.getIntercept() + deltaIntercept, delta) - loss;
return deltaLoss/deltaIntercept/2d;
}
private double getProproInterceptGradient(List<Pair<Double, Double>> rtPairs, SlopeIntercept slopeIntercept, double delta){
double deltaIntercept = 0.00000001d;
double loss = getProproLoss(rtPairs, slopeIntercept.getSlope(), slopeIntercept.getIntercept() - deltaIntercept, delta);
double deltaLoss = getProproLoss(rtPairs, slopeIntercept.getSlope(), slopeIntercept.getIntercept() + deltaIntercept, delta) - loss;
return deltaLoss/deltaIntercept/2d;
}
private SlopeIntercept updateHuberSlopeIntercept(List<Pair<Double, Double>> rtPairs, SlopeIntercept slopeIntercept, double delta){
double slopeStep = 0.000001d, interceptStep = 0.1d;
double sigma = 1d;
double oriLoss = getHuberLoss(rtPairs, slopeIntercept.getSlope(), slopeIntercept.getIntercept(), delta);
double slopeGradient = getHuberSlopeGradient(rtPairs, slopeIntercept, delta);
double interceptGradient = getHuberInterceptGradient(rtPairs, slopeIntercept, delta);
double intercept = slopeIntercept.getIntercept() - sigma * Math.random() * interceptStep * interceptGradient;
double slope = slopeIntercept.getSlope() - sigma * Math.random() * slopeStep * slopeGradient;
double updatedLoss = getHuberLoss(rtPairs, slope, intercept, delta);
while (updatedLoss > oriLoss){
sigma = sigma / 2d;
slope = slopeIntercept.getSlope() - sigma * Math.random() * slopeStep * slopeGradient;
intercept = slopeIntercept.getIntercept() - sigma * Math.random() * interceptStep * interceptGradient;
updatedLoss = getHuberLoss(rtPairs, slope, intercept, delta);
}
return new SlopeIntercept(slope, intercept);
}
private SlopeIntercept updateProproSlopeIntercept(List<Pair<Double, Double>> rtPairs, SlopeIntercept slopeIntercept, double delta){
double slopeStep = 0.00000001d, interceptStep = 0.1d;
double sigma = 1d;
double oriLoss = getProproLoss(rtPairs, slopeIntercept.getSlope(), slopeIntercept.getIntercept(), delta);
double slopeGradient = getProproSlopeGradient(rtPairs, slopeIntercept, delta);
double interceptGradient = getProproInterceptGradient(rtPairs, slopeIntercept, delta);
double intercept = slopeIntercept.getIntercept() - sigma * Math.random() * interceptStep * interceptGradient;
double slope = slopeIntercept.getSlope() - sigma * Math.random() * slopeStep * slopeGradient;
double updatedLoss = getProproLoss(rtPairs, slope, intercept, delta);
while (updatedLoss > oriLoss){
sigma = sigma / 2d;
slope = slopeIntercept.getSlope() - sigma * Math.random() * slopeStep * slopeGradient;
intercept = slopeIntercept.getIntercept() - sigma * Math.random() * interceptStep * interceptGradient;
updatedLoss = getProproLoss(rtPairs, slope, intercept, delta);
}
return new SlopeIntercept(slope, intercept);
}
public SlopeIntercept getInitSlopeIntercept(List<Pair<Double,Double>> rtPairs) throws Exception {
double minLibRT = Double.MAX_VALUE;
for (Pair<Double,Double> pair:rtPairs){
if (pair.getLeft() < minLibRT){
minLibRT = pair.getLeft();
}
}
double max = Double.MIN_VALUE, min = Double.MAX_VALUE;
int maxIndex = 0, minIndex = 0;
for (int i=0; i<rtPairs.size(); i++){
double product = (rtPairs.get(i).getLeft()-minLibRT + 10) * rtPairs.get(i).getRight();
if (product > max){
max = product;
maxIndex = i;
}
if (product < min){
min = product;
minIndex = i;
}
}
if(rtPairs.size() == 0){
throw new Exception("RtPair Size is 0");
}
double slope = (rtPairs.get(maxIndex).getLeft() - rtPairs.get(minIndex).getLeft())
/(rtPairs.get(maxIndex).getRight() - rtPairs.get(minIndex).getRight());
double intercept = rtPairs.get(maxIndex).getLeft() - rtPairs.get(maxIndex).getRight() * slope;
return new SlopeIntercept(slope, intercept);
}
}
|
wangyum/anaconda
|
pkgs/jupyter_client-4.2.2-py27_0/lib/python2.7/site-packages/jupyter_client/kernelspecapp.py
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
import errno
import os.path
import sys
import json
from traitlets.config.application import Application
from jupyter_core.application import (
JupyterApp, base_flags, base_aliases
)
from traitlets import Instance, Dict, Unicode, Bool, List
from . import __version__
from .kernelspec import KernelSpecManager
try:
raw_input
except NameError:
# py3
raw_input = input
class ListKernelSpecs(JupyterApp):
version = __version__
description = """List installed kernel specifications."""
kernel_spec_manager = Instance(KernelSpecManager)
json_output = Bool(False, help='output spec name and location as machine-readable json.',
config=True)
flags = {'json': ({'ListKernelSpecs': {'json_output': True}},
"output spec name and location as machine-readable json."),
'debug': base_flags['debug'],
}
def _kernel_spec_manager_default(self):
return KernelSpecManager(parent=self, data_dir=self.data_dir)
def start(self):
paths = self.kernel_spec_manager.find_kernel_specs()
specs = self.kernel_spec_manager.get_all_specs()
if not self.json_output:
if not specs:
print("No kernels available")
return
# pad to width of longest kernel name
name_len = len(sorted(paths, key=lambda name: len(name))[-1])
def path_key(item):
"""sort key function for Jupyter path priority"""
path = item[1]
for idx, prefix in enumerate(self.jupyter_path):
if path.startswith(prefix):
return (idx, path)
# not in jupyter path, artificially added to the front
return (-1, path)
print("Available kernels:")
for kernelname, path in sorted(paths.items(), key=path_key):
print(" %s %s" % (kernelname.ljust(name_len), path))
else:
print(json.dumps({
'kernelspecs': specs
}, indent=2))
class InstallKernelSpec(JupyterApp):
version = __version__
description = """Install a kernel specification directory.
Given a SOURCE DIRECTORY containing a kernel spec,
jupyter will copy that directory into one of the Jupyter kernel directories.
The default is to install kernelspecs for all users.
`--user` can be specified to install a kernel only for the current user.
"""
examples = """
jupyter kernelspec install /path/to/my_kernel --user
"""
usage = "jupyter kernelspec install SOURCE_DIR [--options]"
kernel_spec_manager = Instance(KernelSpecManager)
def _kernel_spec_manager_default(self):
return KernelSpecManager(data_dir=self.data_dir)
sourcedir = Unicode()
kernel_name = Unicode("", config=True,
help="Install the kernel spec with this name"
)
def _kernel_name_default(self):
return os.path.basename(self.sourcedir)
user = Bool(False, config=True,
help="""
Try to install the kernel spec to the per-user directory instead of
the system or environment directory.
"""
)
prefix = Unicode('', config=True,
help="""Specify a prefix to install to, e.g. an env.
The kernelspec will be installed in PREFIX/share/jupyter/kernels/
"""
)
replace = Bool(False, config=True,
help="Replace any existing kernel spec with this name."
)
aliases = {
'name': 'InstallKernelSpec.kernel_name',
'prefix': 'InstallKernelSpec.prefix',
}
aliases.update(base_aliases)
flags = {'user': ({'InstallKernelSpec': {'user': True}},
"Install to the per-user kernel registry"),
'replace': ({'InstallKernelSpec': {'replace': True}},
"Replace any existing kernel spec with this name."),
'debug': base_flags['debug'],
}
def parse_command_line(self, argv):
super(InstallKernelSpec, self).parse_command_line(argv)
# accept positional arg as profile name
if self.extra_args:
self.sourcedir = self.extra_args[0]
else:
print("No source directory specified.")
self.exit(1)
def start(self):
if self.user and self.prefix:
self.exit("Can't specify both user and prefix. Please choose one or the other.")
try:
self.kernel_spec_manager.install_kernel_spec(self.sourcedir,
kernel_name=self.kernel_name,
user=self.user,
prefix=self.prefix,
replace=self.replace,
)
except OSError as e:
if e.errno == errno.EACCES:
print(e, file=sys.stderr)
if not self.user:
print("Perhaps you want to install with `sudo` or `--user`?", file=sys.stderr)
self.exit(1)
elif e.errno == errno.EEXIST:
print("A kernel spec is already present at %s" % e.filename, file=sys.stderr)
self.exit(1)
raise
class RemoveKernelSpec(JupyterApp):
version = __version__
description = """Remove one or more Jupyter kernelspecs by name."""
examples = """jupyter kernelspec remove python2 [my_kernel ...]"""
force = Bool(False, config=True,
help="""Force removal, don't prompt for confirmation."""
)
spec_names = List(Unicode())
kernel_spec_manager = Instance(KernelSpecManager)
def _kernel_spec_manager_default(self):
return KernelSpecManager(data_dir=self.data_dir, parent=self)
flags = {
'f': ({'RemoveKernelSpec': {'force': True}}, force.get_metadata('help')),
}
flags.update(JupyterApp.flags)
def parse_command_line(self, argv):
super(RemoveKernelSpec, self).parse_command_line(argv)
# accept positional arg as profile name
if self.extra_args:
self.spec_names = sorted(set(self.extra_args)) # remove duplicates
else:
self.exit("No kernelspec specified.")
def start(self):
self.kernel_spec_manager.ensure_native_kernel = False
spec_paths = self.kernel_spec_manager.find_kernel_specs()
missing = set(self.spec_names).difference(set(spec_paths))
if missing:
self.exit("Couldn't find kernel spec(s): %s" % ', '.join(missing))
if not self.force:
print("Kernel specs to remove:")
for name in self.spec_names:
print(" %s\t%s" % (name.ljust(20), spec_paths[name]))
answer = raw_input("Remove %i kernel specs [y/N]: " % len(self.spec_names))
if not answer.lower().startswith('y'):
return
for kernel_name in self.spec_names:
try:
path = self.kernel_spec_manager.remove_kernel_spec(kernel_name)
except OSError as e:
if e.errno == errno.EACCES:
print(e, file=sys.stderr)
print("Perhaps you want sudo?", file=sys.stderr)
self.exit(1)
else:
raise
self.log.info("Removed %s", path)
class InstallNativeKernelSpec(JupyterApp):
version = __version__
description = """[DEPRECATED] Install the IPython kernel spec directory for this Python."""
kernel_spec_manager = Instance(KernelSpecManager)
def _kernel_spec_manager_default(self):
return KernelSpecManager(data_dir=self.data_dir)
user = Bool(False, config=True,
help="""
Try to install the kernel spec to the per-user directory instead of
the system or environment directory.
"""
)
flags = {'user': ({'InstallNativeKernelSpec': {'user': True}},
"Install to the per-user kernel registry"),
'debug': base_flags['debug'],
}
def start(self):
try:
from ipykernel import kernelspec
except ImportError:
print("ipykernel not available, can't install its spec.", file=sys.stderr)
self.exit(1)
try:
kernelspec.install(self.kernel_spec_manager, user=self.user)
except OSError as e:
if e.errno == errno.EACCES:
print(e, file=sys.stderr)
if not self.user:
print("Perhaps you want to install with `sudo` or `--user`?", file=sys.stderr)
self.exit(1)
self.exit(e)
class KernelSpecApp(Application):
version = __version__
name = "jupyter kernelspec"
description = """Manage Jupyter kernel specifications."""
subcommands = Dict({
'list': (ListKernelSpecs, ListKernelSpecs.description.splitlines()[0]),
'install': (InstallKernelSpec, InstallKernelSpec.description.splitlines()[0]),
'uninstall': (RemoveKernelSpec, "Alias for remove"),
'remove': (RemoveKernelSpec, RemoveKernelSpec.description.splitlines()[0]),
'install-self': (InstallNativeKernelSpec, InstallNativeKernelSpec.description.splitlines()[0]),
})
aliases = {}
flags = {}
def start(self):
if self.subapp is None:
print("No subcommand specified. Must specify one of: %s"% list(self.subcommands))
print()
self.print_description()
self.print_subcommands()
self.exit(1)
else:
return self.subapp.start()
if __name__ == '__main__':
KernelSpecApp.launch_instance()
|
SableWalnut/mediastore-sdk
|
dist/api/Customer/getAvailableSwitches.js
|
<gh_stars>0
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
var _appConfigHelper = require("../../util/appConfigHelper");
var _fetchHelper = _interopRequireDefault(require("../../util/fetchHelper"));
var _environmentHelper = _interopRequireDefault(require("../../util/environmentHelper"));
var getAvailableSwitches = /*#__PURE__*/function () {
var _ref = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee(offerId) {
var API_URL, customerId, url;
return _regenerator.default.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
API_URL = (0, _environmentHelper.default)();
customerId = (0, _appConfigHelper.getData)('CLEENG_CUSTOMER_ID') || '';
url = "".concat(API_URL, "/customers/").concat(customerId, "/subscription_switches/").concat(offerId, "/availability");
return _context.abrupt("return", (0, _fetchHelper.default)(url, {
method: 'GET'
}).then(function (res) {
return res.json();
}));
case 4:
case "end":
return _context.stop();
}
}
}, _callee);
}));
return function getAvailableSwitches(_x) {
return _ref.apply(this, arguments);
};
}();
var _default = getAvailableSwitches;
exports.default = _default;
|
youngzil/quickstart-framework
|
quickstart-javase/src/test/java/org/quickstart/javase/network/io/aio/Test.java
|
/**
* 项目名称:quickstart-javase
* 文件名:Test.java
* 版本信息:
* 日期:2018年5月10日
* Copyright yangzl Corporation 2018
* 版权所有 *
*/
package org.quickstart.javase.network.io.aio;
import java.util.Random;
import java.util.Scanner;
import java.util.concurrent.TimeUnit;
import org.quickstart.javase.network.io.nio.Client;
/**
* Test
*
* https://blog.csdn.net/anxpp/article/details/51512200
*
* @author:<EMAIL>
* @2018年5月10日 下午12:38:39
* @since 1.0
*/
public class Test {
// 测试主方法
@SuppressWarnings("resource")
public static void main(String[] args) throws Exception {
// 运行服务器
Server.start();
// 避免客户端先于服务器启动前执行代码
Thread.sleep(100);
// 运行客户端
Client.start();
/*System.out.println("请输入请求消息:");
Scanner scanner = new Scanner(System.in);
while (Client.sendMsg(scanner.nextLine()));*/
// 避免客户端sendMsg时候还未建立连接,抛出java.nio.channels.NotYetConnectedException
Thread.sleep(100);
char operators[] = {'+', '-', '*', '/'};
Random random = new Random(System.currentTimeMillis());
while (true) {
// 随机产生算术表达式
String expression = random.nextInt(10) + "" + operators[random.nextInt(4)] + (random.nextInt(10) + 1);
Client.sendMsg(expression);
try {
// Thread.currentThread().sleep(random.nextInt(1000));
TimeUnit.MILLISECONDS.sleep(random.nextInt(1000));
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
|
conormuldoon/unison
|
back-end/src/main/java/eu/acclimatize/unison/result/FogResult.java
|
<gh_stars>0
package eu.acclimatize.unison.result;
import java.io.PrintWriter;
import java.util.Date;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import eu.acclimatize.unison.Constant;
import eu.acclimatize.unison.HarmonieItem;
import eu.acclimatize.unison.csvcontroller.CSVHeaderItem;
/**
* A Jackson and CSV annotated class that is used to store a result row from a
* fog data query.
*
*/
@JsonInclude(Include.NON_NULL)
public class FogResult implements HarmonieItem {
@CSVHeaderItem(Constant.FROM_HOUR)
@JsonProperty
private Date date;
@CSVHeaderItem("Fog " + Constant.PERCENTAGE_SYMBOL)
@JsonProperty
private Double fog;
/**
* Creates and instance of FogResult.
*
* @param date The hour from which the results relates.
* @param fog The amount of fog.
*/
public FogResult(Date date, Double fog) {
this.date = date;
this.fog = fog;
}
@Override
public void printItem(PrintWriter pw) {
pw.println(date.toString() + ',' + fog + ',');
}
}
|
Study-Tracker/Study-Tracker
|
src/test/java/io/studytracker/test/web/controller/AssayControllerTests.java
|
<filename>src/test/java/io/studytracker/test/web/controller/AssayControllerTests.java
/*
* Copyright 2020 the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.studytracker.test.web.controller;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.user;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import io.studytracker.Application;
import io.studytracker.example.ExampleDataGenerator;
import io.studytracker.service.UserService;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
@SpringBootTest(classes = Application.class, webEnvironment = WebEnvironment.RANDOM_PORT)
@RunWith(SpringRunner.class)
@AutoConfigureMockMvc
@ActiveProfiles({"web-test", "example"})
public class AssayControllerTests {
private static final int NUM_ASSAYS = ExampleDataGenerator.ASSAY_COUNT;
@Autowired private MockMvc mockMvc;
@Autowired private ExampleDataGenerator exampleDataGenerator;
@Autowired private UserService userService;
private String username;
@Before
public void doBefore() {
exampleDataGenerator.populateDatabase();
username = userService.findAll().get(0).getUsername();
}
// Study methods
@Test
public void findAllTest() throws Exception {
mockMvc
.perform(get("/api/assay").with(user(username)).with(csrf()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", hasSize(NUM_ASSAYS)))
.andExpect(jsonPath("$[0]", hasKey("id")))
.andExpect(jsonPath("$[0]", hasKey("name")))
.andExpect(jsonPath("$[0]", hasKey("description")));
}
@Test
public void findByIdTest() throws Exception {
mockMvc
.perform(get("/api/assay/PPB-10001-001").with(user(username)).with(csrf()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", hasKey("code")))
.andExpect(jsonPath("$.code", is("PPB-10001-001")))
.andExpect(jsonPath("$", hasKey("status")))
.andExpect(jsonPath("$.status", is("ACTIVE")))
.andExpect(jsonPath("$", hasKey("assayType")))
.andExpect(jsonPath("$.assayType", hasKey("name")))
.andExpect(jsonPath("$.assayType.name", is("Generic")));
}
@Test
public void findNonExistentAssayTest() throws Exception {
mockMvc
.perform(get("/api/assay/CPA-XXXX-XXXX").with(user(username)).with(csrf()))
.andExpect(status().isNotFound());
}
}
|
domydev/Dark-Basic-Pro
|
Dark Basic Public Shared/Dark Basic Pro SDK/Shared/Bullet/Ragdoll/DBPro/include/DBPro/Mesh.hpp
|
<reponame>domydev/Dark-Basic-Pro
/*
File : DBPro/Mesh.hpp
Generated : 2011/09/02 20:15:10
DBPro release : 7.7
*/
#ifndef INCLUDE_GUARD_DBPro_Mesh_hpp
#define INCLUDE_GUARD_DBPro_Mesh_hpp
#ifdef DBPRO__COMPILER_HAS_PRAGMA_ONCE
#pragma once
#endif
#define VC_EXTRALEAN
#include <windows.h>
#include <D3dx9tex.h>
namespace DBPro
{
}
#endif
|
Robert-Stackflow/HUST-Courses
|
04_C++ Experiment/ex5/Qt Creator/mainwindow.h
|
<reponame>Robert-Stackflow/HUST-Courses<gh_stars>1-10
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QMainWindow>
#include "searchdialog.h"
#include<QComboBox>
QT_BEGIN_NAMESPACE
namespace Ui { class MainWindow; }
QT_END_NAMESPACE
class MainWindow : public QMainWindow
{
Q_OBJECT
public:
MainWindow(QWidget *parent = nullptr);
~MainWindow();
void combobox(QComboBox* box);
void paintEvent(QPaintEvent *);
private slots:
void on_action_2_triggered();
void on_action_1_triggered();
void on_action_3_triggered();
void on_action_4_triggered();
void on_action_6_triggered();
void on_action_7_triggered();
void on_action_HUSTMAP_triggered();
void on_action_5_triggered();
void mousePressEvent(QMouseEvent *event);
void choosebegin(int index);
void choosefinish(int index);
void focusorg(int index);
void on_pushButton_clicked();
void on_pushButton_2_clicked();
private:
Ui::MainWindow *ui;
SearchDialog* searchdialog;
};
#endif // MAINWINDOW_H
|
MarcGuiot/globsframework
|
src/test/java/org/globsframework/utils/MapOfMapsTest.java
|
<filename>src/test/java/org/globsframework/utils/MapOfMapsTest.java
package org.globsframework.utils;
import org.globsframework.utils.collections.MapOfMaps;
import org.junit.Test;
import static org.junit.Assert.*;
public class MapOfMapsTest {
@Test
public void test() throws Exception {
MapOfMaps<Integer, Integer, String> map = new MapOfMaps<Integer, Integer, String>();
map.put(1, 1, "1");
map.put(3, 3, "3");
map.put(3, 4, "4");
map.put(5, 5, "5");
assertEquals("1", map.get(1, 1));
assertEquals("5", map.get(5, 5));
TestUtils.assertSetEquals(map.values(), "1", "3", "4", "5");
TestUtils.assertSetEquals(map.iterator(), "1", "3", "4", "5");
assertTrue(map.containsKey(1, 1));
assertFalse(map.containsKey(1, 0));
assertFalse(map.containsKey(0, 1));
map.remove(3, 3);
TestUtils.assertSetEquals(map.iterator(), "1", "4", "5");
map.put(1, 2, "1bis");
TestUtils.assertSetEquals(map.values(1), "1", "1bis");
TestUtils.assertSetEquals(map.iterator(), "1", "1bis", "4", "5");
map.removeAll(1);
TestUtils.assertSetEquals(map.values(), "4", "5");
assertFalse(map.isEmpty());
map.removeAll(3);
map.removeAll(5);
assertTrue(map.isEmpty());
}
}
|
egojit8/fkcloud
|
fkcloud-admin/src/main/java/com/egojit/cloud/controller/MenusController.java
|
package com.egojit.cloud.controller;
import com.egojit.cloud.common.base.BaseResult;
/**
* xxxx
*
* @author 高露 QQ:408365330
* @date $date$
*/
public interface MenusController {
/**
* 获取所有菜单
* @return
*/
BaseResult list();
}
|
jackyruslymoonlay/dl-models
|
src/production/finishing-printing/quality-control/inspection-lot-color-item-validator.js
|
<filename>src/production/finishing-printing/quality-control/inspection-lot-color-item-validator.js
require("should");
module.exports = function (data) {
data.should.not.equal(null);
data.should.instanceOf(Object);
data.should.have.property('pcsNo');
data.pcsNo.should.instanceof(String);
data.should.have.property('grade');
data.grade.should.instanceOf(String);
data.should.have.property('lot');
data.lot.should.instanceof(String);
data.should.have.property('status');
data.status.should.instanceof(String);
};
|
kanivel/android-mvc-framework
|
src/com/android_mvc/framework/ui/view/MCheckBox.java
|
package com.android_mvc.framework.ui.view;
import android.content.Context;
import android.widget.CheckBox;
/**
* チェックボックスのラッパークラス。
* @author id:language_and_engineering
*
*/
public class MCheckBox extends CheckBox implements IFWView
{
public MCheckBox(Context context) {
super(context);
}
@Override
public Object getViewParam(String key) {
return null;
}
@Override
public void setViewParam(String key, Object val) {
}
// 以下は属性操作
// チェックを外す
public MCheckBox unChecked() {
this.setChecked(false);
return this;
}
}
|
zealoussnow/chromium
|
content/browser/metrics/histogram_subscriber.h
|
<gh_stars>1000+
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_BROWSER_METRICS_HISTOGRAM_SUBSCRIBER_H_
#define CONTENT_BROWSER_METRICS_HISTOGRAM_SUBSCRIBER_H_
#include <string>
#include <vector>
namespace content {
// Objects interested in receiving histograms derive from HistogramSubscriber.
class HistogramSubscriber {
public:
virtual ~HistogramSubscriber() {}
// Send number of pending processes to subscriber. |end| is set to true if it
// is the last time. This is called on the UI thread.
virtual void OnPendingProcesses(int sequence_number,
int pending_processes,
bool end) = 0;
// Send |histogram| back to subscriber.
// This is called on the UI thread.
virtual void OnHistogramDataCollected(
int sequence_number,
const std::vector<std::string>& pickled_histograms) = 0;
};
} // namespace content
#endif // CONTENT_BROWSER_METRICS_HISTOGRAM_SUBSCRIBER_H_
|
Phaicom/movie-store
|
src/java/jsp/model/movie_pic.java
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package jsp.model;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author Reawpai
*/
public class movie_pic {
private int pic_id;
private String pic;
private int movie_id;
public int getPic_id() {
return pic_id;
}
public void setPic_id(int pic_id) {
this.pic_id = pic_id;
}
public String getPic() {
return pic;
}
public void setPic(String pic) {
this.pic = pic;
}
public int getMovie_id() {
return movie_id;
}
public void setMovie_id(int movie_id) {
this.movie_id = movie_id;
}
public static List<movie_pic> getAllImage(int id){
List<movie_pic> mp_list = null;
movie_pic mp = null;
try{
Connection con = ConnectionBuilder.getConnection();
String s = "SELECT mp.* FROM `movie_pic` mp JOIN movie m on m.movie_id = mp.movie_id WHERE m.movie_id = ?";
PreparedStatement pstm = con.prepareStatement(s);
pstm.setInt(1, id);
ResultSet rs = pstm.executeQuery();
while(rs.next()){
if(mp_list == null){
mp_list = new ArrayList<>();
}
mp = new movie_pic();
mp.setMovie_id(rs.getInt("movie_id"));
mp.setPic_id(rs.getInt("pic_id"));
mp.setPic(rs.getString("pic"));
mp_list.add(mp);
}
con.close();
}catch(Exception ex){
System.out.println("getAllImage error: "+ex);
}
return mp_list;
}
@Override
public String toString() {
return "movie_pic{" + "pic_id=" + pic_id + ", pic=" + pic + ", movie_id=" + movie_id + '}';
}
}
|
grischard/osmeditor4android
|
src/main/java/de/blau/android/services/package-info.java
|
<filename>src/main/java/de/blau/android/services/package-info.java
/**
* Background services
*/
package de.blau.android.services;
|
brunocampos01/becoming-a-expert-python
|
file_handling/drop_file.py
|
<reponame>brunocampos01/becoming-a-expert-python
import os
def drop_file(path):
"""
Drop each execution the file
"""
try:
if os.path.exists(path):
os.remove(path)
except IOError as err:
print('IOError: ', err)
|
scionrep/scioncc_new
|
src/ion/util/parse_utils.py
|
<gh_stars>1-10
#!/usr/bin/env python
"""Common utilities to parse external input, e.g. for preload and service gateway"""
__author__ = '<NAME>, <NAME>'
import ast
from pyon.public import BadRequest, IonObject, log
from interface import objects
def get_typed_value(value, schema_entry=None, targettype=None, strict=False):
"""
Performs a value type check or conversion according to a schema entry or specified target type.
Supports simplelist and parsedict special type parsing from strings.
@param strict if True, raise error of type does not match
"""
if not schema_entry and not targettype:
raise BadRequest("Invalid schema or targettype")
targettype = targettype or schema_entry["type"]
if schema_entry and 'enum_type' in schema_entry:
enum_clzz = getattr(objects, schema_entry['enum_type'])
if type(value).__name__ == targettype and value in enum_clzz._str_map:
return value
if isinstance(value, basestring):
if strict and value in enum_clzz._value_map:
return enum_clzz._value_map[value]
elif not strict:
if value in enum_clzz._value_map:
return enum_clzz._value_map[value]
for enum_key, enum_val in enum_clzz._value_map.iteritems():
if enum_key.lower() == value.lower():
return enum_val
raise BadRequest("Value %s is not valid enum value" % value)
elif targettype == 'str':
if type(value) is str:
return value
elif type(value) is unicode:
return value.encode("utf8")
if strict:
raise BadRequest("Value %s is type %s not str" % (value, type(value).__name__))
return str(value)
elif targettype == 'bool':
if type(value) is bool:
return value
if strict:
raise BadRequest("Value %s is type %s not bool" % (value, type(value).__name__))
if value in ('TRUE', 'True', 'true', '1', 1):
return True
elif value in ('FALSE', 'False', 'false', '0', 0, '', None):
return False
raise BadRequest("Value %s cannot be converted to bool" % value)
elif targettype == 'int':
if type(value) in (int, long):
return value
if strict:
raise BadRequest("Value %s is type %s not int" % (value, type(value).__name__))
try:
return int(value)
except Exception:
pass
raise BadRequest("Value %s cannot be converted to int" % value)
elif targettype == 'float':
if type(value) == float:
return value
elif type(value) in (int, long):
return float(value)
if strict:
raise BadRequest("Value %s is type %s not float" % (value, type(value).__name__))
try:
return float(value)
except Exception:
pass
raise BadRequest("Value %s cannot be converted to float" % value)
elif targettype == 'simplelist':
if isinstance(value, basestring):
return parse_list(value)
raise BadRequest("Value %s cannot be converted to list as simplelist" % value)
elif targettype == 'parsedict':
if isinstance(value, basestring):
return parse_dict(value)
raise BadRequest("Value %s cannot be converted to dict as parsedict" % value)
elif targettype == 'list':
if type(value) is list:
return value
if strict:
raise BadRequest("Value %s is type %s not list" % (value, type(value).__name__))
if isinstance(value, (tuple, set)):
return list(value)
elif isinstance(value, basestring):
try:
ret_val = ast.literal_eval(value)
except Exception:
ret_val = None
if isinstance(ret_val, list):
return ret_val
elif isinstance(ret_val, tuple):
return list(ret_val)
if isinstance(value, basestring):
return parse_list(value)
else:
return [value]
elif targettype == 'dict':
if type(value) is dict:
return value
if strict:
raise BadRequest("Value %s is type %s not dict" % (value, type(value).__name__))
if isinstance(value, dict):
return dict(value)
elif isinstance(value, basestring):
try:
ret_val = ast.literal_eval(value)
except Exception:
ret_val = None
if isinstance(ret_val, dict):
return ret_val
return parse_dict(value)
return dict(value=value)
elif targettype == 'NoneType':
if value is None:
return None
if not strict:
if value in ("None", "NONE", "none", "Null", "NULL", "null", ""):
return None
elif isinstance(value, basestring):
return ast.literal_eval(value)
return value
elif targettype == 'ANY':
if isinstance(value, basestring):
return ast.literal_eval(value)
return value
else:
raise BadRequest("Value %s cannot be converted to target type %s" % (value, targettype))
def parse_list(value):
"""
Parse a string to extract a simple list of string values.
Assumes comma separated values optionally within []
"""
if value.startswith('[') and value.endswith(']'):
value = value[1:-1].strip()
elif not value.strip():
return []
return list(value.split(','))
def parse_dict(text):
"""
Parse a text string to obtain a dictionary of unquoted string keys and values.
The following substitutions are made:
keys with dots ('.') will be split into dictionaries.
booleans "True", "False" will be parsed
numbers will be parsed as floats unless they begin with "0" or include one "." and end with "0"
"{}" will be converted to {}
"[]" will be converted to []
For example, an entry could be this:
PARAMETERS.TXWAVESTATS: False,
PARAMETERS.TXREALTIME: True,
PARAMETERS.TXWAVEBURST: false,
SCHEDULER.ACQUIRE_STATUS: {},
SCHEDULER.CLOCK_SYNC: 48.2
SCHEDULER.VERSION.number: 3.0
which would translate back to:
{ "PARAMETERS": { "TXWAVESTATS": False, "TXREALTIME": True, "TXWAVEBURST": "false" },
"SCHEDULER": { "ACQUIRE_STATUS": {}, "CLOCK_SYNC", 48.2, "VERSION": {"number": "3.0"}}
}
"""
substitutions = {"{}": {}, "[]": [], "True": True, "False": False}
def parse_value(some_val):
some_val = substitutions.get(some_val, some_val)
try:
int_val = int(some_val)
if str(int_val) == some_val:
return int_val
except ValueError:
pass
try:
float_val = float(some_val)
if str(float_val) == some_val:
return float_val
except ValueError:
pass
return some_val
def chomp_key_list(out_dict, keys, value):
"""
turn keys like ['a', 'b', 'c', 'd'] and a value into
out_dict['a']['b']['c']['d'] = value
"""
dict_ptr = out_dict
last_ptr = out_dict
for i, key in enumerate(keys):
last_ptr = dict_ptr
if not key in dict_ptr:
dict_ptr[key] = {}
else:
if type(dict_ptr[key]) is not dict:
raise BadRequest("Building a dict in %s field, but it exists as %s already" %
(key, type(dict_ptr[key])))
dict_ptr = dict_ptr[key]
last_ptr[keys[-1]] = value
out = {}
if text is None:
return out
pairs = text.split(',') # pairs separated by commas
for pair in pairs:
if pair.count(':') == 0:
continue
fields = pair.split(':', 1) # pair separated by first colon
key = fields[0].strip()
value = fields[1].strip()
keyparts = key.split(".")
chomp_key_list(out, keyparts, parse_value(value))
return out
def parse_phones(text):
if ':' in text:
out = []
for type,number in parse_dict(text).iteritems():
out.append(IonObject("Phone", phone_number=number, phone_type=type))
return out
elif text:
return [ IonObject("Phone", phone_number=text.strip(), phone_type='office') ]
else:
return []
|
bopopescu/searchparty
|
google-cloud-sdk/lib/surface/dns/dnskeys/describe.py
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""gcloud dns dnskeys describe command."""
from googlecloudsdk.api_lib.dns import util
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dns import flags
from googlecloudsdk.core import properties
from googlecloudsdk.core.resource import resource_projector
ALGORITHM_NUMBERS = {
'RSAMD5': 1,
'DH': 2,
'DSA': 3,
'RSASHA1': 5,
'DSANSEC3SHA1': 6,
'RSASHA1NSEC3SHA1': 7,
'RSASHA256': 8,
'RSASHA512': 10,
'ECCGOST': 12,
'ECDSAP256SHA256': 13,
'ECDSAP384SHA384': 14,
}
DIGEST_TYPE_NUMBERS = {
'SHA1': 1,
'SHA256': 2,
'SHA384': 4,
}
def _GenerateDSRecord(key):
key_tag = str(key.keyTag)
key_algorithm = str(ALGORITHM_NUMBERS[key.algorithm.name])
digest_algorithm = str(DIGEST_TYPE_NUMBERS[key.digests[0].type.name])
digest = key.digests[0].digest
return ' '.join([key_tag, key_algorithm, digest_algorithm, digest])
class Describe(base.DescribeCommand):
"""Get a DnsKey.
This command displays the details of a single DnsKey.
## EXAMPLES
To get a DnsKey from a managed-zone, run:
$ {command} my_zone --key_id my_key
"""
@staticmethod
def Args(parser):
flags.GetZoneArg(
'The name of the managed-zone the DnsKey belongs to'
).AddToParser(parser)
flags.GetKeyArg().AddToParser(parser)
def Run(self, args):
dns_client = apis.GetClientInstance('dns', 'v2beta1')
zone_ref = util.GetRegistry('v2beta1').Parse(
args.zone,
params={
'project': properties.VALUES.core.project.GetOrFail,
},
collection='dns.managedZones')
result_object = dns_client.dnsKeys.Get(
dns_client.MESSAGES_MODULE.DnsDnsKeysGetRequest(
dnsKeyId=args.key_id,
managedZone=zone_ref.Name(),
project=zone_ref.project))
result_dict = resource_projector.MakeSerializable(result_object)
if result_object.type.name == 'KEY_SIGNING':
result_dict['dsRecord'] = _GenerateDSRecord(result_object)
return result_dict
|
bazaarvoice/cloudbreak
|
core/src/main/java/com/sequenceiq/cloudbreak/converter/stack/cluster/ClusterToClusterResponseConverter.java
|
package com.sequenceiq.cloudbreak.converter.stack.cluster;
import static com.sequenceiq.cloudbreak.common.type.OrchestratorConstants.YARN;
import static com.sequenceiq.cloudbreak.domain.ClusterAttributes.CUSTOM_QUEUE;
import static com.sequenceiq.cloudbreak.logger.ReplaceUtil.anonymize;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import javax.inject.Inject;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.api.client.repackaged.com.google.common.base.Strings;
import com.sequenceiq.cloudbreak.api.model.AmbariRepoDetailsJson;
import com.sequenceiq.cloudbreak.api.model.AmbariStackDetailsResponse;
import com.sequenceiq.cloudbreak.api.model.BlueprintInputJson;
import com.sequenceiq.cloudbreak.api.model.BlueprintResponse;
import com.sequenceiq.cloudbreak.api.model.CustomContainerResponse;
import com.sequenceiq.cloudbreak.api.model.GatewayType;
import com.sequenceiq.cloudbreak.api.model.KerberosResponse;
import com.sequenceiq.cloudbreak.api.model.Port;
import com.sequenceiq.cloudbreak.api.model.SharedServiceResponse;
import com.sequenceiq.cloudbreak.api.model.ldap.LdapConfigResponse;
import com.sequenceiq.cloudbreak.api.model.rds.RDSConfigResponse;
import com.sequenceiq.cloudbreak.api.model.stack.cluster.ClusterResponse;
import com.sequenceiq.cloudbreak.api.model.stack.cluster.gateway.GatewayJson;
import com.sequenceiq.cloudbreak.api.model.stack.cluster.host.HostGroupResponse;
import com.sequenceiq.cloudbreak.api.model.v2.AttachedClusterInfoResponse;
import com.sequenceiq.cloudbreak.blueprint.validation.BlueprintValidator;
import com.sequenceiq.cloudbreak.blueprint.validation.StackServiceComponentDescriptor;
import com.sequenceiq.cloudbreak.blueprint.validation.StackServiceComponentDescriptors;
import com.sequenceiq.cloudbreak.cloud.model.AmbariRepo;
import com.sequenceiq.cloudbreak.cloud.model.component.StackRepoDetails;
import com.sequenceiq.cloudbreak.controller.exception.CloudbreakApiException;
import com.sequenceiq.cloudbreak.converter.AbstractConversionServiceAwareConverter;
import com.sequenceiq.cloudbreak.converter.mapper.ProxyConfigMapper;
import com.sequenceiq.cloudbreak.domain.Blueprint;
import com.sequenceiq.cloudbreak.domain.ExposedServices;
import com.sequenceiq.cloudbreak.domain.KerberosConfig;
import com.sequenceiq.cloudbreak.domain.RDSConfig;
import com.sequenceiq.cloudbreak.domain.json.Json;
import com.sequenceiq.cloudbreak.domain.stack.Stack;
import com.sequenceiq.cloudbreak.domain.stack.cluster.Cluster;
import com.sequenceiq.cloudbreak.domain.stack.cluster.gateway.Gateway;
import com.sequenceiq.cloudbreak.domain.stack.cluster.host.HostGroup;
import com.sequenceiq.cloudbreak.domain.stack.instance.InstanceMetaData;
import com.sequenceiq.cloudbreak.json.JsonHelper;
import com.sequenceiq.cloudbreak.service.ClusterComponentConfigProvider;
import com.sequenceiq.cloudbreak.service.cluster.ClusterService;
import com.sequenceiq.cloudbreak.service.network.NetworkUtils;
import com.sequenceiq.cloudbreak.service.rdsconfig.RdsConfigService;
import com.sequenceiq.cloudbreak.service.stack.StackService;
import com.sequenceiq.cloudbreak.util.StackUtil;
@Component
public class ClusterToClusterResponseConverter extends AbstractConversionServiceAwareConverter<Cluster, ClusterResponse> {
private static final Logger LOGGER = LoggerFactory.getLogger(ClusterToClusterResponseConverter.class);
private static final int SECONDS_PER_MINUTE = 60;
private static final int MILLIS_PER_SECOND = 1000;
@Inject
private BlueprintValidator blueprintValidator;
@Inject
private StackServiceComponentDescriptors stackServiceComponentDescs;
@Inject
private RdsConfigService rdsConfigService;
@Inject
private JsonHelper jsonHelper;
@Inject
private ClusterComponentConfigProvider componentConfigProvider;
@Inject
private StackUtil stackUtil;
@Inject
private ProxyConfigMapper proxyConfigMapper;
@Inject
private ClusterService clusterService;
@Inject
private StackService stackService;
@Override
public ClusterResponse convert(Cluster source) {
try {
return convert(source, ClusterResponse.class);
} catch (IllegalAccessException | InstantiationException e) {
throw new RuntimeException(e);
}
}
protected <R extends ClusterResponse> R convert(Cluster source, Class<R> clazz) throws IllegalAccessException, InstantiationException {
R clusterResponse = clazz.newInstance();
clusterResponse.setId(source.getId());
clusterResponse.setName(source.getName());
clusterResponse.setStatus(source.getStatus());
clusterResponse.setStatusReason(source.getStatusReason());
if (source.getBlueprint() != null) {
clusterResponse.setBlueprintId(source.getBlueprint().getId());
}
long uptime = stackUtil.getUptimeForCluster(source, source.isAvailable());
int minutes = (int) ((uptime / (MILLIS_PER_SECOND * SECONDS_PER_MINUTE)) % SECONDS_PER_MINUTE);
int hours = (int) (uptime / (MILLIS_PER_SECOND * SECONDS_PER_MINUTE * SECONDS_PER_MINUTE));
clusterResponse.setUptime(uptime);
clusterResponse.setHoursUp(hours);
clusterResponse.setMinutesUp(minutes);
Set<RDSConfig> rdsConfigs = source.getRdsConfigs();
convertRdsIds(clusterResponse, rdsConfigs);
if (source.getLdapConfig() != null) {
clusterResponse.setLdapConfigId(source.getLdapConfig().getId());
}
if (source.getAttributes() != null) {
clusterResponse.setAttributes(source.getAttributes().getMap());
}
String ambariIp = stackUtil.extractAmbariIp(source.getStack());
clusterResponse.setAmbariServerIp(ambariIp);
clusterResponse.setUserName(source.getUserName());
clusterResponse.setExecutorType(source.getExecutorType());
clusterResponse.setDescription(source.getDescription() == null ? "" : source.getDescription());
clusterResponse.setExtendedBlueprintText(source.getExtendedBlueprintText() == null
? source.getBlueprint().getBlueprintText() : source.getExtendedBlueprintText());
clusterResponse.setHostGroups(convertHostGroupsToJson(source.getHostGroups()));
clusterResponse.setAmbariServerUrl(getAmbariServerUrl(source, ambariIp));
clusterResponse.setServiceEndPoints(prepareServiceEndpointsMap(source, ambariIp));
clusterResponse.setConfigStrategy(source.getConfigStrategy());
setExtendedBlueprintText(source, clusterResponse);
clusterResponse.setLdapConfig(getConversionService().convert(source.getLdapConfig(), LdapConfigResponse.class));
convertRdsConfigs(source, clusterResponse);
clusterResponse.setBlueprint(getConversionService().convert(source.getBlueprint(), BlueprintResponse.class));
convertKnox(source, clusterResponse);
convertCustomQueue(source, clusterResponse);
convertContainerConfig(source, clusterResponse);
convertComponentConfig(clusterResponse, source);
clusterResponse.setCreationFinished(source.getCreationFinished());
KerberosConfig kerberosConfig = source.getKerberosConfig();
if (source.isSecure() && kerberosConfig != null) {
clusterResponse.setSecure(source.isSecure());
clusterResponse.setKerberosResponse(getConversionService().convert(source.getKerberosConfig(), KerberosResponse.class));
}
decorateResponseWithProxyConfig(source, clusterResponse);
addSharedServiceResponse(source, clusterResponse);
return clusterResponse;
}
private <R extends ClusterResponse> void setExtendedBlueprintText(Cluster source, R clusterResponse) {
if (StringUtils.isEmpty(source.getExtendedBlueprintText())) {
clusterResponse.setExtendedBlueprintText(anonymize(source.getBlueprint().getBlueprintText()));
} else {
clusterResponse.setExtendedBlueprintText(anonymize(source.getExtendedBlueprintText()));
}
}
private void convertComponentConfig(ClusterResponse response, Cluster source) {
try {
AmbariRepo ambariRepo = componentConfigProvider.getAmbariRepo(source.getComponents());
if (ambariRepo != null) {
AmbariRepoDetailsJson ambariRepoDetailsJson = getConversionService().convert(ambariRepo, AmbariRepoDetailsJson.class);
response.setAmbariRepoDetailsJson(ambariRepoDetailsJson);
}
StackRepoDetails stackRepoDetails = componentConfigProvider.getStackRepo(source.getComponents());
if (stackRepoDetails != null) {
AmbariStackDetailsResponse ambariRepoDetailsJson = getConversionService().convert(stackRepoDetails, AmbariStackDetailsResponse.class);
response.setAmbariStackDetails(ambariRepoDetailsJson);
}
} catch (RuntimeException e) {
LOGGER.error("Failed to convert dynamic component.", e);
}
}
private void convertCustomQueue(Cluster source, ClusterResponse clusterResponse) {
if (source.getAttributes().getValue() != null) {
Map<String, Object> attributes = source.getAttributes().getMap();
Object customQueue = attributes.get(CUSTOM_QUEUE.name());
if (customQueue != null) {
clusterResponse.setCustomQueue(customQueue.toString());
} else {
clusterResponse.setCustomQueue("default");
}
}
}
private void addSharedServiceResponse(Cluster cluster, ClusterResponse clusterResponse) {
SharedServiceResponse sharedServiceResponse = new SharedServiceResponse();
if (cluster.getStack().getDatalakeId() != null) {
sharedServiceResponse.setSharedClusterId(cluster.getStack().getDatalakeId());
sharedServiceResponse.setSharedClusterName(stackService.get(cluster.getStack().getDatalakeId()).getName());
} else {
for (Stack stack : stackService.findClustersConnectedToDatalake(cluster.getStack().getId())) {
AttachedClusterInfoResponse attachedClusterInfoResponse = new AttachedClusterInfoResponse();
attachedClusterInfoResponse.setId(stack.getId());
attachedClusterInfoResponse.setName(stack.getName());
sharedServiceResponse.getAttachedClusters().add(attachedClusterInfoResponse);
}
}
clusterResponse.setSharedServiceResponse(sharedServiceResponse);
}
private void convertRdsIds(ClusterResponse clusterResponse, Collection<RDSConfig> rdsConfigs) {
if (rdsConfigs != null && !rdsConfigs.isEmpty()) {
for (RDSConfig rdsConfig : rdsConfigs) {
clusterResponse.getRdsConfigIds().add(rdsConfig.getId());
}
}
}
private void convertRdsConfigs(Cluster source, ClusterResponse clusterResponse) {
Set<RDSConfig> rdsConfigs = rdsConfigService.findUserManagedByClusterId(source.getOwner(), source.getAccount(), source.getId());
for (RDSConfig rdsConfig : rdsConfigs) {
clusterResponse.getRdsConfigs().add(getConversionService().convert(rdsConfig, RDSConfigResponse.class));
}
}
private void convertKnox(Cluster source, ClusterResponse clusterResponse) {
Gateway gateway = source.getGateway();
GatewayJson cloudGatewayJson = new GatewayJson();
cloudGatewayJson.setEnableGateway(gateway.getEnableGateway());
cloudGatewayJson.setTopologyName(gateway.getTopologyName());
Json exposedJson = gateway.getExposedServices();
if (exposedJson != null && StringUtils.isNoneEmpty(exposedJson.getValue())) {
try {
cloudGatewayJson.setExposedServices(exposedJson.get(ExposedServices.class).getServices());
} catch (IOException e) {
LOGGER.error("Failed to add exposedServices to response", e);
throw new CloudbreakApiException("Failed to add exposedServices to response", e);
}
}
cloudGatewayJson.setPath(gateway.getPath());
cloudGatewayJson.setTokenCert(gateway.getTokenCert());
cloudGatewayJson.setSsoProvider(gateway.getSsoProvider());
cloudGatewayJson.setSsoType(gateway.getSsoType());
cloudGatewayJson.setGatewayType(gateway.getGatewayType());
clusterResponse.setGateway(cloudGatewayJson);
}
private void convertContainerConfig(Cluster source, ClusterResponse clusterResponse) {
Json customContainerDefinition = source.getCustomContainerDefinition();
if (customContainerDefinition != null && StringUtils.isNoneEmpty(customContainerDefinition.getValue())) {
try {
Map<String, String> map = customContainerDefinition.get(Map.class);
Map<String, String> result = new HashMap<>();
for (Entry<String, String> stringStringEntry : map.entrySet()) {
result.put(stringStringEntry.getKey(), stringStringEntry.getValue());
}
clusterResponse.setCustomContainers(new CustomContainerResponse(result));
} catch (IOException e) {
LOGGER.error("Failed to add customContainerDefinition to response", e);
throw new CloudbreakApiException("Failed to add customContainerDefinition to response", e);
}
}
}
private Set<BlueprintInputJson> convertBlueprintInputs(Json inputs) {
Set<BlueprintInputJson> blueprintInputJsons = new HashSet<>();
try {
if (inputs != null && inputs.getValue() != null) {
Map<String, String> is = inputs.get(Map.class);
for (Entry<String, String> stringStringEntry : is.entrySet()) {
BlueprintInputJson blueprintInputJson = new BlueprintInputJson();
blueprintInputJson.setName(stringStringEntry.getKey());
blueprintInputJson.setPropertyValue(stringStringEntry.getValue());
blueprintInputJsons.add(blueprintInputJson);
}
}
} catch (IOException ignored) {
LOGGER.error("Could not convert blueprintinputs json to Set.");
}
return blueprintInputJsons;
}
private Set<HostGroupResponse> convertHostGroupsToJson(Iterable<HostGroup> hostGroups) {
Set<HostGroupResponse> jsons = new HashSet<>();
for (HostGroup hostGroup : hostGroups) {
jsons.add(getConversionService().convert(hostGroup, HostGroupResponse.class));
}
return jsons;
}
private Map<String, String> prepareServiceEndpointsMap(Cluster cluster, String ambariIp) {
Set<HostGroup> hostGroups = cluster.getHostGroups();
Blueprint blueprint = cluster.getBlueprint();
Map<String, String> result = new HashMap<>();
List<Port> ports = NetworkUtils.getPorts(Optional.empty());
try {
JsonNode hostGroupsNode = blueprintValidator.getHostGroupNode(blueprint);
Map<String, HostGroup> hostGroupMap = blueprintValidator.createHostGroupMap(hostGroups);
for (JsonNode hostGroupNode : hostGroupsNode) {
String hostGroupName = blueprintValidator.getHostGroupName(hostGroupNode);
JsonNode componentsNode = blueprintValidator.getComponentsNode(hostGroupNode);
HostGroup actualHostgroup = hostGroupMap.get(hostGroupName);
String serviceAddress;
if (actualHostgroup.getConstraint().getInstanceGroup() != null) {
InstanceMetaData next = actualHostgroup.getConstraint().getInstanceGroup().getNotDeletedInstanceMetaDataSet().iterator().next();
serviceAddress = next.getPublicIpWrapper();
} else {
serviceAddress = actualHostgroup.getHostMetadata().iterator().next().getHostName();
}
for (JsonNode componentNode : componentsNode) {
String componentName = componentNode.get("name").asText();
StackServiceComponentDescriptor componentDescriptor = stackServiceComponentDescs.get(componentName);
collectServicePorts(result, ports, ambariIp, serviceAddress, componentDescriptor, cluster);
}
}
} catch (Exception ignored) {
return result;
}
return result;
}
private void collectServicePorts(Map<String, String> result, Iterable<Port> ports, String ambariIp, String serviceAddress,
StackServiceComponentDescriptor componentDescriptor, Cluster cluster) throws IOException {
if (componentDescriptor != null && componentDescriptor.isMaster()) {
List<String> exposedServices = new ArrayList<>();
Gateway gateway = cluster.getGateway();
if (gateway.getExposedServices() != null && gateway.getExposedServices().getValue() != null) {
exposedServices = gateway.getExposedServices().get(ExposedServices.class).getServices();
}
for (Port port : ports) {
collectServicePort(result, port, serviceAddress, ambariIp, componentDescriptor, exposedServices, gateway);
}
}
}
private void collectServicePort(Map<String, String> result, Port port, String serviceAddress, String ambariIp,
StackServiceComponentDescriptor componentDescriptor, Collection<String> exposedServices, Gateway gateway) {
if (port.getExposedService().getServiceName().equals(componentDescriptor.getName())) {
if (gateway.getEnableGateway() && ambariIp != null) {
String url;
url = GatewayType.CENTRAL == gateway.getGatewayType() ? String.format("/%s/%s%s", gateway.getPath(), gateway.getTopologyName(),
port.getExposedService().getKnoxUrl()) : String.format("https://%s:8443/%s/%s%s", ambariIp, gateway.getPath(),
gateway.getTopologyName(), port.getExposedService().getKnoxUrl());
// filter out what is not exposed
// filter out what is not expected to be exposed e.g Zeppelin WS since it does not have Knox Url
if (!Strings.isNullOrEmpty(port.getExposedService().getKnoxUrl())
&& exposedServices.contains(port.getExposedService().getKnoxService())) {
result.put(port.getExposedService().getPortName(), url);
}
} else if (serviceAddress != null) {
String url = String.format("http://%s:%s%s", serviceAddress, port.getPort(), port.getExposedService().getPostFix());
result.put(port.getExposedService().getPortName(), url);
}
}
}
private String getAmbariServerUrl(Cluster cluster, String ambariIp) {
String url;
String orchestrator = cluster.getStack().getOrchestrator().getType();
if (ambariIp != null) {
Gateway gateway = cluster.getGateway();
if (YARN.equals(orchestrator)) {
url = String.format("http://%s:8080", ambariIp);
} else {
if (gateway.getEnableGateway()) {
url = GatewayType.CENTRAL == gateway.getGatewayType() ? String.format("/%s/%s/ambari/",
gateway.getPath(), gateway.getTopologyName())
: String.format("https://%s:8443/%s/%s/ambari/", ambariIp, gateway.getPath(), gateway.getTopologyName());
} else {
url = String.format("https://%s/ambari/", ambariIp);
}
}
} else {
url = null;
}
return url;
}
private void decorateResponseWithProxyConfig(Cluster source, ClusterResponse clusterResponse) {
if (source.getProxyConfig() != null) {
clusterResponse.setProxyName(source.getProxyConfig().getName());
}
}
}
|
hab278/terraling
|
app/models/search_results/keyword_filter.rb
|
module SearchResults
class KeywordFilter < Filter
attr_accessor :strategy
def initialize(filter, query)
super
yield self if block_given?
end
def strategy
@strategy ||= :ling
end
def depth_0_vals
@depth_0_vals ||= filter_vals(Depth::PARENT)
end
def depth_1_vals
@depth_1_vals ||= filter_vals(Depth::CHILD)
end
private
def strategy_class
"SearchResults::#{@strategy.to_s.camelize}KeywordStrategy".constantize
end
def filter_vals(depth)
@filter_strategy_instance ||= strategy_class.new(@filter, @query)
result = @filter_strategy_instance.vals_at(depth)
##########################################################################
# Trick to solve issue #1 #
# This shows a NO_DEPTH_1_RESULT as result if no result is retrieved #
# in depth 1 search. #
##########################################################################
is_depth_0?(depth) & any_error?(result) ? [] : result
end
end
class KeywordStrategy
def initialize(filter, query)
@filter, @query = filter, query
end
def query_key
"#{model_name}_keywords".to_sym
end
def model_name
"#{model_class.name.downcase.underscorize}"
end
def keyword(depth)
@query[query_key] && @query[query_key][depth.to_s]
end
def group
@query.group
end
def vals_at(depth)
vals = @filter.vals_at(depth)
keyword(depth).present? ? select_vals_by_keyword(vals, keyword(depth)) : vals
end
def select_vals_by_keyword(vals, keyword)
result = LingsProperty.select_ids.
# where(:id => vals.pluck(:id)).
with_id(vals.pluck(:id)).
# includes(:ling, :property, :examples).
joins("#{model_name}".to_sym).
# Intersect with the result of keyword search
merge search_scope_name_by_keyword(keyword)
# p "[DEBUG] #{result.inspect}"
result.empty? ? Filter::NO_DEPTH_1_RESULT : result
end
def search_scope_name_by_keyword(keyword)
model_class.in_group(group).unscoped.
# Arel
where( (model_class.arel_table[:name].matches("#{keyword}%")).
or( model_class.arel_table[:name].matches("%#{keyword}%") ))
# Metawhere
# where({:name.matches => "#{keyword}%"} | { :name.matches => "%#{keyword}%"})
# Squeel Syntax
# where{ (:name =~ "#{keyword}%") || ( :name =~ "%#{keyword}%")}
end
end
class LingKeywordStrategy < KeywordStrategy
def model_class
Ling
end
end
class PropertyKeywordStrategy < KeywordStrategy
def model_class
Property
end
def keyword(category_id)
@query[query_key] && @query[query_key][category_id.to_s]
end
def map_selected_vals_id(selected_vals)
selected_vals == Filter::NO_DEPTH_1_RESULT ? [-1] : selected_vals.map(&:id)
end
def vals_at(depth)
vals = @filter.vals_at(depth)
category_ids = @query.group_prop_category_ids(depth)
collected_ids = category_ids.collect do |category_id|
if keyword(category_id).present?
map_selected_vals_id(select_vals_by_keyword(vals, keyword(category_id)))
else
raise Exceptions::ResultTooBigError if vals.size > Search::RESULTS_FLATTEN_THRESHOLD
map_selected_vals_id(vals)
end
end.flatten
return collected_ids if collected_ids == Filter::NO_DEPTH_1_RESULT
LingsProperty.with_id(collected_ids).select_ids
end
end
class ExampleKeywordStrategy < KeywordStrategy
# query: :example_fields=>{"0"=>["origin"], "1"=>["text"]}, :example_keywords=>{"0"=>"gold", "1"=>""}}
def model_class
Example
end
def keyword(depth)
@query[query_key] && @query[query_key][depth.to_s]
end
def vals_at(depth)
vals = @filter.vals_at(depth)
keyword(depth).present? ? select_vals_by_keyword(vals, keyword(depth), depth) : vals
end
def select_vals_by_keyword(vals, keyword, depth)
example_attribute = @query[:example_fields][depth.to_s].to_sym
keyword_scope = case example_attribute
when :description
search_scope_name_by_keyword(keyword)
else
# keyword search by stored value key/pair
search_scope_value_by_stored_value_key_pair(keyword, example_attribute)
end
LingsProperty.select_ids.where(:id => vals.pluck(:id)).
joins(:examples).merge keyword_scope
# Squeel Syntax
# LingsProperty.select_ids.where{ (:id == my{vals}) } & keyword_scope
end
def search_scope_value_by_stored_value_key_pair(keyword, key)
model_class.unscoped.where(:group_id => group.id).
joins("INNER JOIN stored_values ON examples.id = stored_values.storable_id").
merge StoredValue.unscoped.with_key(key).
where( (StoredValue.arel_table[:value].matches("#{keyword}%")).
or(StoredValue.arel_table[:value].matches("%#{keyword}%") ))
# Squeel Syntax
# model_class.unscoped.where{ :group == my{group.id} } &
# StoredValue.unscoped.with_key(key).
# where { (:value =~ "#{keyword}%") || (:value =~ "%#{keyword}%")}
end
end
end
|
Invenso/xdnucleus-senlimo-demo
|
src/main/java/com/invenso/xperido/senlimo/model/db/ContractEntity.java
|
package com.invenso.xperido.senlimo.model.db;
import javax.persistence.*;
import java.sql.Date;
import java.util.Collection;
/**
* JPA Entity for the contract table
*/
@Entity
@Table(name = "contract", schema = "public", catalog = "senlimo")
@NamedQuery(name="ContractEntity.byID", query="select c from ContractEntity c where c.contractId = :id")
public class ContractEntity {
private Integer contractId;
private String contractName;
private Date contractStartDate;
private Date contractEndDate;
private AccountEntity accountByContractAccount;
private EmployeeEntity employeeByContractOwningUser;
private ContactEntity contactByContractContact;
private Collection<LineitemEntity> lineitemsByContractId;
@Id
@Column(name = "`ContractID`")
public Integer getContractId() {
return contractId;
}
public void setContractId(Integer contractId) {
this.contractId = contractId;
}
@Basic
@Column(name = "`ContractName`")
public String getContractName() {
return contractName;
}
public void setContractName(String contractName) {
this.contractName = contractName;
}
@Basic
@Column(name = "`ContractStartDate`")
public Date getContractStartDate() {
return contractStartDate;
}
public void setContractStartDate(Date contractStartDate) {
this.contractStartDate = contractStartDate;
}
@Basic
@Column(name = "`ContractEndDate`")
public Date getContractEndDate() {
return contractEndDate;
}
public void setContractEndDate(Date contractEndDate) {
this.contractEndDate = contractEndDate;
}
@ManyToOne
@JoinColumn(name = "`ContractAccount`", referencedColumnName = "`AccountID`", nullable = false)
public AccountEntity getAccountByContractAccount() {
return accountByContractAccount;
}
public void setAccountByContractAccount(AccountEntity accountByContractAccount) {
this.accountByContractAccount = accountByContractAccount;
}
@ManyToOne
@JoinColumn(name = "`ContractOwningUser`", referencedColumnName = "`UserID`", nullable = false)
public EmployeeEntity getEmployeeByContractOwningUser() {
return employeeByContractOwningUser;
}
public void setEmployeeByContractOwningUser(EmployeeEntity employeeByContractOwningUser) {
this.employeeByContractOwningUser = employeeByContractOwningUser;
}
@ManyToOne
@JoinColumn(name = "`ContractContact`", referencedColumnName = "`ContactID`")
public ContactEntity getContactByContractContact() {
return contactByContractContact;
}
public void setContactByContractContact(ContactEntity contactByContractContact) {
this.contactByContractContact = contactByContractContact;
}
@OneToMany(mappedBy = "contractByLineItemContract")
public Collection<LineitemEntity> getLineitemsByContractId() {
return lineitemsByContractId;
}
public void setLineitemsByContractId(Collection<LineitemEntity> lineitemsByContractId) {
this.lineitemsByContractId = lineitemsByContractId;
}
}
|
liupengzhouyi/selection_of_college_graduation_design
|
src/main/java/cn/liupengstudy/selection_of_college_graduation_design/pojo/tools/dataType/CollageIDAndProfessionalIDType.java
|
<reponame>liupengzhouyi/selection_of_college_graduation_design<filename>src/main/java/cn/liupengstudy/selection_of_college_graduation_design/pojo/tools/dataType/CollageIDAndProfessionalIDType.java
package cn.liupengstudy.selection_of_college_graduation_design.pojo.tools.dataType;
import cn.liupengstudy.selection_of_college_graduation_design.pojo.CollageAndProfessionalTable;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
@ApiModel(value = "学院ID和专业ID信息实体")
public class CollageIDAndProfessionalIDType {
@ApiModelProperty(value = "学院ID")
private Integer collageID;
@ApiModelProperty(value = "专业ID")
private Integer professionalID;
public CollageIDAndProfessionalIDType() {
}
public CollageIDAndProfessionalIDType(Integer collageID, Integer professionalID) {
this.collageID = collageID;
this.professionalID = professionalID;
}
public Integer getCollageID() {
return collageID;
}
public void setCollageID(Integer collageID) {
this.collageID = collageID;
}
public Integer getProfessionalID() {
return professionalID;
}
public void setProfessionalID(Integer professionalID) {
this.professionalID = professionalID;
}
public void getCollageIDAndProfessionalIDTypeByCollageAndProfessionalTable(CollageAndProfessionalTable collageAndProfessionalTable) {
this.setCollageID(collageAndProfessionalTable.getCollageid());
this.setProfessionalID(collageAndProfessionalTable.getProfessionalid());
}
@Override
public String toString() {
return "CollageIDAndProfessionalIDType{" +
"collageID=" + collageID +
", professionalID=" + professionalID +
'}';
}
}
|
jangalinski/camunda-bpm-fluent-tests
|
camunda-bpm-fluent-engine-api/src/main/java/org/camunda/bpm/engine/fluent/FluentProcessInstanceRepositoryImpl.java
|
<reponame>jangalinski/camunda-bpm-fluent-tests
package org.camunda.bpm.engine.fluent;
/**
* @author <NAME> <<EMAIL>>
* @author <NAME> <<EMAIL>>
*/
public class FluentProcessInstanceRepositoryImpl extends AbstractFluentProcessEngineAware implements FluentProcessInstanceRepository {
public FluentProcessInstanceRepositoryImpl(FluentProcessEngine engine) {
super(engine);
}
/**
* @see org.camunda.bpm.engine.test.fluent.FluentProcessEngineTests#newProcessInstance(String)
*/
@Override
public FluentProcessInstance newProcessInstance(String processDefinitionKey) {
return new FluentProcessInstanceImpl(engine, processDefinitionKey);
}
}
|
sho25/hbase
|
hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestOrderedString.java
|
<filename>hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestOrderedString.java
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
end_comment
begin_package
package|package
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hbase
operator|.
name|types
package|;
end_package
begin_import
import|import static
name|org
operator|.
name|junit
operator|.
name|Assert
operator|.
name|assertEquals
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hbase
operator|.
name|HBaseClassTestRule
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hbase
operator|.
name|testclassification
operator|.
name|MiscTests
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hbase
operator|.
name|testclassification
operator|.
name|SmallTests
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hbase
operator|.
name|util
operator|.
name|Order
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hbase
operator|.
name|util
operator|.
name|PositionedByteRange
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hbase
operator|.
name|util
operator|.
name|SimplePositionedMutableByteRange
import|;
end_import
begin_import
import|import
name|org
operator|.
name|junit
operator|.
name|ClassRule
import|;
end_import
begin_import
import|import
name|org
operator|.
name|junit
operator|.
name|Test
import|;
end_import
begin_import
import|import
name|org
operator|.
name|junit
operator|.
name|experimental
operator|.
name|categories
operator|.
name|Category
import|;
end_import
begin_class
annotation|@
name|Category
argument_list|(
block|{
name|MiscTests
operator|.
name|class
block|,
name|SmallTests
operator|.
name|class
block|}
argument_list|)
specifier|public
class|class
name|TestOrderedString
block|{
annotation|@
name|ClassRule
specifier|public
specifier|static
specifier|final
name|HBaseClassTestRule
name|CLASS_RULE
init|=
name|HBaseClassTestRule
operator|.
name|forClass
argument_list|(
name|TestOrderedString
operator|.
name|class
argument_list|)
decl_stmt|;
specifier|private
specifier|static
specifier|final
name|String
index|[]
name|VALUES
init|=
operator|new
name|String
index|[]
block|{
literal|null
block|,
literal|""
block|,
literal|"1"
block|,
literal|"22"
block|,
literal|"333"
block|,
literal|"4444"
block|,
literal|"55555"
block|,
literal|"666666"
block|,
literal|"7777777"
block|,
literal|"88888888"
block|,
literal|"999999999"
block|}
decl_stmt|;
annotation|@
name|Test
specifier|public
name|void
name|testEncodedLength
parameter_list|()
block|{
name|PositionedByteRange
name|buff
init|=
operator|new
name|SimplePositionedMutableByteRange
argument_list|(
literal|20
argument_list|)
decl_stmt|;
for|for
control|(
specifier|final
name|DataType
argument_list|<
name|String
argument_list|>
name|type
range|:
operator|new
name|OrderedString
index|[]
block|{
operator|new
name|OrderedString
argument_list|(
name|Order
operator|.
name|ASCENDING
argument_list|)
block|,
operator|new
name|OrderedString
argument_list|(
name|Order
operator|.
name|DESCENDING
argument_list|)
block|}
control|)
block|{
for|for
control|(
specifier|final
name|String
name|val
range|:
name|VALUES
control|)
block|{
name|buff
operator|.
name|setPosition
argument_list|(
literal|0
argument_list|)
expr_stmt|;
name|type
operator|.
name|encode
argument_list|(
name|buff
argument_list|,
name|val
argument_list|)
expr_stmt|;
name|assertEquals
argument_list|(
literal|"encodedLength does not match actual, "
operator|+
name|val
argument_list|,
name|buff
operator|.
name|getPosition
argument_list|()
argument_list|,
name|type
operator|.
name|encodedLength
argument_list|(
name|val
argument_list|)
argument_list|)
expr_stmt|;
block|}
block|}
block|}
annotation|@
name|Test
specifier|public
name|void
name|testEncodedClassIsFloat
parameter_list|()
block|{
specifier|final
name|DataType
argument_list|<
name|String
argument_list|>
name|type
init|=
operator|new
name|OrderedString
argument_list|(
name|Order
operator|.
name|ASCENDING
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
name|String
operator|.
name|class
argument_list|,
name|type
operator|.
name|encodedClass
argument_list|()
argument_list|)
expr_stmt|;
block|}
block|}
end_class
end_unit
|
WXM99/SE228_SpringbootDemo
|
ebook-backend/src/main/java/ebook/service/BookService.java
|
<filename>ebook-backend/src/main/java/ebook/service/BookService.java
package ebook.service;
import ebook.model.BookDetails;
import ebook.model.BookInfoBrief;
import ebook.model.outOfDB.WholeBook;
import java.rmi.RemoteException;
import net.sf.json.JSONObject;
import java.util.List;
public interface BookService {
List<BookInfoBrief> allBook();
BookInfoBrief findBook(JSONObject input);
WholeBook find(JSONObject input) throws RemoteException;
List<BookInfoBrief> findBookWithPage(JSONObject input);
List<BookInfoBrief> searchBook(JSONObject input);
BookDetails addDetails(JSONObject input);
}
|
variflight/feeyo-flatbuffers
|
src/main/java/com/feeyo/flattenable/model/test2/FlatEdgeList.java
|
<filename>src/main/java/com/feeyo/flattenable/model/test2/FlatEdgeList.java<gh_stars>1-10
package com.feeyo.flattenable.model.test2;
import com.feeyo.flattenable.FlattenableList;
public class FlatEdgeList extends FlattenableList<FlatEdge> {
public FlatEdgeList(Class<FlatEdge> clazz) {
super(clazz);
}
// @Override
// public String flattenToJson() {
// int length = length();
// FlatEdge[] flatEdges = new FlatEdge[length];
// for (int i = 0; i < length; i++) {
// flatEdges[i] = (FlatEdge) get(i);
// }
// return JSON.toJSONString(flatEdges);
// }
}
|
bitkylin/ClusterDeviceManager
|
ClusterDevicePlatform-client/src/main/java/cc/bitky/clusterdeviceplatform/client/server/repo/MsgPackage.java
|
package cc.bitky.clusterdeviceplatform.client.server.repo;
import cc.bitky.clusterdeviceplatform.messageutils.msg.statusreply.MsgReplyDeviceStatus;
public class MsgPackage {
MsgReplyDeviceStatus chargeStatus;
MsgReplyDeviceStatus workStatus;
public MsgPackage(MsgReplyDeviceStatus chargeStatus, MsgReplyDeviceStatus workStatus) {
this.chargeStatus = chargeStatus;
this.workStatus = workStatus;
}
public MsgReplyDeviceStatus getChargeStatus() {
return chargeStatus;
}
public MsgReplyDeviceStatus getWorkStatus() {
return workStatus;
}
}
|
augustusliu/thingsboard
|
common/data/src/main/java/org/thingsboard/server/common/data/kv/AttributeKvEntry.java
|
<filename>common/data/src/main/java/org/thingsboard/server/common/data/kv/AttributeKvEntry.java<gh_stars>1-10
package org.thingsboard.server.common.data.kv;
/**
* @author <NAME>
*/
public interface AttributeKvEntry extends KvEntry {
long getLastUpdateTs();
}
|
mishra-sid/clustering_hyperparameters
|
src/clustering_hyperparameters/optimize/optimize.py
|
from ..cluster.cluster import cluster
from ..utils.type_utils import get_type_from_str
from pathlib import Path
from ax.modelbridge.generation_strategy import GenerationStrategy, GenerationStep
from ax.modelbridge.registry import Models
from ax.service.ax_client import AxClient
from ax.core.metric import Metric
from ax.service.utils.report_utils import exp_to_df
from ray import tune
from ray.tune import report, Callback
from ray.tune.suggest.ax import AxSearch
from omegaconf import OmegaConf
import torch
import pandas as pd
def evaluation_metric_function(parameterization, config):
"""[Finds evaluation metric for a model given it's parameters and global config]
Args:
parameterization (dict): [Parameter values taken by each hyperparameter of model]
config (dict): [Global config object]
"""
resolved_config = config
resolved_config["model"]["params"] = dict(parameterization)
report(**cluster(resolved_config))
def optimize(config):
"""[Performs hyperparameter optimization procedure given global config object]
Args:
config (dict): [Global config object]
"""
dataset_index = int(config['dataset_index'])
suite_name = config['suite']['name']
output_dir = config["root_dir"] + "/output/" + suite_name
Path(output_dir).mkdir(parents=True, exist_ok=True)
optim_seed = config["optim"]["run_index"]
normalized = "_normalized" if config["normalize"] else ''
exp_name = f"experiment_{config['model']['name']}_{config['suite']['datasets'][dataset_index]['name']}{normalized}_{optim_seed}"
num_random_trials = config["optim"]["num_random_trials"]
to_attach = config["optim"]["attach_from_existing_trials"]
num_bayes_trials = config["optim"]["num_bayes_trials"]
num_total_trials = num_random_trials + num_bayes_trials
gen_steps = []
if num_random_trials > 0:
gen_steps.append(GenerationStep(
model=Models.SOBOL,
num_trials=num_random_trials,
min_trials_observed=num_random_trials,
max_parallelism=config["optim"]["compute"]["max_concurrent"],
model_kwargs={"seed": optim_seed}))
if num_bayes_trials > 0:
gen_steps.append(GenerationStep(
model=Models.BOTORCH,
num_trials=num_bayes_trials,
max_parallelism=config["optim"]["compute"]["max_concurrent"]))
gen_strat = GenerationStrategy(
steps=gen_steps
)
ax_client = AxClient(generation_strategy=gen_strat,
enforce_sequential_optimization=False)
model_params = OmegaConf.to_container((config["model"]["params"]), resolve=True)
ax_client.create_experiment(name="clustering_hyperparameter_optimization",
parameters=model_params,
objective_name=config["optim"]["eval_metric"]["name"],
minimize=config["optim"]["eval_metric"]["minimize"])
ax_client.experiment.add_tracking_metrics([Metric(name=mname) for mname in ["adjusted_mutual_info_score",
"completeness_score",
"fowlkes_mallows_score",
"homogeneity_score",
"mutual_info_score",
"normalized_mutual_info_score",
"rand_score",
"v_measure_score",
"homogeneity_completeness_v_measure"]])
out_csv_path = Path(output_dir) / (exp_name + ".csv" )
if to_attach:
out_bayes_path = Path(output_dir) / (exp_name + "_bayesian.csv" )
if out_bayes_path.exists():
return
num_trials_to_attach = config["optim"]["num_trials_to_attach"]
trials_df_existing = pd.read_csv(out_csv_path)
for ind, row in trials_df_existing.head(num_trials_to_attach).iterrows():
param_cols=[x for x in trials_df_existing.columns if x not in ["adjusted_rand_score",
"trial_status",
"generator_model",
"generation_method",
"trial_index",
"arm_name",
"compute_time"]]
get_type = lambda x: list(filter(lambda y: y['name'] == x, model_params))[0]['value_type']
params = { col: get_type_from_str(get_type(col))(row[col]) for col in param_cols }
params, trial_ind = ax_client.attach_trial(params)
ax_client.complete_trial(trial_index=trial_ind,
raw_data={"adjusted_rand_score": row["adjusted_rand_score"]})
elif out_csv_path.exists():
return
resolved_config = OmegaConf.to_container(config, resolve=True)
class TuneCallBack(Callback):
def on_step_begin(self, iteration, trials, **info):
torch.manual_seed(optim_seed)
tune.run(
lambda parameterization: evaluation_metric_function(parameterization, resolved_config),
name=exp_name,
num_samples=num_total_trials,
callbacks=[TuneCallBack()],
search_alg=AxSearch(
ax_client=ax_client,
max_concurrency=config["optim"]["compute"]["max_concurrent"]
),
local_dir=config["root_dir"] + "/ray/" + suite_name,
resources_per_trial={"cpu": config["optim"]["compute"]["cpu"], "gpu": config["optim"]["compute"]["gpu"]}
)
trials_df = ax_client.get_trials_data_frame()
compute_time_col = { index: (trial.time_completed - trial.time_run_started).total_seconds() for index, trial in ax_client.experiment.trials.items() }
trials_df['compute_time'] = [ compute_time_col[trial_index] for trial_index in trials_df.trial_index ]
trials_df.to_csv(output_dir + "/" + exp_name + ".csv", encoding='utf-8', index=False)
|
JorgeCastilloPrz/Corleone
|
sample/src/main/java/com/github/jorgecastilloprz/corleone/sample/di/ApplicationModule.java
|
<gh_stars>10-100
/*
* Copyright (C) 2015 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.jorgecastilloprz.corleone.sample.di;
import android.content.Context;
import android.net.ConnectivityManager;
import android.view.LayoutInflater;
import com.github.jorgecastilloprz.corleone.sample.SampleApplication;
import com.github.jorgecastilloprz.corleone.sample.domain.model.GameCatalog;
import com.github.jorgecastilloprz.corleone.sample.domain.model.LucasArtCatalog;
import com.github.jorgecastilloprz.corleone.sample.ui.animator.ToolbarAnimator;
import com.github.jorgecastilloprz.corleone.sample.ui.animator.ToolbarAnimatorImpl;
import com.github.jorgecastilloprz.corleone.sample.ui.mainthread.MainThread;
import com.github.jorgecastilloprz.corleone.sample.ui.mainthread.MainThreadImpl;
import dagger.Module;
import dagger.Provides;
/**
* Dagger module used to inject application context generic dependencies
*
* @author <NAME>
*/
@Module(
injects = { SampleApplication.class },
library = true, complete = false) public class ApplicationModule {
private final Context appContext;
public ApplicationModule(Context appContext) {
this.appContext = appContext;
}
@Provides Context provideApplicationContext() {
return appContext;
}
@Provides LayoutInflater provideLayoutInflater() {
return LayoutInflater.from(appContext);
}
@Provides ConnectivityManager provideConnectivityManager() {
return (ConnectivityManager) appContext.getSystemService(Context.CONNECTIVITY_SERVICE);
}
@Provides MainThread provideMainThread(MainThreadImpl mainThread) {
return mainThread;
}
@Provides GameCatalog provideGameCatalog(LucasArtCatalog catalog) {
return catalog;
}
@Provides ToolbarAnimator provideToolbarAnimator(ToolbarAnimatorImpl animator) {
return animator;
}
}
|
arraycto/d2-crud-plus
|
packages/d2-crud-plus-example/src/business/modules/example/views/form/header/crud.js
|
<gh_stars>0
export const crudOptions = {
columns: [
{
title: '多级表头',
children: [
{
title: '时间',
children: [
{
title: '日期',
key: 'date',
sortable: true,
type: 'date',
search: { disabled: true },
form: { disabled: true }
},
{
title: '日期2',
key: 'date2',
sortable: true,
type: 'date',
search: { disabled: true },
form: { disabled: false }
}
]
},
{
title: '状态',
key: 'status',
sortable: true,
search: { key: 'status', disabled: false },
type: 'select',
form: { rules: [{ required: true, message: '请选择状态' }] },
dict: { url: '/dicts/OpenStatusEnum' }
}
]
},
{
title: '地区',
key: 'province',
sortable: true,
search: { key: 'province', disabled: false },
type: 'select',
form: {
rules: [{ required: true, message: '请选择地区' }],
component: { props: { filterable: true, multiple: true, clearable: true } }
},
dict: {
data: [
{ value: 'sz', label: '深圳' },
{ value: 'gz', label: '广州' },
{ value: 'wh', label: '武汉' },
{ value: 'sh', label: '上海' }
]
}
}
]
}
|
yifanyou/devops-service
|
src/main/java/io/choerodon/devops/domain/application/convertor/DevopsEnvCommitConvertor.java
|
package io.choerodon.devops.domain.application.convertor;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Component;
import io.choerodon.core.convertor.ConvertorI;
import io.choerodon.devops.domain.application.entity.DevopsEnvCommitE;
import io.choerodon.devops.infra.dataobject.DevopsEnvCommitDO;
@Component
public class DevopsEnvCommitConvertor implements ConvertorI<DevopsEnvCommitE, DevopsEnvCommitDO, Object> {
@Override
public DevopsEnvCommitE doToEntity(DevopsEnvCommitDO devopsEnvCommitDO) {
DevopsEnvCommitE devopsEnvCommitE = new DevopsEnvCommitE();
BeanUtils.copyProperties(devopsEnvCommitDO, devopsEnvCommitE);
return devopsEnvCommitE;
}
@Override
public DevopsEnvCommitDO entityToDo(DevopsEnvCommitE devopsEnvCommitE) {
DevopsEnvCommitDO devopsEnvCommitDO = new DevopsEnvCommitDO();
BeanUtils.copyProperties(devopsEnvCommitE, devopsEnvCommitDO);
return devopsEnvCommitDO;
}
}
|
zugaldia/javacpp-presets
|
arrow/src/gen/java/org/bytedeco/arrow_dataset/DataSource.java
|
<reponame>zugaldia/javacpp-presets<filename>arrow/src/gen/java/org/bytedeco/arrow_dataset/DataSource.java<gh_stars>1-10
// Targeted by JavaCPP version 1.5.3-SNAPSHOT: DO NOT EDIT THIS FILE
package org.bytedeco.arrow_dataset;
import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;
import org.bytedeco.arrow.*;
import static org.bytedeco.arrow.global.arrow.*;
import org.bytedeco.parquet.*;
import static org.bytedeco.arrow.global.parquet.*;
import static org.bytedeco.arrow.global.arrow_dataset.*;
/** \brief A basic component of a Dataset which yields zero or more
* DataFragments. A DataSource acts as a discovery mechanism of DataFragments
* and partitions, e.g. files deeply nested in a directory. */
@Namespace("arrow::dataset") @NoOffset @Properties(inherit = org.bytedeco.arrow.presets.arrow_dataset.class)
public class DataSource extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public DataSource(Pointer p) { super(p); }
/** \brief GetFragments returns an iterator of DataFragments. The ScanOptions
* controls filtering and schema inference. */
public native @ByVal DataFragmentIterator GetFragments(@SharedPtr ScanOptions options);
/** \brief An expression which evaluates to true for all data viewed by this DataSource.
* May be null, which indicates no information is available. */
public native @SharedPtr Expression partition_expression();
public native @StdString String type();
}
|
GaoGersy/PasswordManager
|
app/src/main/java/com/gersion/superlock/lockadapter/PatternLockAdapter.java
|
package com.gersion.superlock.lockadapter;
import android.content.Context;
import android.text.TextUtils;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.TextView;
import com.andrognito.patternlockview.PatternLockView;
import com.andrognito.patternlockview.listener.PatternLockViewListener;
import com.andrognito.patternlockview.utils.PatternLockUtils;
import com.andrognito.patternlockview.utils.ResourceUtils;
import com.gersion.superlock.R;
import com.gersion.superlock.utils.ConfigManager;
import com.gersion.superlock.utils.MyConstants;
import java.util.List;
/**
* Created by aa326 on 2018/1/9.
*/
public class PatternLockAdapter implements LockAdapter {
private PatternLockView mPatternLockView;
private LockCallback mLockCallback;
private ConfigManager mConfigManager;
private String patternString;
private TextView mTvNotice;
private int mLockMode = -1;
public PatternLockAdapter(int lockMode){
mLockMode = lockMode;
}
private PatternLockViewListener mPatternLockViewListener = new PatternLockViewListener() {
@Override
public void onStarted() {
mTvNotice.setText("绘制解锁图案");
}
@Override
public void onProgress(List<PatternLockView.Dot> progressPattern) {
Log.d(getClass().getName(), "Pattern progress: " +
PatternLockUtils.patternToString(mPatternLockView, progressPattern));
}
@Override
public void onComplete(List<PatternLockView.Dot> pattern) {
String result = PatternLockUtils.patternToString(mPatternLockView, pattern);
handleResult(result);
}
@Override
public void onCleared() {
Log.d(getClass().getName(), "Pattern has been cleared");
}
};
private void handleResult(String result) {
switch (mLockMode){
case MyConstants.LockMode.MODE_INIT:
onInit(result);
break;
case MyConstants.LockMode.MODE_LOCK:
onLock(result);
break;
case MyConstants.LockMode.MODE_RESET:
onReset(result);
break;
default:
break;
}
}
private void onLock(String result) {
if (result.equals(mConfigManager.getPatternString())) {
mLockCallback.onSuccess();
} else {
mLockCallback.onError("解锁失败");
mTvNotice.setText("解锁失败");
}
}
int step = 0;
private void onReset(String result) {
if (step ==0) {
String password = mConfigManager.getPatternString();
if (TextUtils.equals(password, result)) {
mPatternLockView.clearPattern();
mTvNotice.setText("绘制新的解锁图案");
patternString = null;
step++;
}else {
mTvNotice.setText("图案不正确");
}
}else {
onInit(result);
}
}
private void onInit(String result) {
if (patternString != null) {
if (patternString.equals(result)) {
mLockCallback.onSuccess();
mConfigManager.setPatternString(result);
} else {
mLockCallback.onError("两次图案不一致");
}
} else {
patternString = result;
mPatternLockView.clearPattern();
mTvNotice.setText("重复上一次图案");
}
}
public View init(Context context) {
mConfigManager = ConfigManager.getInstance();
View view = LayoutInflater.from(context).inflate(R.layout.view_pattern, null);
mPatternLockView = (PatternLockView) view.findViewById(R.id.patter_lock_view);
mTvNotice = (TextView) view.findViewById(R.id.tv_notice);
mPatternLockView.setDotCount(3);
mPatternLockView.setDotNormalSize((int) ResourceUtils.getDimensionInPx(context, R.dimen.pattern_lock_dot_size));
mPatternLockView.setDotSelectedSize((int) ResourceUtils.getDimensionInPx(context, R.dimen.pattern_lock_dot_selected_size));
mPatternLockView.setPathWidth((int) ResourceUtils.getDimensionInPx(context, R.dimen.pattern_lock_path_width));
mPatternLockView.setAspectRatioEnabled(true);
mPatternLockView.setAspectRatio(PatternLockView.AspectRatio.ASPECT_RATIO_HEIGHT_BIAS);
mPatternLockView.setViewMode(PatternLockView.PatternViewMode.CORRECT);
mPatternLockView.setDotAnimationDuration(150);
mPatternLockView.setPathEndAnimationDuration(100);
mPatternLockView.setCorrectStateColor(ResourceUtils.getColor(context, R.color.colorAccent));
mPatternLockView.setInStealthMode(false);
mPatternLockView.setTactileFeedbackEnabled(true);
mPatternLockView.setInputEnabled(true);
mPatternLockView.addPatternLockListener(mPatternLockViewListener);
return view;
}
@Override
public void onStart() {
}
@Override
public void setLockCallback(LockCallback lockCallback) {
mLockCallback = lockCallback;
}
}
|
Ezeer/VegaStrike_win32FR
|
vegastrike/boost/1_28/boost/config/platform/hpux.hpp
|
// (C) Copyright Boost.org 2001. Permission to copy, use, modify, sell and
// distribute this software is granted provided this copyright notice appears
// in all copies. This software is provided "as is" without express or implied
// warranty, and with no claim as to its suitability for any purpose.
// See http://www.boost.org for most recent version.
// hpux specific config options:
#define BOOST_PLATFORM "HP-UX"
// In principle, HP-UX has a nice <stdint.h> under the name <inttypes.h>
// However, it has the following problem:
// Use of UINT32_C(0) results in "0u l" for the preprocessed source
// (verifyable with gcc 2.95.3, assumed for HP aCC)
// #define BOOST_HAS_STDINT_H
#define BOOST_NO_SWPRINTF
#define BOOST_NO_CWCTYPE
// boilerplate code:
#define BOOST_HAS_UNISTD_H
#include <boost/config/posix_features.hpp>
#ifndef BOOST_HAS_GETTIMEOFDAY
// gettimeofday is always available
#define BOOST_HAS_GETTIMEOFDAY
#endif
|
semantico/spring-security-oauth
|
spring-security-oauth2/src/main/java/org/springframework/security/oauth2/common/OAuth2AccessToken.java
|
<reponame>semantico/spring-security-oauth
package org.springframework.security.oauth2.common;
import java.io.Serializable;
import java.util.Collections;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.TreeSet;
import org.codehaus.jackson.map.annotate.JsonDeserialize;
import org.codehaus.jackson.map.annotate.JsonSerialize;
/**
* Basic access token for OAuth 2.
*
* @author <NAME>
* @author <NAME>
* @author <NAME>
*/
@JsonSerialize(using = OAuth2AccessTokenSerializer.class)
@JsonDeserialize(using = OAuth2AccessTokenDeserializer.class)
public class OAuth2AccessToken implements Serializable {
private static final long serialVersionUID = 914967629530462926L;
public static String BEARER_TYPE = "Bearer";
public static String OAUTH2_TYPE = "OAuth2";
/**
* The access token issued by the authorization server. This value is REQUIRED.
*/
public static String ACCESS_TOKEN = "access_token";
/**
* The type of the token issued as described in <a
* href="http://tools.ietf.org/html/draft-ietf-oauth-v2-22#section-7.1">Section 7.1</a>. Value is case insensitive.
* This value is REQUIRED.
*/
public static String TOKEN_TYPE = "token_type";
/**
* The lifetime in seconds of the access token. For example, the value "3600" denotes that the access token will
* expire in one hour from the time the response was generated. This value is OPTIONAL.
*/
public static String EXPIRES_IN = "expires_in";
/**
* The refresh token which can be used to obtain new access tokens using the same authorization grant as described
* in <a href="http://tools.ietf.org/html/draft-ietf-oauth-v2-22#section-6">Section 6</a>. This value is OPTIONAL.
*/
public static String REFRESH_TOKEN = "refresh_token";
/**
* The scope of the access token as described by <a
* href="http://tools.ietf.org/html/draft-ietf-oauth-v2-22#section-3.3">Section 3.3</a>
*/
public static String SCOPE = "scope";
private String value;
private Date expiration;
private String tokenType = BEARER_TYPE.toLowerCase();
private OAuth2RefreshToken refreshToken;
private Set<String> scope;
private Map<String, Object> additionalInformation = Collections.emptyMap();
/**
* Create an access token from the value provided.
*/
public OAuth2AccessToken(String value) {
this.value = value;
}
/**
* Private constructor for JPA and other serialization tools.
*/
@SuppressWarnings("unused")
private OAuth2AccessToken() {
this(null);
}
/**
* The token value.
*
* @return The token value.
*/
public String getValue() {
return value;
}
public int getExpiresIn() {
return expiration != null ? Long.valueOf((expiration.getTime() - System.currentTimeMillis()) / 1000L)
.intValue() : 0;
}
protected void setExpiresIn(int delta) {
setExpiration(new Date(System.currentTimeMillis() + delta));
}
/**
* The instant the token expires.
*
* @return The instant the token expires.
*/
public Date getExpiration() {
return expiration;
}
/**
* The instant the token expires.
*
* @param expiration The instant the token expires.
*/
public void setExpiration(Date expiration) {
this.expiration = expiration;
}
/**
* Convenience method for checking expiration
*
* @return true if the expiration is befor ethe current time
*/
public boolean isExpired() {
return expiration != null && expiration.before(new Date());
}
/**
* The token type, as introduced in draft 11 of the OAuth 2 spec. The spec doesn't define (yet) that the valid token
* types are, but says it's required so the default will just be "undefined".
*
* @return The token type, as introduced in draft 11 of the OAuth 2 spec.
*/
public String getTokenType() {
return tokenType;
}
/**
* The token type, as introduced in draft 11 of the OAuth 2 spec.
*
* @param tokenType The token type, as introduced in draft 11 of the OAuth 2 spec.
*/
public void setTokenType(String tokenType) {
this.tokenType = tokenType;
}
/**
* The refresh token associated with the access token, if any.
*
* @return The refresh token associated with the access token, if any.
*/
public OAuth2RefreshToken getRefreshToken() {
return refreshToken;
}
/**
* The refresh token associated with the access token, if any.
*
* @param refreshToken The refresh token associated with the access token, if any.
*/
public void setRefreshToken(OAuth2RefreshToken refreshToken) {
this.refreshToken = refreshToken;
}
/**
* The scope of the token.
*
* @return The scope of the token.
*/
public Set<String> getScope() {
return scope;
}
/**
* The scope of the token.
*
* @param scope The scope of the token.
*/
public void setScope(Set<String> scope) {
this.scope = scope;
}
@Override
public boolean equals(Object obj) {
return obj != null && toString().equals(obj.toString());
}
@Override
public int hashCode() {
return toString().hashCode();
}
@Override
public String toString() {
return String.valueOf(getValue());
}
public static OAuth2AccessToken valueOf(Map<String, String> tokenParams) {
OAuth2AccessToken token = new OAuth2AccessToken(tokenParams.get(ACCESS_TOKEN));
if (tokenParams.containsKey(EXPIRES_IN)) {
long expiration = 0;
try {
expiration = Long.parseLong(String.valueOf(tokenParams.get(EXPIRES_IN)));
}
catch (NumberFormatException e) {
// fall through...
}
token.setExpiration(new Date(System.currentTimeMillis() + (expiration * 1000L)));
}
if (tokenParams.containsKey(REFRESH_TOKEN)) {
String refresh = tokenParams.get(REFRESH_TOKEN);
OAuth2RefreshToken refreshToken = new OAuth2RefreshToken(refresh);
token.setRefreshToken(refreshToken);
}
if (tokenParams.containsKey(SCOPE)) {
Set<String> scope = new TreeSet<String>();
for (StringTokenizer tokenizer = new StringTokenizer(tokenParams.get(SCOPE), " ,"); tokenizer
.hasMoreTokens();) {
scope.add(tokenizer.nextToken());
}
token.setScope(scope);
}
if (tokenParams.containsKey(TOKEN_TYPE)) {
token.setTokenType(tokenParams.get(TOKEN_TYPE));
}
return token;
}
/**
* Additional information that token granters would like to add to the token, e.g. to support new token types.
*
* @return the additional information (default empty)
*/
public Map<String, Object> getAdditionalInformation() {
return additionalInformation;
}
/**
* Additional information that token granters would like to add to the token, e.g. to support new token types. If
* the values in the map are primitive then remote communication is going to always work. It should also be safe to
* use maps (nested if desired), or something that is explicitly serializable by Jackson.
*
* @param additionalInformation the additional information to set
*/
public void setAdditionalInformation(Map<String, Object> additionalInformation) {
this.additionalInformation = new LinkedHashMap<String, Object>(additionalInformation);
}
}
|
gentjankolicaj/EData
|
src/main/java/edata/common/domain/open_weather_map/Main.java
|
package edata.common.domain.open_weather_map;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public class Main {
private Float temp;
private Float pressure;
private Float humidity;
private Float tempMin;
private Float tempMax;
}
|
yuqicxy/tangram-es
|
platforms/linux/src/linuxPlatform.cpp
|
<filename>platforms/linux/src/linuxPlatform.cpp
#include "linuxPlatform.h"
#include "linuxSystemFontHelper.h"
#include "gl/hardware.h"
#include "log.h"
#include <algorithm>
#include <stdio.h>
#include <stdarg.h>
#include <libgen.h>
#include <unistd.h>
#include <sys/resource.h>
#include <sys/syscall.h>
#if defined(TANGRAM_LINUX)
#include <GLFW/glfw3.h>
#elif defined(TANGRAM_RPI)
#include "context.h"
#endif
namespace Tangram {
void logMsg(const char* fmt, ...) {
va_list args;
va_start(args, fmt);
vfprintf(stderr, fmt, args);
va_end(args);
}
LinuxPlatform::LinuxPlatform()
: LinuxPlatform(UrlClient::Options{}) {}
LinuxPlatform::LinuxPlatform(UrlClient::Options urlClientOptions) :
m_urlClient(std::make_unique<UrlClient>(urlClientOptions)) {
m_fcConfig = FcInitLoadConfigAndFonts();
}
LinuxPlatform::~LinuxPlatform() {
FcConfigDestroy(m_fcConfig);
}
void LinuxPlatform::shutdown() {
// Stop all UrlWorker threads
m_urlClient.reset();
Platform::shutdown();
}
void LinuxPlatform::requestRender() const {
if (m_shutdown) { return; }
glfwPostEmptyEvent();
}
std::vector<FontSourceHandle> LinuxPlatform::systemFontFallbacksHandle() const {
// Read system fontconfig to get list of fallback font for each
// supported language
auto fallbackFonts = systemFallbackFonts(m_fcConfig);
// Create FontSourceHandle from the found list of fallback fonts
std::vector<FontSourceHandle> handles;
handles.reserve(fallbackFonts.size());
std::transform(std::begin(fallbackFonts), std::end(fallbackFonts),
std::back_inserter(handles),
[](auto& path) { return FontSourceHandle(Url(path)); });
return handles;
}
FontSourceHandle LinuxPlatform::systemFont(const std::string& _name,
const std::string& _weight,
const std::string& _face) const {
auto fontFile = systemFontPath(m_fcConfig, _name, _weight, _face);
if (fontFile.empty()) { return {}; }
return FontSourceHandle(Url(fontFile));
}
bool LinuxPlatform::startUrlRequestImpl(const Url& _url, const UrlRequestHandle _request, UrlRequestId& _id) {
_id = m_urlClient->addRequest(_url.string(),
[this, _request](UrlResponse&& response) {
onUrlResponse(_request, std::move(response));
});
return true;
}
void LinuxPlatform::cancelUrlRequestImpl(const UrlRequestId _id) {
if (m_urlClient) {
m_urlClient->cancelRequest(_id);
}
}
void setCurrentThreadPriority(int priority) {
setpriority(PRIO_PROCESS, 0, priority);
}
void initGLExtensions() {
Tangram::Hardware::supportsMapBuffer = true;
}
} // namespace Tangram
|
rsllcinfosec/pwm
|
server/src/main/java/password/pwm/svc/sessiontrack/UserAgentUtils.java
|
/*
* Password Management Servlets (PWM)
* http://www.pwm-project.org
*
* Copyright (c) 2006-2009 Novell, Inc.
* Copyright (c) 2009-2020 The PWM Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package password.pwm.svc.sessiontrack;
import com.blueconic.browscap.Capabilities;
import com.blueconic.browscap.ParseException;
import com.blueconic.browscap.UserAgentParser;
import com.blueconic.browscap.UserAgentService;
import lombok.Value;
import password.pwm.error.ErrorInformation;
import password.pwm.error.PwmError;
import password.pwm.error.PwmUnrecoverableException;
import password.pwm.http.HttpHeader;
import password.pwm.http.PwmRequest;
import password.pwm.http.PwmRequestAttribute;
import password.pwm.util.java.JavaHelper;
import password.pwm.util.java.LazySoftReference;
import password.pwm.util.java.StringUtil;
import password.pwm.util.java.TimeDuration;
import password.pwm.util.logging.PwmLogger;
import java.io.IOException;
import java.io.Serializable;
import java.time.Instant;
import java.util.Optional;
public class UserAgentUtils
{
private static final PwmLogger LOGGER = PwmLogger.forClass( UserAgentUtils.class );
private static final LazySoftReference<UserAgentParser> CACHED_PARSER = new LazySoftReference<>( UserAgentUtils::loadUserAgentParser );
public enum BrowserType
{
ie( "IE" ),
ff( "Firefox" ),
webkit( "Safari" ),
chrome( "Chrome" ),;
private final String browserCapName;
BrowserType( final String browserCapName )
{
this.browserCapName = browserCapName;
}
static Optional<BrowserType> forBrowserCapName( final String browserCapName )
{
for ( final BrowserType browserType : BrowserType.values() )
{
if ( browserType.browserCapName.equalsIgnoreCase( browserCapName ) )
{
return Optional.of( browserType );
}
}
return Optional.empty();
}
}
private static UserAgentParser loadUserAgentParser( )
{
try
{
return new UserAgentService().loadParser();
}
catch ( final IOException | ParseException e )
{
final String msg = "error loading user-agent parser: " + e.getMessage();
LOGGER.error( () -> msg, e );
}
return null;
}
public static void initializeCache()
{
final Instant startTime = Instant.now();
CACHED_PARSER.get();
LOGGER.trace( () -> "loaded useragent parser", () -> TimeDuration.fromCurrent( startTime ) );
}
public static void checkIfPreIE11( final PwmRequest pwmRequest ) throws PwmUnrecoverableException
{
final Optional<BrowserInfo> optionalBrowserInfo = getBrowserInfo( pwmRequest );
if ( optionalBrowserInfo.isPresent() )
{
final BrowserInfo browserInfo = optionalBrowserInfo.get();
if ( BrowserType.ie == browserInfo.getBrowserType() )
{
if ( browserInfo.getMajorVersion() <= 10 && browserInfo.getMajorVersion() > -1 )
{
final String errorMsg = "Internet Explorer version is not supported for this function. Please use Internet Explorer 11 or higher or another web browser.";
throw new PwmUnrecoverableException( new ErrorInformation( PwmError.ERROR_UNAUTHORIZED, errorMsg ) );
}
}
}
}
public static Optional<BrowserType> getBrowserType( final PwmRequest pwmRequest )
{
final Optional<BrowserInfo> optionalBrowserInfo = getBrowserInfo( pwmRequest );
if ( optionalBrowserInfo.isPresent() )
{
final BrowserInfo browserInfo = optionalBrowserInfo.get();
return Optional.ofNullable( browserInfo.getBrowserType() );
}
return Optional.empty();
}
public static Optional<BrowserInfo> getBrowserInfo( final PwmRequest pwmRequest )
{
final BrowserInfo cachedBrowserInfo = ( BrowserInfo ) pwmRequest.getAttribute( PwmRequestAttribute.BrowserInfo );
if ( cachedBrowserInfo != null )
{
return Optional.of( cachedBrowserInfo );
}
final String userAgentString = pwmRequest.readHeaderValueAsString( HttpHeader.UserAgent );
if ( StringUtil.isEmpty( userAgentString ) )
{
return Optional.empty();
}
final UserAgentParser userAgentParser = CACHED_PARSER.get();
final Capabilities capabilities = userAgentParser.parse( userAgentString );
final String browser = capabilities.getBrowser();
final String browserMajorVersion = capabilities.getBrowserMajorVersion();
final int intMajorVersion = JavaHelper.silentParseInt( browserMajorVersion, -1 );
final Optional<BrowserType> optionalBrowserType = BrowserType.forBrowserCapName( browser );
final BrowserInfo browserInfo = new BrowserInfo( optionalBrowserType.orElse( null ), intMajorVersion );
pwmRequest.setAttribute( PwmRequestAttribute.BrowserInfo, browserInfo );
return Optional.of( browserInfo );
}
@Value
private static class BrowserInfo implements Serializable
{
private final BrowserType browserType;
private final int majorVersion;
}
}
|
IBM/rules_extraction_from_healthcare_policy
|
ontology-and-lifting/src/main/java/com/ibm/wh/extractionservice/ontology/search/OntologyIndexSearch.java
|
<gh_stars>1-10
package com.ibm.wh.extractionservice.ontology.search;
import java.util.List;
public interface OntologyIndexSearch {
List<OntologySearchResult> search(String queryString, int maxNumberOfResults);
void close();
}
|
meritlabs/lightwallet-stack
|
packages/merit-wallet-service/test/testdata.js
|
var keyPair = {
priv: '<KEY>',
pub: '026092daeed8ecb2212869395770e956ffc9bf453f803e700f64ffa70c97a00d80',
};
var copayers = [
{
id44: '626452e5e0e35df4d9ae4d3e60653c9ae9a814f00c84dc40f5887069b18e2110',
id45: '<KEY>',
xPrivKey:
'<KEY>',
xPubKey:
'<KEY>',
xPrivKey_45H:
'<KEY>',
xPubKey_45H:
'<KEY>',
xPrivKey_44H_0H_0H:
'<KEY>',
xPubKey_44H_0H_0H:
'<KEY>',
xPrivKey_1H:
'<KEY>',
xPubKey_1H:
'<KEY>',
privKey_1H_0: 'a710be25950738a7d13637e2e09affd7f579a3479fd7cc024bd9459f8fba6659',
pubKey_1H_0: '026e3020913420a5b9425952627f0a074c9235e7a329869b322061f786e997ae0d',
},
{
id44: '<KEY>',
id45: '<KEY>',
xPrivKey:
'<KEY>',
xPubKey:
'<KEY>',
xPrivKey_45H:
'<KEY>',
xPubKey_45H:
'<KEY>',
xPrivKey_44H_0H_0H:
'<KEY>',
xPubKey_44H_0H_0H:
'<KEY>',
xPrivKey_1H:
'<KEY>',
xPubKey_1H:
'<KEY>',
privKey_1H_0: 'ee062ce6dc5ece50e8110646b5e858c98dba9315cdfdd19da85ab0d33dcac74a',
pubKey_1H_0: '02c679bf169233a273dec87fae5a1830481866c4e96a350d56346ac267808c905d',
},
{
id44: '719f4ee61c691fbf0ebefa34e2151a1a3dbe39cf2fa4a498cb6af53600d30d1a',
id45: 'acd666d7c677d9f2c85b55a5fad1610fe272eac46ef7a577c7aeeab0b1474e43',
xPrivKey:
'<KEY>',
xPubKey:
'<KEY>',
xPrivKey_45H:
'<KEY>',
xPubKey_45H:
'<KEY>',
xPrivKey_44H_0H_0H:
'<KEY>',
xPubKey_44H_0H_0H:
'<KEY>',
xPrivKey_1H:
'<KEY>',
xPubKey_1H:
'<KEY>',
privKey_1H_0: '5009c8488e9a364fc24a999d99a81ae955271de1d06d46c2f2f09e20c6281b04',
pubKey_1H_0: '03338a3b7c08e9d9832e1baff0758e08f9cc691497dd6e91d4c191cd960fb2f043',
},
{
id44: 'e225a29864060823df67b98432b070a40aad1bf9af517005b0b5fe09c96e29c9',
id45: 'c65a89f64794cb7e1886c7010a32dd6fa362d3e81710bac32e97e325b9109fd8',
xPrivKey:
'<KEY>',
xPubKey:
'<KEY>',
xPrivKey_45H:
'<KEY>',
xPubKey_45H:
'<KEY>',
xPrivKey_44H_0H_0H:
'<KEY>',
xPubKey_44H_0H_0H:
'<KEY>',
xPrivKey_1H:
'<KEY>',
xPubKey_1H:
'<KEY>',
privKey_1H_0: '460ee692f05de66b5d8e2fa1d005a8b6bdb1442e2ce6b3facfcee2f9012c9474',
pubKey_1H_0: '<KEY>',
},
{
id44: '120416cd4c427a7e4d94213cebe242f56a06bc6dd5c5c6cae27dc920a0ddf1fb',
id45: '65ae087eb9efdc7e0ada3a7ef954285e9e5ba4b8c7ab2d36747ddd286f7a334f',
xPrivKey:
'<KEY>',
xPubKey:
'<KEY>',
xPrivKey_45H:
'<KEY>',
xPubKey_45H:
'<KEY>',
xPrivKey_44H_0H_0H:
'<KEY>',
xPubKey_44H_0H_0H:
'<KEY>',
xPrivKey_1H:
'<KEY>',
xPubKey_1H:
'<KEY>',
privKey_1H_0: '<KEY>',
pubKey_1H_0: '<KEY>',
},
{
id44: '<KEY>',
id45: 'dacc5c350cef4449a3ca12939711c7449d0d6189e5e7f33cff60095a7a29b0f9',
xPrivKey:
'<KEY>',
xPubKey:
'<KEY>',
xPrivKey_45H:
'<KEY>',
xPubKey_45H:
'<KEY>',
xPrivKey_44H_0H_0H:
'<KEY>',
xPubKey_44H_0H_0H:
'<KEY>',
xPrivKey_1H:
'<KEY>',
xPubKey_1H:
'<KEY>',
privKey_1H_0: '<KEY>',
pubKey_1H_0: '0307ab8c0d8eea1fe3c3781050a69e71f9e7c8cc8476a77103e08a461506a0e780',
},
{
id44: '4d0c1eaab0aafc08aea7328f9ed1d3fc2812791ad2ebb9cbc1a8537b51b18afa',
id45: '<KEY>',
xPrivKey:
'<KEY>',
xPubKey:
'<KEY>',
xPrivKey_45H:
'<KEY>',
xPubKey_45H:
'<KEY>',
xPrivKey_44H_0H_0H:
'<KEY>',
xPubKey_44H_0H_0H:
'<KEY>',
xPrivKey_1H:
'<KEY>',
xPubKey_1H:
'<KEY>',
privKey_1H_0: '87f8a2b92dd04d2782c3d40a34f09f2ab42076bd02b81fbe4a4a72f87ad2e6df',
pubKey_1H_0: '02a0370d6f1213ab3390ac666585614ad71146f3f28ec326e2e779f999c1a497eb',
},
{
id44: '5ae7b75deb3b4d7e251f1fc5613904c9ef8548af7601d93ef668299be4f75ddd',
id45: '<KEY>',
xPrivKey:
'<KEY>',
xPubKey:
'<KEY>',
xPrivKey_45H:
'<KEY>',
xPubKey_45H:
'<KEY>',
xPrivKey_44H_0H_0H:
'<KEY>',
xPubKey_44H_0H_0H:
'<KEY>',
xPrivKey_1H:
'<KEY>',
xPubKey_1H:
'<KEY>',
privKey_1H_0: '66230b6b8b65725162ea43313fcc233f4f0dd135cea00d04b73a84d3f681ef25',
pubKey_1H_0: '03f148bde0784c80051acd159b28a30022e685aca56418f8f50100d9f8a0192c37',
},
{
id44: '98e78a9cb2ab340a245c5082897eadb28c367319f97b93e7b51b4d5ca5cdc68e',
id45: 'e1557d3421a8884fe007674f3f0b6f0feafa76289a0edcc5ec736161b4d02257',
xPrivKey:
'<KEY>',
xPubKey:
'<KEY>',
xPrivKey_45H:
'<KEY>',
xPubKey_45H:
'xpub69QL1adPGBJ1ucwm3bbh6283RxQbsB9dityQysMryTuQ9BHK8iAJU2udZZNN2t3MNSGnzFiu97BUCjjMUAXqb4caURCMEStMorDU3y3NtgB',
xPrivKey_44H_0H_0H:
'<KEY>',
xPubKey_44H_0H_0H:
'<KEY>',
xPrivKey_1H:
'<KEY>',
xPubKey_1H:
'<KEY>',
privKey_1H_0: '9e215580c8e5876215ad101ded325bcacc5ab9d97b26e8fdfab89ef5bb6e0ab7',
pubKey_1H_0: '<KEY>',
},
{
id44: 'f716dbeec58e44c698b34c2d81bae4699ed5a5a522281733ec50aa03caf76a19',
id45: '<KEY>',
xPrivKey:
'<KEY>',
xPubKey:
'<KEY>',
xPrivKey_45H:
'<KEY>',
xPubKey_45H:
'<KEY>',
xPrivKey_44H_0H_0H:
'<KEY>',
xPubKey_44H_0H_0H:
'<KEY>',
xPrivKey_1H:
'<KEY>',
xPubKey_1H:
'<KEY>',
privKey_1H_0: '95951f0e40d31bafe54a3098bd0ed898d370cc5d52a9318d7b7b14568da6cb5c',
pubKey_1H_0: '0266cdb57b8a4d7c1b5b20ddeea43705420c6e3aef2c2979a3768b7b585839a0d3',
},
];
var history = [
{
txid: '0279ef7b21630f859deb723e28beac9e7011660bd1346c2da40321d2f7e34f04',
vin: [
{
txid: 'c8e221141e8bb60977896561b77fa59d6dacfcc10db82bf6f5f923048b11c70d',
vout: 0,
n: 0,
addr: '2N6Zutg26LEC4iYVxi7SHhopVLP1iZPU1rZ',
valueMicros: 485645,
value: 0.00485645,
},
{
txid: '6e599eea3e2898b91087eb87e041c5d8dec5362447a8efba185ed593f6dc64c0',
vout: 1,
n: 1,
addr: '2MyqmcWjmVxW8i39wdk1CVPdEqKyFSY9H1S',
valueMicros: 885590,
value: 0.0088559,
},
],
vout: [
{
value: '0.00045753',
n: 0,
scriptPubKey: {
addresses: ['<KEY>'],
},
},
{
value: '0.01300000',
n: 1,
scriptPubKey: {
addresses: ['<KEY>'],
},
},
],
confirmations: 0,
firstSeenTs: 1424471000,
valueOut: 0.01345753,
valueIn: 0.01371235,
fees: 0.00025482,
},
{
txid: '0279ef7b21630f859deb723e28beac9e7011660bd1346c2da40321d2f7e34f04',
vin: [
{
txid: 'c8e221141e8bb60977896561b77fa59d6dacfcc10db82bf6f5f923048b11c70d',
vout: 0,
n: 0,
addr: '2N6Zutg26LEC4iYVxi7SHhopVLP1iZPU1rZ',
valueMicros: 485645,
value: 0.00485645,
},
{
txid: '6e599eea3e2898b91087eb87e041c5d8dec5362447a8efba185ed593f6dc64c0',
vout: 1,
n: 1,
addr: '2MyqmcWjmVxW8i39wdk1CVPdEqKyFSY9H1S',
valueMicros: 885590,
value: 0.0088559,
},
],
vout: [
{
value: '0.00045753',
n: 0,
scriptPubKey: {
addresses: ['<KEY>'],
},
},
{
value: '0.01300000',
n: 1,
scriptPubKey: {
addresses: ['<KEY>'],
},
},
],
confirmations: 2,
firstSeenTs: 1424471041,
blocktime: 1424471051,
valueOut: 0.01345753,
valueIn: 0.01371235,
fees: 0.00025482,
},
{
txid: 'fad88682ccd2ff34cac6f7355fe9ecd8addd9ef167e3788455972010e0d9d0de',
vin: [
{
txid: '0279ef7b21630f859deb723e28beac9e7011660bd1346c2da40321d2f7e34f04',
vout: 0,
n: 0,
addr: '2NAVFnsHqy5JvqDJydbHPx393LFqFFBQ89V',
valueMicros: 45753,
value: 0.00045753,
},
],
vout: [
{
value: '0.00011454',
n: 0,
scriptPubKey: {
addresses: ['<KEY>'],
},
},
{
value: '0.00020000',
n: 1,
scriptPubKey: {
addresses: ['<KEY>'],
},
},
],
confirmations: 1,
time: 1424472242,
blocktime: 1424472242,
valueOut: 0.00031454,
valueIn: 0.00045753,
fees: 0.00014299,
},
];
module.exports.keyPair = keyPair;
module.exports.copayers = copayers;
module.exports.history = history;
|
kamentr/TraineeAtKodar
|
GradleProject/university/src/main/java/net/kodar/university/business/processor/discipline/DisciplineProcessorGeneric.java
|
<reponame>kamentr/TraineeAtKodar<filename>GradleProject/university/src/main/java/net/kodar/university/business/processor/discipline/DisciplineProcessorGeneric.java
package net.kodar.university.business.processor.discipline;
import net.kodar.university.business.processor.Processor;
import net.kodar.university.presentation.depricated.parameter.DisciplineParam;
import net.kodar.university.presentation.depricated.result.DisciplineResult;
interface DisciplineProcessorGeneric extends Processor<DisciplineParam, DisciplineResult> {
}
|
xvicmanx/express-api-starter-kit
|
src/core/helpers.js
|
<filename>src/core/helpers.js
// Helpers file
const DynRequire = require('dyn-require');
const fs = require('fs');
/**
* Finds all the files in a given directory that contains certain text.
* @param {string} dir - parent directory
* @param {string} str - target string that the file should contain in its name
* @param {function} cb - call back that is called for every matching file.
*/
const forEachFile = (dir, str, cb) => {
const modules = new DynRequire(dir);
const filesMap = modules.requireAllEx();
Object.entries(filesMap).forEach((entry) => {
if (entry[0].indexOf(str) > -1) {
cb(entry[1]);
}
});
};
/**
* Create a directory if it does not exist.
* @param {string} dir - directory to create.
*/
const createDir = (dir) => {
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir);
}
};
/**
* Returns the version of the API
*/
const getAPIVersion = () => process.env.API_VERSION || 'v1';
/**
* Returns the host name of the API
*/
const getHostName = () => process.env.HOST_NAME || 'localhost';
/**
* Returns the port of the server.
*/
const getPort = () => process.env.PORT || 3000;
/**
* Recursively go through an object and gets the value of the
* property queried.
*
* @param {Object} source - target object
* @param {string} query - property query
* @param {any} defaultValue - default value to return when property is not found
* @returns {any} - the found property, otherwise the default value
*/
const queryValue = (source, query = '', defaultValue = null) => {
const value = query.split('.').reduce((result, key) => (result && result[key] ? result[key] : null), source);
return value || defaultValue;
};
/**
* Handles a query promise
* @param {Object} req - request object
* @param {Object} res - response object
* @param {Array} expectedArgs - expected arguments
*/
const handleQuery = (req, res, expectedArgs = []) => {
expectedArgs.forEach((arg) => {
if (!queryValue(req, arg)) {
res.status(400).send(`Missing argument ${arg.split('.')[0]}`);
}
});
return promise => promise.then((result) => {
if (!result || (Array.isArray(result) && !result.length)) {
res.status(404);
}
res.send(result);
}).catch((err) => {
res.status(500);
res.send('An error has occured while executing the search', err);
});
};
/**
* Handles a mutation promise
* @param {Object} req - request object
* @param {Object} res - response object
* @param {Array} expectedArgs - expected arguments
*/
const handleMutation = (req, res, expectedArgs = []) => {
expectedArgs.forEach((arg) => {
if (!queryValue(req, arg)) {
res.status(400).send(`Missing argument ${arg}`);
}
});
return promise => promise.then((result) => {
res.send(result);
}).catch((err) => {
res.status(500);
res.send('An error has occured while executing the search', err);
});
};
/**
* Converts a callback like function to a promise
* @param {function} method - function to promify
* @param {Array} args - argumtents of the target function
* @returns {Promise}
*/
const promify = (method, ...args) => new Promise((resolve, reject) => {
method(...args, (error, result) => {
if (error) {
reject(error);
} else {
resolve(result);
}
});
});
/**
* Converts text from snake to camelcase
* @param {string} text - text to be converted
* @return {string} - the converted string
*/
const snakeToCamelCase = text => text.replace(/(_)([a-z])/g, (x, y, z) => z.toUpperCase());
/**
* Converts text to titlecase
* @param {string} text - text to be converted
* @return {string} - the converted string
*/
const toTitleCase = text => `${text.substring(0, 1).toUpperCase()}${text.substring(1)}`;
module.exports = {
forEachFile,
getAPIVersion,
getHostName,
getPort,
createDir,
queryValue,
handleQuery,
handleMutation,
promify,
snakeToCamelCase,
toTitleCase,
};
|
foreverzmy/zent
|
packages/zent/src/popover/withPopover.js
|
import React, { PureComponent } from 'react';
import omit from 'lodash/omit';
import { PopoverContextType } from './Popover';
/**
* A high order component to expose imperative APIs for popover.
*
* Adds a popover prop to component.
*/
export const exposePopover = propName => Base => {
return class ExposePopover extends PureComponent {
static contextTypes = PopoverContextType;
render() {
const { _zentPopover: popover } = this.context || {};
const context = {
[propName]: omit(popover, [
'registerDescendant',
'unregisterDescendant',
]),
};
return <Base {...this.props} {...context} />;
}
};
};
export default exposePopover('popover');
|
maxim5/webby
|
webby-core/src/main/java/io/webby/common/Lifetime.java
|
package io.webby.common;
import com.google.common.flogger.FluentLogger;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import io.webby.util.func.ThrowRunnable;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.VisibleForTesting;
import java.io.Closeable;
import java.util.ArrayDeque;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Function;
public abstract class Lifetime {
// See https://stackoverflow.com/questions/48755164/referencing-subclass-in-static-variable
// and https://stackoverflow.com/questions/50021182/java-static-initializers-referring-to-subclasses-avoid-class-loading-deadlock
@SuppressWarnings("StaticInitializerReferencesSubClass")
public static final Lifetime Eternal = new Definition();
public abstract @NotNull Status status();
public @NotNull Lifetime.Definition createNested() {
Definition child = new Definition();
attach(child);
return child;
}
public @NotNull Lifetime.Definition createNested(@NotNull Consumer<Definition> atomicAction) {
Definition nested = createNested();
try {
nested.executeIfAlive(() -> atomicAction.accept(nested));
return nested;
} catch (Throwable throwable) {
nested.terminate();
throw throwable;
}
}
public <T> @NotNull T usingNested(@NotNull Function<Lifetime, T> action) {
Definition nested = createNested();
try {
return action.apply(nested);
} finally {
nested.terminate();
}
}
public abstract void onTerminate(@NotNull Closeable closeable);
protected abstract void attach(@NotNull Lifetime.Definition child);
public static class Definition extends Lifetime {
private static final FluentLogger log = FluentLogger.forEnclosingClass();
private final ArrayDeque<Object> resources = new ArrayDeque<>();
private final AtomicReference<Status> status = new AtomicReference<>(Status.Alive);
@Override
public @NotNull Status status() {
return status.get();
}
@Override
public void onTerminate(@NotNull Closeable closeable) {
log.atFiner().log("Adding closeable for terminate: %s...", closeable);
tryToAdd(closeable);
}
@Override
protected void attach(@NotNull Lifetime.Definition child) {
if (child == Eternal) {
throw new IllegalArgumentException("Eternal lifetime can't be attached");
}
tryToAdd(child);
}
@CanIgnoreReturnValue
public boolean terminate() {
if (this == Eternal) {
return false;
}
if (status.compareAndSet(Status.Alive, Status.Terminating)) {
deconstruct();
status.set(Status.Terminated);
return true;
} else {
if (status.get() == Status.Terminating) {
log.atSevere().log("Lifetime is already terminating");
}
return false;
}
}
public void terminateIfAlive() {
if (status.get() == Status.Alive) {
terminate();
}
}
private void tryToAdd(@NotNull Object resource) {
if (this == Eternal) {
return;
}
if (status.get() == Status.Alive) {
resources.addLast(resource);
} else {
log.atWarning().log("Failed to add a resource due to status=%s: %s", status.get(), resource);
}
}
private void deconstruct() {
while (!resources.isEmpty()) {
Object resource = resources.pollLast(); // in reverse order: lowest on the stack added first
log.atInfo().log("Terminating %s...", resource);
try {
deconstruct(resource);
} catch (Throwable throwable) {
throwable.printStackTrace();
log.atWarning().withCause(throwable).log("Runnable failed: %s", throwable.getMessage());
}
}
}
@VisibleForTesting
void deconstruct(@Nullable Object resource) throws Throwable {
if (resource instanceof Closeable closeable) {
closeable.close();
} else if (resource instanceof ThrowRunnable<?> runnable) {
runnable.run();
} else if (resource instanceof Definition definition) {
definition.terminate();
} else {
log.atSevere().log("Failed to terminal unexpected resource: %s", resource);
}
}
public <E extends Throwable> void executeIfAlive(@NotNull ThrowRunnable<E> runnable) throws E {
if (status.compareAndSet(Status.Alive, Status.Alive)) {
runnable.run();
}
}
}
public enum Status {
Alive,
Terminating,
Terminated
}
}
|
rsassi/hivemind2
|
library/src/java/org/apache/hivemind/lib/util/StrategyRegistry.java
|
<reponame>rsassi/hivemind2
// Copyright 2004, 2005 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.hivemind.lib.util;
/**
* An implementation of the <b>strategy </b> pattern. The strategy pattern allows new functionality
* to be assigned to an existing class. As implemented here, this is a smart lookup between a
* particular class (called the <em>subject class</em>) and some object instance that can provide
* the extra functionality (called the <em>strategy</em>). The implementation of the strategy is
* not relevant to the StrategyRegistry class.
* <p>
* Strategies are registered before they can be used; the registration maps a particular class to a
* strategy instance. The strategy instance will be used when the subject class matches the
* registered class, or the subject class inherits from the registered class.
* <p>
* This means that a search must be made that walks the inheritance tree (upwards from the subject
* class) to find a registered mapping.
* <p>
* In addition, strategies can be registered against <em>interfaces</em>. Searching of interfaces
* occurs after searching of classes. The exact order is:
* <ul>
* <li>Search for the subject class, then each super-class of the subject class (excluding
* java.lang.Object)
* <li>Search interfaces, starting with interfaces implemented by the subject class, continuing
* with interfaces implemented by the super-classes, then interfaces extended by earlier interfaces
* (the exact order is a bit fuzzy)
* <li>Search for a match for java.lang.Object, if any
* </ul>
* <p>
* The first match terminates the search.
* <p>
* The StrategyRegistry caches the results of search; a subsequent search for the same subject class
* will be resolved immediately.
* <p>
* StrategyRegistry does a minor tweak of the "natural" inheritance. Normally, the parent class of
* an object array (i.e., <code>Foo[]</code>) is simply <code>Object</code>, even though you
* may assign <code>Foo[]</code> to a variable of type <code>Object[]</code>. StrategyRegistry
* "fixes" this by searching for <code>Object[]</code> as if it was the superclass of any object
* array. This means that the search path for <code>Foo[]</code> is <code>Foo[]</code>,
* <code>Object[]</code>, then a couple of interfaces {@link java.lang.Cloneable},
* {@link java.io.Serializable}, etc. that are implicitily implemented by arrays), and then,
* finally, <code>Object</code>
* <p>
* This tweak doesn't apply to arrays of primitives, since such arrays may <em>not</em> be
* assigned to <code>Object[]</code>.
*
* @author <NAME>
* @see org.apache.hivemind.lib.util.StrategyRegistryImpl
* @since 1.1
*/
public interface StrategyRegistry
{
/**
* Registers an adapter for a registration class.
*
* @throws IllegalArgumentException
* if a strategy has already been registered for the given class.
*/
public void register(Class registrationClass, Object strategy);
/**
* Gets the stategy object for the specified subjectClass.
*
* @throws IllegalArgumentException
* if no strategy could be found.
*/
public Object getStrategy(Class subjectClass);
}
|
ed741/PathBench
|
src/simulator/views/main_view.py
|
from typing import Optional
from simulator.models.model import Model
from simulator.services.debug import DebugLevel
from simulator.services.event_manager.events.event import Event
from simulator.services.services import Services
from simulator.services.event_manager.events.quit_event import QuitEvent
from simulator.views.view import View
from utility.timer import Timer
class MainView(View):
MVG_AVG_SIZE = 3
__frame_timer: Timer
__frame_count: int
__frame_mvg_average: int
__fps: int
"""
Draws the model state onto the screen.
"""
def __init__(self, services: Services, model: Model, root_view: Optional[View]) -> None:
super().__init__(services, model, root_view)
self._services.ev_manager.register_tick_listener(self)
self.__frame_timer = Timer()
self.__frame_count = 0
self.__frame_mvg_average = 0
self.__fps = 0
self.__screen = None
self._initialised = False
def notify(self, event: Event) -> None:
"""
Receive events posted to the message queue.
"""
super().notify(event)
if isinstance(event, QuitEvent):
self._initialised = False
self._services.graphics.window.quit()
def initialise(self) -> None:
self.__frame_timer = Timer()
def tick(self) -> None:
if not self._initialised:
return
self.update()
def update(self) -> None:
if self.__frame_timer.stop() >= 1:
if self.__frame_mvg_average == 0:
self.__fps = self.__frame_count
else:
self.__fps += (self.__frame_count - self.__fps) / self.__frame_mvg_average
self.__frame_mvg_average = min(self.__frame_mvg_average + 1, self.MVG_AVG_SIZE)
self.__frame_count = 0
self.__frame_timer = Timer()
self._services.debug.write("FPS: " + str(self.__fps), DebugLevel.MEDIUM)
for child in self._children:
child.update()
# update window
self._services.graphics.window.update()
self.__frame_count += 1
|
oneAlon/DemoCode
|
runtime/runtimeDemo6/runtimeDemo6/YAPerson.h
|
//
// YAPerson.h
// runtimeDemo6
//
// Created by xygj on 2018/7/18.
// Copyright © 2018年 xygj. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface YAPerson : NSObject
@property (assign, nonatomic) int weight;
@property (assign, nonatomic) int age;
@property (copy, nonatomic) NSString *name;
@end
|
jjzhang166/lullaby
|
lullaby/systems/text/detail/util.h
|
/*
Copyright 2017 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS-IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#ifndef LULLABY_SYSTEMS_TEXT_DETAIL_UTIL_H_
#define LULLABY_SYSTEMS_TEXT_DETAIL_UTIL_H_
#include "mathfu/glsl_mappings.h"
namespace lull {
inline mathfu::vec4 CalcSdfParams(float edge_softness, float sdf_dist_offset,
float sdf_dist_scale) {
// Softness defines how wide the gradient at the edge of glyphs are drawn.
// Default value chosen by UX.
const float default_softness = 32.0f / 255.0f;
const float threshold = .5f;
const float softness =
(edge_softness >= 0) ? edge_softness : default_softness;
const float sdf_min = mathfu::Clamp(threshold - .5f * softness, 0.0f, 1.0f);
const float sdf_max =
mathfu::Clamp(threshold + .5f * softness, sdf_min, 1.0f) + .001f;
return mathfu::vec4(sdf_dist_offset, sdf_dist_scale, sdf_min, sdf_max);
}
} // namespace lull
#endif // LULLABY_SYSTEMS_TEXT_DETAIL_UTIL_H_
|
npocmaka/Windows-Server-2003
|
inetsrv/msmq/src/trigger/trigutil/ruleinfo.cpp
|
<reponame>npocmaka/Windows-Server-2003
//*****************************************************************************
//
// Class Name : CRuntimeRuleInfo
//
// Author : <NAME> (Microsoft Consulting Services)
//
// Description : This class encapsulates information about a trigger rule.
// It is used to cache rule information at runtime about trigger
// rules, as well as accessing the underlying trigger storage
// medium.
//
// Notes : The current implementation uses the registry as the storage
// medium.
//
// This class is used by both the trggers service and the trigger
// COM components.
//
// When | Who | Change Descriptin
// ------------------------------------------------------------------
// 15/01/99 | jsimpson | Initial Release
//
//*****************************************************************************
#include "stdafx.h"
#include "mqtg.h"
#include "stdfuncs.hpp"
#include "Cm.h"
#include "ruleinfo.hpp"
#include "ruleinfo.tmh"
//*****************************************************************************
//
// Method : Constructor
//
// Description : Initialize an empty instance of this class.
//
//*****************************************************************************
CRuntimeRuleInfo::CRuntimeRuleInfo(
LPCTSTR pwzRegPath
) :
m_MSMQRuleHandler(NULL)
{
m_bstrRuleID = _T("");
m_bstrRuleName = _T("");
m_bstrAction = _T("");
m_bstrCondition = _T("");
m_bstrImplementationProgID = _T("");
m_fShowWindow = false;
_snwprintf(m_wzRuleRegPath, TABLE_SIZE(m_wzRuleRegPath), L"%s\\%s", pwzRegPath, REG_SUBKEY_RULES);
}
//*****************************************************************************
//
// Method : Constructor
//
// Description : Initialize an instance of the CRuntimeRuleInfo structure
//
//*****************************************************************************
CRuntimeRuleInfo::CRuntimeRuleInfo(
const _bstr_t& ruleId,
BSTR bsRuleName,
BSTR bsRuleDescription,
BSTR bsRuleCondition,
BSTR bsRuleAction,
BSTR bsRuleImplementationProgID,
LPCTSTR pwzRegPath,
bool fShowWindow
):
m_bstrRuleID(ruleId),
m_MSMQRuleHandler(NULL)
{
ASSERT(bsRuleName != NULL);
m_bstrRuleName = bsRuleName;
if(bsRuleDescription == NULL)
{
m_bstrRuleDescription = _T("");
}
else
{
m_bstrRuleDescription = bsRuleDescription;
}
ASSERT(bsRuleAction != NULL); //always contains COM or EXE
m_bstrAction = bsRuleAction;
if(bsRuleCondition == NULL)
{
m_bstrCondition = _T("");
}
else
{
m_bstrCondition = bsRuleCondition;
}
if(bsRuleImplementationProgID == NULL)
{
m_bstrImplementationProgID = _T("");
}
else
{
m_bstrImplementationProgID = bsRuleImplementationProgID;
}
m_fShowWindow = fShowWindow;
_snwprintf(m_wzRuleRegPath, TABLE_SIZE(m_wzRuleRegPath), L"%s\\%s", pwzRegPath, REG_SUBKEY_RULES);
}
//*****************************************************************************
//
// Method : Destructor
//
// Description : Does nothing.
//
//*****************************************************************************
CRuntimeRuleInfo::~CRuntimeRuleInfo()
{
}
//*****************************************************************************
//
// Method : GetRuleKeyHandle
//
// Description : Returns a handle to specified registry key
//
//*****************************************************************************
HKEY
CRuntimeRuleInfo::GetRuleKeyHandle(
HKEY hRegistry,
LPCTSTR ruleId
)
{
TCHAR rulePath[MAX_REGKEY_NAME_SIZE];
int n = _snwprintf(rulePath, MAX_REGKEY_NAME_SIZE - 1, L"%s%s", m_wzRuleRegPath, ruleId);
rulePath[MAX_REGKEY_NAME_SIZE - 1] = L'\0';
if (n < 0)
{
TrERROR(GENERAL, "Buffer to small to contain the registry path of a rule.");
return NULL;
}
RegEntry ruleReg(rulePath, NULL, 0, RegEntry::Optional, hRegistry);
return CmOpenKey(ruleReg, KEY_READ);
}
//*****************************************************************************
//
// Method : IsValid
//
// Description : Returns a boolean value indicating if the current
// instance represents a valid rule definition.
//
//*****************************************************************************
bool CRuntimeRuleInfo::IsValid()
{
return(IsValidRuleID(m_bstrRuleID) &&
IsValidRuleName(m_bstrRuleName) &&
IsValidRuleDescription(m_bstrRuleDescription) &&
IsValidRuleAction(m_bstrAction) &&
IsValidRuleCondition(m_bstrCondition) &&
IsValidRuleProgID(m_bstrImplementationProgID));
}
//*****************************************************************************
//
// Method : Update
//
// Description : This method is used to update the definition of this
// rule (currently in persisted in the registry).
//
//*****************************************************************************
bool CRuntimeRuleInfo::Update(HKEY hRegistry)
{
// Assert that we have valid parameters
ASSERT(hRegistry != NULL);
ASSERT(IsValid());
CRegHandle hRuleKey = GetRuleKeyHandle(hRegistry, m_bstrRuleID );
if (hRuleKey == NULL)
{
TrERROR(GENERAL, "Failed to update rule properties for rule: %ls. Rule does't exist in registry", (LPCWSTR)m_bstrRuleID);
return false;
}
try
{
FlushValuesToRegistry(hRuleKey);
return true;
}
catch (const bad_alloc&)
{
//
// ISSUE-2000/10/26-urih: partial success can cause rule inconsistency
//
TrERROR(GENERAL, "Failed to update rule properties for: %ls rule.", (LPCWSTR)m_bstrRuleID);
return false;
}
}
//*****************************************************************************
//
// Method : Create
//
// Description : This method creates a new rule definition based on
// properties values of this class instance.
//
//*****************************************************************************
bool CRuntimeRuleInfo::Create(HKEY hRegistry)
{
//
// Assert that we have valid parameters
//
ASSERT(hRegistry != NULL);
//
// Check that there the registery doesn't contain another rule with same ID
//
CRegHandle hNotExistRuleKey = GetRuleKeyHandle(hRegistry, m_bstrRuleID );
if (hNotExistRuleKey != NULL)
{
TrERROR(GENERAL, "Failed to create a key for rule:%ls . Registry already contains rule with same ID.", (LPCWSTR)m_bstrRuleID);
return false;
}
//
// Assemble rule registery path
//
TCHAR rulePath[MAX_REGKEY_NAME_SIZE];
int n = _snwprintf(rulePath, MAX_REGKEY_NAME_SIZE - 1, L"%s%s", m_wzRuleRegPath, static_cast<LPCWSTR>(m_bstrRuleID));
rulePath[MAX_REGKEY_NAME_SIZE - 1] = L'\0';
if (n < 0)
{
TrERROR(GENERAL, "Failed to create a key for rule:%ls. Buffer to small to contain the registry path of a rule.", (LPCWSTR)m_bstrRuleID);
return false;
}
try
{
//
// Create key for the rule in registry
//
RegEntry ruleReg(rulePath, NULL, 0, RegEntry::MustExist, hRegistry);
CRegHandle hRuleKey = CmCreateKey(ruleReg, KEY_ALL_ACCESS);
FlushValuesToRegistry(hRuleKey);
return true;
}
catch(const bad_alloc&)
{
//
// Remove the key if already created
//
RegEntry ruleReg(rulePath, NULL, 0, RegEntry::Optional, hRegistry);
CmDeleteKey(ruleReg);
TrERROR(GENERAL, "Failed to store rule:%ls in registry.",(LPCWSTR)m_bstrRuleID);
return false;
}
}
//*****************************************************************************
//
// Method : Delete
//
// Description : This method will delete the current rule definition from the
// registry. Note that before deleting a rule we must check that
// it is not currently in use. We do this by retrieving the rule
// definition again and checking the reference count.
//
//*****************************************************************************
bool CRuntimeRuleInfo::Delete(HKEY hRegistry)
{
try
{
RegEntry rulesReg(m_wzRuleRegPath, NULL, 0, RegEntry::MustExist, hRegistry);
CRegHandle hRuless = CmOpenKey(rulesReg, KEY_ALL_ACCESS);
RegEntry ruleReg(m_bstrRuleID, NULL, 0, RegEntry::MustExist, hRuless);
CmDeleteKey(ruleReg);
return true;
}
catch (const exception&)
{
TrERROR(GENERAL, "Failed to delete rule:%ls from registry.", (LPCWSTR)m_bstrRuleID);
return false;
}
}
//*****************************************************************************
//
// Method : Retrieve
//
// Description : This method retrieve the specified rule ID from the
// supplied registry key.
//
//*****************************************************************************
bool CRuntimeRuleInfo::Retrieve(HKEY hRegistry, _bstr_t bstrRuleID)
{
CRegHandle hRuleKey = GetRuleKeyHandle(hRegistry, bstrRuleID );
if (hRuleKey == NULL)
{
TrERROR(GENERAL, "Failed to retrieve rule properties from registery for %ls. Registery key isn't exist.", (LPCWSTR)m_bstrRuleID);
return false;
}
try
{
//
// Retrieve rule name
//
AP<TCHAR> ruleName = NULL;
RegEntry ruleNameReg(NULL, REGISTRY_RULE_VALUE_NAME, 0, RegEntry::MustExist, hRuleKey);
CmQueryValue(ruleNameReg, &ruleName);
//
// Retrieve rule description
//
AP<TCHAR> ruleDescription = NULL;
RegEntry ruleDescReg(NULL, REGISTRY_RULE_VALUE_DESCRIPTION, 0, RegEntry::MustExist, hRuleKey);
CmQueryValue(ruleDescReg, &ruleDescription);
//
// Retrieve rule prog-id
//
AP<TCHAR> ruleProgid = NULL;
RegEntry ruleProgidReg(NULL, REGISTRY_RULE_VALUE_IMP_PROGID, 0, RegEntry::MustExist, hRuleKey);
CmQueryValue(ruleProgidReg, &ruleProgid);
//
// Retrieve rule condition
//
AP<TCHAR> ruleCond = NULL;
RegEntry ruleCondReg(NULL, REGISTRY_RULE_VALUE_CONDITION, 0, RegEntry::MustExist, hRuleKey);
CmQueryValue(ruleCondReg, &ruleCond);
//
// Retrieve rule action
//
AP<TCHAR> ruleAction = NULL;
RegEntry ruleActReg(NULL, REGISTRY_RULE_VALUE_ACTION, 0, RegEntry::MustExist, hRuleKey);
CmQueryValue(ruleActReg, &ruleAction);
//
// Retrieve rule show console window value
//
DWORD ruleShowWindow;
RegEntry ruleShowWinReg(NULL, REGISTRY_RULE_VALUE_SHOW_WINDOW, 0, RegEntry::MustExist, hRuleKey);
CmQueryValue(ruleShowWinReg, &ruleShowWindow);
//
// Initialise the member vars of this rule instance.
//
m_bstrRuleID = bstrRuleID;
m_bstrRuleName = ruleName;
m_bstrRuleDescription = ruleDescription;
m_bstrImplementationProgID = ruleProgid;
m_bstrCondition = ruleCond;
m_bstrAction = ruleAction;
m_fShowWindow = ruleShowWindow != 0;
if (IsValid())
return true;
//
// Invalid rule. write a log message and return false.
//
TrERROR(GENERAL, "Failed to retrieve rule properties for %ls. Rule property isn't valid", (LPCWSTR)m_bstrRuleID);
return false;
}
catch (const exception&)
{
TrERROR(GENERAL, "Failed to retrieve rule %ls from registry", (LPCWSTR)bstrRuleID);
return false;
}
}
//*****************************************************************************
//
// Method : FlushValuesToRegistry
//
// Description : This method flushes the member variable values to the
// supplied registry key.
//
//*****************************************************************************
void
CRuntimeRuleInfo::FlushValuesToRegistry(
const HKEY& hRuleKey
)
{
//
// Set the NAME value for this rule
//
RegEntry ruleNameReg(NULL, REGISTRY_RULE_VALUE_NAME, 0, RegEntry::MustExist, hRuleKey);
CmSetValue(ruleNameReg, m_bstrRuleName);
//
// set the DESCRIPTION value for this rule
//
RegEntry ruleDescReg(NULL, REGISTRY_RULE_VALUE_DESCRIPTION, 0, RegEntry::MustExist, hRuleKey);
CmSetValue(ruleDescReg, m_bstrRuleDescription);
// Create the Implementation value for this rule. Note that in this release we are
// not allow the user supplied prog-id to be used - we are forcing the use of the MS
// supplied rule-handler.
m_bstrImplementationProgID = _T("MSQMTriggerObjects.MSMQRuleHandler"); // TO BE REMOVED.
RegEntry ruleUmpProgReg(NULL, REGISTRY_RULE_VALUE_IMP_PROGID, 0, RegEntry::MustExist, hRuleKey);
CmSetValue(ruleUmpProgReg, m_bstrImplementationProgID);
//
// Set the Condition value for this rule
//
RegEntry ruleCondReg(NULL, REGISTRY_RULE_VALUE_CONDITION, 0, RegEntry::MustExist, hRuleKey);
CmSetValue(ruleCondReg, m_bstrCondition);
//
// Set the Action value for this rule
//
RegEntry ruleActReg(NULL, REGISTRY_RULE_VALUE_ACTION, 0, RegEntry::MustExist, hRuleKey);
CmSetValue(ruleActReg, m_bstrAction);
//
// Set the show console window value
//
DWORD dwShowWindow = m_fShowWindow ? 1 : 0;
RegEntry ruleShowWinReg(NULL, REGISTRY_RULE_VALUE_SHOW_WINDOW, 0, RegEntry::MustExist, hRuleKey);
CmSetValue(ruleShowWinReg, dwShowWindow);
}
//*****************************************************************************
//
// Method : IsValid*
//
// Description : The following static methods are used to validate
// the validity of parameters and member vars used by
// the CRuntimeRuleInfo class.
//
//*****************************************************************************
bool CRuntimeRuleInfo::IsValidRuleID(_bstr_t bstrRuleID)
{
return((bstrRuleID.length() > 0) && (bstrRuleID.length() <= MAX_RULE_ID_LEN) ? true:false);
}
bool CRuntimeRuleInfo::IsValidRuleName(_bstr_t bstrRuleName)
{
return((bstrRuleName.length() > 0) && (bstrRuleName.length() <= MAX_RULE_NAME_LEN) ? true:false);
}
bool CRuntimeRuleInfo::IsValidRuleDescription(_bstr_t bstrRuleDescription)
{
return((bstrRuleDescription.length() <= MAX_RULE_DESCRIPTION_LEN) ? true:false);
}
bool CRuntimeRuleInfo::IsValidRuleCondition(_bstr_t bstrRuleCondition)
{
return((bstrRuleCondition.length() <= MAX_RULE_CONDITION_LEN)? true:false);
}
bool CRuntimeRuleInfo::IsValidRuleAction(_bstr_t bstrRuleAction)
{
return((bstrRuleAction.length() > 0) && (bstrRuleAction.length() <= MAX_RULE_ACTION_LEN) ? true:false);
}
bool CRuntimeRuleInfo::IsValidRuleProgID(_bstr_t bstrRuleProgID)
{
return((bstrRuleProgID.length() > 0) && (bstrRuleProgID.length() <= MAX_RULE_PROGID_LEN) ? true:false);
}
|
UbuntuEvangelist/lila
|
ui/coachOpening/src/inspect.js
|
var m = require('mithril');
var chessground = require('chessground');
var sections = require('./sections');
var coach = require('coach');
function sideCommands(ctrl) {
return [
m('a.to.back', {
'data-icon': 'L',
onclick: ctrl.uninspect
}),
m('a.to.prev', {
'data-icon': 'I',
onclick: function() {
ctrl.jumpBy(-1);
}
}),
m('a.to.next', {
'data-icon': 'H',
onclick: function() {
ctrl.jumpBy(1);
}
})
];
}
module.exports = function(ctrl, inspecting) {
var d = ctrl.data;
var eco = inspecting.eco;
var o = d.openings[eco];
if (!o) return m('div.top.nodata', [
sideCommands(ctrl),
m('p', 'No results for this data range and opening!')
]);
var opening = o.opening;
var results = o.results;
var user = ctrl.user.id;
var searchUrl = '/games/search?players.a=' + user + '&players.' + ctrl.color + '=' + user + '&opening=' + eco;
var analysedUrl = searchUrl + '&analysed=1';
return m('div.top.inspect', [
sideCommands(ctrl),
coach.resultBar(results),
m('div.main', [
coach.shared.progress(results.ratingDiff / results.nbGames),
m('h2', [
m('strong', [
opening.eco,
' ',
opening.name
]),
m('em', opening.moves)
]),
m('div.baseline', [
m('a', {
href: searchUrl
}, [
m('strong', results.nbGames),
' games'
]),
', ',
m('a', {
href: analysedUrl
}, [
m('strong', results.nbAnalysis),
' analysed.'
]),
' Last played ',
coach.shared.momentFromNow(results.lastPlayed),
'.',
])
]),
m('div.content', [
m('div.board',
chessground.view(ctrl.vm.inspecting.chessground)
),
m('div.right', [
// moves(ctrl, results),
sections(ctrl, results),
results.bestWin ? [
m('br'),
' Best win: ',
coach.bestWin(results.bestWin, ctrl.color)
] : null
// m('table', [
// m('tr', [
// m('tr', [
// m('th', 'Average opponent'),
// m('tr', m('strong', results.opponentRatingAvg))
// ]),
// results.bestWin ? m('tr', [
// m('th', 'Best win'),
// m('tr', bestWin(results.bestWin))
// ]) : null
// ])
])
])
]);
};
|
matcha-ai/matcha
|
src/bits_of_matcha/Loader.cpp
|
#include "bits_of_matcha/Loader.h"
#include "matcha/dataset"
#include "matcha/tensor"
namespace matcha {
Loader::Loader(const std::string& file)
: file_(file)
{}
Loader load(const std::string& file) {
return Loader(file);
}
Loader::operator Dataset() {
return dataset::Csv {file_};
}
Loader::operator tensor() {
return 0;
}
Loader::operator Flow() {
return {};
}
}
|
ljktest/siconos
|
externals/numeric_bindings/boost/numeric/bindings/blas/level1/dotu.hpp
|
//
// Copyright (c) 2002--2010
// <NAME>, <NAME>, <NAME>,
// <NAME> and <NAME>
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
#ifndef BOOST_NUMERIC_BINDINGS_BLAS_LEVEL1_DOTU_HPP
#define BOOST_NUMERIC_BINDINGS_BLAS_LEVEL1_DOTU_HPP
#include <boost/numeric/bindings/blas/level1/dot.hpp>
namespace boost {
namespace numeric {
namespace bindings {
namespace blas {
//
// dotu is a synonym for dot
//
// Functions for direct use. These functions are overloaded for temporaries,
// so that wrapped types can still be passed and used for write-access.
//
//
// Overloaded function for dotu.
//
template< typename VectorX, typename VectorY >
inline typename dot_impl< typename bindings::value_type< VectorX >::type >::result_type
dotu( const VectorX& x, const VectorY& y ) {
return dot_impl< typename bindings::value_type< VectorX >::type >::invoke( x, y );
}
} // namespace blas
} // namespace bindings
} // namespace numeric
} // namespace boost
#endif
|
daemon-demon/airflow
|
tests/sensors/test_time_sensor.py
|
<reponame>daemon-demon/airflow<gh_stars>1-10
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime, time
import pendulum
from mock import patch
from parameterized import parameterized
from airflow.models.dag import DAG
from airflow.sensors.time_sensor import TimeSensor
from airflow.utils import timezone
DEFAULT_TIMEZONE = "Asia/Singapore" # UTC+08:00
DEFAULT_DATE_WO_TZ = datetime(2015, 1, 1)
DEFAULT_DATE_WITH_TZ = datetime(
2015, 1, 1, tzinfo=pendulum.tz.timezone(DEFAULT_TIMEZONE)
)
@patch(
"airflow.sensors.time_sensor.timezone.utcnow",
return_value=timezone.datetime(2020, 1, 1, 23, 0).replace(tzinfo=timezone.utc),
)
class TestTimeSensor:
@parameterized.expand(
[
("UTC", DEFAULT_DATE_WO_TZ, True),
("UTC", DEFAULT_DATE_WITH_TZ, False),
(DEFAULT_TIMEZONE, DEFAULT_DATE_WO_TZ, False),
]
)
def test_timezone(self, mock_utcnow, default_timezone, start_date, expected):
with patch("airflow.settings.TIMEZONE", pendulum.timezone(default_timezone)):
dag = DAG("test", default_args={"start_date": start_date})
op = TimeSensor(task_id="test", target_time=time(10, 0), dag=dag)
assert op.poke(None) == expected
|
kong0827/SpringBoot-Learning
|
Event/src/main/java/com/kxj/listener/ApplicationEventListener.java
|
package com.kxj.listener;
import org.springframework.boot.context.event.*;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;
/**
* @author xiangjin.kong
* @date 2021/1/15 16:43
*/
@Component
public class ApplicationEventListener {
@EventListener(String.class)
public void listener(String message) {
System.out.println("事件监听, 消息为:" + message);
}
@EventListener
public void handleEvent(Object event) {
if (event instanceof ApplicationFailedEvent) {
System.out.println("注解-项目启动失败事件监听");
}
if (event instanceof ApplicationStartedEvent) {
System.out.println("注解-项目启动时间监听");
}
if (event instanceof ApplicationPreparedEvent) {
System.out.println("注解-上下文context准备时触发");
}
if (event instanceof ApplicationReadyEvent) {
System.out.println("注解-上下文已经准备完毕的时候触发");
}
if (event instanceof SpringApplicationEvent) {
System.out.println("注解-获取SpringApplication");
}
if (event instanceof ApplicationEnvironmentPreparedEvent) {
System.out.println("注解-环境事先准备");
}
}
}
|
DCNick3/esp-idf
|
components/riscv/include/esp_debug_helpers.h
|
<gh_stars>10-100
// Copyright 2015-2019 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#ifdef __cplusplus
extern "C" {
#endif
#ifndef __ASSEMBLER__
#include <stdbool.h>
#include "esp_err.h"
#include "soc/soc.h"
#define ESP_WATCHPOINT_LOAD 0x40000000
#define ESP_WATCHPOINT_STORE 0x80000000
#define ESP_WATCHPOINT_ACCESS 0xC0000000
/*
* @brief Structure used for backtracing
*
* This structure stores the backtrace information of a particular stack frame
* (i.e. the PC and SP). This structure is used iteratively with the
* esp_cpu_get_next_backtrace_frame() function to traverse each frame within a
* single stack. The next_pc represents the PC of the current frame's caller, thus
* a next_pc of 0 indicates that the current frame is the last frame on the stack.
*
* @note Call esp_backtrace_get_start() to obtain initialization values for
* this structure
*/
typedef struct {
uint32_t pc; /* PC of the current frame */
uint32_t sp; /* SP of the current frame */
uint32_t next_pc; /* PC of the current frame's caller */
} esp_backtrace_frame_t;
/**
* @brief If an OCD is connected over JTAG. set breakpoint 0 to the given function
* address. Do nothing otherwise.
* @param fn Pointer to the target breakpoint position
*/
void esp_set_breakpoint_if_jtag(void *fn);
/**
* @brief Set a watchpoint to break/panic when a certain memory range is accessed.
*
* @param no Watchpoint number. On the ESP32, this can be 0 or 1.
* @param adr Base address to watch
* @param size Size of the region, starting at the base address, to watch. Must
* be one of 2^n, with n in [0..6].
* @param flags One of ESP_WATCHPOINT_* flags
*
* @return ESP_ERR_INVALID_ARG on invalid arg, ESP_OK otherwise
*
* @warning The ESP32 watchpoint hardware watches a region of bytes by effectively
* masking away the lower n bits for a region with size 2^n. If adr does
* not have zero for these lower n bits, you may not be watching the
* region you intended.
*/
esp_err_t esp_set_watchpoint(int no, void *adr, int size, int flags);
/**
* @brief Clear a watchpoint
*
* @param no Watchpoint to clear
*
*/
void esp_clear_watchpoint(int no);
/**
* Get the first frame of the current stack's backtrace
*
* Given the following function call flow (B -> A -> X -> esp_backtrace_get_start),
* this function will do the following.
* - Flush CPU registers and window frames onto the current stack
* - Return PC and SP of function A (i.e. start of the stack's backtrace)
* - Return PC of function B (i.e. next_pc)
*
* @note This function is implemented in assembly
*
* @param[out] pc PC of the first frame in the backtrace
* @param[out] sp SP of the first frame in the backtrace
* @param[out] next_pc PC of the first frame's caller
*/
extern void esp_backtrace_get_start(uint32_t *pc, uint32_t *sp, uint32_t *next_pc);
/**
* Get the next frame on a stack for backtracing
*
* Given a stack frame(i), this function will obtain the next stack frame(i-1)
* on the same call stack (i.e. the caller of frame(i)). This function is meant to be
* called iteratively when doing a backtrace.
*
* Entry Conditions: Frame structure containing valid SP and next_pc
* Exit Conditions:
* - Frame structure updated with SP and PC of frame(i-1). next_pc now points to frame(i-2).
* - If a next_pc of 0 is returned, it indicates that frame(i-1) is last frame on the stack
*
* @param[inout] frame Pointer to frame structure
*
* @return
* - True if the SP and PC of the next frame(i-1) are sane
* - False otherwise
*/
bool esp_backtrace_get_next_frame(esp_backtrace_frame_t *frame);
/**
* @brief Print the backtrace of the current stack
*
* @param depth The maximum number of stack frames to print (should be > 0)
*
* @return
* - ESP_OK Backtrace successfully printed to completion or to depth limit
* - ESP_FAIL Backtrace is corrupted
*/
esp_err_t esp_backtrace_print(int depth);
#endif
#ifdef __cplusplus
}
#endif
|
notapatch/product_feedback_app_rails
|
app/controllers/suggestions_controller.rb
|
<reponame>notapatch/product_feedback_app_rails<gh_stars>0
class SuggestionsController < ApplicationController
layout "homepage"
skip_before_action :authenticate_user!, only: %i[index]
def index
@results = Suggestions::SuggestionsIndex.new.suggestions_index(category: params[:category],
sort: params[:column_sort])
store_last_index_page
authorize @results.feedbacks
end
end
|
akanksha2306/DSA
|
src/NumberPrinting/num1.java
|
package NumberPrinting;
public class num1 {
public static void main(String[] args) {
for(int i =1; i<=5; i++){
for(int j =1; j<=5; j++){
System.out.print(i);
}
System.out.println(" ");
}
}
}
|
nao20010128nao/Neapolitan
|
app/src/main/java/jadx/core/dex/attributes/nodes/JumpInfo.java
|
package jadx.core.dex.attributes.nodes;
import jadx.core.utils.*;
public class JumpInfo {
private final int src;
private final int dest;
public JumpInfo(int src, int dest) {
this.src = src;
this.dest = dest;
}
public int getSrc() {
return src;
}
public int getDest() {
return dest;
}
@Override
public int hashCode() {
return 31 * dest + src;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
JumpInfo other = (JumpInfo) obj;
return dest == other.dest && src == other.src;
}
@Override
public String toString() {
return "JUMP: " + InsnUtils.formatOffset(src) + " -> " + InsnUtils.formatOffset(dest);
}
}
|
dusg/java_lab
|
src/main/java/j2d/color/ColorPanel.java
|
<gh_stars>0
package j2d.color;
import j2d.Utils;
import javax.swing.*;
import java.awt.*;
import java.awt.geom.Area;
import java.awt.geom.Ellipse2D;
public class ColorPanel extends JPanel {
int red;
int blue;
int green;
public ColorPanel() {
setPreferredSize(new Dimension(500, 500));
setBackground(Color.white);
}
@Override
protected void paintComponent(Graphics g) {
super.paintComponent(g);
Graphics2D g2 = (Graphics2D) g;
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
Shape rc = new Ellipse2D.Double(100, 113, 200, 200);
Shape gc = new Ellipse2D.Double(50, 200, 200, 200);
Shape bc = new Ellipse2D.Double(150, 200, 200, 200);
Area ra = new Area(rc);
Area ga = new Area(gc);
Area ba = new Area(bc);
Area rga = new Area(rc);
rga.intersect(ga);
Area gba = new Area(gc);
gba.intersect(ba);
Area bra = new Area(bc);
bra.intersect(ra);
Area rgba = new Area(rga);
rgba.intersect(ba);
ra.subtract(rga);
ra.subtract(bra);
ga.subtract(rga);
ga.subtract(gba);
ba.subtract(bra);
ba.subtract(gba);
g2.setColor(new Color(red, 0,0));
g2.fill(ra);
g2.setColor(new Color(0, green, 0));
g2.fill(ga);
g2.setColor(new Color(0, 0, blue));
g2.fill(ba);
g2.setColor(new Color(red, green, 0));
g2.fill(rga);
g2.setColor(new Color(0, green, blue));
g2.fill(gba);
g2.setColor(new Color(red, 0, blue));
g2.fill(bra);
g2.setColor(new Color(red, green, blue));
g2.fill(rgba);
g2.setColor(Color.black);
g2.draw(rc);
g2.draw(gc);
g2.draw(bc);
}
}
|
visruth/Automotion-Java
|
src/test/java/SystemHelperTest.java
|
import org.junit.Assert;
import org.junit.Test;
import util.general.SystemHelper;
import java.util.HashMap;
import java.util.Map;
public class SystemHelperTest {
@Test
public void testThatColorCanBeConvertedFromHexToRgb() {
Map<String, String> colors = new HashMap<>();
colors.put("rgb(0,0,0)", "#000000");
colors.put("rgb(255,255,255)", "#FFFFFF");
colors.put("rgba(255,255,255,1.0)", "#FFFFFFFF");
colors.put("rgba(255,255,255,0.0)", "#00FFFFFF");
colors.put("rgba(255,255,255,0.2)", "#33FFFFFF");
colors.put("rgba(255,255,255,0.5)", "#80FFFFFF");
colors.put("rgb(251,220,220)", "#fbdcdc");
for (Map.Entry<String, String> entry : colors.entrySet()) {
Assert.assertEquals(entry.getKey(), SystemHelper.hexStringToARGB(entry.getValue()));
}
}
}
|
weixingtai/FrameworkAlpha
|
module_message/src/main/java/com/alpha/module_message/MessageFragment.java
|
<reponame>weixingtai/FrameworkAlpha
package com.alpha.module_message;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.alpha.module_common.base.BaseFragment;
public class MessageFragment extends BaseFragment {
public MessageFragment(){
}
public static MessageFragment getInstance(){
return new MessageFragment();
}
@Override
public View onCreateFragmentView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.activity_message,container,false);
}
}
|
limhawjia/DukeAcademy
|
src/main/java/com/dukeacademy/model/notes/Note.java
|
package com.dukeacademy.model.notes;
import static com.dukeacademy.commons.util.CollectionUtil.requireAllNonNull;
import java.util.UUID;
/**
* Represents a Note in the note bank. Notes can be created by users to store information in the form text or a
* simple sketch. The Note class serves as a model for a user's notes. It contains a title, the text contents and
* also a unique sketch id used to name the image file storing the sketch. Notes are identified from each other by
* their unique sketch ids.
*/
public class Note {
private final String title;
private final String content;
private final UUID sketchId;
public Note(String title, String content) {
requireAllNonNull(title, content);
this.title = title;
this.content = content;
this.sketchId = UUID.randomUUID();
}
public Note(String title, String content, UUID sketchId) {
this.title = title;
this.content = content;
this.sketchId = sketchId;
}
public String getTitle() {
return title;
}
public String getContent() {
return content;
}
public UUID getSketchId() {
return sketchId;
}
/**
* Returns a new Note instance with the same title and unique sketch id, but with its contents updated to the
* String provided. This new Note instance represents the same note, but with its contents changed. This method
* is used to maintain immutability of the Note class.
* @param noteContents the new contents for the note to be updated with
* @return the updated note
*/
public Note withNewNoteContents(String noteContents) {
return new Note(this.title, noteContents, this.sketchId);
}
/**
* Two note instances are equal if their unique sketch ids are the same.
* @param object the object to be compared to
* @return true if the object is the same note
*/
@Override
public boolean equals(Object object) {
if (object instanceof Note) {
return this.sketchId.equals(((Note) object).sketchId);
}
return false;
}
@Override
public String toString() {
final StringBuilder builder = new StringBuilder();
builder.append(getTitle())
.append(" Title: ")
.append(title)
.append(" Content: ")
.append(content)
.append(" SketchId: ")
.append(sketchId);
return builder.toString();
}
}
|
fbuloup/docometre
|
MatlabEngine/matlabcontrol-4.1.0/matlabcontrol/src/matlabcontrol/JMIWrapper.java
|
/*******************************************************************************
* Copyright or © or Copr. Institut des Sciences du Mouvement
* (CNRS & Aix Marseille Université)
*
* The DOCoMETER Software must be used with a real time data acquisition
* system marketed by ADwin (ADwin Pro and Gold, I and II) or an Arduino
* Uno. This software, created within the Institute of Movement Sciences,
* has been developed to facilitate their use by a "neophyte" public in the
* fields of industrial computing and electronics. Students, researchers or
* engineers can configure this acquisition system in the best possible
* conditions so that it best meets their experimental needs.
*
* This software is governed by the CeCILL-B license under French law and
* abiding by the rules of distribution of free software. You can use,
* modify and/ or redistribute the software under the terms of the CeCILL-B
* license as circulated by CEA, CNRS and INRIA at the following URL
* "http://www.cecill.info".
*
* As a counterpart to the access to the source code and rights to copy,
* modify and redistribute granted by the license, users are provided only
* with a limited warranty and the software's author, the holder of the
* economic rights, and the successive licensors have only limited
* liability.
*
* In this respect, the user's attention is drawn to the risks associated
* with loading, using, modifying and/or developing or reproducing the
* software by the user in light of its specific status of free software,
* that may mean that it is complicated to manipulate, and that also
* therefore means that it is reserved for developers and experienced
* professionals having in-depth computer knowledge. Users are therefore
* encouraged to load and test the software's suitability as regards their
* requirements in conditions enabling the security of their systems and/or
* data to be ensured and, more generally, to use and operate it in the
* same conditions as regards security.
*
* The fact that you are presently reading this means that you have had
* knowledge of the CeCILL-B license and that you accept its terms.
*
* Contributors:
* - <NAME> - <EMAIL> - initial API and implementation [25/03/2020]
******************************************************************************/
package matlabcontrol;
/*
* Copyright (c) 2013, <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
* - Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other materials provided with the distribution.
* - Neither the name of matlabcontrol nor the names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.mathworks.jmi.Matlab;
import com.mathworks.jmi.NativeMatlab;
import java.awt.AWTEvent;
import java.awt.EventQueue;
import java.awt.Toolkit;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.atomic.AtomicReference;
import matlabcontrol.MatlabProxy.MatlabThreadCallable;
import matlabcontrol.MatlabProxy.MatlabThreadProxy;
/**
* Interacts with MATLAB via the undocumented Java MATLAB Interface (JMI).
* <br><br>
* This code is inspired by <a href="mailto:<EMAIL>">Kamin Whitehouse</a>'s
* <a href="http://www.cs.virginia.edu/~whitehouse/matlab/JavaMatlab.html">MatlabControl</a>. Fixes to concurrency
* bugs in this class have been aided by the feedback of several matlabcontrol users, thank you for your feedback!
* <br><br>
* This class runs inside of MATLAB's Java Virtual Machine and relies upon the Java MATLAB Interface which is
* distributed by MathWorks as {@code jmi.jar}. It allows for Java to send {@code eval} and {@code feval} statements to
* MATLAB and receive results. {@code jmi.jar} is <b>not</b> distributed with matlabcontrol as it is the property of
* MathWorks. If you wish to compile the source code you will need to reference the version of {@code jmi.jar} that is
* distributed with your copy of MATLAB. It is located at {@code matlabroot/java/jar/jmi.jar} where {@code matlabroot}
* is the location of your MATLAB installation. The location of {@code matlabroot} can be determined by executing the
* {@code matlabroot} command in the MATLAB Command Window.
* <br><br>
* This is the only class in matlabcontrol which directly links against code in {@code jmi.jar}. (And therefore also the
* only class that needs {@code jmi.jar} to be on the classpath in order to compile.) {@link Configuration} also uses
* code in {@code jmi.jar} but uses reflection to interact with it.
*
* @since 3.0.0
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
class JMIWrapper
{
private static final MatlabThreadOperations THREAD_OPERATIONS = new MatlabThreadOperations();
private static final EventQueue EVENT_QUEUE = Toolkit.getDefaultToolkit().getSystemEventQueue();
private static final Method EVENT_QUEUE_DISPATCH_METHOD;
static
{
try
{
EVENT_QUEUE_DISPATCH_METHOD = EventQueue.class.getDeclaredMethod("dispatchEvent", AWTEvent.class);
}
catch(NoSuchMethodException e)
{
throw new IllegalStateException("java.awt.EventQueue's protected void dispatchEvent(java.awt.AWTEvent) " +
"method could not be found", e);
}
EVENT_QUEUE_DISPATCH_METHOD.setAccessible(true);
}
private JMIWrapper() { }
/**
* Exits MATLAB without waiting for MATLAB to return, because MATLAB will not return when exiting.
*
* @throws MatlabInvocationException
*/
static void exit()
{
Runnable runnable = new Runnable()
{
@Override
public void run()
{
try
{
Matlab.mtFevalConsoleOutput("exit", null, 0);
}
//This should never fail, and if it does there is no way to consistently report it back to the caller
//because this method does not block
catch(Exception e) { }
}
};
if(NativeMatlab.nativeIsMatlabThread())
{
runnable.run();
}
else
{
Matlab.whenMatlabIdle(runnable);
}
}
//The following functions wait for MATLAB to complete the computation before returning
//See MatlabProxy for the method documentation, acts as if running inside MATLAB
//(A LocalMatlabProxy is just a thin wrapper around these methods)
static void setVariable(final String variableName, final Object value) throws MatlabInvocationException
{
invokeAndWait(new MatlabThreadCallable<Void>()
{
@Override
public Void call(MatlabThreadProxy proxy) throws MatlabInvocationException
{
proxy.setVariable(variableName, value);
return null;
}
});
}
static Object getVariable(final String variableName) throws MatlabInvocationException
{
return invokeAndWait(new MatlabThreadCallable<Object>()
{
@Override
public Object call(MatlabThreadProxy proxy) throws MatlabInvocationException
{
return proxy.getVariable(variableName);
}
});
}
static void eval(final String command) throws MatlabInvocationException
{
invokeAndWait(new MatlabThreadCallable<Void>()
{
@Override
public Void call(MatlabThreadProxy proxy) throws MatlabInvocationException
{
proxy.eval(command);
return null;
}
});
}
static Object[] returningEval(final String command, final int nargout) throws MatlabInvocationException
{
return invokeAndWait(new MatlabThreadCallable<Object[]>()
{
@Override
public Object[] call(MatlabThreadProxy proxy) throws MatlabInvocationException
{
return proxy.returningEval(command, nargout);
}
});
}
static void feval(final String functionName, final Object... args) throws MatlabInvocationException
{
invokeAndWait(new MatlabThreadCallable<Void>()
{
@Override
public Void call(MatlabThreadProxy proxy) throws MatlabInvocationException
{
proxy.feval(functionName, args);
return null;
}
});
}
static Object[] returningFeval(final String functionName, final int nargout, final Object... args)
throws MatlabInvocationException
{
return invokeAndWait(new MatlabThreadCallable<Object[]>()
{
@Override
public Object[] call(MatlabThreadProxy proxy) throws MatlabInvocationException
{
return proxy.returningFeval(functionName, nargout, args);
}
});
}
/**
* Invokes the {@code callable} on the main MATLAB thread and waits for the computation to be completed.
*
* @param <T>
* @param callable
* @return
* @throws MatlabInvocationException
*/
static <T> T invokeAndWait(final MatlabThreadCallable<T> callable) throws MatlabInvocationException
{
T result;
if(NativeMatlab.nativeIsMatlabThread())
{
try
{
result = callable.call(THREAD_OPERATIONS);
}
catch(RuntimeException e)
{
ThrowableWrapper cause = new ThrowableWrapper(e);
throw MatlabInvocationException.Reason.RUNTIME_EXCEPTION.asException(cause);
}
}
else if(EventQueue.isDispatchThread())
{
final AtomicReference<MatlabReturn<T>> returnRef = new AtomicReference<MatlabReturn<T>>();
Matlab.whenMatlabIdle(new Runnable()
{
@Override
public void run()
{
MatlabReturn<T> matlabReturn;
try
{
matlabReturn = new MatlabReturn<T>(callable.call(THREAD_OPERATIONS));
}
catch(MatlabInvocationException e)
{
matlabReturn = new MatlabReturn<T>(e);
}
catch(RuntimeException e)
{
ThrowableWrapper cause = new ThrowableWrapper(e);
MatlabInvocationException userCausedException =
MatlabInvocationException.Reason.RUNTIME_EXCEPTION.asException(cause);
matlabReturn = new MatlabReturn<T>(userCausedException);
}
returnRef.set(matlabReturn);
}
});
//Pump event queue while waiting for MATLAB to complete the computation
try
{
while(returnRef.get() == null)
{
if(EVENT_QUEUE.peekEvent() != null)
{
EVENT_QUEUE_DISPATCH_METHOD.invoke(EVENT_QUEUE, EVENT_QUEUE.getNextEvent());
}
}
}
catch(InterruptedException e)
{
throw MatlabInvocationException.Reason.EVENT_DISPATCH_THREAD.asException(e);
}
catch(IllegalAccessException e)
{
throw MatlabInvocationException.Reason.EVENT_DISPATCH_THREAD.asException(e);
}
catch(InvocationTargetException e)
{
throw MatlabInvocationException.Reason.EVENT_DISPATCH_THREAD.asException(e);
}
//Process return
MatlabReturn<T> matlabReturn = returnRef.get();
//If exception was thrown, rethrow it
if(matlabReturn.exception != null)
{
throw matlabReturn.exception;
}
//Return data computed by MATLAB
else
{
result = matlabReturn.data;
}
}
else
{
//Used to block the calling thread while waiting for MATLAB to finish computing
final ArrayBlockingQueue<MatlabReturn<T>> returnQueue = new ArrayBlockingQueue<MatlabReturn<T>>(1);
Matlab.whenMatlabIdle(new Runnable()
{
@Override
public void run()
{
MatlabReturn<T> matlabReturn;
try
{
matlabReturn = new MatlabReturn<T>(callable.call(THREAD_OPERATIONS));
}
catch(MatlabInvocationException e)
{
matlabReturn = new MatlabReturn<T>(e);
}
catch(RuntimeException e)
{
ThrowableWrapper cause = new ThrowableWrapper(e);
MatlabInvocationException userCausedException =
MatlabInvocationException.Reason.RUNTIME_EXCEPTION.asException(cause);
matlabReturn = new MatlabReturn<T>(userCausedException);
}
returnQueue.add(matlabReturn);
}
});
try
{
//Wait for MATLAB's main thread to finish computation
MatlabReturn<T> matlabReturn = returnQueue.take();
//If exception was thrown, rethrow it
if(matlabReturn.exception != null)
{
throw matlabReturn.exception;
}
//Return data computed by MATLAB
else
{
result = matlabReturn.data;
}
}
catch(InterruptedException e)
{
throw MatlabInvocationException.Reason.INTERRRUPTED.asException(e);
}
}
return result;
}
/**
* Data returned from MATLAB or exception thrown. The two different constructors are needed as opposed to using
* {@code instanceof} because it is possible the user would want to <strong>return</strong> an exception. The
* appropriate constructor will always be used because determining which overloaded constructor (or method) is done
* at compile time, not run time.
*/
private static class MatlabReturn<T>
{
final T data;
final MatlabInvocationException exception;
MatlabReturn(T value)
{
this.data = value;
this.exception = null;
}
MatlabReturn(MatlabInvocationException exception)
{
this.data = null;
this.exception = exception;
}
}
/**
* Interacts with MATLAB on MATLAB's main thread. Interacting on MATLAB's main thread is not enforced by this class,
* that is done by its use in {@link JMIWrapper#invokeAndWait(matlabcontrol.MatlabProxy.MatlabThreadCallable)}.
*/
private static class MatlabThreadOperations implements MatlabThreadProxy
{
@Override
public void setVariable(String variableName, Object value) throws MatlabInvocationException
{
this.returningFeval("assignin", 0, "base", variableName, value);
}
@Override
public Object getVariable(String variableName) throws MatlabInvocationException
{
return this.returningFeval("evalin", 1, "base", variableName)[0];
}
@Override
public void eval(String command) throws MatlabInvocationException
{
this.returningFeval("evalin", 0, "base", command);
}
@Override
public Object[] returningEval(String command, int nargout) throws MatlabInvocationException
{
return this.returningFeval("evalin", nargout, "base", command);
}
@Override
public void feval(String functionName, Object... args) throws MatlabInvocationException
{
this.returningFeval(functionName, 0, args);
}
@Override
public Object[] returningFeval(String functionName, int nargout, Object... args) throws MatlabInvocationException
{
//Functions with no arguments should be passed null, not an empty array
if(args != null && args.length == 0)
{
args = null;
}
try
{
Object matlabResult = Matlab.mtFevalConsoleOutput(functionName, args, nargout);
Object[] resultArray;
if(nargout == 0)
{
resultArray = new Object[0];
}
else if(nargout == 1)
{
resultArray = new Object[] { matlabResult };
}
//If multiple return values then an Object[] should have been returned
else
{
if(matlabResult == null)
{
String errorMsg = "Expected " + nargout + " return arguments, instead null was returned";
throw MatlabInvocationException.Reason.NARGOUT_MISMATCH.asException(errorMsg);
}
else if(!matlabResult.getClass().equals(Object[].class))
{
String errorMsg = "Expected " + nargout + " return arguments, instead 1 argument was returned";
throw MatlabInvocationException.Reason.NARGOUT_MISMATCH.asException(errorMsg);
}
resultArray = (Object[]) matlabResult;
if(nargout != resultArray.length)
{
String errorMsg = "Expected " + nargout + " return arguments, instead " + resultArray.length +
(resultArray.length == 1 ? " argument was" : " arguments were") + " returned";
throw MatlabInvocationException.Reason.NARGOUT_MISMATCH.asException(errorMsg);
}
}
return resultArray;
}
catch(Exception e)
{
throw MatlabInvocationException.Reason.INTERNAL_EXCEPTION.asException(new ThrowableWrapper(e));
}
}
}
}
|
Azegor/mjc
|
test/sematest/valid/matmul.java
|
class MatrixMultiplication {
public static void main (String[] args){}
public int[][] calculate(int[][] left, int[][] right, int length) {
int[][] result = new int[length][];
int i = 0;
while(i < length) {
i = i + 1;
int j = 0;
while(j < length) {
j = j + 1;
int k = 0;
while(k < length) {
k = k + 1;
result[i][j] = result[i][j] + left[i][k] * right[k][j];
}
}
}
return result;
}
}
|
mahairod/catalina
|
java/com/sun/appserv/web/cache/mapping/ConstraintField.java
|
/*
* Copyright 1997-2007 Sun Microsystems, Inc. All rights reserved.
*
*/
package com.sun.appserv.web.cache.mapping;
import java.util.logging.Logger;
import java.util.logging.Level;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import com.sun.enterprise.web.logging.pwc.LogDomains;
/** ConstraintField class represents a single Field and constraints on its
* values; Field name and its scope are inherited from the Field class.
*/
public class ConstraintField extends Field {
private static final String[] SCOPE_NAMES = {
"", "context.attribute", "request.header", "request.parameter",
"request.cookie", "request.attribute", "session.attribute",
"session.id"
};
private static Logger _logger = null;
private static boolean _isTraceEnabled = false;
// whether to cache if there was a match
boolean cacheOnMatch = true;
// whether to cache if there was a failure to match
boolean cacheOnMatchFailure = false;
// field value constraints
ValueConstraint constraints[] = new ValueConstraint[0];
/**
* create a new cache field, given a string representation of the scope
* @param name name of this field
* @param scope scope of this field
*/
public ConstraintField (String name, String scope)
throws IllegalArgumentException {
super(name, scope);
if (_logger == null) {
_logger = LogDomains.getLogger(LogDomains.PWC_LOGGER);
_isTraceEnabled = _logger.isLoggable(Level.FINE);
}
}
/** set whether to cache should the constraints check pass
* @param cacheOnMatch should the constraint check pass, should we cache?
*/
public void setCacheOnMatch(boolean cacheOnMatch) {
this.cacheOnMatch = cacheOnMatch;
}
/**
* @return cache-on-match setting
*/
public boolean getCacheOnMatch() {
return cacheOnMatch;
}
/** set whether to cache should there be a failure forcing the constraint
* @param cacheOnMatchFailure should there be a constraint check failure,
* enable cache?
*/
public void setCacheOnMatchFailure(boolean cacheOnMatchFailure) {
this.cacheOnMatchFailure = cacheOnMatchFailure;
}
/**
* @return cache-on-match-failure setting
*/
public boolean getCacheOnMatchFailure() {
return cacheOnMatchFailure;
}
/**
* add a constraint for this field
* @param constraint one constraint associated with this field
*/
public void addConstraint(ValueConstraint constraint) {
if (constraint == null)
return;
ValueConstraint results[] =
new ValueConstraint[constraints.length + 1];
for (int i = 0; i < constraints.length; i++)
results[i] = constraints[i];
results[constraints.length] = constraint;
constraints = results;
}
/**
* add an array of constraints for this field
* @param vcs constraints associated with this field
*/
public void setValueConstraints(ValueConstraint[] vcs) {
if (vcs == null)
return;
constraints = vcs;
}
/** apply the constraints on the value of the field in the given request.
* return a true if all the constraints pass; false when the
* field is not found or the field value doesn't pass the caching
* constraints.
*/
public boolean applyConstraints(ServletContext context,
HttpServletRequest request) {
Object value = getValue(context, request);
if (value == null) {
// the field is not present in the request
if (_isTraceEnabled) {
_logger.fine(
"The constraint field " + name
+ " is not found in the scope " + SCOPE_NAMES[scope]
+ "; returning cache-on-match-failure: "
+ cacheOnMatchFailure);
}
return cacheOnMatchFailure;
} else if (constraints.length == 0) {
// the field is present but has no value constraints
if (_isTraceEnabled) {
_logger.fine(
"The constraint field " + name + " value = "
+ value.toString() + " is found in scope "
+ SCOPE_NAMES[scope] + "; returning cache-on-match: "
+ cacheOnMatch);
}
return cacheOnMatch;
}
// apply all the value constraints
for (int i = 0; i < constraints.length; i++) {
ValueConstraint c = constraints[i];
// one of the values matched
if (c.matches(value)) {
if (_isTraceEnabled) {
_logger.fine(
"The constraint field " + name + " value = "
+ value.toString() + " is found in scope "
+ SCOPE_NAMES[scope] + "; and matches with a value "
+ c.toString() + "; returning cache-on-match: "
+ cacheOnMatch);
}
return cacheOnMatch;
}
}
// none of the values matched; should we cache?
if (_isTraceEnabled) {
_logger.fine(
"The constraint field " + name + " value = "
+ value.toString() + " is found in scope " + SCOPE_NAMES[scope]
+ "; but didn't match any of the value constraints; "
+ "returning cache-on-match-failure = "
+ cacheOnMatchFailure);
}
return cacheOnMatchFailure;
}
}
|
Bit-Nation/BITNATION-Pangea-mobile
|
src/PangeaCore/__tests__/UI/Account/Passcode/PinCode/index.js
|
import React from 'react';
import { shallow } from 'enzyme';
import configureStore from 'redux-mock-store';
import PinCodeScreen from '../../../../../UI/Account/Passcode/PinCode';
import navigatorMock from '../../../../../__mocks__/Navigator';
describe('PinCodeScreen', () => {
let pinCodeScreen;
const initialStateMock = {};
const storeMock = configureStore([]);
const propsMock = {
navigator: navigatorMock,
shouldShowCancel: false,
onSubmit: jest.fn(),
onCancel: jest.fn(),
};
beforeEach(() => {
pinCodeScreen = shallow(<PinCodeScreen {...propsMock} store={storeMock(initialStateMock)} />);
});
test('Rendering', () => {
expect(pinCodeScreen).toMatchSnapshot();
});
});
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.