repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
dtcooper/jewpizza
|
backend/webcore/tests.py
|
<reponame>dtcooper/jewpizza
import json
from django.core import mail
from django.urls import reverse
from jew_pizza.test_utils import JewPizzaTestCase
class ViewRenderTests(JewPizzaTestCase):
def test_home_renders(self):
self.assertPageRenders("webcore:home", "webcore/home.html", "jew.pizza - <NAME>")
def test_bio_renders(self):
self.assertPageRenders("webcore:bio", "webcore/bio.html", "Bio")
def test_testimonials_renders(self):
self.assertPageRenders("webcore:testimonials", "webcore/testimonials.html", "Testimonials")
def test_social_renders(self):
self.assertPageRenders("webcore:social", "webcore/social.html", "Social")
def test_log_js_error(self):
self.assertEqual(len(mail.outbox), 0)
response = self.client.post(
reverse("webcore:log-js-error"),
json.dumps(
{
"url": "http://example.com",
"title": "test_title",
"detail": "test_detail",
"filename": "test_filename.js",
}
),
content_type="application/json",
)
self.assertEqual(response.status_code, 204)
self.assertEqual(len(mail.outbox), 1)
|
Junlin-Yin/myLeetCode
|
mine/5-longest_palindromic_substring.cpp
|
<gh_stars>0
class Solution {
public:
bool isPalindromic(string s){
if(s.length() == 1) return true;
string r = s;
reverse(r.begin(), r.end());
return r == s;
}
string longestPalindrome(string s) {
string max_sub;
int left, right, max_length=0;
for(left=0; left<s.length(); ++left){
char c = s.at(left);
right = left + max_length - 1;
while(true){
for(++right; right<s.length() && s.at(right)!=c; ++right);
if(right >= s.length()) break;
string sub = s.substr(left, right+1-left);
if(isPalindromic(sub)){
int length = sub.length();
if(max_length < length){
max_length = length;
max_sub = sub;
}
}
}
}
return max_sub;
}
};
|
Rusev12/JSCore
|
JSFundamentals/StringAndRegex-Exercises/FindVariableInSentences.js
|
function printOnlyVariable(string) {
let pattern = /\b( _)([A-Za-z0-9]+)\b/g
let variable = [] ;
let match;
while (match = pattern.exec(string)){
variable.push(match[2].trim());
}
console.log(variable.join(','))
}
printOnlyVariable('__invalidVariable _evenMoreInvalidVariable_ _validVariable')
|
ngocjr7/geneticpython
|
geneticpython/core/operators/crossover/prim_crossover.py
|
<reponame>ngocjr7/geneticpython
"""
File: prim_crossover.py
Created by ngocjr7 on 2020-09-10 16:18
Email: <EMAIL>
Github: https://github.com/ngocjr7
Description:
"""
from __future__ import absolute_import
from geneticpython.core.operators.crossover import Crossover
from geneticpython.core.individual import Individual
from geneticpython.utils.validation import check_random_state
from geneticpython.utils import rset
from geneticpython.models.tree import Tree, RootedTree
from copy import deepcopy
from random import Random
from typing import Callable
import random
import numpy as np
class PrimCrossover(Crossover):
def cross(self, father: Individual, mother: Individual, random_state=None):
random_state = check_random_state(random_state)
do_cross = True if random_state.random() <= self.pc else False
children = father.clone(), mother.clone()
if not do_cross:
return children
trees = children[0].decode(), children[1].decode()
if not (isinstance(trees[0], Tree) and isinstance(trees[1], Tree)):
raise ValueError(f"The PrimCrossover is only used on the individual that \
decodes to an instance of Tree. \
got father type: {type(trees[0])} and mother type {type(trees[1])}")
edge_union = set()
potential_adj = [list() for _ in range(trees[0].number_of_vertices)]
for i in range(2):
for u, v in trees[i].edges:
if (v, u) not in edge_union:
edge_union.add((u, v))
potential_adj[u].append(v)
potential_adj[v].append(u)
for i in range(2):
trees[i].initialize()
# This is tricky,
# I'm trying to make this method can work both cases
# one when you add_edge in initialize() function, and one is not
# I'm using parent attribute to do this.
# But this only work when you update parent attribute after add_edge in initialize()
# or you this crossover method with RootedTree, it supports update parent attribute in add_edge
if not isinstance(trees[i], RootedTree) and len(trees[i].edges) != 0:
raise Exception("Unexpected error occurred when running PrimCrossover")
if trees[i].root is not None:
root = trees[i].root
else:
random_state.randint(0, trees[i].number_of_vertices)
trees[i].parent[root] = root
# Set of connected nodes
C = set()
# eligible edges
A = rset()
# Init tree
for u in range(trees[i].number_of_vertices):
if trees[i].parent[u] != -1:
C.add(u)
for v in potential_adj[u]:
if v not in C:
A.add((u, v))
while len(C) < trees[i].number_of_vertices:
u, v = A.random_choice(random_state)
A.remove((u, v))
if v not in C:
trees[i].add_edge(u, v)
C.add(v)
for w in potential_adj[v]:
if w not in C:
A.add((v, w))
if len(A) == 0 and len(C) != trees[i].number_of_vertices:
raise ValueError('Cannot create random spanning tree from unconnected tree')
trees[i].repair()
try:
children[0].encode(trees[0])
children[1].encode(trees[1])
except NotImplementedError:
raise ValueError("Cannot call encode method. PrimCrossover requires encode method in Individual")
except Exception as e:
raise e
return children[0], children[1]
|
battheresa/acm-practice
|
src/hanging_out.cpp
|
<reponame>battheresa/acm-practice
#include <iostream>
using namespace std;
int main() {
int max, events, current = 0, added, denied = 0;
string action;
cin >> max >> events;
for (int i = 0; i < events; i++) {
cin >> action >> added;
if (action == "enter") {
if (current + added <= max)
current += added;
else
denied++;
}
else if (action == "leave") {
current -= added;
if (current < 0)
current = 0;
}
}
cout << denied << endl;
return 0;
}
|
crici/gradle-native
|
subprojects/internal-testing/src/main/java/dev/nokee/internal/testing/file/TestNameTestDirectoryProvider.java
|
<filename>subprojects/internal-testing/src/main/java/dev/nokee/internal/testing/file/TestNameTestDirectoryProvider.java
/*
* Copyright 2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.nokee.internal.testing.file;
import lombok.val;
import java.io.File;
import java.nio.file.Path;
/**
* A file fixture which provides a unique temporary folder for the test.
*/
public final class TestNameTestDirectoryProvider extends AbstractTestDirectoryProvider {
public TestNameTestDirectoryProvider(Class<?> klass) {
// NOTE: the space in the directory name is intentional
super(new File("build/tmp/test files").toPath(), klass);
}
public TestNameTestDirectoryProvider(Path root, Class<?> klass) {
super(root, klass);
}
public static TestNameTestDirectoryProvider newInstance(Class<?> testClass) {
return new TestNameTestDirectoryProvider(testClass);
}
public static TestNameTestDirectoryProvider newInstance(String methodName, Object target) {
val testDirectoryProvider = new TestNameTestDirectoryProvider(target.getClass());
testDirectoryProvider.init(methodName);
return testDirectoryProvider;
}
}
|
hindog/neo4j-gremlin-bolt
|
src/integTest/java/ta/nemahuta/neo4j/SimpleCreateAndLoadTest.java
|
package ta.nemahuta.neo4j;
import com.google.common.collect.ImmutableList;
import org.apache.tinkerpop.gremlin.structure.Direction;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.*;
import static ta.nemahuta.neo4j.AbstractExampleGraphTest.ExampleGraphs.COMPLEX;
import static ta.nemahuta.neo4j.AbstractExampleGraphTest.ExampleGraphs.SIMPLE;
class SimpleCreateAndLoadTest extends AbstractExampleGraphTest {
private final CountDownLatch latch = new CountDownLatch(1);
@Test
void checkCreateAndReadSmallGraph() throws Exception {
checkGraph(SIMPLE);
withGraph(graph -> {
final Optional<Vertex> joshOpt = ImmutableList.copyOf(graph.vertices()).stream()
.filter(v -> Objects.equals(v.property("name").value(), "josh")).findAny();
assertTrue(joshOpt.isPresent(), "Josh is not present");
final Vertex josh = joshOpt.get();
assertTrue(josh.property("age").isPresent());
josh.property("age").remove();
assertFalse(josh.property("age").isPresent());
assertTrue(josh.edges(Direction.OUT).hasNext(), "No out edges for josh");
josh.remove();
assertTrue(ImmutableList.copyOf(graph.vertices()).stream()
.noneMatch(v -> Objects.equals(v.property("name").value(), "josh")));
graph.tx().commit();
});
withGraph(graph -> {
assertTrue(ImmutableList.copyOf(graph.vertices()).stream()
.noneMatch(v -> Objects.equals(v.property("name").value(), "josh")));
});
}
@Test
void openAndCloseFactory() throws Exception {
checkGraph(SIMPLE);
resetGraphFactory();
withGraph(graph -> {
assertTrue(ImmutableList.copyOf(graph.vertices()).stream()
.anyMatch(v -> Objects.equals(v.property("name").value(), "josh")));
});
}
@Test
void checkCreateAndReadHugeGraph() throws Exception {
checkGraph(COMPLEX);
}
@Test
void parallelTest1() throws Exception {
parallelStream(100, "/graph1-example.xml");
}
@Test
void parallelTest2() throws Exception {
parallelStream(5, "/graph2-example.xml");
}
@Test
void edgesAreRegisteredOnBothEnds() throws Exception {
final AtomicReference<Object> vertexId = new AtomicReference<Object>();
withGraph(graph -> {
assertFalse(graph.vertices().hasNext());
final Vertex vertex1 = graph.addVertex("hallo");
vertexId.set(vertex1.id());
final Vertex vertex2 = graph.addVertex("hejsan");
assertFalse(vertex1.edges(Direction.OUT, "svenska").hasNext());
vertex1.addEdge("svenska", vertex2);
assertEquals(vertex1, graph.vertices(vertex1.id()).next());
assertTrue(vertex2.edges(Direction.IN, "svenska").hasNext());
assertTrue(graph.vertices(vertex2.id()).next().edges(Direction.IN, "svenska").hasNext());
});
}
private void parallelStream(final int max, final String source) throws InterruptedException {
final ExecutorService executor = Executors.newCachedThreadPool();
final List<Future<Boolean>> futures = executor.invokeAll(
Stream.<Callable<Boolean>>generate(() -> () -> {
latch.countDown();
try {
streamGraph(source);
} catch (Exception e) {
throw new IllegalStateException(e);
}
return true;
}).limit(max).collect(Collectors.toList())
);
executor.shutdown();
assertTrue(futures.stream().allMatch(f -> {
try {
return f.get();
} catch (InterruptedException | ExecutionException e) {
throw new IllegalStateException(e);
}
}));
}
}
|
prologic/toolbox
|
recipe/dev/build/info.go
|
<gh_stars>10-100
package build
import (
"encoding/json"
"errors"
"github.com/go-git/go-git/v5"
"github.com/tidwall/gjson"
"github.com/watermint/toolbox/essentials/go/es_project"
"github.com/watermint/toolbox/essentials/log/esl"
"github.com/watermint/toolbox/infra/app"
"github.com/watermint/toolbox/infra/control/app_control"
"github.com/watermint/toolbox/infra/recipe/rc_recipe"
"github.com/watermint/toolbox/infra/security/sc_zap"
"github.com/watermint/toolbox/quality/infra/qt_errors"
"github.com/watermint/toolbox/resources"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
type Info struct {
rc_recipe.RemarkSecret
}
func (z *Info) Preset() {
}
func (z *Info) Exec(c app_control.Control) error {
l := c.Log()
productionReady := true
prjBase, err := es_project.DetectRepositoryRoot()
if err != nil {
l.Debug("Unable to detect the repository root", esl.Error(err))
return err
}
prjGit := filepath.Join(prjBase, ".git")
repo, err := git.PlainOpen(prjGit)
if err != nil {
l.Debug("Unable to open the .git", esl.Error(err))
return err
}
hash, err := repo.ResolveRevision("HEAD")
if err != nil {
l.Debug("Unable to detect the hash", esl.Error(err))
return err
}
head, err := repo.Head()
if err != nil {
l.Debug("Unable to detect the head", esl.Error(err))
return err
}
headName := string(head.Name())
if !strings.HasPrefix(headName, "refs/heads") {
l.Debug("Unexpected ref format", esl.String("head", headName))
return errors.New("unexpected git refs")
}
branch := strings.ReplaceAll(headName, "refs/heads/", "")
xap, found := os.LookupEnv(app.EnvNameToolboxBuilderKey)
if !found {
l.Info("Builder key not found. Please set the build key for production release", esl.String("key", app.EnvNameToolboxBuilderKey))
xap = ""
productionReady = false
}
var zap string
zap = sc_zap.NewZap(hash.String())
appKeyData, found := os.LookupEnv(app.EnvNameToolboxAppKeys)
if !found {
l.Warn("App key data not found. Please set the build key for production release", esl.String("key", app.EnvNameToolboxAppKeys))
zap = ""
productionReady = false
} else {
if !gjson.Valid(appKeyData) {
l.Warn("App key data is not look like a JSON data")
return errors.New("invalid app key data format")
}
if err := sc_zap.Zap(zap, prjBase, []byte(appKeyData)); err != nil {
l.Warn("Unable to zap the data", esl.Error(err))
return err
}
}
buildTimestamp := time.Now().UTC()
info := resources.BuildInfo{
Version: app.BuildId,
Hash: hash.String(),
Branch: branch,
Timestamp: buildTimestamp.Format(time.RFC3339),
Year: buildTimestamp.Year(),
Zap: zap,
Xap: xap,
Production: productionReady,
}
infoPath := filepath.Join(prjBase, "resources/build", "info.json")
l.Info("Build info", esl.Any("branch", branch), esl.Any("hash", info.Hash), esl.String("version", app.BuildId), esl.Bool("releaseReady", productionReady))
infoData, err := json.Marshal(info)
if err != nil {
l.Debug("Unable to marshal the data", esl.Error(err))
return err
}
if err := ioutil.WriteFile(infoPath, infoData, 0600); err != nil {
l.Warn("Unable to write the file", esl.Error(err))
return err
}
return nil
}
func (z *Info) Test(c app_control.Control) error {
return qt_errors.ErrorNoTestRequired
}
|
rveerama1/istio
|
pkg/test/framework/resource/flags_test.go
|
<reponame>rveerama1/istio
// Copyright Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package resource
import (
"testing"
"github.com/google/go-cmp/cmp"
)
func TestValidate(t *testing.T) {
tcs := []struct {
name string
settings *Settings
expectErr bool
expectedRevs RevVerMap
}{
{
name: "fail on deprecation and nocleanup",
settings: &Settings{
FailOnDeprecation: true,
NoCleanup: true,
},
expectErr: true,
},
{
name: "fail on both revision and revisions flag",
settings: &Settings{
Revision: "a",
Compatibility: false,
Revisions: RevVerMap{
"b": "",
},
},
expectErr: true,
},
{
name: "fail when compatibility mode but no revisions",
settings: &Settings{
Compatibility: true,
},
expectErr: true,
},
{
name: "revision flag converted to revvermap",
settings: &Settings{
Revision: "a",
Compatibility: false,
},
expectedRevs: RevVerMap{
"a": "",
},
},
}
for _, tc := range tcs {
t.Run(tc.name, func(t *testing.T) {
err := validate(tc.settings)
if tc.expectErr {
if err == nil {
t.Error("expected error but got none")
}
return
}
if tc.expectedRevs != nil {
if diff := cmp.Diff(tc.expectedRevs, tc.settings.Revisions); diff != "" {
t.Errorf("unexpected revisions, got: %v, want: %v, diff: %v",
tc.settings.Revisions, tc.expectedRevs, diff)
}
}
})
}
}
|
gcapo123/b2b-commerce-on-lightning-quickstart
|
force-app/main/default/aura/EMC_OpportunityScore/EMC_OpportunityScoreHelper.js
|
<reponame>gcapo123/b2b-commerce-on-lightning-quickstart<filename>force-app/main/default/aura/EMC_OpportunityScore/EMC_OpportunityScoreHelper.js
({
saveScore : function(component) {
let action = component.get('c.saveOpportunityScore');
let score = component.get('v.score');
let reasons = component.get('v.reasons');
let recordId = component.get('v.recordId');
score['BaseId'] = recordId
score['Insights'] = JSON.stringify(reasons)
action.setParams({
scoreData: score
})
action.setCallback(this, function(res){
let retVal = res.getReturnValue();
let state = res.getState();
if(state === 'SUCCESS'){
this.sendMixpanelEvent(
component,
$A.getCallback(function(){
$A.get('e.force:refreshView').fire();
$A.get("e.force:closeQuickAction").fire();
}))
} else if(state === 'INCOMPLETE'){
console.log('INCOMPLETE', res.getError())
} else {
console.log('ERROR', res.getError());
}
})
$A.enqueueAction(action);
},
sendMixpanelEvent: function(component, callback){
let mixpanelEvent = component.getEvent('MixpanelEvent');
mixpanelEvent.setParams({
eventName: 'SDO Event',
payload: {
action: 'Score Opportunity'
}
});
mixpanelEvent.fire();
if(callback){
callback();
}
}
})
|
lczhai/Mds-iOS-Library
|
Source/BMController/ScanQR/QRShaowView.h
|
<filename>Source/BMController/ScanQR/QRShaowView.h<gh_stars>1-10
//
// QRShaowView.h
// MDSBaseLibrary
//
// Created by jony on 2018/10/11.
//
#import <UIKit/UIKit.h>
@interface QRShaowView : UIView
@property (nonatomic,assign) CGSize showSize; /**< 有效范围 */
- (void)showAnimation;
- (void)stopAnimation;
@end
|
sunabove/ws_map_01
|
PR200_GOODMAP_ENGINE/src_ygis_engine/com/ynhenc/gis/projection/MgrToWgs.java
|
package com.ynhenc.gis.projection;
public class MgrToWgs extends UtmToWgs {
public double[] convertMGRUTMToLatLong(String mgrutm) {
double[] latlon = { 0.0, 0.0 };
// 02CNR0634657742
int zone = Integer.parseInt(mgrutm.substring(0, 2));
String latZone = mgrutm.substring(2, 3);
String digraph1 = mgrutm.substring(3, 4);
String digraph2 = mgrutm.substring(4, 5);
this.easting = Double.parseDouble(mgrutm.substring(5, 10));
this.northing = Double.parseDouble(mgrutm.substring(10, 15));
LatZoneList lz = new LatZoneList();
double latZoneDegree = lz.getLatZoneDegree(latZone);
double a1 = latZoneDegree * 40000000 / 360.0;
double a2 = 2000000 * Math.floor(a1 / 2000000.0);
Digraphs digraphs = new Digraphs();
double digraph2Index = digraphs.getDigraph2Index(digraph2);
double startindexEquator = 1;
if ((1 + zone % 2) == 1) {
startindexEquator = 6;
}
double a3 = a2 + (digraph2Index - startindexEquator) * 100000;
if (a3 <= 0) {
a3 = 10000000 + a3;
}
this.northing = a3 + this.northing;
this.zoneCM = -183 + 6 * zone;
double digraph1Index = digraphs.getDigraph1Index(digraph1);
int a5 = 1 + zone % 3;
double[] a6 = { 16, 0, 8 };
double a7 = 100000 * (digraph1Index - a6[a5 - 1]);
this.easting = this.easting + a7;
this.setVariables();
double latitude = 0;
latitude = 180 * (this.phi1 - this.fact1 * (this.fact2 + this.fact3 + this.fact4)) / Math.PI;
if (latZoneDegree < 0) {
latitude = 90 - latitude;
}
double d = this._a2 * 180 / Math.PI;
double longitude = this.zoneCM - d;
if (this.getHemisphere(latZone).equals("S")) {
latitude = -latitude;
}
latlon[0] = latitude;
latlon[1] = longitude;
return latlon;
}
}
|
minbox-projects/api-boot-admin
|
admin-services/src/main/java/org/minbox/framework/api/boot/admin/api/SystemUserApi.java
|
<gh_stars>1-10
package org.minbox.framework.api.boot.admin.api;
import com.gitee.hengboy.mybatis.pageable.Page;
import com.gitee.hengboy.mybatis.pageable.request.PageableRequest;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.minbox.framework.api.boot.admin.api.base.BaseApi;
import org.minbox.framework.api.boot.admin.api.request.*;
import org.minbox.framework.api.boot.admin.api.response.CurrentUserResponse;
import org.minbox.framework.api.boot.admin.common.constants.UrlConstants;
import org.minbox.framework.api.boot.admin.common.enums.Status;
import org.minbox.framework.api.boot.admin.common.exception.LogicException;
import org.minbox.framework.api.boot.admin.common.model.ApiResponse;
import org.minbox.framework.api.boot.admin.converter.SystemUserStruct;
import org.minbox.framework.api.boot.admin.entity.SystemUser;
import org.minbox.framework.api.boot.admin.service.SystemUserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import static org.minbox.framework.api.boot.admin.common.constants.UrlConstants.URL_ACTIONS;
import static org.minbox.framework.api.boot.admin.common.constants.UrlConstants.URL_FILTERS;
/**
* 系统用户接口定义
*
* @author 恒宇少年
*/
@RestController
@RequestMapping(value = UrlConstants.USER)
@Api(tags = "系统用户")
public class SystemUserApi extends BaseApi {
/**
* 系统用户业务逻辑
*/
@Autowired
private SystemUserService systemUserService;
/**
* 分页查询系统用户列表
*
* @return {@link ApiResponse}
* @throws LogicException
*/
@GetMapping(value = URL_FILTERS + "/pageable")
@ApiOperation(value = "条件分页查询用户列表")
public ApiResponse<Page<SystemUser>> findByPageable(@Valid SelectSystemUserByParamRequest request) throws LogicException {
Page<SystemUser> page =
PageableRequest.of(request.getPage(), request.getSize()).request(() -> systemUserService.findByParams(request));
return ApiResponse.success().data(page);
}
/**
* 启用系统用户
* 根据用户编号{@link SystemUser#getUserId()}支持一次性启用多个
*
* @return {@link ApiResponse}
* @throws LogicException logic exception
*/
@PostMapping(value = URL_ACTIONS + "/enable")
public ApiResponse enableSystemUser(@Valid @RequestBody EnableSystemUserRequest request) throws LogicException {
systemUserService.updateUserStatus(request.getUserIds(), Status.ENABLE);
return ApiResponse.success();
}
/**
* 禁用系统用户
* 根据用户编号{@link SystemUser#getUserId()}支持一次性禁用多个
*
* @return {@link ApiResponse}
* @throws LogicException logic exception
*/
@PostMapping(value = URL_ACTIONS + "/disable")
public ApiResponse disableSystemUser(@Valid @RequestBody DisableSystemUserRequest request) throws LogicException {
systemUserService.updateUserStatus(request.getUserIds(), Status.DISABLE);
return ApiResponse.success();
}
/**
* 逻辑删除系统用户
* 根据用户编号{@link SystemUser#getUserId()}支持一次性删除多个
*
* @return {@link ApiResponse}
* @throws LogicException logic exception
*/
@DeleteMapping
public ApiResponse removeSystemUser(@Valid @RequestBody RemoveSystemUserRequest request) throws LogicException {
systemUserService.updateUserStatus(request.getUserIds(), Status.DELETE);
return ApiResponse.success();
}
/**
* 查询当前登录用户基本信息
*
* @return {@link SystemUser}
* @throws LogicException
*/
@GetMapping
@ApiOperation(value = "获取当前用户信息")
public ApiResponse<CurrentUserResponse> getCurrentUserInfo() throws LogicException {
String username = getCurrentUserName();
SystemUser systemUser = systemUserService.findByUsernameAndCheck(username);
CurrentUserResponse response = SystemUserStruct.INSTANCE.fromSystemUser(systemUser);
// TODO 用户角色暂未从数据库查询
globalLogging.debug("获取当前登录用户:{},获取基本信息完成.", username);
return ApiResponse.success().data(response);
}
/**
* 添加系统用户
*
* @param request {@link AddSystemUserRequest}
* @return {@link ApiResponse}
* @throws LogicException
*/
@PostMapping
@ApiOperation(value = "添加用户")
public ApiResponse<String> addSystemUser(@Valid @RequestBody AddSystemUserRequest request) throws LogicException {
SystemUser systemUser = SystemUserStruct.INSTANCE.fromAddSystemUserRequest(request);
String userId = systemUserService.addUser(systemUser);
globalLogging.debug("用户:{},添加成功,用户编号为:{}", request.getUsername(), userId);
return ApiResponse.success().data(userId);
}
}
|
Nyran/schism
|
scm_gl_core/src/scm/gl_core/state_objects.h
|
// Copyright (c) 2012 <NAME> <<EMAIL>>
// Distributed under the Modified BSD License, see license.txt.
#ifndef SCM_GL_CORE_STATE_OBJECTS_H_INCLUDED
#define SCM_GL_CORE_STATE_OBJECTS_H_INCLUDED
#include <scm/gl_core/state_objects/state_objects_fwd.h>
#include <scm/gl_core/state_objects/blend_state.h>
#include <scm/gl_core/state_objects/depth_stencil_state.h>
#include <scm/gl_core/state_objects/rasterizer_state.h>
#include <scm/gl_core/state_objects/sampler_state.h>
#endif // SCM_GL_CORE_STATE_OBJECTS_H_INCLUDED
|
OtenMoten/DesignPatterns_Structural
|
src/Adapter_DuckTurkey/DuckClasses.java
|
package Adapter_DuckTurkey;
interface IDuck {
public void quack();
public void flyLong();
}
class Mallard implements IDuck {
@Override
public void quack() {
System.out.println("Quack Quack");
}
@Override
public void flyLong() {
System.out.println("I fly long");
}
}
|
lechium/iOS1351Headers
|
System/Library/PrivateFrameworks/PhotosUICore.framework/PXGCaptureSpriteTextureProvider.h
|
<filename>System/Library/PrivateFrameworks/PhotosUICore.framework/PXGCaptureSpriteTextureProvider.h
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, October 27, 2021 at 3:16:16 PM Mountain Standard Time
* Operating System: Version 13.5.1 (Build 17F80)
* Image Source: /System/Library/PrivateFrameworks/PhotosUICore.framework/PhotosUICore
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>.
*/
#import <PhotosUICore/PhotosUICore-Structs.h>
#import <PhotosUICore/PXGTextureProvider.h>
@interface PXGCaptureSpriteTextureProvider : PXGTextureProvider
-(NSRange)requestTexturesForSpritesInRange:(PXGSpriteIndexRange)arg1 geometries:(SCD_Struct_PX13*)arg2 styles:(SCD_Struct_PX77*)arg3 infos:(SCD_Struct_PX17*)arg4 inLayout:(id)arg5 ;
-(void)_requestWithBehavior:(unsigned long long)arg1 requestID:(int)arg2 ;
@end
|
k-czajka/fh
|
fhdp/fhdp-commons/fhdp-commons-services/src/main/java/pl/fhframework/dp/commons/services/i18n/ServicesMessageHelper.java
|
<gh_stars>0
package pl.fhframework.dp.commons.services.i18n;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import pl.fhframework.core.i18n.MessageService;
@Component
public class ServicesMessageHelper {
@Autowired
private MessageService messageService;
public String getMessage(String messageKey, Object... params) {
return messageService.getBundle(ServicesMessageSourceConfig.SOURCE_NAME).getMessage(messageKey, params);
}
}
|
42Bastian/lyxass
|
opcode.h
|
<reponame>42Bastian/lyxass
/*
structure that describes an opcode
*/
#ifndef _OPCODE_H_
#define _OPCODE_H_
struct opcode_s{
char name[12];
int (*func)(int);
int misc;
};
int SearchOpcode(const struct opcode_s *, const char *);
int SearchOpcode2(const struct opcode_s *, const char *, int (**)(int ),int *);
#endif /* _OPCODE_H_ */
|
A2-Collaboration/epics
|
modules/synApps_5_6/support/ip-2-13/ipApp/src/devMPC.c
|
/* devMPC.c
* Modifications:
* <NAME> 17-Feb-2001 Added support for TSP and auto-restart
* <NAME> 26-Oct-2002 Fixed problem with reading AMPS on recent MPC
* controllers, they don't send AMPS in the response
* <NAME> 1-Sep-2003 Changed software to use normal mpfSerial server,
* rather than custom server
* <NAME> 22-Sep-2004 Changed from MPF to asyn, and from C++ to C
INP or OUT has form @asyn(port address)command parameter
port is the asyn serial port name
address is the pump address
command is the pump command
parameter is 1 or 2 depending on which of the two XXX we use
Command Record Description
0 - SI Pump Status
1 - AI Read Pressure
2 - AI Read Current
3 - AI Read Voltage
4 - AI Read Pump Size
5 - AI Read Setpoint value 1 or 2
6 - BI Read On/Off of setpoint 1 or 2
7 - AI Read Setpoint value 3 or 4
8 - BI Read On/Off of setpoint 3 or 4
9 - AI Read Setpoint value 5 or 6
10 - BI Read On/Off of setpoint 5 or 6
11 - AI Read Setpoint value 7 or 8
12 - BI Read On/Off of setpoint 7 or 8
13 - BI Read auto-restart status
14 - SI Read TSP status
20 - MBBO Set Pressure Units
21 - MBBO Set Display
22 - AO Set Pump Size
23 - AO Set Setpoint 1 or 2
24 - AO Set Setpoint 3 or 4
25 - AO Set Setpoint 5 or 6
26 - AO Set Setpoint 7 or 8
27 - BO Start /Stop Pump
28 - Used by start/stop pump
29 - BO Keyboard lock/unlock
30 - Used by keyboard lock/unlock
31 - BO Auto-restart on/off
32 - SO TSP timed mode on
33 - BO TSP off
34 - MBBO Select TSP filament
35 - BO TSP filament clear
36 - BO TSP filament auto-advance on/off
37 - BO TSP continuous on/off
38 - SO Set TSP sublimation parameters
39 - BO TSP degass
*/
#include <stdlib.h>
#include <stdio.h>
#include <ctype.h>
#include <string.h>
#include <dbScan.h>
#include <dbDefs.h>
#include <dbAccess.h>
#include <dbCommon.h>
#include <alarm.h>
#include <link.h>
#include <recGbl.h>
#include <recSup.h>
#include <devSup.h>
#include <epicsString.h>
#include <errlog.h>
#include <asynDriver.h>
#include <asynEpicsUtils.h>
#include <asynOctet.h>
#include <aiRecord.h>
#include <aoRecord.h>
#include <biRecord.h>
#include <boRecord.h>
#include <mbboRecord.h>
#include <stringinRecord.h>
#include <stringoutRecord.h>
#include <epicsExport.h>
#include "devMPC.h"
typedef struct {
int command;
char *commandString;
} mpcCommandStruct;
static mpcCommandStruct mpcCommands[MAX_MPC_COMMANDS] = {
{GetStatus, "GET_STATUS"},
{GetPres, "GET_PRESSURE"},
{GetCur, "GET_CURRENT"},
{GetVolt, "GET_VOLT"},
{GetSize, "GET_SIZE"},
{GetSpVal12, "GET_SPVAL12"},
{GetSpS12, "GET_SPS12"},
{GetSpVal34, "GET_SPVAL34"},
{GetSpS34, "GET_SPS34"},
{GetSpVal56, "GET_SPVAL56"},
{GetSpS56, "GET_SPS56"},
{GetSpVal78, "GET_SPVAL78"},
{GetSpS78, "GET_SPS78"},
{GetAutoRestart, "GET_AUTO_RESTART"},
{GetTSPStat, "GET_TSP_STATUS"},
{SetUnit, "SET_UNIT"},
{SetDis, "SET_DISPLAY"},
{SetSize, "SET_SIZE"},
{SetSp12, "SET_SP12"},
{SetSp34, "SET_SP34"},
{SetSp56, "SET_SP56"},
{SetSp78, "SET_SP78"},
{SetStart, "SET_START"},
{SetStop, "SET_STOP"},
{SetLock, "SET_LOCK"},
{SetUnlock, "SET_UNLOCK"},
{SetAutoRestart, "SET_AUTO_RESTART"},
{SetTSPTimed, "SET_TSP_TIMED"},
{SetTSPOff, "SET_TSP_OFF"},
{SetTSPFilament, "SET_TSP_FILAMENT"},
{SetTSPClear, "SET_TSP_CLEAR"},
{SetTSPAutoAdv, "SET_TSP_AUTO_ADVANCE"},
{SetTSPContinuous, "SET_TSP_CONTINUOUS"},
{SetTSPSublimation, "SET_TSP_SUBLIMATION"},
{SetTSPDegas, "SET_TSP_DEGAS"}
};
typedef enum {opTypeInput, opTypeOutput} opType;
typedef enum {recTypeAi, recTypeAo, recTypeBi, recTypeBo,
recTypeMbbo, recTypeSi, recTypeSo} recType;
#define MPC_BUFFER_SIZE 50
#define MPC_TIMEOUT 3.0
typedef struct devMPCPvt {
asynUser *pasynUser;
asynOctet *pasynOctet;
void *octetPvt;
opType opType;
recType recType;
asynStatus status;
char recBuf[MPC_BUFFER_SIZE];
char sendBuf[MPC_BUFFER_SIZE];
char address[3];
char parameter[2];
int command;
} devMPCPvt;
typedef struct dsetMPC{
long number;
DEVSUPFUN report;
DEVSUPFUN init;
DEVSUPFUN init_record;
DEVSUPFUN get_ioint_info;
DEVSUPFUN io;
DEVSUPFUN convert;
} dsetMPC;
static long initCommon(dbCommon *pr, DBLINK *plink, opType ot, recType rt);
static long startIOCommon(dbCommon *pr);
static void devMPCCallback(asynUser *pasynUser);
static long MPCConvert(dbCommon* pr,int pass);
static int buildCommand(devMPCPvt *pPvt, int hexCmd, char *pvalue);
static long initAi(aiRecord *pr);
static long readAi(aiRecord *pr);
static long initAo(aoRecord *pr);
static long writeAo(aoRecord *pr);
static long initBi(biRecord *pr);
static long readBi(biRecord *pr);
static long initBo(boRecord *pr);
static long writeBo(boRecord *pr);
static long initMbbo(mbboRecord *pr);
static long writeMbbo(mbboRecord *pr);
static long initSi(stringinRecord *pr);
static long readSi(stringinRecord *pr);
static long initSo(stringoutRecord *pr);
static long writeSo(stringoutRecord *pr);
dsetMPC devAiMPC = {6,0,0,initAi,0,readAi,MPCConvert};
epicsExportAddress(dset,devAiMPC);
dsetMPC devAoMPC = {6,0,0,initAo,0,writeAo,MPCConvert};
epicsExportAddress(dset,devAoMPC);
dsetMPC devBiMPC = {6,0,0,initBi,0,readBi,0};
epicsExportAddress(dset,devBiMPC);
dsetMPC devBoMPC = {6,0,0,initBo,0,writeBo,0};
epicsExportAddress(dset,devBoMPC);
dsetMPC devMbboMPC = {6,0,0,initMbbo,0,writeMbbo,0};
epicsExportAddress(dset,devMbboMPC);
dsetMPC devSiMPC = {6,0,0,initSi,0,readSi,0};
epicsExportAddress(dset,devSiMPC);
dsetMPC devSoMPC = {6,0,0,initSo,0,writeSo,0};
epicsExportAddress(dset,devSoMPC);
static long initCommon(dbCommon *pr, DBLINK *plink, opType ot, recType rt)
{
char *port, *userParam;
int i;
int address;
asynUser *pasynUser=NULL;
asynStatus status;
asynInterface *pasynInterface;
devMPCPvt *pPvt=NULL;
char command[100];
char *pstring;
/* Allocate private structure */
pPvt = calloc(1, sizeof(devMPCPvt));
pPvt->opType = ot;
pPvt->recType = rt;
/* Create an asynUser */
pasynUser = pasynManager->createAsynUser(devMPCCallback, 0);
pasynUser->userPvt = pr;
/* Parse link */
status = pasynEpicsUtils->parseLink(pasynUser, plink,
&port, &address, &userParam);
if (status != asynSuccess) {
errlogPrintf("devXxMPC::initCommon %s bad link %s\n",
pr->name, pasynUser->errorMessage);
goto bad;
}
status = pasynManager->connectDevice(pasynUser,port,0);
if(status!=asynSuccess) goto bad;
pasynInterface = pasynManager->findInterface(pasynUser,asynOctetType,1);
if(!pasynInterface) goto bad;
pPvt->pasynOctet = (asynOctet *)pasynInterface->pinterface;
pPvt->octetPvt = pasynInterface->drvPvt;
pPvt->pasynUser = pasynUser;
pr->dpvt = pPvt;
if ((userParam == NULL) || strlen(userParam) == 0) {
errlogPrintf("devMPC::initCommon %s invalid userParam %s\n",
pr->name, userParam);
goto bad;
}
sscanf(userParam,"%s %d",command, &i);
sprintf(pPvt->address,"%02X",address);
sprintf(pPvt->parameter,"%d",i);
for (i=0; i<MAX_MPC_COMMANDS; i++) {
pstring = mpcCommands[i].commandString;
if (epicsStrCaseCmp(command, pstring) == 0) {
pPvt->command = mpcCommands[i].command;
goto found;
}
}
asynPrint(pasynUser, ASYN_TRACE_ERROR,
"devMPC::init_common %s, unknown command=%s\n",
pr->name, command);
goto bad;
found:
asynPrint(pasynUser, ASYN_TRACE_FLOW,
"devMPC::initCommon name=%s; command string=%s command=%d, address=%s; parameter=%s;\n",
pr->name, command, pPvt->command, pPvt->address, pPvt->parameter);
if (pPvt->command<0 ||
(pPvt->command >GetTSPStat && pPvt->command < SetUnit) ||
pPvt->command>SetTSPDegas) {
asynPrint(pasynUser, ASYN_TRACE_ERROR,
"devMPC::initCommon %s illegal command=%d\n",
pr->name, pPvt->command);
goto bad;
}
return 0;
bad:
if(pasynUser) pasynManager->freeAsynUser(pasynUser);
if(pPvt) free(pPvt);
pr->pact = 1;
return 0;
}
static long initAi(aiRecord *pr)
{
return(initCommon((dbCommon *)pr, &pr->inp, opTypeInput, recTypeAi));
}
static long initAo(aoRecord *pr)
{
return(initCommon((dbCommon *)pr, &pr->out, opTypeOutput, recTypeAo));
}
static long initBi(biRecord *pr)
{
return(initCommon((dbCommon *)pr, &pr->inp, opTypeInput, recTypeBi));
}
static long initBo(boRecord *pr)
{
return(initCommon((dbCommon *)pr, &pr->out, opTypeOutput, recTypeBo));
}
static long initMbbo(mbboRecord *pr)
{
return(initCommon((dbCommon *)pr, &pr->out, opTypeOutput, recTypeMbbo));
}
static long initSi(stringinRecord *pr)
{
return(initCommon((dbCommon *)pr, &pr->inp, opTypeInput, recTypeSi));
}
static long initSo(stringoutRecord *pr)
{
return(initCommon((dbCommon *)pr, &pr->out, opTypeOutput, recTypeSo));
}
static int buildCommand(devMPCPvt *pPvt, int hexCmd, char *pvalue)
{
asynUser *pasynUser = pPvt->pasynUser;
dbCommon *pr = (dbCommon *)pasynUser->userPvt;
/*
The MPC commands are of the form : "~ AA XX d cc"
AA = Address from 00 - FF
XX = 2 character Hex Command
d = parameter or data comma seperated
cc = 2 character checksum Hex values
The checksum is to be calculated starting from the character after the
start character and ending with the space after the data/parm field.
Add the sum and divide by 0x100 or decimal 256. The reminder in hex is
two character checksum. Follow the checksum with a terminator of CR only.
At the current time we are not calculating the checksum due to the fact
the device is happy with just "00"
*/
char tempBuf[10];
memset(pPvt->sendBuf, 0, MPC_BUFFER_SIZE);
strcpy(pPvt->sendBuf, "~ ");
strcat(pPvt->sendBuf, pPvt->address);
sprintf(tempBuf, " %2.2X ", hexCmd);
strcat(pPvt->sendBuf, tempBuf);
strcat(pPvt->sendBuf, pvalue);
strcat(pPvt->sendBuf, " 00"); /* checksum is set to 00 now !!!! */
asynPrint(pPvt->pasynUser, ASYN_TRACEIO_DEVICE,
"devMPC::buildCommand %s command 0x%X len=%d string=|%s|\n",
pr->name, hexCmd, strlen(pPvt->sendBuf), pPvt->sendBuf);
return(0);
}
static long checkRtnSize(dbCommon *pr, int rtnSize, int minSize)
{
devMPCPvt *pPvt = (devMPCPvt *)pr->dpvt;
if (rtnSize < minSize) {
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC, %s rtnSize=%d, should be >%d, response=%s\n",
pr->name, rtnSize, minSize, pPvt->recBuf);
return -1;
}
return 0;
}
static long readAi(aiRecord *pr)
{
devMPCPvt *pPvt = (devMPCPvt *)pr->dpvt;
char tempparameter[2];
int stptNo;
int hexCmd=0;
int rtnSize;
char *ploc;
char pvalue[10] = "";
float value=0;
char *pdata = pPvt->recBuf;
char *llen = pdata;
if (!pr->pact) {
/* For setpoint readback set the correct setpoint number.
* All odd setpoints are for pump 1 and even for pump 2 */
switch (pPvt->command) {
case GetSpVal12:
case GetSpVal34:
case GetSpVal56:
case GetSpVal78:
hexCmd = 0x3c;
stptNo = (pPvt->command - GetSpVal12)/2 ;
stptNo = stptNo * 2 + atoi(pPvt->parameter);
sprintf(tempparameter,"%d",stptNo);
break;
case GetPres:
hexCmd = 0x0b;
strcpy(tempparameter, pPvt->parameter);
break;
case GetCur:
hexCmd = 0x0a;
strcpy(tempparameter, pPvt->parameter);
break;
case GetVolt:
hexCmd = 0x0c;
strcpy(tempparameter, pPvt->parameter);
break;
case GetSize:
hexCmd = 0x11;
strcpy(tempparameter, pPvt->parameter);
break;
default:
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::readAi %s Wrong record type \n",
pr->name);
break;
}
buildCommand(pPvt, hexCmd, tempparameter);
return(startIOCommon((dbCommon *)pr));
}
/* Assume failure */
pr->val = 0;
strcpy(pr->egu, "");
if (pPvt->status != asynSuccess) {
return(2);
}
rtnSize = strlen(pPvt->recBuf);
switch (pPvt->command) {
case GetPres:
if (checkRtnSize((dbCommon *)pr, rtnSize, 9)) return(2);
strncpy(pvalue, pPvt->recBuf, 7);
pvalue[7] = 0;
value = strtod(pvalue, NULL);
ploc=&pPvt->recBuf[8];
strncpy(pvalue, ploc, rtnSize-8);
pvalue[strlen(ploc)] = 0;
break;
case GetCur:
if (checkRtnSize((dbCommon *)pr, rtnSize, 7)) return(2);
strncpy(pvalue, pPvt->recBuf, 7);
pvalue[7] = 0;
value = strtod(pvalue, NULL);
strcpy(pvalue, "AMPS");
break;
case GetVolt:
if (checkRtnSize((dbCommon *)pr, rtnSize, 1)) return(2);
strncpy(pvalue, pPvt->recBuf, rtnSize);
pvalue[rtnSize] = 0;
value = strtod(pvalue, NULL);
strcpy(pvalue, "VOLTS");
break;
case GetSize:
llen = strchr(pdata, 'L');
if (llen == NULL) {
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::readAi, %s cannot find L, response=%s\n",
pr->name, pdata);
return(2);
}
*llen = 0;
strcpy(pvalue, pdata);
value = strtod(pvalue, NULL);
strcpy(pvalue, "L/S");
break;
case GetSpVal12:
case GetSpVal34:
case GetSpVal56:
case GetSpVal78:
if (checkRtnSize((dbCommon *)pr, rtnSize, 11)) return(2);
ploc=&pPvt->recBuf[4];
strncpy(pvalue, ploc, 7);
pvalue[7] = 0;
value = strtod(pvalue, NULL);
strcpy(pvalue, "TORR");
break;
default:
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::readAi %s Wrong record type \n",
pr->name);
break;
}
pr->val = value;
strcpy(pr->egu, pvalue);
pr->udf=0;
return(2);
}
static long readBi(biRecord *pr)
{
devMPCPvt *pPvt = (devMPCPvt *)pr->dpvt;
char tempparameter[2];
int stptNo;
int hexCmd=0;
int rtnSize;
char *pvalue;
int value=0;
if (!pr->pact) {
/* For setpoint readback set the correct setpoint number.
* All odd setpoints are for pump 1 and even for pump 2 */
switch (pPvt->command) {
case GetSpS12:
case GetSpS34:
case GetSpS56:
case GetSpS78:
hexCmd = 0x3c;
stptNo = (pPvt->command - GetSpS12) + atoi(pPvt->parameter);
sprintf(tempparameter, "%d", stptNo);
break;
case GetAutoRestart:
hexCmd = 0x34;
strcpy(tempparameter, "");
break;
default:
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::readBi %s Wrong record type \n",
pr->name);
break;
}
buildCommand(pPvt, hexCmd, tempparameter);
return(startIOCommon((dbCommon *)pr));
}
pr->rval = 0;
if (pPvt->status != asynSuccess) {
return(0);
}
rtnSize = strlen(pPvt->recBuf);
if (checkRtnSize((dbCommon *)pr, rtnSize, 2)) return(0);
switch (pPvt->command) {
case GetSpS12:
case GetSpS34:
case GetSpS56:
case GetSpS78:
pvalue = &pPvt->recBuf[rtnSize-1];
sscanf(pvalue, "%d", &value);
break;
case GetAutoRestart:
if (strcmp(pPvt->recBuf, "YES") == 0) value=1; else value=0;
break;
default:
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::readBi %s Wrong record type \n",
pr->name);
break;
}
pr->rval = value;
pr->udf=0;
return(0);
}
static long readSi(stringinRecord *pr)
{
devMPCPvt *pPvt = (devMPCPvt *)pr->dpvt;
int hexCmd=0;
int rtnSize;
if (!pr->pact) {
switch (pPvt->command) {
case GetStatus:
hexCmd = 0x0d;
buildCommand(pPvt, hexCmd, pPvt->parameter);
break;
case GetTSPStat:
hexCmd = 0x2a;
buildCommand(pPvt, hexCmd, "");
break;
default:
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::readSi %s Wrong record type \n",
pr->name);
break;
}
return(startIOCommon((dbCommon *)pr));
}
strcpy(pr->val, "");
if (pPvt->status != asynSuccess) {
return(0);
}
rtnSize = strlen(pPvt->recBuf);
if (rtnSize > 39) {
recGblSetSevr(pr, READ_ALARM, INVALID_ALARM);
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::readSi message too big in %s \n",
pr->name);
return(0);
}
strcpy(pr->val, pPvt->recBuf);
pr->udf=0;
return(0);
}
static long writeAo(aoRecord *pr)
{
devMPCPvt *pPvt = (devMPCPvt *)pr->dpvt;
char tempparameter[25]="";
char pvalue[10]="";
int stptNo;
int hexCmd=0;
int rtnSize;
if (!pr->pact) {
/* For setting setpoint the command is of the form :n,s,x.xE-yy,x.xE-yy
* n - setpoint number.
* s - supply here 1 for pump1 and 2 for pump 2
* values are for On and Off and will be set the same as pr->val field.
* All odd setpoints are for pump 1 and even for pump 2 */
switch (pPvt->command) {
case SetSize:
hexCmd = 0x12;
strcpy(tempparameter, pPvt->parameter);
strcat(tempparameter, ",");
sprintf(pvalue, "%d", (int) pr->val);
strcat(tempparameter, pvalue); /* Pump Size */
break;
case SetSp12:
case SetSp34:
case SetSp56:
case SetSp78:
hexCmd = 0x3d;
stptNo = (pPvt->command - SetSp12) *2 + atoi(pPvt->parameter);
sprintf(tempparameter, "%1.1d", stptNo);
strcat(tempparameter, ",");
strcat(tempparameter, pPvt->parameter); /* for supply number */
strcat(tempparameter, ",");
sprintf(pvalue, "%7.1E", pr->val);
strcat(tempparameter, pvalue); /* for On pressure */
strcat(tempparameter, ",");
strcat(tempparameter, pvalue); /* for Off pressure */
break;
default:
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::writeAo %s Wrong record type \n",
pr->name);
break;
}
buildCommand(pPvt, hexCmd, tempparameter);
return(startIOCommon((dbCommon *)pr));
}
if (pPvt->status != asynSuccess) return(2);
rtnSize = strlen(pPvt->recBuf);
if (rtnSize > 2) {
recGblSetSevr(pr, READ_ALARM, INVALID_ALARM);
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::writeAo message too big in %s\n",
pr->name);
return(2);
}
pr->udf=0;
return(2);
}
static long writeBo(boRecord *pr)
{
devMPCPvt *pPvt = (devMPCPvt *)pr->dpvt;
int hexCmd=0;
int rtnSize;
if (!pr->pact) {
switch (pPvt->command) {
case SetStart:
case SetStop:
if (pr->val == 0) hexCmd = 0x37; else hexCmd = 0x38;
buildCommand(pPvt, hexCmd, pPvt->parameter);
break;
case SetLock:
case SetUnlock:
if (pr->val == 0) hexCmd = 0x44; else hexCmd = 0x45;
buildCommand(pPvt, hexCmd, pPvt->parameter);
break;
case SetAutoRestart:
hexCmd = 0x33;
if (pr->val)
buildCommand(pPvt, hexCmd, "YES");
else
buildCommand(pPvt, hexCmd, "NO");
break;
case SetTSPAutoAdv:
hexCmd = 0x2c;
if (pr->val)
buildCommand(pPvt, hexCmd, "YES");
else
buildCommand(pPvt, hexCmd, "NO");
break;
case SetTSPOff:
hexCmd = 0x28;
buildCommand(pPvt, hexCmd, "");
break;
case SetTSPClear:
hexCmd = 0x2b;
buildCommand(pPvt, hexCmd, "");
break;
case SetTSPContinuous:
hexCmd = 0x2d;
buildCommand(pPvt, hexCmd, "");
break;
case SetTSPDegas:
hexCmd = 0x2f;
buildCommand(pPvt, hexCmd, "");
break;
default:
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::writeBo %s Wrong record type \n",
pr->name);
break;
}
return(startIOCommon((dbCommon *)pr));
}
if (pPvt->status != asynSuccess) return(0);
rtnSize = strlen(pPvt->recBuf);
if (rtnSize > 2) {
recGblSetSevr(pr, READ_ALARM, INVALID_ALARM);
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::writeBo message too big in %s\n",
pr->name);
return(0);
}
pr->udf=0;
return(0);
}
static long writeMbbo(mbboRecord *pr)
{
devMPCPvt *pPvt = (devMPCPvt *)pr->dpvt;
char tempparameter[10];
int hexCmd=0;
int rtnSize;
if (!pr->pact) {
switch (pPvt->command) {
case SetUnit:
hexCmd = 0x0e;
strcpy(tempparameter, pPvt->parameter);
strcat(tempparameter, DisplayStr[pr->rval]);
break;
case SetDis:
hexCmd = 0x25;
strcpy(tempparameter, pPvt->parameter);
strcat(tempparameter, DisplayStr[pr->rval]);
break;
case SetTSPFilament:
hexCmd = 0x29;
sprintf(tempparameter, "%d", pr->rval);
break;
default:
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::writeMbbo %s Wrong record type \n",
pr->name);
break;
}
buildCommand(pPvt, hexCmd, tempparameter);
return(startIOCommon((dbCommon *)pr));
}
if (pPvt->status != asynSuccess) return(0);
rtnSize = strlen(pPvt->recBuf);
if (rtnSize > 2) {
recGblSetSevr(pr, READ_ALARM, INVALID_ALARM);
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::writeMbbo message too big in %s\n",
pr->name);
return(0);
}
pr->udf=0;
return(0);
}
static long writeSo(stringoutRecord *pr)
{
devMPCPvt *pPvt = (devMPCPvt *)pr->dpvt;
int hexCmd=0;
int rtnSize;
if (!pr->pact) {
switch (pPvt->command) {
case SetTSPTimed:
hexCmd = 0x27;
break;
case SetTSPSublimation:
hexCmd = 0x2e;
break;
default:
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::writeSo %s Wrong record type \n",
pr->name);
break;
}
buildCommand(pPvt, hexCmd, pr->val);
return(startIOCommon((dbCommon *)pr));
}
if (pPvt->status != asynSuccess) return(0);
rtnSize = strlen(pPvt->recBuf);
if (rtnSize > 2) {
recGblSetSevr(pr, READ_ALARM, INVALID_ALARM);
asynPrint(pPvt->pasynUser, ASYN_TRACE_ERROR,
"devMPC::writeSo message too big in %s\n",
pr->name);
return(0);
}
pr->udf=0;
return(0);
}
static long startIOCommon(dbCommon* pr)
{
devMPCPvt *pPvt = (devMPCPvt *)pr->dpvt;
asynUser *pasynUser = pPvt->pasynUser;
int status;
pr->pact = 1;
status = pasynManager->queueRequest(pasynUser, 0, 0);
if (status != asynSuccess) status = -1;
return(status);
}
static void devMPCCallback(asynUser *pasynUser)
{
dbCommon *pr = (dbCommon *)pasynUser->userPvt;
devMPCPvt *pPvt = (devMPCPvt *)pr->dpvt;
char readBuffer[MPC_BUFFER_SIZE];
struct rset *prset = (struct rset *)(pr->rset);
int eomReason;
size_t nread, nwrite;
memset(pPvt->recBuf, 0, MPC_BUFFER_SIZE);
pPvt->pasynUser->timeout = MPC_TIMEOUT;
pPvt->status = pPvt->pasynOctet->write(pPvt->octetPvt, pasynUser,
pPvt->sendBuf, strlen(pPvt->sendBuf),
&nwrite);
if (pPvt->status != asynSuccess) {
asynPrint(pasynUser, ASYN_TRACE_ERROR,
"devMPC::devMPCCallback write error, status=%d error= %s\n",
pPvt->status, pasynUser->errorMessage);
recGblSetSevr(pr, WRITE_ALARM, INVALID_ALARM);
goto done;
}
asynPrint(pasynUser, ASYN_TRACEIO_DEVICE,
"devMPC::devMPCCallback %s nwrite=%d, output=%s\n",
pr->name, nwrite, pPvt->sendBuf);
pPvt->status = pPvt->pasynOctet->read(pPvt->octetPvt, pasynUser,
readBuffer, MPC_BUFFER_SIZE,
&nread, &eomReason);
if (pPvt->status != asynSuccess) {
asynPrint(pasynUser, ASYN_TRACE_ERROR,
"devMPC::devMPCCallback %s read error, status=%d error= %s\n",
pr->name, pPvt->status, pasynUser->errorMessage);
recGblSetSevr(pr, READ_ALARM, INVALID_ALARM);
goto done;
}
asynPrint(pasynUser, ASYN_TRACEIO_DEVICE,
"devMPC::devMPCCallback %s nread=%d, input=%s\n",
pr->name, nread, readBuffer);
if (nread < 4) {
asynPrint(pasynUser, ASYN_TRACE_ERROR,
"devMPC::devMPCCallback %s message too small=%d\n",
pr->name, nread);
recGblSetSevr(pr, READ_ALARM, INVALID_ALARM);
pPvt->status = asynError;
goto done;
}
asynPrint(pasynUser, ASYN_TRACEIO_DEVICE,
"devMPC: %s command (%d) received (before processing) len=%d, |%s|\n",
pr->name, pPvt->command, nread, readBuffer);
if(readBuffer[3]=='O' && readBuffer[4] == 'K') {
if (nread < 12 ) {
strcpy(pPvt->recBuf, "OK");
} else {
char *pdata = &readBuffer[9]; /* strip off the header cmds */
/* strip off 3 trailing character (space, checksum) */
strncpy(pPvt->recBuf, pdata, nread-12);
}
}
asynPrint(pasynUser, ASYN_TRACEIO_DEVICE,
"devMPC: %s command (%d) received (after processing) |%s|\n",
pr->name, pPvt->command, pPvt->recBuf);
done:
/* Process the record. This will result in the readX or writeX routine
being called again, but with pact=1 */
dbScanLock(pr);
(*prset->process)(pr);
dbScanUnlock(pr);
}
static long MPCConvert(dbCommon* pr,int pass)
{
aiRecord* pai = (aiRecord*)pr;
pai->eslo=1.0;
pai->roff=0;
return 0;
}
|
attila-sim/attila-sim
|
src/trace/D3DDriver/AD3D9/AIResourceImp_9.h
|
/**************************************************************************
*
* Copyright (c) 2002 - 2011 by Computer Architecture Department,
* Universitat Politecnica de Catalunya.
* All rights reserved.
*
* The contents of this file may not be disclosed to third parties,
* copied or duplicated in any form, in whole or in part, without the
* prior permission of the authors, Computer Architecture Department
* and Universitat Politecnica de Catalunya.
*
*/
#ifndef AIRESOURCEIMP_9_H
#define AIRESOURCEIMP_9_H
class AIResourceImp9 : public IDirect3DResource9{
public:
static AIResourceImp9 &getInstance();
HRESULT D3D_CALL QueryInterface ( REFIID riid , void** ppvObj );
ULONG D3D_CALL AddRef ( );
ULONG D3D_CALL Release ( );
HRESULT D3D_CALL GetDevice ( IDirect3DDevice9** ppDevice );
HRESULT D3D_CALL SetPrivateData ( REFGUID refguid , CONST void* pData , DWORD SizeOfData , DWORD Flags );
HRESULT D3D_CALL GetPrivateData ( REFGUID refguid , void* pData , DWORD* pSizeOfData );
HRESULT D3D_CALL FreePrivateData ( REFGUID refguid );
DWORD D3D_CALL SetPriority ( DWORD PriorityNew );
DWORD D3D_CALL GetPriority ( );
void D3D_CALL PreLoad ( );
D3DRESOURCETYPE D3D_CALL GetType ( );
private:
AIResourceImp9();
};
#endif
|
ghafooridev/glass_accounting
|
src/pages/product/filter.js
|
import React, { useState } from "react";
import { Grid, TextField, MenuItem, Button, Divider } from "@material-ui/core";
const Filter = ({ onFilter, category }) => {
const [filterData, setFilterData] = useState({ category: "" });
const handleChange = (event) => {
// setFilterData(event.target.value);
setFilterData({ ...filterData, category: event.target.value });
};
const onSubmit = () => {
if (typeof onFilter === "function") {
onFilter(filterData);
}
};
return (
<Grid container spacing={3} alignItems="center" style={{ padding: 10 }}>
<Grid item lg={3} xs={12}>
<TextField
select
label="دسته بندی"
onChange={handleChange}
value={filterData.category}
variant="outlined"
fullWidth
size="small"
>
{category.map((option) => (
<MenuItem key={option.value} value={option.value}>
{option.label}
</MenuItem>
))}
</TextField>
</Grid>
<Grid item lg={3} xs={12}>
<Button variant="contained" color="primary" onClick={onSubmit}>
تایید
</Button>
</Grid>
<Divider style={{ width: "100%" }} />
</Grid>
);
};
export default Filter;
|
JM-Ski/CarGame
|
include/player.h
|
<gh_stars>0
#pragma once
#include "rectangle.h"
#include "physics.h"
//!This class is used to create and control a player.
class Player : public Rectangle
{
private:
/*!Confirmation to rotate right.*/
bool bRotateRight;
/*!Confirmation to rotate left.*/
bool bRotateLeft;
/*!Confirmation to go up.*/
bool bPressedUp;
/*!Confirmation to go down.*/
bool bPressedDown;
/*!Physics for the player.*/
Physics m_rectPhysics;
public:
/*!Initializer.*/
Player(){};
/*!Constructor.*/
/**@param position = Position of the player.
@param dimensions = Size of the player.
@param texture = Texture to load.*/
Player(sf::Vector2f& position, sf::Vector2f& dimensions, sf::Texture& texture);
/*!Process any key events.*/
/**@param e = Event variable needed to capture any key presses.*/
void processEvent(sf::Event& e);
/*!Updates the player.*/
/**@param timeStep = Time which updates most of the functions and variables, for example velocity.*/
void update(float timeStep);
/*!Sets a position of the player for physics.*/
/**@param other = New position vector.*/
void setPositionPhys(sf::Vector2f other);
/*!Sets a velocity of the player for physics.*/
/**@param other = New velocity vector.*/
void setVelocityPhys(sf::Vector2f other);
/*!Returns a velocity vector.*/
/**@return m_rectPhysics.getVelocity() = Returns a current velocity vector.*/
sf::Vector2f getVelocityPhys();
/*!Resets player's variables.*/
void resetPlayer();
};
|
tempoz/irods
|
server/api/include/rsPhyPathReg.hpp
|
<reponame>tempoz/irods<filename>server/api/include/rsPhyPathReg.hpp
#ifndef RS_PHY_PATH_REG_HPP
#define RS_PHY_PATH_REG_HPP
#include "rodsConnect.h"
#include "dataObjInpOut.h"
int rsPhyPathReg( rsComm_t *rsComm, dataObjInp_t *phyPathRegInp );
int phyPathRegNoChkPerm( rsComm_t *rsComm, dataObjInp_t *phyPathRegInp );
int irsPhyPathReg( rsComm_t *rsComm, dataObjInp_t *phyPathRegInp );
int remotePhyPathReg( rsComm_t *rsComm, dataObjInp_t *phyPathRegInp, rodsServerHost_t *rodsServerHost );
int _rsPhyPathReg( rsComm_t *rsComm, dataObjInp_t *phyPathRegInp, const char *_resc_name, rodsServerHost_t *rodsServerHost );
int filePathReg( rsComm_t *rsComm, dataObjInp_t *phyPathRegInp, const char *_resc_name );
int filePathRegRepl( rsComm_t *rsComm, dataObjInp_t *phyPathRegInp, char *filePath, const char *_resc_name );
int dirPathReg( rsComm_t *rsComm, dataObjInp_t *phyPathRegInp, char *filePath, const char *_resc_name );
int mountFileDir( rsComm_t *rsComm, dataObjInp_t *phyPathRegInp, char *filePath, const char *rescVaultPath );
int structFileReg( rsComm_t *rsComm, dataObjInp_t *phyPathRegInp );
int unmountFileDir( rsComm_t *rsComm, dataObjInp_t *phyPathRegInp );
int structFileSupport( rsComm_t *rsComm, char *collection, char *collType, char* );
int linkCollReg( rsComm_t *rsComm, dataObjInp_t *phyPathRegInp );
#endif
|
sdinot/hipparchus
|
hipparchus-core/src/main/java/org/hipparchus/analysis/differentiation/FieldDerivativeStructure.java
|
/*
* Licensed to the Hipparchus project under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The Hipparchus project licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hipparchus.analysis.differentiation;
import org.hipparchus.CalculusFieldElement;
import org.hipparchus.Field;
import org.hipparchus.exception.MathIllegalArgumentException;
import org.hipparchus.exception.MathRuntimeException;
import org.hipparchus.util.FastMath;
import org.hipparchus.util.FieldSinCos;
import org.hipparchus.util.FieldSinhCosh;
import org.hipparchus.util.MathArrays;
import org.hipparchus.util.MathUtils;
/** Class representing both the value and the differentials of a function.
* <p>This class is similar to {@link DerivativeStructure} except function
* parameters and value can be any {@link CalculusFieldElement}.</p>
* <p>Instances of this class are guaranteed to be immutable.</p>
* @see DerivativeStructure
* @see FDSFactory
* @see DSCompiler
* @param <T> the type of the field elements
*/
public class FieldDerivativeStructure<T extends CalculusFieldElement<T>>
implements FieldDerivative<T, FieldDerivativeStructure<T>> {
/** Factory that built the instance. */
private final FDSFactory<T> factory;
/** Combined array holding all values. */
private final T[] data;
/** Build an instance with all values and derivatives set to 0.
* @param factory factory that built the instance
* @param data combined array holding all values
*/
FieldDerivativeStructure(final FDSFactory<T> factory, final T[] data) {
this.factory = factory;
this.data = data.clone();
}
/** Build an instance with all values and derivatives set to 0.
* @param factory factory that built the instance
* @since 1.4
*/
FieldDerivativeStructure(final FDSFactory<T> factory) {
this.factory = factory;
this.data = MathArrays.buildArray(factory.getValueField(), factory.getCompiler().getSize());
}
/** {@inheritDoc} */
@Override
public FieldDerivativeStructure<T> newInstance(final double value) {
return factory.constant(value);
}
/** Get the factory that built the instance.
* @return factory that built the instance
*/
public FDSFactory<T> getFactory() {
return factory;
}
@Override
/** {@inheritDoc} */
public int getFreeParameters() {
return getFactory().getCompiler().getFreeParameters();
}
@Override
/** {@inheritDoc} */
public int getOrder() {
return getFactory().getCompiler().getOrder();
}
/** {@inheritDoc}
*/
@Override
public double getReal() {
return data[0].getReal();
}
/** Set a derivative component.
* <p>
* This method is package-private (no modifier specified), as it is intended
* to be used only by {@link FDSFactory} since it relied on the ordering of
* derivatives within the class. This allows avoiding checks on the index,
* for performance reasons.
* </p>
* @param index index of the derivative
* @param value of the derivative to set
* @since 1.4
*/
void setDerivativeComponent(final int index, final T value) {
data[index] = value;
}
/** Get the value part of the derivative structure.
* @return value part of the derivative structure
* @see #getPartialDerivative(int...)
*/
@Override
public T getValue() {
return data[0];
}
/** {@inheritDoc} */
@Override
public T getPartialDerivative(final int ... orders)
throws MathIllegalArgumentException {
return data[factory.getCompiler().getPartialDerivativeIndex(orders)];
}
/** Get all partial derivatives.
* @return a fresh copy of partial derivatives, in an array sorted according to
* {@link DSCompiler#getPartialDerivativeIndex(int...)}
*/
public T[] getAllDerivatives() {
return data.clone();
}
/** '+' operator.
* @param a right hand side parameter of the operator
* @return this+a
*/
public FieldDerivativeStructure<T> add(T a) {
final FieldDerivativeStructure<T> ds = factory.build();
System.arraycopy(data, 0, ds.data, 0, data.length);
ds.data[0] = ds.data[0].add(a);
return ds;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> add(final double a) {
final FieldDerivativeStructure<T> ds = factory.build();
System.arraycopy(data, 0, ds.data, 0, data.length);
ds.data[0] = ds.data[0].add(a);
return ds;
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> add(final FieldDerivativeStructure<T> a)
throws MathIllegalArgumentException {
factory.checkCompatibility(a.factory);
final FieldDerivativeStructure<T> ds = factory.build();
factory.getCompiler().add(data, 0, a.data, 0, ds.data, 0);
return ds;
}
/** '-' operator.
* @param a right hand side parameter of the operator
* @return this-a
*/
public FieldDerivativeStructure<T> subtract(final T a) {
final FieldDerivativeStructure<T> ds = factory.build();
System.arraycopy(data, 0, ds.data, 0, data.length);
ds.data[0] = ds.data[0].subtract(a);
return ds;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> subtract(final double a) {
final FieldDerivativeStructure<T> ds = factory.build();
System.arraycopy(data, 0, ds.data, 0, data.length);
ds.data[0] = ds.data[0].subtract(a);
return ds;
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> subtract(final FieldDerivativeStructure<T> a)
throws MathIllegalArgumentException {
factory.checkCompatibility(a.factory);
final FieldDerivativeStructure<T> ds = factory.build();
factory.getCompiler().subtract(data, 0, a.data, 0, ds.data, 0);
return ds;
}
/** '×' operator.
* @param a right hand side parameter of the operator
* @return this×a
*/
public FieldDerivativeStructure<T> multiply(final T a) {
final FieldDerivativeStructure<T> ds = factory.build();
for (int i = 0; i < ds.data.length; ++i) {
ds.data[i] = data[i].multiply(a);
}
return ds;
}
/** {@inheritDoc} */
@Override
public FieldDerivativeStructure<T> multiply(final int n) {
return multiply((double) n);
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> multiply(final double a) {
final FieldDerivativeStructure<T> ds = factory.build();
for (int i = 0; i < ds.data.length; ++i) {
ds.data[i] = data[i].multiply(a);
}
return ds;
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> multiply(final FieldDerivativeStructure<T> a)
throws MathIllegalArgumentException {
factory.checkCompatibility(a.factory);
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().multiply(data, 0, a.data, 0, result.data, 0);
return result;
}
/** '÷' operator.
* @param a right hand side parameter of the operator
* @return this÷a
*/
public FieldDerivativeStructure<T> divide(final T a) {
final FieldDerivativeStructure<T> ds = factory.build();
for (int i = 0; i < ds.data.length; ++i) {
ds.data[i] = data[i].divide(a);
}
return ds;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> divide(final double a) {
final FieldDerivativeStructure<T> ds = factory.build();
for (int i = 0; i < ds.data.length; ++i) {
ds.data[i] = data[i].divide(a);
}
return ds;
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> divide(final FieldDerivativeStructure<T> a)
throws MathIllegalArgumentException {
factory.checkCompatibility(a.factory);
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().divide(data, 0, a.data, 0, result.data, 0);
return result;
}
/** IEEE remainder operator.
* @param a right hand side parameter of the operator
* @return this - n × a where n is the closest integer to this/a
* (the even integer is chosen for n if this/a is halfway between two integers)
*/
public FieldDerivativeStructure<T> remainder(final T a) {
final FieldDerivativeStructure<T> ds = factory.build();
System.arraycopy(data, 0, ds.data, 0, data.length);
ds.data[0] = data[0].remainder(a);
return ds;
}
/** {@inheritDoc} */
@Override
public FieldDerivativeStructure<T> remainder(final double a) {
final FieldDerivativeStructure<T> ds = factory.build();
System.arraycopy(data, 0, ds.data, 0, data.length);
ds.data[0] = data[0].remainder(a);
return ds;
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> remainder(final FieldDerivativeStructure<T> a)
throws MathIllegalArgumentException {
factory.checkCompatibility(a.factory);
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().remainder(data, 0, a.data, 0, result.data, 0);
return result;
}
/** {@inheritDoc} */
@Override
public FieldDerivativeStructure<T> negate() {
final FieldDerivativeStructure<T> ds = factory.build();
for (int i = 0; i < ds.data.length; ++i) {
ds.data[i] = data[i].negate();
}
return ds;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> abs() {
if (Double.doubleToLongBits(data[0].getReal()) < 0) {
// we use the bits representation to also handle -0.0
return negate();
} else {
return this;
}
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> ceil() {
return factory.constant(data[0].ceil());
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> floor() {
return factory.constant(data[0].floor());
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> rint() {
return factory.constant(data[0].rint());
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> sign() {
return factory.constant(data[0].sign());
}
/**
* Returns the instance with the sign of the argument.
* A NaN {@code sign} argument is treated as positive.
*
* @param sign the sign for the returned value
* @return the instance with the same sign as the {@code sign} argument
*/
public FieldDerivativeStructure<T> copySign(final T sign) {
long m = Double.doubleToLongBits(data[0].getReal());
long s = Double.doubleToLongBits(sign.getReal());
if ((m >= 0 && s >= 0) || (m < 0 && s < 0)) { // Sign is currently OK
return this;
}
return negate(); // flip sign
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> copySign(final double sign) {
long m = Double.doubleToLongBits(data[0].getReal());
long s = Double.doubleToLongBits(sign);
if ((m >= 0 && s >= 0) || (m < 0 && s < 0)) { // Sign is currently OK
return this;
}
return negate(); // flip sign
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> copySign(final FieldDerivativeStructure<T> sign) {
long m = Double.doubleToLongBits(data[0].getReal());
long s = Double.doubleToLongBits(sign.data[0].getReal());
if ((m >= 0 && s >= 0) || (m < 0 && s < 0)) { // Sign is currently OK
return this;
}
return negate(); // flip sign
}
/**
* Return the exponent of the instance value, removing the bias.
* <p>
* For double numbers of the form 2<sup>x</sup>, the unbiased
* exponent is exactly x.
* </p>
* @return exponent for instance in IEEE754 representation, without bias
*/
@Override
public int getExponent() {
return data[0].getExponent();
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> scalb(final int n) {
final FieldDerivativeStructure<T> ds = factory.build();
for (int i = 0; i < ds.data.length; ++i) {
ds.data[i] = data[i].scalb(n);
}
return ds;
}
/** {@inheritDoc}
* <p>
* The {@code ulp} function is a step function, hence all its derivatives are 0.
* </p>
* @since 2.0
*/
@Override
public FieldDerivativeStructure<T> ulp() {
final FieldDerivativeStructure<T> ds = factory.build();
ds.data[0] = FastMath.ulp(data[0]);
return ds;
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> hypot(final FieldDerivativeStructure<T> y)
throws MathIllegalArgumentException {
factory.checkCompatibility(y.factory);
if (data[0].isInfinite() || y.data[0].isInfinite()) {
return factory.constant(Double.POSITIVE_INFINITY);
} else if (data[0].isNaN() || y.data[0].isNaN()) {
return factory.constant(Double.NaN);
} else {
final int expX = getExponent();
final int expY = y.getExponent();
if (expX > expY + 27) {
// y is neglectible with respect to x
return abs();
} else if (expY > expX + 27) {
// x is neglectible with respect to y
return y.abs();
} else {
// find an intermediate scale to avoid both overflow and underflow
final int middleExp = (expX + expY) / 2;
// scale parameters without losing precision
final FieldDerivativeStructure<T> scaledX = scalb(-middleExp);
final FieldDerivativeStructure<T> scaledY = y.scalb(-middleExp);
// compute scaled hypotenuse
final FieldDerivativeStructure<T> scaledH =
scaledX.multiply(scaledX).add(scaledY.multiply(scaledY)).sqrt();
// remove scaling
return scaledH.scalb(middleExp);
}
}
}
/**
* Returns the hypotenuse of a triangle with sides {@code x} and {@code y}
* - sqrt(<i>x</i><sup>2</sup> +<i>y</i><sup>2</sup>)
* avoiding intermediate overflow or underflow.
*
* <ul>
* <li> If either argument is infinite, then the result is positive infinity.</li>
* <li> else, if either argument is NaN then the result is NaN.</li>
* </ul>
*
* @param x a value
* @param y a value
* @return sqrt(<i>x</i><sup>2</sup> +<i>y</i><sup>2</sup>)
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
* @param <T> the type of the field elements
*/
public static <T extends CalculusFieldElement<T>> FieldDerivativeStructure<T>
hypot(final FieldDerivativeStructure<T> x, final FieldDerivativeStructure<T> y)
throws MathIllegalArgumentException {
return x.hypot(y);
}
/** Compute composition of the instance by a univariate function.
* @param f array of value and derivatives of the function at
* the current point (i.e. [f({@link #getValue()}),
* f'({@link #getValue()}), f''({@link #getValue()})...]).
* @return f(this)
* @exception MathIllegalArgumentException if the number of derivatives
* in the array is not equal to {@link #getOrder() order} + 1
*/
@SafeVarargs
public final FieldDerivativeStructure<T> compose(final T ... f)
throws MathIllegalArgumentException {
MathUtils.checkDimension(f.length, getOrder() + 1);
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().compose(data, 0, f, result.data, 0);
return result;
}
/** Compute composition of the instance by a univariate function.
* @param f array of value and derivatives of the function at
* the current point (i.e. [f({@link #getValue()}),
* f'({@link #getValue()}), f''({@link #getValue()})...]).
* @return f(this)
* @exception MathIllegalArgumentException if the number of derivatives
* in the array is not equal to {@link #getOrder() order} + 1
*/
public FieldDerivativeStructure<T> compose(final double ... f)
throws MathIllegalArgumentException {
MathUtils.checkDimension(f.length, getOrder() + 1);
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().compose(data, 0, f, result.data, 0);
return result;
}
/** {@inheritDoc} */
@Override
public FieldDerivativeStructure<T> reciprocal() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().pow(data, 0, -1, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> sqrt() {
return rootN(2);
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> cbrt() {
return rootN(3);
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> rootN(final int n) {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().rootN(data, 0, n, result.data, 0);
return result;
}
/** {@inheritDoc} */
@Override
public Field<FieldDerivativeStructure<T>> getField() {
return factory.getDerivativeField();
}
/** Compute a<sup>x</sup> where a is a double and x a {@link FieldDerivativeStructure}
* @param a number to exponentiate
* @param x power to apply
* @param <T> the type of the field elements
* @return a<sup>x</sup>
*/
public static <T extends CalculusFieldElement<T>> FieldDerivativeStructure<T> pow(final double a, final FieldDerivativeStructure<T> x) {
final FieldDerivativeStructure<T> result = x.factory.build();
x.factory.getCompiler().pow(a, x.data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> pow(final double p) {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().pow(data, 0, p, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> pow(final int n) {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().pow(data, 0, n, result.data, 0);
return result;
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> pow(final FieldDerivativeStructure<T> e)
throws MathIllegalArgumentException {
factory.checkCompatibility(e.factory);
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().pow(data, 0, e.data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> exp() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().exp(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> expm1() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().expm1(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> log() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().log(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> log1p() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().log1p(data, 0, result.data, 0);
return result;
}
/** Base 10 logarithm.
* @return base 10 logarithm of the instance
*/
@Override
public FieldDerivativeStructure<T> log10() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().log10(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> cos() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().cos(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> sin() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().sin(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldSinCos<FieldDerivativeStructure<T>> sinCos() {
final FieldDerivativeStructure<T> sin = factory.build();
final FieldDerivativeStructure<T> cos = factory.build();
factory.getCompiler().sinCos(data, 0, sin.data, 0, cos.data, 0);
return new FieldSinCos<>(sin, cos);
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> tan() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().tan(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> acos() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().acos(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> asin() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().asin(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> atan() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().atan(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> atan2(final FieldDerivativeStructure<T> x)
throws MathIllegalArgumentException {
factory.checkCompatibility(x.factory);
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().atan2(data, 0, x.data, 0, result.data, 0);
return result;
}
/** Two arguments arc tangent operation.
* @param y first argument of the arc tangent
* @param x second argument of the arc tangent
* @param <T> the type of the field elements
* @return atan2(y, x)
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
public static <T extends CalculusFieldElement<T>> FieldDerivativeStructure<T> atan2(final FieldDerivativeStructure<T> y,
final FieldDerivativeStructure<T> x)
throws MathIllegalArgumentException {
return y.atan2(x);
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> cosh() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().cosh(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> sinh() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().sinh(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldSinhCosh<FieldDerivativeStructure<T>> sinhCosh() {
final FieldDerivativeStructure<T> sinh = factory.build();
final FieldDerivativeStructure<T> cosh = factory.build();
factory.getCompiler().sinhCosh(data, 0, sinh.data, 0, cosh.data, 0);
return new FieldSinhCosh<>(sinh, cosh);
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> tanh() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().tanh(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> acosh() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().acosh(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> asinh() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().asinh(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> atanh() {
final FieldDerivativeStructure<T> result = factory.build();
factory.getCompiler().atanh(data, 0, result.data, 0);
return result;
}
/** {@inheritDoc} */
@Override
public FieldDerivativeStructure<T> toDegrees() {
final FieldDerivativeStructure<T> ds = factory.build();
for (int i = 0; i < ds.data.length; ++i) {
ds.data[i] = data[i].toDegrees();
}
return ds;
}
/** {@inheritDoc} */
@Override
public FieldDerivativeStructure<T> toRadians() {
final FieldDerivativeStructure<T> ds = factory.build();
for (int i = 0; i < ds.data.length; ++i) {
ds.data[i] = data[i].toRadians();
}
return ds;
}
/** Evaluate Taylor expansion of a derivative structure.
* @param delta parameters offsets (Δx, Δy, ...)
* @return value of the Taylor expansion at x + Δx, y + Δy, ...
* @throws MathRuntimeException if factorials becomes too large
*/
@SafeVarargs
public final T taylor(final T ... delta) throws MathRuntimeException {
return factory.getCompiler().taylor(data, 0, delta);
}
/** Evaluate Taylor expansion of a derivative structure.
* @param delta parameters offsets (Δx, Δy, ...)
* @return value of the Taylor expansion at x + Δx, y + Δy, ...
* @throws MathRuntimeException if factorials becomes too large
*/
public T taylor(final double ... delta) throws MathRuntimeException {
return factory.getCompiler().taylor(data, 0, delta);
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> linearCombination(final FieldDerivativeStructure<T>[] a,
final FieldDerivativeStructure<T>[] b)
throws MathIllegalArgumentException {
// compute an accurate value, taking care of cancellations
final T[] aT = MathArrays.buildArray(factory.getValueField(), a.length);
for (int i = 0; i < a.length; ++i) {
aT[i] = a[i].getValue();
}
final T[] bT = MathArrays.buildArray(factory.getValueField(), b.length);
for (int i = 0; i < b.length; ++i) {
bT[i] = b[i].getValue();
}
final T accurateValue = aT[0].linearCombination(aT, bT);
// compute a simple value, with all partial derivatives
FieldDerivativeStructure<T> simpleValue = a[0].getField().getZero();
for (int i = 0; i < a.length; ++i) {
simpleValue = simpleValue.add(a[i].multiply(b[i]));
}
// create a result with accurate value and all derivatives (not necessarily as accurate as the value)
final T[] all = simpleValue.getAllDerivatives();
all[0] = accurateValue;
return factory.build(all);
}
/**
* Compute a linear combination.
* @param a Factors.
* @param b Factors.
* @return <code>Σ<sub>i</sub> a<sub>i</sub> b<sub>i</sub></code>.
* @throws MathIllegalArgumentException if arrays dimensions don't match
*/
public FieldDerivativeStructure<T> linearCombination(final T[] a, final FieldDerivativeStructure<T>[] b)
throws MathIllegalArgumentException {
// compute an accurate value, taking care of cancellations
final T[] bT = MathArrays.buildArray(factory.getValueField(), b.length);
for (int i = 0; i < b.length; ++i) {
bT[i] = b[i].getValue();
}
final T accurateValue = bT[0].linearCombination(a, bT);
// compute a simple value, with all partial derivatives
FieldDerivativeStructure<T> simpleValue = b[0].getField().getZero();
for (int i = 0; i < a.length; ++i) {
simpleValue = simpleValue.add(b[i].multiply(a[i]));
}
// create a result with accurate value and all derivatives (not necessarily as accurate as the value)
final T[] all = simpleValue.getAllDerivatives();
all[0] = accurateValue;
return factory.build(all);
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> linearCombination(final double[] a, final FieldDerivativeStructure<T>[] b)
throws MathIllegalArgumentException {
// compute an accurate value, taking care of cancellations
final T[] bT = MathArrays.buildArray(factory.getValueField(), b.length);
for (int i = 0; i < b.length; ++i) {
bT[i] = b[i].getValue();
}
final T accurateValue = bT[0].linearCombination(a, bT);
// compute a simple value, with all partial derivatives
FieldDerivativeStructure<T> simpleValue = b[0].getField().getZero();
for (int i = 0; i < a.length; ++i) {
simpleValue = simpleValue.add(b[i].multiply(a[i]));
}
// create a result with accurate value and all derivatives (not necessarily as accurate as the value)
final T[] all = simpleValue.getAllDerivatives();
all[0] = accurateValue;
return factory.build(all);
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> linearCombination(final FieldDerivativeStructure<T> a1, final FieldDerivativeStructure<T> b1,
final FieldDerivativeStructure<T> a2, final FieldDerivativeStructure<T> b2)
throws MathIllegalArgumentException {
// compute an accurate value, taking care of cancellations
final T accurateValue = a1.getValue().linearCombination(a1.getValue(), b1.getValue(),
a2.getValue(), b2.getValue());
// compute a simple value, with all partial derivatives
final FieldDerivativeStructure<T> simpleValue = a1.multiply(b1).add(a2.multiply(b2));
// create a result with accurate value and all derivatives (not necessarily as accurate as the value)
final T[] all = simpleValue.getAllDerivatives();
all[0] = accurateValue;
return factory.build(all);
}
/**
* Compute a linear combination.
* @param a1 first factor of the first term
* @param b1 second factor of the first term
* @param a2 first factor of the second term
* @param b2 second factor of the second term
* @return a<sub>1</sub>×b<sub>1</sub> +
* a<sub>2</sub>×b<sub>2</sub>
* @see #linearCombination(double, FieldDerivativeStructure, double, FieldDerivativeStructure)
* @see #linearCombination(double, FieldDerivativeStructure, double, FieldDerivativeStructure, double, FieldDerivativeStructure, double, FieldDerivativeStructure)
* @exception MathIllegalArgumentException if number of free parameters or orders are inconsistent
*/
public FieldDerivativeStructure<T> linearCombination(final T a1, final FieldDerivativeStructure<T> b1,
final T a2, final FieldDerivativeStructure<T> b2)
throws MathIllegalArgumentException {
factory.checkCompatibility(b1.factory);
factory.checkCompatibility(b2.factory);
final FieldDerivativeStructure<T> ds = factory.build();
factory.getCompiler().linearCombination(a1, b1.data, 0,
a2, b2.data, 0,
ds.data, 0);
return ds;
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> linearCombination(final double a1, final FieldDerivativeStructure<T> b1,
final double a2, final FieldDerivativeStructure<T> b2)
throws MathIllegalArgumentException {
factory.checkCompatibility(b1.factory);
factory.checkCompatibility(b2.factory);
final FieldDerivativeStructure<T> ds = factory.build();
factory.getCompiler().linearCombination(a1, b1.data, 0,
a2, b2.data, 0,
ds.data, 0);
return ds;
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> linearCombination(final FieldDerivativeStructure<T> a1, final FieldDerivativeStructure<T> b1,
final FieldDerivativeStructure<T> a2, final FieldDerivativeStructure<T> b2,
final FieldDerivativeStructure<T> a3, final FieldDerivativeStructure<T> b3)
throws MathIllegalArgumentException {
// compute an accurate value, taking care of cancellations
final T accurateValue = a1.getValue().linearCombination(a1.getValue(), b1.getValue(),
a2.getValue(), b2.getValue(),
a3.getValue(), b3.getValue());
// compute a simple value, with all partial derivatives
final FieldDerivativeStructure<T> simpleValue = a1.multiply(b1).add(a2.multiply(b2)).add(a3.multiply(b3));
// create a result with accurate value and all derivatives (not necessarily as accurate as the value)
final T[] all = simpleValue.getAllDerivatives();
all[0] = accurateValue;
return factory.build(all);
}
/**
* Compute a linear combination.
* @param a1 first factor of the first term
* @param b1 second factor of the first term
* @param a2 first factor of the second term
* @param b2 second factor of the second term
* @param a3 first factor of the third term
* @param b3 second factor of the third term
* @return a<sub>1</sub>×b<sub>1</sub> +
* a<sub>2</sub>×b<sub>2</sub> + a<sub>3</sub>×b<sub>3</sub>
* @see #linearCombination(double, FieldDerivativeStructure, double, FieldDerivativeStructure)
* @see #linearCombination(double, FieldDerivativeStructure, double, FieldDerivativeStructure, double, FieldDerivativeStructure, double, FieldDerivativeStructure)
* @exception MathIllegalArgumentException if number of free parameters or orders are inconsistent
*/
public FieldDerivativeStructure<T> linearCombination(final T a1, final FieldDerivativeStructure<T> b1,
final T a2, final FieldDerivativeStructure<T> b2,
final T a3, final FieldDerivativeStructure<T> b3)
throws MathIllegalArgumentException {
factory.checkCompatibility(b1.factory);
factory.checkCompatibility(b2.factory);
factory.checkCompatibility(b3.factory);
final FieldDerivativeStructure<T> ds = factory.build();
factory.getCompiler().linearCombination(a1, b1.data, 0,
a2, b2.data, 0,
a3, b3.data, 0,
ds.data, 0);
return ds;
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> linearCombination(final double a1, final FieldDerivativeStructure<T> b1,
final double a2, final FieldDerivativeStructure<T> b2,
final double a3, final FieldDerivativeStructure<T> b3)
throws MathIllegalArgumentException {
factory.checkCompatibility(b1.factory);
factory.checkCompatibility(b2.factory);
factory.checkCompatibility(b3.factory);
final FieldDerivativeStructure<T> ds = factory.build();
factory.getCompiler().linearCombination(a1, b1.data, 0,
a2, b2.data, 0,
a3, b3.data, 0,
ds.data, 0);
return ds;
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> linearCombination(final FieldDerivativeStructure<T> a1, final FieldDerivativeStructure<T> b1,
final FieldDerivativeStructure<T> a2, final FieldDerivativeStructure<T> b2,
final FieldDerivativeStructure<T> a3, final FieldDerivativeStructure<T> b3,
final FieldDerivativeStructure<T> a4, final FieldDerivativeStructure<T> b4)
throws MathIllegalArgumentException {
// compute an accurate value, taking care of cancellations
final T accurateValue = a1.getValue().linearCombination(a1.getValue(), b1.getValue(),
a2.getValue(), b2.getValue(),
a3.getValue(), b3.getValue(),
a4.getValue(), b4.getValue());
// compute a simple value, with all partial derivatives
final FieldDerivativeStructure<T> simpleValue = a1.multiply(b1).add(a2.multiply(b2)).add(a3.multiply(b3)).add(a4.multiply(b4));
// create a result with accurate value and all derivatives (not necessarily as accurate as the value)
final T[] all = simpleValue.getAllDerivatives();
all[0] = accurateValue;
return factory.build(all);
}
/**
* Compute a linear combination.
* @param a1 first factor of the first term
* @param b1 second factor of the first term
* @param a2 first factor of the second term
* @param b2 second factor of the second term
* @param a3 first factor of the third term
* @param b3 second factor of the third term
* @param a4 first factor of the third term
* @param b4 second factor of the third term
* @return a<sub>1</sub>×b<sub>1</sub> +
* a<sub>2</sub>×b<sub>2</sub> + a<sub>3</sub>×b<sub>3</sub> +
* a<sub>4</sub>×b<sub>4</sub>
* @see #linearCombination(double, FieldDerivativeStructure, double, FieldDerivativeStructure)
* @see #linearCombination(double, FieldDerivativeStructure, double, FieldDerivativeStructure, double, FieldDerivativeStructure)
* @exception MathIllegalArgumentException if number of free parameters or orders are inconsistent
*/
public FieldDerivativeStructure<T> linearCombination(final T a1, final FieldDerivativeStructure<T> b1,
final T a2, final FieldDerivativeStructure<T> b2,
final T a3, final FieldDerivativeStructure<T> b3,
final T a4, final FieldDerivativeStructure<T> b4)
throws MathIllegalArgumentException {
factory.checkCompatibility(b1.factory);
factory.checkCompatibility(b2.factory);
factory.checkCompatibility(b3.factory);
factory.checkCompatibility(b4.factory);
final FieldDerivativeStructure<T> ds = factory.build();
factory.getCompiler().linearCombination(a1, b1.data, 0,
a2, b2.data, 0,
a3, b3.data, 0,
a4, b4.data, 0,
ds.data, 0);
return ds;
}
/** {@inheritDoc}
* @exception MathIllegalArgumentException if number of free parameters
* or orders do not match
*/
@Override
public FieldDerivativeStructure<T> linearCombination(final double a1, final FieldDerivativeStructure<T> b1,
final double a2, final FieldDerivativeStructure<T> b2,
final double a3, final FieldDerivativeStructure<T> b3,
final double a4, final FieldDerivativeStructure<T> b4)
throws MathIllegalArgumentException {
factory.checkCompatibility(b1.factory);
factory.checkCompatibility(b2.factory);
factory.checkCompatibility(b3.factory);
factory.checkCompatibility(b4.factory);
final FieldDerivativeStructure<T> ds = factory.build();
factory.getCompiler().linearCombination(a1, b1.data, 0,
a2, b2.data, 0,
a3, b3.data, 0,
a4, b4.data, 0,
ds.data, 0);
return ds;
}
/** {@inheritDoc}
*/
@Override
public FieldDerivativeStructure<T> getPi() {
return factory.getDerivativeField().getPi();
}
}
|
rogeriotadeudosreis/Projeto-Torrentz-Em-Grupo
|
src/br/com/torrentzfilmes/dal/PlanoDal.java
|
<reponame>rogeriotadeudosreis/Projeto-Torrentz-Em-Grupo
/*
* <NAME>uldade de Tecnologia
* ADS - Análise e Desenvolvimento de Sistemas
* Projeto Torrentz Filmes
* Atividade integrando as disciplinas:
* Arquitetura e Projeto de Software;
* Gestão de Projetos;
* Modelagem de Banco de Dados
* Alunos: <NAME>, <NAME>, <NAME> e <NAME>
*/
package br.com.torrentzfilmes.dal;
import br.com.torrentzfilmes.model.Plano;
import br.com.torrentzfilmes.util.Conexao;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author roger
*/
public class PlanoDal {
private Connection conexao;
public PlanoDal() {
conexao = Conexao.getConexao();
}
public void addPlano(Plano plano) throws Exception {
String sql = "INSERT INTO tb_planos (pla_acesso_simultaneo,pla_descricao,"
+ "pla_preco) VALUES (?,?,?)";
try {
PreparedStatement preparedStatement = conexao.prepareStatement(sql);
preparedStatement.setInt(1, plano.getAcessoSimultaneo());
preparedStatement.setString(2, plano.getDescricao().trim());
preparedStatement.setBigDecimal(3, plano.getPreco());
preparedStatement.executeUpdate();
} catch (SQLException erro) {
throw erro;
}
}
public void deletePlano(int id) throws Exception {
String sql = "DELETE FROM tb_planos WHERE pla_iden=?";
try {
PreparedStatement preparedStatement = conexao.prepareStatement(sql);
preparedStatement.setInt(1, id);
preparedStatement.executeUpdate();
} catch (Exception erro) {
throw erro;
}
}
public void updatePlano(Plano plano) throws Exception {
String sql = "UPDATE tb_planos SET pla_acesso_simultaneo=?, pla_descricao=?,"
+ "pla_preco=? WHERE pla_iden=?";
try {
PreparedStatement preparedStatement = conexao.prepareStatement(sql);
preparedStatement.setInt(1, plano.getAcessoSimultaneo());
preparedStatement.setString(2, plano.getDescricao().trim());
preparedStatement.setBigDecimal(3, plano.getPreco());
preparedStatement.setInt(4, plano.getId());
preparedStatement.executeUpdate();
} catch (SQLException erro) {
throw erro;
}
}
public List<Plano> getAllPlanos() throws Exception {
List<Plano> listaPlanos = new ArrayList<>();
String sql = "SELECT * FROM tb_planos";
try {
Statement statement = conexao.createStatement();
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
Plano plano = new Plano();
plano.setId(rs.getInt("pla_iden"));
plano.setAcessoSimultaneo(rs.getInt("pla_acesso_simultaneo"));
plano.setDescricao(rs.getString("pla_descricao"));
plano.setPreco(rs.getBigDecimal("pla_preco"));
listaPlanos.add(plano);
}
} catch (Exception erro) {
throw erro;
}
return listaPlanos;
}
public Plano getPlanoById(int id) throws Exception {
Plano plano = new Plano();
String sql = "SELECT * FROM tb_planos WHERE pla_iden=?";
try {
PreparedStatement preparedStatement = conexao.prepareStatement(sql);
preparedStatement.setInt(1, id);
ResultSet rs = preparedStatement.executeQuery();
if (rs.next()) {
plano.setId(rs.getInt("pla_iden"));
plano.setAcessoSimultaneo(rs.getInt("pla_acesso_simultaneo"));
plano.setDescricao(rs.getString("pla_descricao"));
plano.setPreco(rs.getBigDecimal("pla_preco"));
}
} catch (Exception erro) {
throw erro;
}
return plano;
}
public ArrayList sourcePlanos(String dados) throws Exception {
String textoDigitado = dados;
ArrayList<Plano> resultado = new ArrayList<>();
boolean existe = false;
for (Plano plano : getAllPlanos()) {
if (plano.getDescricao().toLowerCase().trim().contains(textoDigitado)) {
resultado.add(plano);
existe = true;
}
}
if (!existe) {
throw new Exception("Registro não encontrado!\n");
}
return resultado;
}
public ResultSet sourceInteligente(String nome) {
ResultSet rs = null;
String sql = "SELECT * FROM tb_planos where pla_descricao like ?";
PreparedStatement pst;
try {
pst = conexao.prepareStatement(sql);
pst.setString(1, nome + "%");
rs = pst.executeQuery();
} catch (Exception e) {
}
return rs;
}
}
|
Felon03/CppPrimer
|
Ch 01/1.10.cpp
|
/* 打印10到0之间的整数*/
#include<iostream>
int main5()
{
int i = 11;
while (i--)
{
std::cout << i << std::endl;
}
return 0;
}
|
BulkSecurityGeneratorProject/Ablams
|
ablams-backend/src/main/java/de/teberhardt/ablams/service/impl/AudioFileMetadataService.java
|
<reponame>BulkSecurityGeneratorProject/Ablams
package de.teberhardt.ablams.service.impl;
import org.jaudiotagger.audio.AudioFile;
import org.jaudiotagger.audio.AudioFileIO;
import org.jaudiotagger.audio.exceptions.CannotReadException;
import org.jaudiotagger.audio.exceptions.InvalidAudioFrameException;
import org.jaudiotagger.audio.exceptions.ReadOnlyFileException;
import org.jaudiotagger.tag.TagException;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.nio.file.Path;
@Service
public class AudioFileMetadataService {
void readAllMetadata(Path p) throws TagException, ReadOnlyFileException, CannotReadException, InvalidAudioFrameException, IOException {
AudioFile read = AudioFileIO.read(p.toFile());
//read.getTag().getFields().next().
}
}
|
hephaistionn/AgeOfCivilization3
|
client/Model/Engine/Entity/Building/Market.js
|
const Entity = require('../Entity');
const ee = require('../../../../services/eventEmitter');
class Market extends Entity {
constructor(params) {
super(params);
this.food = params.food || 0;
ee.emit('newEntity', {sourceId: this._id, type: 'Seller'});
}
store(value) {
this.food += value;
}
getWorkerSlot(step) {
const x = this.x + (step === 2 ? 0.2 : 0);
const z = this.z + 0.4;
return {x: x, y: this.y, z: z, a: Math.PI / 2}
}
}
Market.selectable = true;
Market.description = 'This building increase the prosperity of your city';
Market.tile_x = 1;
Market.tile_z = 1;
Market.cost = {wood: 5, stone: 5};
Market.require = {inactive: 2};
Market.enabled = {wood: 5, population: 4};
Market.walkable = 0;
Market.constuctDuration = 1000;
Market.instances = [];
module.exports = Market;
|
davejones74/ccd-data-store-api
|
src/main/java/uk/gov/hmcts/ccd/domain/service/callbacks/EventTokenService.java
|
<reponame>davejones74/ccd-data-store-api<filename>src/main/java/uk/gov/hmcts/ccd/domain/service/callbacks/EventTokenService.java
package uk.gov.hmcts.ccd.domain.service.callbacks;
import uk.gov.hmcts.ccd.ApplicationParams;
import uk.gov.hmcts.ccd.domain.model.callbacks.EventTokenProperties;
import uk.gov.hmcts.ccd.domain.model.definition.CaseDetails;
import uk.gov.hmcts.ccd.domain.model.definition.CaseEvent;
import uk.gov.hmcts.ccd.domain.model.definition.CaseType;
import uk.gov.hmcts.ccd.domain.model.definition.Jurisdiction;
import uk.gov.hmcts.ccd.domain.service.common.CaseService;
import uk.gov.hmcts.ccd.endpoint.exceptions.BadRequestException;
import uk.gov.hmcts.ccd.endpoint.exceptions.EventTokenException;
import uk.gov.hmcts.ccd.endpoint.exceptions.ResourceNotFoundException;
import uk.gov.hmcts.ccd.infrastructure.RandomKeyGenerator;
import java.util.Date;
import com.google.common.collect.Maps;
import io.jsonwebtoken.Claims;
import io.jsonwebtoken.ExpiredJwtException;
import io.jsonwebtoken.Jwts;
import io.jsonwebtoken.SignatureAlgorithm;
import io.jsonwebtoken.SignatureException;
import io.jsonwebtoken.impl.TextCodec;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class EventTokenService {
private static final CaseDetails EMPTY_CASE = new CaseDetails();
static {
EMPTY_CASE.setData(Maps.newHashMap());
}
private final RandomKeyGenerator randomKeyGenerator;
private final String tokenSecret;
private final CaseService caseService;
@Autowired
public EventTokenService(final RandomKeyGenerator randomKeyGenerator,
final ApplicationParams applicationParams,
final CaseService caseService) {
this.randomKeyGenerator = randomKeyGenerator;
this.tokenSecret = applicationParams.getTokenSecret();
this.caseService = caseService;
}
public String generateToken(final String uid,
final CaseEvent event,
final Jurisdiction jurisdiction,
final CaseType caseType) {
return generateToken(uid, EMPTY_CASE, event, jurisdiction, caseType);
}
public String generateToken(final String uid,
final CaseDetails caseDetails,
final CaseEvent event,
final Jurisdiction jurisdiction,
final CaseType caseType) {
return Jwts.builder()
.setId(randomKeyGenerator.generate())
.setSubject(uid)
.setIssuedAt(new Date())
.signWith(SignatureAlgorithm.HS256, TextCodec.BASE64.encode(tokenSecret))
.claim(EventTokenProperties.CASE_ID, caseDetails.getId())
.claim(EventTokenProperties.TRIGGER_EVENT_ID, event.getId())
.claim(EventTokenProperties.CASE_TYPE_ID, caseType.getId())
.claim(EventTokenProperties.JURISDICTION_ID, jurisdiction.getId())
.claim(EventTokenProperties.CASE_STATE, caseDetails.getState())
.claim(EventTokenProperties.CASE_VERSION, caseService.hashData(caseDetails))
.claim(EventTokenProperties.ENTITY_VERSION, caseDetails.getVersion())
.compact();
}
public EventTokenProperties parseToken(final String token) {
try {
final Claims claims = Jwts.parser()
.setSigningKey(TextCodec.BASE64.encode(tokenSecret))
.parseClaimsJws(token).getBody();
return new EventTokenProperties(
claims.getSubject(),
toString(claims.get(EventTokenProperties.CASE_ID)),
toString(claims.get(EventTokenProperties.JURISDICTION_ID)),
toString(claims.get(EventTokenProperties.TRIGGER_EVENT_ID)),
toString(claims.get(EventTokenProperties.CASE_TYPE_ID)),
toString(claims.get(EventTokenProperties.CASE_VERSION)),
toString(claims.get(EventTokenProperties.CASE_STATE)),
toString(claims.get(EventTokenProperties.ENTITY_VERSION)));
} catch (ExpiredJwtException | SignatureException e) {
throw new EventTokenException(e.getMessage());
}
}
public void validateToken(final String token,
final String uid,
final CaseEvent event,
final Jurisdiction jurisdiction,
final CaseType caseType) {
validateToken(token, uid, EMPTY_CASE, event, jurisdiction, caseType);
}
public void validateToken(final String token,
final String uid,
final CaseDetails caseDetails,
final CaseEvent event,
final Jurisdiction jurisdiction,
final CaseType caseType) {
if (token == null || token.isEmpty()) {
throw new BadRequestException("Missing start trigger token");
}
try {
final EventTokenProperties eventTokenProperties = parseToken(token);
if (!(eventTokenProperties.getEventId() == null || eventTokenProperties.getEventId().equalsIgnoreCase(event.getId())
&& eventTokenProperties.getCaseId() == null || eventTokenProperties.getCaseId().equalsIgnoreCase(caseDetails.getId().toString())
&& eventTokenProperties.getJurisdictionId() == null || eventTokenProperties.getJurisdictionId().equalsIgnoreCase(jurisdiction.getId())
&& eventTokenProperties.getCaseTypeId() == null || eventTokenProperties.getCaseTypeId().equalsIgnoreCase(caseType.getId())
&& eventTokenProperties.getUid() == null || eventTokenProperties.getUid().equalsIgnoreCase(uid))) {
throw new ResourceNotFoundException("Cannot find matching start trigger");
}
if (eventTokenProperties.getEntityVersion() != null) {
caseDetails.setVersion(Integer.parseInt(eventTokenProperties.getEntityVersion()));
}
} catch (EventTokenException e) {
throw new SecurityException("Token is not valid");
}
}
/**
* @param object Object to convert to string
* @return <code>object.toString()</code> when object is not null; <code>null</code> otherwise
*/
private String toString(Object object) {
if (null == object) {
return null;
}
return object.toString();
}
}
|
polly3d/leetcode
|
897. Increasing Order Search Tree/897. Increasing Order Search Tree.go
|
/**
* Definition for a binary tree node.
* type TreeNode struct {
* Val int
* Left *TreeNode
* Right *TreeNode
* }
*/
func increasingBST(root *TreeNode) *TreeNode {
if root == nil {
return nil
}
s := []int{}
inorder(root, &s)
header := &TreeNode{}
t := header
for _, v := range s {
t.Right = &TreeNode{Val: v}
t = t.Right
}
return header.Right
}
func inorder(root *TreeNode, s *[]int) {
if root == nil {
return
}
inorder(root.Left, s)
*s = append(*s, root.Val)
inorder(root.Right, s)
}
|
kumasento/gradient-scaling
|
chainerlp/hooks/zero_mult_hook.py
|
<filename>chainerlp/hooks/zero_mult_hook.py<gh_stars>1-10
""" Hook to sample the zero multiplications """
import os
import pickle
import numpy as np
import pandas as pd
from collections import OrderedDict
import chainer
import chainer.links as L
import chainer.functions as F
from chainer import backend
from chainer import link_hook
from chainer import function_hook
from chainer.training import Trainer
class ZeroMultFuncHook(function_hook.FunctionHook):
""" The zero-mult sampling function hook. """
name = "ZeroMultFuncHook"
def __init__(self, trainer=None, sample_per_n_iteration=100, snapshot_dir=None):
""" CTOR. """
assert isinstance(trainer, Trainer)
self.trainer = trainer
self.sample_per_n_iteration = sample_per_n_iteration
self.results = [] # record the final result
self.counters = OrderedDict()
self.n_iter = 0
self.snapshot_dir = snapshot_dir
@property
def optim(self):
optims = self.trainer.updater.get_all_optimizers()
return optims["main"]
@property
def current_iteration(self):
return self.trainer.updater.iteration
@property
def current_epoch(self):
return self.optim.epoch
def inc_counter(self, func):
""" Increase the counter of the current label func """
if func.label not in self.counters:
self.counters[func.label] = 0
else:
self.counters[func.label] += 1
return self.counters[func.label]
def reset_counter(self):
self.counters.clear()
def forward_preprocess(self, func, in_data):
""" called before running the forward pass """
if self.current_iteration % self.sample_per_n_iteration != 1:
return None
if self.n_iter != self.current_iteration: # reset the counter
self.n_iter = self.current_iteration
self.reset_counter()
if func.label in ["Convolution2DFunction"]:
self.process_convolution2d(func, in_data)
def process_convolution2d(self, func, in_data):
""" Work on the convolution2d input. """
assert len(in_data) == 2
func_id = self.inc_counter(func)
X, W = in_data
xp = backend.get_array_module(X)
ksize = W.shape[2]
stride = 1
pad = 1 if ksize == 3 else 0
X_ = F.im2col(X, ksize, stride=stride, pad=pad).reshape(
[X.shape[0], -1, X.shape[2] * X.shape[3]]
)
X_ = F.transpose(X_, axes=(0, 2, 1)).reshape([-1, X_.shape[1]])
X_ = X_.array
W_ = W.reshape([W.shape[0], -1])
W_nz = (W_ != 0).astype("bool")
X_nz = (X_ != 0).astype("bool")
n_zm = 0
for i in range(W_.shape[0]):
M = xp.multiply(X_, W_[i, :]) # multiply
# zero mult
ZM = xp.logical_and(M == 0, xp.logical_and(W_nz[i, :], X_nz))
n_zm += ZM.sum()
self.results.append(
[
self.current_epoch,
self.current_iteration,
func_id,
func.label,
n_zm.item(),
W_.shape[0] * X_.shape[0] * X_.shape[1],
]
)
def snapshot(self):
""" Take a snapshot of zero mult results """
fp = os.path.join(self.snapshot_dir, "zero_mult.csv")
df = pd.DataFrame(
self.results,
columns=["n_epoch", "n_iter", "func_id", "func_label", "n_zm", "n_total"],
)
df.to_csv(fp)
|
prisms-center/materialscommons.org
|
backend/servers/mcapid/config/routes.js
|
<reponame>prisms-center/materialscommons.org
exports['default'] = {
routes: (api) => {
return {
get: [
{path: '/datasets/views/all', action: 'allPublishedDatasets'},
{path: '/datasets/views/top', action: 'topViewedPublishedDatasets'},
{path: '/datasets/views/recent', action: 'recentlyPublishedDatasets'},
{path: '/datasets/:dataset_id', action: 'getPublishedDataset'}
]
/* ---------------------
routes.js
For web clients (http and https) you can define an optional RESTful mapping to help route requests to actions.
If the client doesn't specify and action in a param, and the base route isn't a named action, the action will attempt to be discerned from this routes.js file.
Learn more here: http://www.actionherojs.com/docs/#routes
examples:
get: [
{path: '/datasets/views/all', action: 'allPublishedDatasets'},
{path: '/datasets/views/top', action: 'topViewedPublishedDatasets'},
{path: '/datasets/views/recent', action: 'recentlyPublishedDatasets'},
{path: '/datasets/:dataset_id', action: 'getPublishedDataset'}
],
post: [
{ path: '/login/:userID(^\\d{3}$)', action: 'login' } // (POST) /api/login/123
],
all: [
{ path: '/user/:userID', action: 'user', matchTrailingPathParts: true } // (*) /api/user/123, api/user/123/stuff
]
---------------------- */
}
}
}
|
hugoresende27/Java
|
Guanabara_JavaIntro/TesteFuncao02/src/testefuncao02/Operacoes.java
|
<reponame>hugoresende27/Java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package testefuncao02;
/**
*
* @author Hugo
*
*/
public class Operacoes {
//publico e estático
public static String contador(int i, int f) //metodo com retorno string, var locais i e f
{
String s = ""; //string s vazia
for (int c=0; c<= f ; c++) //ciclo para percorrer string
{
s += c+ " "; //s recebe a string + c e espaço
}
return s; //retorna string //funcao dá um print do incio ao fim dos parametros i e f
}
}
|
Hoppelite/active_campaign_webhooks
|
lib/active_campaign/webhooks/request/base_request.rb
|
# frozen_string_literal: true
module ActiveCampaign
module Webhooks
module Request
# Base Request
class BaseRequest < BaseRecord
# @return [String]
attribute :type
# @return [DateTime]
attribute :date_time, :DateTime
# @return [String]
attribute :initiated_from
# @return [String]
attribute :initiated_by
# @return [Integer]
attribute :list, :Integer
end
end
end
end
|
philip-alldredge/AGREE
|
com.rockwellcollins.atc.agree.analysis/src/com/rockwellcollins/atc/agree/analysis/handlers/VerifyRealizabilityHandler.java
|
package com.rockwellcollins.atc.agree.analysis.handlers;
import org.eclipse.jface.preference.IPreferenceStore;
import com.rockwellcollins.atc.agree.analysis.Activator;
import com.rockwellcollins.atc.agree.analysis.AgreeException;
import com.rockwellcollins.atc.agree.analysis.preferences.PreferenceConstants;
public class VerifyRealizabilityHandler extends VerifyHandler {
@Override
protected boolean isRecursive() {
return false;
}
@Override
protected boolean isMonolithic() {
return false;
}
@Override
protected String getJobName() {
return "AGREE - Verify Realizability";
}
@Override
protected boolean isRealizability() {
IPreferenceStore prefs = Activator.getDefault().getPreferenceStore();
String solver = prefs.getString(PreferenceConstants.PREF_SOLVER);
switch (solver) {
case PreferenceConstants.SOLVER_Z3:
return true;
default:
throw new AgreeException("You must select Z3 as your solver to check realizability.");
}
}
}
|
Hyperfoil/Hyperfoil
|
api/src/main/java/io/hyperfoil/api/session/ObjectAccess.java
|
<reponame>Hyperfoil/Hyperfoil<filename>api/src/main/java/io/hyperfoil/api/session/ObjectAccess.java
package io.hyperfoil.api.session;
public interface ObjectAccess extends WriteAccess {
void setObject(Session session, Object value);
/**
* Make variable set without changing its (pre-allocated) value.
*
* @param session Session with variables.
* @return Variable value
*/
Object activate(Session session);
}
|
kubeup/archon2
|
vendor/k8s.io/heapster/metrics/sinks/metric/metric_sink_test.go
|
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package metricsink
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"k8s.io/heapster/metrics/core"
)
func makeBatches(now time.Time, key, otherKey string) (core.DataBatch, core.DataBatch, core.DataBatch) {
batch1 := core.DataBatch{
Timestamp: now.Add(-180 * time.Second),
MetricSets: map[string]*core.MetricSet{
key: {
Labels: map[string]string{
core.LabelMetricSetType.Key: core.MetricSetTypePod,
core.LabelPodNamespace.Key: "ns1",
},
MetricValues: map[string]core.MetricValue{
"m1": {
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 60,
},
"m2": {
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 666,
},
},
},
},
}
batch2 := core.DataBatch{
Timestamp: now.Add(-60 * time.Second),
MetricSets: map[string]*core.MetricSet{
key: {
Labels: map[string]string{
core.LabelMetricSetType.Key: core.MetricSetTypePod,
core.LabelPodNamespace.Key: "ns1",
},
MetricValues: map[string]core.MetricValue{
"m1": {
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 40,
},
"m2": {
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 444,
},
},
LabeledMetrics: []core.LabeledMetric{
{
Name: "somelblmetric",
Labels: map[string]string{"lbl1": "val1.1", "lbl2": "val2.1"},
MetricValue: core.MetricValue{
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 8675,
},
},
{
Name: "otherlblmetric",
Labels: map[string]string{"lbl1": "val1.1", "lbl2": "val2.1"},
MetricValue: core.MetricValue{
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 1234,
},
},
},
},
},
}
batch3 := core.DataBatch{
Timestamp: now.Add(-20 * time.Second),
MetricSets: map[string]*core.MetricSet{
key: {
Labels: map[string]string{
core.LabelMetricSetType.Key: core.MetricSetTypePod,
core.LabelPodNamespace.Key: "ns1",
},
MetricValues: map[string]core.MetricValue{
"m1": {
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 20,
},
"m2": {
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 222,
},
},
LabeledMetrics: []core.LabeledMetric{
{
Name: "somelblmetric",
Labels: map[string]string{"lbl1": "val1.1", "lbl2": "val2.1"},
MetricValue: core.MetricValue{
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 309,
},
},
{
Name: "somelblmetric",
Labels: map[string]string{"lbl1": "val1.2", "lbl2": "val2.1"},
MetricValue: core.MetricValue{
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 5678,
},
},
},
},
otherKey: {
Labels: map[string]string{
core.LabelMetricSetType.Key: core.MetricSetTypePod,
core.LabelPodNamespace.Key: "ns1",
},
MetricValues: map[string]core.MetricValue{
"m1": {
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 123,
},
},
},
},
}
return batch1, batch2, batch3
}
func TestGetMetrics(t *testing.T) {
now := time.Now()
key := core.PodKey("ns1", "pod1")
otherKey := core.PodKey("ns1", "other")
batch1, batch2, batch3 := makeBatches(now, key, otherKey)
metrics := NewMetricSink(45*time.Second, 120*time.Second, []string{"m1"})
metrics.ExportData(&batch1)
metrics.ExportData(&batch2)
metrics.ExportData(&batch3)
//batch1 is discarded by long store
result1 := metrics.GetMetric("m1", []string{key}, now.Add(-120*time.Second), now)
assert.Equal(t, 2, len(result1[key]))
assert.Equal(t, int64(40), result1[key][0].MetricValue.IntValue)
assert.Equal(t, int64(20), result1[key][1].MetricValue.IntValue)
assert.Equal(t, 1, len(metrics.GetMetric("m1", []string{otherKey}, now.Add(-120*time.Second), now)[otherKey]))
//batch1 is discarded by long store and batch2 doesn't belong to time window
assert.Equal(t, 1, len(metrics.GetMetric("m1", []string{key}, now.Add(-30*time.Second), now)[key]))
//batch1 and batch1 are discarded by short store
assert.Equal(t, 1, len(metrics.GetMetric("m2", []string{key}, now.Add(-120*time.Second), now)[key]))
//nothing is in time window
assert.Equal(t, 0, len(metrics.GetMetric("m2", []string{key}, now.Add(-10*time.Second), now)[key]))
metricNames := metrics.GetMetricNames(key)
assert.Equal(t, 2, len(metricNames))
assert.Contains(t, metricNames, "m1")
assert.Contains(t, metricNames, "m2")
}
func TestGetLabeledMetrics(t *testing.T) {
now := time.Now().UTC()
key := core.PodKey("ns1", "pod1")
otherKey := core.PodKey("ns1", "other")
batch1, batch2, batch3 := makeBatches(now, key, otherKey)
metrics := NewMetricSink(45*time.Second, 120*time.Second, []string{"m1"})
metrics.ExportData(&batch1)
metrics.ExportData(&batch2)
metrics.ExportData(&batch3)
result := metrics.GetLabeledMetric("somelblmetric", map[string]string{"lbl1": "val1.1", "lbl2": "val2.1"}, []string{key}, now.Add(-120*time.Second), now)
assert.Equal(t, []core.TimestampedMetricValue{
{
Timestamp: now.Add(-20 * time.Second),
MetricValue: core.MetricValue{
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 309,
},
},
}, result[key])
}
func TestGetNames(t *testing.T) {
now := time.Now()
key := core.PodKey("ns1", "pod1")
otherKey := core.PodKey("ns1", "other")
batch := core.DataBatch{
Timestamp: now.Add(-20 * time.Second),
MetricSets: map[string]*core.MetricSet{
key: {
Labels: map[string]string{
core.LabelMetricSetType.Key: core.MetricSetTypePod,
core.LabelPodNamespace.Key: "ns1",
core.LabelNamespaceName.Key: "ns1",
core.LabelPodName.Key: "pod1",
},
MetricValues: map[string]core.MetricValue{
"m1": {
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 20,
},
"m2": {
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 222,
},
},
},
otherKey: {
Labels: map[string]string{
core.LabelMetricSetType.Key: core.MetricSetTypePod,
core.LabelPodNamespace.Key: "ns2",
core.LabelNamespaceName.Key: "ns2",
core.LabelPodName.Key: "pod2",
},
MetricValues: map[string]core.MetricValue{
"m1": {
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 123,
},
},
},
},
}
metrics := NewMetricSink(45*time.Second, 120*time.Second, []string{"m1"})
metrics.ExportData(&batch)
assert.Contains(t, metrics.GetPods(), "ns1/pod1")
assert.Contains(t, metrics.GetPods(), "ns2/pod2")
assert.Contains(t, metrics.GetPodsFromNamespace("ns1"), "pod1")
assert.NotContains(t, metrics.GetPodsFromNamespace("ns1"), "pod2")
assert.Contains(t, metrics.GetMetricSetKeys(), key)
assert.Contains(t, metrics.GetMetricSetKeys(), otherKey)
}
|
RubenDguez/GuitarStore
|
project0/src/main/java/com/revature/guitarstore/utils/DAOUtils.java
|
package com.revature.guitarstore.utils;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.revature.guitarstore.exceptions.GuitarStoreException;
public class DAOUtils {
final static int MIN_CODE_LENGTH = 3;
final static int MAX_CODE_LENGTH = 10;
final static int MIN_DESCRIPTION_LENGTH = 5;
final static int MAX_DESCRIPTION_LENGTH = 255;
private final static Logger logger = LogManager.getLogger(DBConn.class);
public static boolean uniqueIdExists(int id, String table) {
try (Connection conn = DBConn.getConnection()) {
String sql = "SELECT * FROM " + table + " WHERE UNIQUEID = ?";
PreparedStatement stmt = conn.prepareStatement(sql);
stmt.setInt(1, id);
ResultSet rs = stmt.executeQuery();
if (rs.next())
return true;
} catch (SQLException e) {
logger.error(e.getMessage());
}
return false;
}
public static boolean isCodeDuplicated(String code, String table) {
try (Connection conn = DBConn.getConnection()) {
String sql = "SELECT * FROM " + table + " WHERE CODE = ?";
PreparedStatement stmt = conn.prepareStatement(sql);
stmt.setString(1, code);
ResultSet rs = stmt.executeQuery();
if (rs.next())
return true;
} catch (SQLException e) {
logger.error(e.getMessage());
}
return false;
}
public static boolean isDescriptionDuplicated(String description, String table) {
try (Connection conn = DBConn.getConnection()) {
String sql = "SELECT * FROM " + table + " WHERE DESCRIPTION = ?";
PreparedStatement stmt = conn.prepareStatement(sql);
stmt.setString(1, description);
ResultSet rs = stmt.executeQuery();
if (rs.next())
return true;
} catch (SQLException e) {
logger.error(e.getMessage());
}
return false;
}
public static boolean isValidCodeField(String value) throws GuitarStoreException {
// Code rules
if (value == "" || value == null)
throw new GuitarStoreException("Code must not be empty.");
if (value.length() < MIN_CODE_LENGTH)
throw new GuitarStoreException("Code must not be less than " + MIN_CODE_LENGTH + " characters");
if (value.length() > MAX_CODE_LENGTH)
throw new GuitarStoreException("Code must not be greater than " + MAX_CODE_LENGTH + " characters");
return true;
}
public static boolean isValidDescriptionField(String value) throws GuitarStoreException {
// Description rules
if (value == "" || value == null)
throw new GuitarStoreException("Description must not be empty.");
if (value.length() < MIN_DESCRIPTION_LENGTH)
throw new GuitarStoreException(
"Description must not be less than " + MIN_DESCRIPTION_LENGTH + " characters");
if (value.length() > MAX_DESCRIPTION_LENGTH)
throw new GuitarStoreException("Description must not be greater than " + MAX_CODE_LENGTH + " characters");
return true;
}
}
|
project2WMDD/Beerfondbackend
|
routes/brewery.js
|
<gh_stars>0
const router = require('express').Router();
const { createBrewery, getBrewery, getAllBreweries } = require('../controllers/breweryController');
router.route('/:id').get(getBrewery);
router.route('/').get(getAllBreweries).post(createBrewery);
module.exports = router;
|
unepwcmc/protectedplanet-api
|
api/v3/protected_areas.rb
|
<filename>api/v3/protected_areas.rb
require 'models/protected_area'
class API::V3::ProtectedAreas < Grape::API
include Grape::Kaminari
before do
authenticate!
end
rescue_from Grape::Exceptions::ValidationErrors do |e|
error! e, 400
end
# == annotations
################
desc "Get all protected areas, paginated."
paginate per_page: 25, max_per_page: 50
params { optional :with_geometry, default: false, type: Boolean }
# == body
#########
get rabl: "v3/views/protected_areas" do
collection = ProtectedArea
collection = collection.without_geometry unless params[:with_geometry]
@with_geometry = params[:with_geometry]
@protected_areas = paginate(collection)
end
# == annotations
################
desc "Search for a subset of protected areas."
paginate per_page: 25, max_per_page: 50
params do
optional :country, type: String, regexp: /[a-zA-Z]{3}/
optional :marine, type: Boolean
optional :is_green_list, type: Boolean
optional :designation, type: Integer
optional :jurisdiction, type: Integer
optional :governance, type: Integer
optional :iucn_category, type: Integer
optional :with_geometry, default: false, type: Boolean
at_least_one_of :country, :marine, :is_green_list, :designation,
:jurisdiction, :governance, :iucn_category
end
# == body
#########
get :search, rabl: "v3/views/protected_areas" do
collection = ProtectedArea.search(declared(params, include_missing: false))
@with_geometry = params[:with_geometry]
@protected_areas = paginate(collection)
end
# == annotations
################
desc "Get ACP countries protected areas."
params { optional :with_geometry, default: false, type: Boolean }
# == body
#########
get :biopama, rabl: "v3/views/protected_areas" do
collection = ProtectedArea.biopama.with_pame_evaluations
collection = collection.without_geometry unless params[:with_geometry]
@with_geometry = params[:with_geometry]
@protected_areas = collection
end
# == annotations
################
desc "Get a protected area via its wdpa_id."
params { optional :with_geometry, default: true, type: Boolean }
# == body
#########
get ":wdpa_id", rabl: "v3/views/protected_area" do
@with_geometry = params[:with_geometry]
@protected_area = ProtectedArea.find_by_wdpa_id(
params[:wdpa_id]
) or error!(:not_found, 404)
end
end
|
shamanland/xdroid
|
lib-adapter/src/main/java/xdroid/adapter/ViewBinder.java
|
<gh_stars>10-100
package xdroid.adapter;
import android.view.View;
/**
* @author <NAME> (<EMAIL>)
*/
public interface ViewBinder<D, V extends View> {
/**
* Implementation should prepare passed <b>view</b> for future call {@link #onNewData}.
*
* @param position index of data item
* @param view newly created instance
*/
void onNewView(int position, V view);
void onNewData(int position, V view, D data);
}
|
allenbyerly/jellyfish-api
|
spec/requests/product_types_spec.rb
|
<reponame>allenbyerly/jellyfish-api
require 'rails_helper'
RSpec.describe 'Product Types API' do
describe 'GET index' do
it 'returns a collection of all product types' do
sign_in_as create(:staff, :admin)
create :product_type
create :product_type
get '/api/v1/product_types'
expect(json.length).to eq 2
end
end
end
|
robertkowalski/hops
|
packages/spec/helpers.js
|
exports.handleConsoleOutput = (msg) => {
const type = msg.type();
const text = msg.text();
if (type === 'error') {
throw new Error(`${type} in browser console: ${text}`);
}
};
|
peter-clemenko/cesium
|
ThirdParty/dojo-release-1.8.3-src/dojox/calendar/nls/pl/buttons.js
|
define( {
previousButton: "◄",
nextButton: "►",
todayButton: "Dzisiaj",
dayButton: "Dzień",
weekButton: "Tydzień",
fourDaysButton: "4 dni",
monthButton: "Miesiąc"
}
);
|
tlemoult/spectroDb
|
scheduler/in-request.py
|
import datetime,time
from datetime import datetime
import sys,os
import urllib.request, urllib.parse, urllib.error,glob
import libsdb.dbSpectro as dbSpectro
import libsdb.cds as cds #mes modules
print("Ajout demande observation dans la base")
configFilePath="../config/config.json"
db=dbSpectro.init_connection(configFilePath)
#dbSpectro.listObs(db)
#BasePath=sys.path[0]
if len(sys.argv)<5:
print("nombre d'argument incorrect")
print("""utiliser 4 arguments: Projet 'nom_cds' priorite exposure""")
exit(1)
project=sys.argv[1]
objname=sys.argv[2]
priority=int(sys.argv[3])
exposure=int(sys.argv[4])
print("verifie si object",objname,"est connus")
objId=dbSpectro.getObjId_fromObjName(db,objname)
if objId==0:
print("nouvel objet")
else:
print("object connus")
print('interroge le CDS obj="'+objname+'" ', end=' ')
cdsInfo=cds.getsimbadMesurement(objname)
if 'alpha' in list(cdsInfo.keys()): # objet connus du CDS ?
print(" OK")
ra=cdsInfo['alpha']
dec=cdsInfo['delta']
else:
print(" Inconnus du CDS... On ne le prend pas dans la base")
exit()
print("Insertion objet ",'"project="'+project+'" objname="'+objname+'" ra="'+ra+'" dec="'+dec+'" priority=',priority,"exposure time=",exposure)
(objId,isNewObject)=dbSpectro.insert_request_observation_with_name(db,project,objname,ra,dec,priority,exposure) #insert dans la base l observation
if isNewObject:
print("Object "+objname+" inconnus, on update les donnees avec mesures photometrique, type spectral, etc...")
# complete avec les identifiant
cdsname=cds.getHD_and_BayerIdentifier(objname)
cdsInfo['bayerName']=cdsname['bayerName']
cdsInfo['noHD']=cdsname['HDno']
# update les mesures etc
dbSpectro.update_Obj_info(db,cdsInfo,objId) # complete les informations sur l objet.
else:
print("Object "+objname+" connus")
db.close()
|
Advencher/web_GIS
|
public/js/page2Phyto/dataGp/dataForInsertAndDelete.js
|
function dataForCancelDeleteGroup(row){
var rowForInsert = {};
rowForInsert.id_phyto = row.IDphyto;
rowForInsert.id_group = row.IDGroup;
rowForInsert.total_species_in_group = row.TotalSpecies;
rowForInsert.total_percent = row.TotalPercent;
rowForInsert.biomass_percent = row.BiomassPercent;
rowForInsert.number = row.Number;
rowForInsert.biomass = row.Biomass;
return rowForInsert;
}
function dataForInsertGroup(id_phyto){
var rowForInsert = {};
rowForInsert.id_phyto = id_phyto;
rowForInsert.id_group = '0';
rowForInsert.total_species_in_group = '0';
rowForInsert.total_percent = '0';
rowForInsert.biomass_percent = '0';
rowForInsert.number = '0';
rowForInsert.biomass = '0';
return rowForInsert;
}
function dataForInsertDGGroup(id_phyto, id_group_in_phyto){
var rowForInsert = {};
rowForInsert.ID = id_group_in_phyto;
rowForInsert.IDphyto = id_phyto;
rowForInsert.IDGroup = '0';
rowForInsert.TotalSpecies = '0';
rowForInsert.TotalPercent = '0';
rowForInsert.BiomassPercent = '0';
rowForInsert.Number = '0';
rowForInsert.Biomass = '0';
rowForInsert.GroupName = 'Группа не определена';
return rowForInsert;
}
function dataForDeleteGroup(data){
var rowForDelete = {};
rowForDelete.id_group_in_phyto = data.ID;
return rowForDelete;
}
|
wayfinder/Wayfinder-Server
|
Server/Shared/include/ExternalSearchConsts.h
|
/*
Copyright (c) 1999 - 2010, Vodafone Group Services Ltd
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the Vodafone Group Services Ltd nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef EXTERNAL_SEARCH_CONSTS_H
#define EXTERNAL_SEARCH_CONSTS_H
#include "config.h"
/**
* Constants common to Servers and ExtSearchModule
*/
class ExternalSearchConsts {
public:
/// Types of SearchFields.
enum search_fields_t {
/// A country drop down
country = 1,
/// Name or phone number
name_or_phone = 2,
/// Address or city
address_or_city = 3,
/// First name
first_name = 4,
/// Last name
last_name = 5,
/// Telephone number
phone_number = 6,
/// Name
name = 7,
/// Zip code
zip_code = 8,
/// City
city = 9,
/// Address
address = 10,
/// Postal area
postal_area = 11,
/// Company
company = 12,
/// Search word
search_word = 13,
/// Company or search word
company_or_search_word = 14,
/// City or area
city_or_area = 15,
/// Category name
category = 16,
/// Name of the country
country_name = 17,
/// Top region center in wgs84 as "lat,lon" format.
top_region_center = 18,
/// Top region span in wgs84 as "lat,lon" format.
top_region_span = 19,
/// The category id
category_id = 20
};
// Some shared ExtService::service_t from ExtServices.h.
static const uint32 not_external = 0;
static const uint32 google_local_search = 1;
static const uint32 qype = 2;
static const uint32 adserver = 3;
};
#endif
|
kumarlakshya24/INFO6205-Algorithms
|
Code Lab/132Pattern/src/edu/northeastern/lakshya/Pattern.java
|
package edu.northeastern.lakshya;
class Pattern {
public boolean pattern132(int[] nums) {
if(nums == null || nums.length < 3) {
return false;
}
for(int i = 0; i < nums.length - 2; i++) {
int largerNumber = nums[i];
for(int j = i + 1; j < nums.length; j++) {
if(nums[j] <= nums[i]) {
continue;
}
if(nums[j] >= largerNumber) {
largerNumber = nums[j];
}
else {
return true;
}
}
}
return false;
}
}
|
jjperezaguinaga/MyBit-Go.website
|
components/constants/statement.js
|
export const ecosystem = (translator) => {
return {
title: translator('common:mybit_home_diamond_platform_title'),
paragraph: translator('common:mybit_home_diamond_platform'),
}
}
export const community = {
title: 'Community',
paragraph: `
A project powered by Ethereum, driven by the community.
`,
icon: 'community',
link: 'community',
}
export const products = {
title: 'Products',
paragraph: `
Smart investing paired with a decentralized IoT exchange.
`,
icon: 'products',
link: 'products',
}
export const involved = {
title: 'Get involved',
paragraph: `
MyBit offers opportunities for everyone to participate in the economy of tomorrow.
`
}
export const howItWorks = (translator, currentLanguage) => {
return {
title: translator('common:mybit_home_diamond_how_it_works_title'),
paragraph: translator('common:mybit_home_diamond_how_it_works'),
link: currentLanguage !== "en-US" && currentLanguage !== "en" ? `how-it-works?lng=${currentLanguage}` : 'how-it-works',
buttonClassName: 'Home__btn-start-here Home__btn-start-here--is-statement',
label: translator('common:mybit_home_diamond_start_here'),
}
}
export const investors = (translator) => {
return {
title: translator('common:mybit_home_diamond_investors_title'),
paragraph: translator('common:mybit_home_diamond_investors'),
icon: 'investors',
}
}
export const asset = (translator) => {
return{
title: translator('common:mybit_home_diamond_asset_managers_title'),
paragraph: translator('common:mybit_home_diamond_asset_managers'),
icon: 'asset-manager',
}
}
export const locking = (translator) => {
return{
title: translator('common:mybit_home_diamond_locking_title'),
paragraph: translator('common:mybit_home_diamond_locking'),
icon: 'locking',
}
}
export const token = (translator, currentLanguage) => {
return {
title: translator('common:mybit_home_diamond_token_title'),
paragraph: translator('common:mybit_home_diamond_token'),
link: currentLanguage !== "en-US" && currentLanguage !== "en" ? `access-layer?lng=${currentLanguage}` : 'access-layer',
}
}
export const staking = (translator) => {
return {
title: translator('common:mybit_home_diamond_staking_title'),
paragraph: translator('common:mybit_home_diamond_staking'),
icon: 'staking',
}
}
export const access = (translator) => {
return {
title: translator('common:mybit_home_diamond_access_title'),
paragraph: translator('common:mybit_home_diamond_access'),
icon: 'access',
}
}
|
Lenddo/java-lenddo
|
LenddoApi/src/com/lenddo/javaapi/models/ApplicationVerification.java
|
<gh_stars>1-10
package com.lenddo.javaapi.models;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.ArrayList;
import java.util.List;
/**
* Created by <NAME> on 12/8/15.
* Updated: 01/24/2017 <j.antonio> SDK-30
*/
@XmlRootElement
public class ApplicationVerification {
public String partner_script_id;
public Integer updated;
public Integer application_created;
public Integer created;
public List<String> duplicate_profiles = new ArrayList<String>();
public String facebook_photo_url;
public List<String> flags = new ArrayList<String>();
public Verifications verifications;
public String client_id;
public String application_id;
public String partner_id;
public Probes probes;
public Boolean verified_by_facebook;
public static class Verifications {
public Boolean name;
public Boolean university;
public Boolean employer;
public Boolean phone;
public Boolean birthday;
public Boolean email;
public Object top_employer;
}
public static class Probes {
public List<String> name = new ArrayList<String>();
public University university;
public Employer employer;
public String phone;
public List<Integer> birthday = new ArrayList<Integer>();
public String email;
public Object top_employer;
}
public static class Employer {
public String employer;
}
public static class University {
public String university;
}
}
|
phoenixeliot/design-system
|
src/MLIcon/StepForwardOutlined.js
|
import { createWrappedMLIcon } from './icon-wrappers'
import AntStepForwardOutlined from '@ant-design/icons/StepForwardOutlined'
const StepForwardOutlined = createWrappedMLIcon(AntStepForwardOutlined)
export default StepForwardOutlined
|
doyaGu/C0501Q_HWJL01
|
sdk/sdk/driver/ts_demuxer/ts_txt_conv/iso8859.c
|
/*
* Copyright (c) 2007 SMedia Technology Corp. All Rights Reserved.
*/
/** @file iso8859.c
* Ultility functions used to convert ISO/IEC 8859 series of character sets
* to Unicode. The mapping table from ISO 8859 character sets to Unicode can
* be found in http://unicode.org/Public/MAPPINGS/ISO8859/.
*
* @author <NAME>
* @version 0.1
*/
#include "iso8859.h"
//=============================================================================
// Private Function Declaration
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_1ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_2ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_3ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_4ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_5ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_6ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_7ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_8ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_9ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_10ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_11ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_13ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_14ToUTF16(
MMP_UINT8 ch);
MMP_INLINE uint16_t
_ISO8859_15ToUTF16(
MMP_UINT8 ch);
//=============================================================================
// Global Data Definition
//=============================================================================
static uint16_t _ISO8859_2[96] =
{
0x00A0, 0x0104, 0x02D8, 0x0141, 0x00A4, 0x013D, 0x015A, 0x00A7,
0x00A8, 0x0160, 0x015E, 0x0164, 0x0179, 0x00AD, 0x017D, 0x017B,
0x00B0, 0x0105, 0x02DB, 0x0142, 0x00B4, 0x013E, 0x015B, 0x02C7,
0x00B8, 0x0161, 0x015F, 0x0165, 0x017A, 0x02DD, 0x017E, 0x017C,
0x0154, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0139, 0x0106, 0x00C7,
0x010C, 0x00C9, 0x0118, 0x00CB, 0x011A, 0x00CD, 0x00CE, 0x010E,
0x0110, 0x0143, 0x0147, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x00D7,
0x0158, 0x016E, 0x00DA, 0x0170, 0x00DC, 0x00DD, 0x0162, 0x00DF,
0x0155, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x013A, 0x0107, 0x00E7,
0x010D, 0x00E9, 0x0119, 0x00EB, 0x011B, 0x00ED, 0x00EE, 0x010F,
0x0111, 0x0144, 0x0148, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x00F7,
0x0159, 0x016F, 0x00FA, 0x0171, 0x00FC, 0x00FD, 0x0163, 0x02D9
};
static uint16_t _ISO8859_3_0xA0TO0xBF[32] =
{
0x00A0, 0x0126, 0x02D8, 0x00A3, 0x00A4, 0x00A5, 0x0124, 0x00A7,
0x00A8, 0x0130, 0x015E, 0x011E, 0x0134, 0x00AD, 0x00AE, 0x017B,
0x00B0, 0x0127, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x0125, 0x00B7,
0x00B8, 0x0131, 0x015F, 0x011F, 0x0135, 0x00BD, 0x00BE, 0x017C
};
static uint16_t _ISO8859_3_0xF0TO0xFF[16] =
{
0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x0121, 0x00F6, 0x00F7,
0x011D, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x016D, 0x015D, 0x02D9
};
static uint16_t _ISO8859_4[96] =
{
0x00A0, 0x0104, 0x0138, 0x0156, 0x00A4, 0x0128, 0x013B, 0x00A7,
0x00A8, 0x0160, 0x0112, 0x0122, 0x0166, 0x00AD, 0x017D, 0x00AF,
0x00B0, 0x0105, 0x02DB, 0x0157, 0x00B4, 0x0129, 0x013C, 0x02C7,
0x00B8, 0x0161, 0x0113, 0x0123, 0x0167, 0x014A, 0x017E, 0x014B,
0x0100, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x012E,
0x010C, 0x00C9, 0x0118, 0x00CB, 0x0116, 0x00CD, 0x00CE, 0x012A,
0x0110, 0x0145, 0x014C, 0x0136, 0x00D4, 0x00D5, 0x00D6, 0x00D7,
0x00D8, 0x0172, 0x00DA, 0x00DB, 0x00DC, 0x0168, 0x016A, 0x00DF,
0x0101, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x012F,
0x010D, 0x00E9, 0x0119, 0x00EB, 0x0117, 0x00ED, 0x00EE, 0x012B,
0x0111, 0x0146, 0x014D, 0x0137, 0x00F4, 0x00F5, 0x00F6, 0x00F7,
0x00F8, 0x0173, 0x00FA, 0x00FB, 0x00FC, 0x0169, 0x016B, 0x02D9
};
static uint16_t _ISO8859_7_0xA0TO0xBD[30] =
{
0x00A0, 0x2018, 0x2019, 0x00A3, 0x20AC, 0x20AF, 0x00A6, 0x00A7,
0x00A8, 0x00A9, 0x037A, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x2015,
0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x0384, 0x0385, 0x0386, 0x00B7,
0x0388, 0x0389, 0x038A, 0x00BB, 0x038C, 0x00BD
};
static uint16_t _ISO8859_10[96] =
{
0x00A0, 0x0104, 0x0112, 0x0122, 0x012A, 0x0128, 0x0136, 0x00A7,
0x013B, 0x0110, 0x0160, 0x0166, 0x017D, 0x00AD, 0x016A, 0x014A,
0x00B0, 0x0105, 0x0113, 0x0123, 0x012B, 0x0129, 0x0137, 0x00B7,
0x013C, 0x0111, 0x0161, 0x0167, 0x017E, 0x2015, 0x016B, 0x014B,
0x0100, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x012E,
0x010C, 0x00C9, 0x0118, 0x00CB, 0x0116, 0x00CD, 0x00CE, 0x00CF,
0x00D0, 0x0145, 0x014C, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x0168,
0x00D8, 0x0172, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF,
0x0101, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x012F,
0x010D, 0x00E9, 0x0119, 0x00EB, 0x0117, 0x00ED, 0x00EE, 0x00EF,
0x00F0, 0x0146, 0x014D, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x0169,
0x00F8, 0x0173, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x0138
};
static uint16_t _ISO8859_13[96] =
{
0x00A0, 0x201D, 0x00A2, 0x00A3, 0x00A4, 0x201E, 0x00A6, 0x00A7,
0x00D8, 0x00A9, 0x0156, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00C6,
0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x201C, 0x00B5, 0x00B6, 0x00B7,
0x00F8, 0x00B9, 0x0157, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00E6,
0x0104, 0x012E, 0x0100, 0x0106, 0x00C4, 0x00C5, 0x0118, 0x0112,
0x010C, 0x00C9, 0x0179, 0x0116, 0x0122, 0x0136, 0x012A, 0x013B,
0x0160, 0x0143, 0x0145, 0x00D3, 0x014C, 0x00D5, 0x00D6, 0x00D7,
0x0172, 0x0141, 0x015A, 0x016A, 0x00DC, 0x017B, 0x017D, 0x00DF,
0x0105, 0x012F, 0x0101, 0x0107, 0x00E4, 0x00E5, 0x0119, 0x0113,
0x010D, 0x00E9, 0x017A, 0x0117, 0x0123, 0x0137, 0x012B, 0x013C,
0x0161, 0x0144, 0x0146, 0x00F3, 0x014D, 0x00F5, 0x00F6, 0x00F7,
0x0173, 0x0142, 0x015B, 0x016B, 0x00FC, 0x017C, 0x017E, 0x2019
};
static uint16_t _ISO8859_14_0xA0TO0xBF[32] =
{
0x00A0, 0x1E02, 0x1E03, 0x00A3, 0x010A, 0x010B, 0x1E0A, 0x00A7,
0x1E80, 0x00A9, 0x1E82, 0x1E0B, 0x1EF2, 0x00AD, 0x00AE, 0x0178,
0x1E1E, 0x1E1F, 0x0120, 0x0121, 0x1E40, 0x1E41, 0x00B6, 0x1E56,
0x1E81, 0x1E57, 0x1E83, 0x1E60, 0x1EF3, 0x1E84, 0x1E85, 0x1E61
};
static uint16_t _ISO8859_15_0xA4TO0xA8[5] =
{
0x20AC, 0x00A5, 0x0160, 0x00A7, 0x0161
};
static uint16_t _ISO8859_15_0xB4TO0xBE[11] =
{
0x017D, 0x00B5, 0x00B6, 0x00B7, 0x017E, 0x00B9, 0x00BA, 0x00BB,
0x0152, 0x0153, 0x0178
};
static ISO8859ToUTF16 _ISO8859ToUTF16_TABLE[15] =
{
_ISO8859_1ToUTF16,
_ISO8859_2ToUTF16,
_ISO8859_3ToUTF16,
_ISO8859_4ToUTF16,
_ISO8859_5ToUTF16,
_ISO8859_6ToUTF16,
_ISO8859_7ToUTF16,
_ISO8859_8ToUTF16,
_ISO8859_9ToUTF16,
_ISO8859_10ToUTF16,
_ISO8859_11ToUTF16,
MMP_NULL,
_ISO8859_13ToUTF16,
_ISO8859_14ToUTF16,
_ISO8859_15ToUTF16,
};
//=============================================================================
// Public Function Definition
//=============================================================================
//=============================================================================
/**
* Get the ISO 8859 to Unicode converter.
*
* @param stdIndex An index value indicating which standard the converter
* supports.
* ex. 1 => ISO8559-1
* 2 => ISO8559-2
* 3 => ISO8559-3
* ...
* 15 => ISO8559-15
* @return The ISO 8859 to Unicode converter.
*/
//=============================================================================
ISO8859ToUTF16
ISO8859_GetConverter(
MMP_UINT stdIndex)
{
if (1 <= stdIndex && stdIndex <= 15)
{
return _ISO8859ToUTF16_TABLE[stdIndex - 1];
}
return MMP_NULL;
}
//=============================================================================
// Private Function Definition
//=============================================================================
//=============================================================================
/**
* Convert an ISO 8859-1 character to Unicode.
*
* @param ch An ISO 8859-1 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_1ToUTF16(
MMP_UINT8 ch)
{
return ch;
}
//=============================================================================
/**
* Convert an ISO 8859-2 character to Unicode.
*
* @param ch An ISO 8859-2 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_2ToUTF16(
MMP_UINT8 ch)
{
return (0xA0 < ch) ? _ISO8859_2[ch - 0xA0] : ch;
}
//=============================================================================
/**
* Convert an ISO 8859-3 character to Unicode.
*
* @param ch An ISO 8859-3 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_3ToUTF16(
MMP_UINT8 ch)
{
if (0xA0 < ch)
{
switch(ch & 0xF0)
{
case 0xA0:
case 0xB0:
return _ISO8859_3_0xA0TO0xBF[ch - 0xA0];
case 0xC0:
switch (ch)
{
case 0xC5: return 0x10A;
case 0xC6: return 0x108;
}
break;
case 0xD0:
switch (ch)
{
case 0xD5: return 0x120;
case 0xD8: return 0x11C;
case 0xDD: return 0x16C;
case 0xDE: return 0x15C;
}
break;
case 0xE0:
switch (ch)
{
case 0xE5: return 0x10B;
case 0xE6: return 0x109;
}
break;
case 0xF0:
return _ISO8859_3_0xF0TO0xFF[ch - 0xF0];
}
}
return ch;
}
//=============================================================================
/**
* Convert an ISO 8859-4 character to Unicode.
*
* @param ch An ISO 8859-4 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_4ToUTF16(
MMP_UINT8 ch)
{
return (0xA0 < ch) ? _ISO8859_4[ch - 0xA0] : ch;
}
//=============================================================================
/**
* Convert an ISO 8859-5 character to Unicode.
*
* @param ch An ISO 8859-5 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_5ToUTF16(
MMP_UINT8 ch)
{
if (0xA0 < ch)
{
switch (ch)
{
case 0xAD: return 0x00AD;
case 0xF0: return 0x2116;
case 0xFD: return 0x00A7;
}
return ch + 0x360;
}
return ch;
}
//=============================================================================
/**
* Convert an ISO 8859-6 character to Unicode.
*
* @param ch An ISO 8859-6 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_6ToUTF16(
MMP_UINT8 ch)
{
if (0xA0 < ch
&& 0xA4 != ch
&& 0xAD != ch)
{
return ch + 0x560;
}
return ch;
}
//=============================================================================
/**
* Convert an ISO 8859-7 character to Unicode.
*
* @param ch An ISO 8859-7 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_7ToUTF16(
MMP_UINT8 ch)
{
if (0xA0 < ch)
{
if (ch <= 0xBD)
return _ISO8859_7_0xA0TO0xBD[ch - 0xA0];
else
return ch + 0x2D0;
}
return ch;
}
//=============================================================================
/**
* Convert an ISO 8859-8 character to Unicode.
*
* @param ch An ISO 8859-8 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_8ToUTF16(
MMP_UINT8 ch)
{
if (0xAA <= ch)
{
if (ch <= 0xDF) // ch = 0xAA ~ 0xDF
{
switch (ch)
{
case 0xAA: return 0x00D7;
case 0xBA: return 0x00F7;
case 0xDF: return 0x2017;
default: return ch;
}
}
else if (ch <= 0xFA) // ch = 0xE0 ~ 0xFA
{
return ch + 0x4F0;
}
else // ch = 0xFB ~ 0xFF
{
return ch + 0x1F11;
}
}
return ch;
}
//=============================================================================
/**
* Convert an ISO 8859-9 character to Unicode.
*
* @param ch An ISO 8859-9 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_9ToUTF16(
MMP_UINT8 ch)
{
if (0xD0 <= ch)
{
switch (ch)
{
case 0xD0: return 0x11E;
case 0xDD: return 0x130;
case 0xDE: return 0x15E;
case 0xF0: return 0x11F;
case 0xFD: return 0x131;
case 0xFE: return 0x15F;
}
}
return ch;
}
//=============================================================================
/**
* Convert an ISO 8859-10 character to Unicode.
*
* @param ch An ISO 8859-10 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_10ToUTF16(
MMP_UINT8 ch)
{
return (0xA0 < ch) ? _ISO8859_10[ch - 0xA0] : ch;
}
//=============================================================================
/**
* Convert an ISO 8859-11 character to Unicode.
*
* @param ch An ISO 8859-11 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_11ToUTF16(
MMP_UINT8 ch)
{
return (0xA0 < ch) ? (ch + 0xD60) : ch;
}
//=============================================================================
/**
* Convert an ISO 8859-13 character to Unicode.
*
* @param ch An ISO 8859-13 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_13ToUTF16(
MMP_UINT8 ch)
{
return (0xA0 < ch) ? _ISO8859_13[ch - 0xA0] : ch;
}
//=============================================================================
/**
* Convert an ISO 8859-14 character to Unicode.
*
* @param ch An ISO 8859-14 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_14ToUTF16(
MMP_UINT8 ch)
{
if (0xA0 < ch)
{
switch (ch & 0xF0)
{
case 0xA0:
case 0xB0:
return _ISO8859_14_0xA0TO0xBF[ch - 0xA0];
case 0xC0:
case 0xE0:
return ch;
case 0xD0:
switch (ch)
{
case 0xD0: return 0x0174;
case 0xD7: return 0x1E6A;
case 0xDE: return 0x0176;
}
return ch;
case 0xF0:
switch (ch)
{
case 0xF0: return 0x0175;
case 0xF7: return 0x1E6B;
case 0xFE: return 0x0177;
}
return ch;
}
}
return ch;
}
//=============================================================================
/**
* Convert an ISO 8859-15 character to Unicode.
*
* @param ch An ISO 8859-15 character.
* @return The corresponding Unicode character of the input character.
*/
//=============================================================================
MMP_INLINE uint16_t
_ISO8859_15ToUTF16(
MMP_UINT8 ch)
{
if (0xA4 <= ch && ch <= 0xBE)
{
if (ch <= 0xA8) // ch = 0xA4 ~ 0xA8
{
return _ISO8859_15_0xA4TO0xA8[ch - 0xA4];
}
else if (ch <= 0xB3) // ch = 0xA9 ~ 0xB3
{
return ch;
}
else // ch = 0xB4 ~ 0xBE
{
return _ISO8859_15_0xB4TO0xBE[ch - 0xB4];
}
}
return ch;
}
|
SebastianTirado/Cpp-Learning-Archive
|
DiscoveringModernCpp/c++11/type_traits_overloading.cpp
|
<filename>DiscoveringModernCpp/c++11/type_traits_overloading.cpp
#include <utility>
#include <iostream>
#include <type_traits>
#include <cmath>
#include <typeinfo>
namespace dmc {
#if 0
template <typename T>
constexpr std::false_type is_a_matrix(const T&) { return {}; }
#endif
constexpr std::false_type is_a_matrix(...) { return {}; }
template <typename T>
using is_matrix= decltype(is_a_matrix(std::declval<T>()));
#if 0 // requires C++14
template <typename T>
constexpr bool is_matrix_v= is_matrix<T>::value;
#endif
template <typename Value> struct dense_matrix {};
template <typename Value> struct sparse_matrix {};
template <typename Value>
constexpr std::true_type is_a_matrix(const dense_matrix<Value>&)
{ return {}; }
template <typename Value>
constexpr std::true_type is_a_matrix(const sparse_matrix<Value>&)
{ return {}; }
struct negate_functor
{
template <typename Value>
Value operator()(const Value& x) const { return -x; }
};
struct abs_functor
{
// use auto return type in C++14
template <typename Value>
Value operator()(const Value& x) const
{
using std::abs;
return abs(x);
}
};
template <typename Matrix, typename Functor>
struct map_view
{
static_assert(is_matrix<Matrix>::value, "First argument must be a matrix.");
// ...
};
template <typename Matrix, typename Functor>
constexpr std::true_type is_a_matrix(const map_view<Matrix, Functor>&)
{ return {}; }
template <typename Matrix>
struct negate_view
: map_view<Matrix, negate_functor>
{};
template <typename Matrix>
struct abs_view
: map_view<Matrix, abs_functor>
{};
template <typename T, bool Expected= true>
void check_matrix()
{
std::cout << typeid(T).name() << " is " << (is_matrix<T>{} ? "" : "not ") << "a matrix.\n";
static_assert(is_matrix<T>{} == Expected, "Matrix property wrong.");
}
}
int main()
{
using namespace dmc;
using namespace std;
check_matrix<int, false>();
check_matrix<dense_matrix<int>>();
check_matrix<sparse_matrix<int>>();
check_matrix<negate_view<dense_matrix<int>>>();
check_matrix<abs_view<sparse_matrix<int>>>();
}
|
lambdaxymox/barrelfish
|
usr/eclipseclp/Alog/src/alog_evntdfs.h
|
<reponame>lambdaxymox/barrelfish
/****************************************************************************
These are the reserved event types for logfile header records. Unspecified
fields are either 0 or (in the case of string data) null.
e_type proc_id task_id int_data cycle timestamp string_data
-1 creator and date
-2 # events
-3 # procs
-4 # tasks
-5 # event types
-6 start_time
-7 end_time
-8 # timer_cycles
-9 event_type description
-10 event_type printf string
*************************************************************************/
#define SYSTEM_TYPE -1
#define NUM_EVENTS -2
#define NUM_PROCS -3
#define NUM_TASKS -4
#define NUM_EVTYPES -5
#define START_TIME -6
#define END_TIME -7
#define NUM_CYCLES -8
#define EVTYPE_DESC -9
#define EPRINT_FORMAT -10
#define ALOG_EVENT_SYNC -101
#define ALOG_EVENT_PAIR_A1 -102
#define ALOG_EVENT_PAIR_A2 -103
#define ALOG_EVENT_PAIR_B1 -104
|
daystram/ratify
|
ratify-be/utils/token.go
|
<gh_stars>1-10
package utils
import (
"crypto/rand"
"github.com/dgrijalva/jwt-go"
"github.com/daystram/ratify/ratify-be/config"
)
const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
func GenerateRandomString(length int) string {
byteString := make([]byte, length)
_, _ = rand.Read(byteString)
for i, b := range byteString {
byteString[i] = letterBytes[b%byte(len(letterBytes))]
}
return string(byteString)
}
func GenerateJWT(claims jwt.Claims) (string, error) {
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
return token.SignedString([]byte(config.AppConfig.JWTSecret))
}
|
Hevelian/hevelian-odata-elasticsearch
|
olastic-core/src/test/java/com/hevelian/olastic/core/api/edm/provider/MultyElasticIndexCsdlEdmProviderTest.java
|
package com.hevelian.olastic.core.api.edm.provider;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.olingo.commons.api.edm.FullQualifiedName;
import org.apache.olingo.commons.api.edm.provider.CsdlComplexType;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityContainer;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityContainerInfo;
import org.apache.olingo.commons.api.edm.provider.CsdlEntitySet;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityType;
import org.apache.olingo.commons.api.edm.provider.CsdlNavigationProperty;
import org.apache.olingo.commons.api.edm.provider.CsdlNavigationPropertyBinding;
import org.apache.olingo.commons.api.edm.provider.CsdlProperty;
import org.apache.olingo.commons.api.edm.provider.CsdlPropertyRef;
import org.apache.olingo.commons.api.edm.provider.CsdlSchema;
import org.apache.olingo.commons.api.ex.ODataException;
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.ImmutableOpenMap.Builder;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.hevelian.olastic.core.common.NestedTypeMapper;
import com.hevelian.olastic.core.elastic.ElasticConstants;
import com.hevelian.olastic.core.elastic.mappings.DefaultElasticToCsdlMapper;
import com.hevelian.olastic.core.elastic.mappings.ElasticToCsdlMapper;
import com.hevelian.olastic.core.elastic.mappings.MappingMetaDataProvider;
import com.hevelian.olastic.core.utils.MetaDataUtils;
/**
* JUnit tests for {@link MultyElasticIndexCsdlEdmProvider} class.
*
* @author rdidyk
*/
@RunWith(MockitoJUnitRunner.class)
public class MultyElasticIndexCsdlEdmProviderTest {
private static final String AUTHOR_TYPE = "author";
private static final String AUTHORS_INDEX = "authors";
private static final String WRITERS_INDEX = "writers";
private static final String BOOK_TYPE = "book";
private static final FullQualifiedName AUTHORS_FQN = new FullQualifiedName(
addNamespace(AUTHORS_INDEX));
private static final FullQualifiedName AUTHOR_FQN = new FullQualifiedName(
addNamespace(AUTHORS_INDEX), AUTHOR_TYPE);
private static final FullQualifiedName WRITERS_FQN = new FullQualifiedName(
addNamespace(WRITERS_INDEX));
private static final FullQualifiedName BOOK_FQN = new FullQualifiedName(
addNamespace(AUTHORS_INDEX), BOOK_TYPE);
private static final String AUTHORS_FQN_STRING = AUTHORS_FQN.getFullQualifiedNameAsString();
private static final String WRITERS_FQN_STRING = WRITERS_FQN.getFullQualifiedNameAsString();
private static Set<String> indices;
@Mock
private MappingMetaDataProvider metaDataProvider;
@Mock
private NestedTypeMapper nestedTypeMapper;
@BeforeClass
public static void setUpBeforeClass() {
indices = new HashSet<String>();
indices.add(AUTHORS_INDEX);
indices.add(WRITERS_INDEX);
}
private static String addNamespace(String... path) {
StringBuffer result = new StringBuffer(DefaultElasticToCsdlMapper.DEFAULT_NAMESPACE);
for (int i = 0; i < path.length; i++) {
if (i == 0 || i != path.length - 1) {
result.append(MetaDataUtils.NAMESPACE_SEPARATOR);
}
result.append(path[i]);
}
return result.toString();
}
@Before
public void setUp() {
when(nestedTypeMapper.getComplexType(anyString(), anyString(), anyString()))
.thenAnswer(new Answer<FullQualifiedName>() {
@Override
public FullQualifiedName answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
return new FullQualifiedName((String) args[1], (String) args[2]);
}
});
}
@Test
public void constructor_MappingMetadataProvider_Setted() {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
assertEquals(metaDataProvider, edmProvider.getMappingMetaDataProvider());
assertNotNull(edmProvider.getCsdlMapper());
assertNotNull(edmProvider.getNestedTypeMapper());
}
@Test
public void constructor_MappingMetadataProviderAndCsdlMapper_Setted() {
ElasticToCsdlMapper csdlMapper = mock(ElasticToCsdlMapper.class);
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices, csdlMapper);
assertEquals(metaDataProvider, edmProvider.getMappingMetaDataProvider());
assertEquals(csdlMapper, edmProvider.getCsdlMapper());
assertNotNull(edmProvider.getNestedTypeMapper());
}
@Test
public void constructor_MappingMetadataProviderAndCsdlMapperAndNestedMappingStrategy_Setted() {
ElasticToCsdlMapper csdlMapper = mock(ElasticToCsdlMapper.class);
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices, csdlMapper, nestedTypeMapper);
assertEquals(metaDataProvider, edmProvider.getMappingMetaDataProvider());
assertEquals(csdlMapper, edmProvider.getCsdlMapper());
assertNotNull(edmProvider.getNestedTypeMapper());
assertEquals(nestedTypeMapper, edmProvider.getNestedTypeMapper());
}
@Test
public void getSchemaNamespaces_SetOfIndices_ShemaNamespacesRetrieved() throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
List<String> schemaNamespaces = edmProvider.getSchemaNamespaces();
assertEquals(2, schemaNamespaces.size());
assertTrue(schemaNamespaces.contains(AUTHORS_FQN_STRING));
assertTrue(schemaNamespaces.contains(WRITERS_FQN_STRING));
}
@Test
public void getSchemaNamespaces_EmptyIndices_EmptyShemaNamespacesRetrieved()
throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, new HashSet<String>());
List<String> schemaNamespaces = edmProvider.getSchemaNamespaces();
assertTrue(schemaNamespaces.isEmpty());
}
@Test
public void namespaceToIndex_DifferentNamespaces_ExpectedValuesRetrieved()
throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
assertEquals(AUTHORS_INDEX, edmProvider.namespaceToIndex(AUTHORS_FQN_STRING));
assertEquals(WRITERS_INDEX, edmProvider.namespaceToIndex(WRITERS_FQN_STRING));
assertNull(edmProvider.namespaceToIndex("Olingo.Test.authors"));
}
@Test
public void getProperties_TypeNameAndCorrectMetaData_ListOfCsdlPropertiesRetrieved()
throws IOException, ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices, nestedTypeMapper);
List<CsdlProperty> csdlProperties = edmProvider.getProperties(AUTHORS_INDEX, AUTHOR_TYPE,
getStubProperties());
assertEquals(2, csdlProperties.size());
for (CsdlProperty property : csdlProperties) {
assertTrue(property instanceof ElasticCsdlProperty);
assertEquals(AUTHORS_INDEX, ((ElasticCsdlProperty) property).getESIndex());
assertEquals(AUTHOR_TYPE, ((ElasticCsdlProperty) property).getESType());
assertEquals(property.getName(), ((ElasticCsdlProperty) property).getESField());
assertNotNull(property.getTypeAsFQNObject());
}
}
@Test(expected = ODataException.class)
public void getProperties_MetaDataThrowsIOException_ODataExceptionRetrieved()
throws IOException, ODataException {
MappingMetaData mappingMetaData = mock(MappingMetaData.class);
when(mappingMetaData.sourceAsMap()).thenThrow(new IOException("test cause"));
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
edmProvider.getProperties(AUTHORS_INDEX, AUTHOR_TYPE, mappingMetaData);
}
@Test
public void getNavigationProperties_EntityTypeNameAndEmptyMappings_EmptyListRetrieved() {
Builder<String, FieldMappingMetaData> builder = ImmutableOpenMap.builder();
ImmutableOpenMap<String, FieldMappingMetaData> map = builder.build();
when(metaDataProvider.getMappingsForField(AUTHORS_INDEX, ElasticConstants.PARENT_PROPERTY))
.thenReturn(map);
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
assertTrue(edmProvider.getNavigationProperties(AUTHORS_INDEX, AUTHOR_TYPE).isEmpty());
}
@Test
public void getNavigationProperties_EntityTypeNameAndMappingsEmptyValueMap_EmptyListRetrieved() {
Builder<String, FieldMappingMetaData> builder = ImmutableOpenMap.builder();
FieldMappingMetaData mappingMetaData = mock(FieldMappingMetaData.class);
when(mappingMetaData.sourceAsMap()).thenReturn(new HashMap<String, Object>());
builder.put(BOOK_TYPE, mappingMetaData);
ImmutableOpenMap<String, FieldMappingMetaData> map = builder.build();
when(metaDataProvider.getMappingsForField(AUTHORS_INDEX, ElasticConstants.PARENT_PROPERTY))
.thenReturn(map);
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
assertTrue(edmProvider.getNavigationProperties(AUTHORS_INDEX, AUTHOR_TYPE).isEmpty());
}
@Test
public void getNavigationProperties_EntityTypeNameAndMappings_OneChildPropertyRetrieved() {
doReturn(getParentChildMappings()).when(metaDataProvider).getMappingsForField(AUTHORS_INDEX,
ElasticConstants.PARENT_PROPERTY);
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
List<ElasticCsdlNavigationProperty> navigationProperties = edmProvider
.getNavigationProperties(AUTHORS_INDEX, AUTHOR_TYPE);
assertEquals(1, navigationProperties.size());
CsdlNavigationProperty navigationProperty = navigationProperties.get(0);
assertEquals(BOOK_TYPE, navigationProperty.getName());
assertTrue(navigationProperty.isCollection());
assertEquals(AUTHOR_TYPE, navigationProperty.getPartner());
}
@Test
public void getNavigationProperties_EntityTypeNameAndMappings_OneParentPropertyRetrieved() {
doReturn(getParentChildMappings()).when(metaDataProvider).getMappingsForField(AUTHORS_INDEX,
ElasticConstants.PARENT_PROPERTY);
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
List<ElasticCsdlNavigationProperty> navigationProperties = edmProvider
.getNavigationProperties(AUTHORS_INDEX, BOOK_TYPE);
assertEquals(1, navigationProperties.size());
CsdlNavigationProperty navigationProperty = navigationProperties.get(0);
assertEquals(AUTHOR_TYPE, navigationProperty.getName());
assertFalse(navigationProperty.isCollection());
assertEquals(BOOK_TYPE, navigationProperty.getPartner());
}
@Test
public void getEntityType_IndexDoesntExist_NullRetrived() throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
assertNull(
edmProvider.getEntityType(new FullQualifiedName("Test.IllegalNamespace.entity")));
}
@Test
public void getEntityType_IndexExistAndEntityTypeList_EntityTypeRetrived()
throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = spy(
new MultyElasticIndexCsdlEdmProvider(metaDataProvider, indices));
ElasticCsdlEntityType type1 = mock(ElasticCsdlEntityType.class);
when(type1.getESType()).thenReturn(BOOK_TYPE);
ElasticCsdlEntityType type2 = mock(ElasticCsdlEntityType.class);
when(type2.getESType()).thenReturn(AUTHOR_TYPE);
doReturn(Arrays.asList(type1, type2)).when(edmProvider).getEntityTypes(AUTHORS_INDEX);
assertEquals(type2, edmProvider.getEntityType(AUTHOR_FQN));
}
@Test
public void getEntityType_IndexExistAndEmptyEntityTypeList_EntityTypeRetrived()
throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = spy(
new MultyElasticIndexCsdlEdmProvider(metaDataProvider, indices));
doReturn(Collections.emptyList()).when(edmProvider).getEntityTypes(AUTHORS_INDEX);
assertNull(edmProvider.getEntityType(AUTHOR_FQN));
}
@Test
public void createEntityType_IndexAndType_EntityTypeRetrived()
throws ODataException, IOException {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
doReturn(getStubProperties()).when(metaDataProvider).getMappingForType(AUTHORS_INDEX,
AUTHOR_TYPE);
doReturn(getParentChildMappings()).when(metaDataProvider).getMappingsForField(AUTHORS_INDEX,
ElasticConstants.PARENT_PROPERTY);
ElasticCsdlEntityType entityType = edmProvider.createEntityType(AUTHORS_INDEX, AUTHOR_TYPE);
assertTrue(entityType instanceof ElasticCsdlEntityType);
assertEquals(AUTHORS_INDEX, ((ElasticCsdlEntityType) entityType).getESIndex());
assertEquals(AUTHOR_TYPE, ((ElasticCsdlEntityType) entityType).getESType());
List<CsdlProperty> properties = entityType.getProperties();
assertEquals(3, properties.size());
CsdlProperty idProperty = properties.get(2);
assertEquals(ElasticConstants.ID_FIELD_NAME, idProperty.getName());
List<CsdlPropertyRef> keys = entityType.getKey();
assertEquals(1, keys.size());
CsdlPropertyRef idRef = keys.get(0);
assertEquals(ElasticConstants.ID_FIELD_NAME, idRef.getName());
List<CsdlNavigationProperty> navigationProperties = entityType.getNavigationProperties();
assertEquals(1, navigationProperties.size());
ElasticCsdlNavigationProperty bookProperty = (ElasticCsdlNavigationProperty) navigationProperties
.get(0);
assertEquals(BOOK_TYPE, bookProperty.getName());
assertEquals(BOOK_TYPE, bookProperty.getESType());
assertEquals(AUTHORS_INDEX, bookProperty.getESIndex());
assertEquals(BOOK_FQN, bookProperty.getTypeFQN());
assertEquals(AUTHOR_TYPE, bookProperty.getPartner());
}
@Test
public void createEntityType_IndexAndTypeWithCustomIdProperty_EntityTypeRetrived()
throws ODataException, IOException {
MultyElasticIndexCsdlEdmProvider edmProvider = spy(
new MultyElasticIndexCsdlEdmProvider(metaDataProvider, indices));
MappingMetaData metaData = mock(MappingMetaData.class);
when(metaDataProvider.getMappingForType(AUTHORS_INDEX, AUTHOR_TYPE)).thenReturn(metaData);
doReturn(new ArrayList<>()).when(edmProvider).getProperties(AUTHORS_INDEX, AUTHOR_TYPE,
metaData);
doReturn(new ArrayList<>()).when(edmProvider).getNavigationProperties(AUTHORS_INDEX,
AUTHOR_TYPE);
ElasticCsdlEntityType entityType = edmProvider.createEntityType(AUTHORS_INDEX, AUTHOR_TYPE);
assertTrue(entityType instanceof ElasticCsdlEntityType);
assertEquals(AUTHORS_INDEX, ((ElasticCsdlEntityType) entityType).getESIndex());
assertEquals(AUTHOR_TYPE, ((ElasticCsdlEntityType) entityType).getESType());
List<CsdlProperty> properties = entityType.getProperties();
assertEquals(1, properties.size());
CsdlProperty idProperty = properties.get(0);
assertEquals(ElasticConstants.ID_FIELD_NAME, idProperty.getName());
List<CsdlPropertyRef> keys = entityType.getKey();
assertEquals(1, keys.size());
CsdlPropertyRef idRef = keys.get(0);
assertEquals(ElasticConstants.ID_FIELD_NAME, idRef.getName());
}
@Test(expected = ODataException.class)
public void createEntityType_MappingsAreNull_ODataExceptionRetrieved()
throws ODataException, IOException {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
doReturn(null).when(metaDataProvider).getMappingForType(AUTHORS_INDEX, AUTHOR_TYPE);
edmProvider.createEntityType(AUTHORS_INDEX, AUTHOR_TYPE);
}
@Test
public void createEntitySet_IndexAndType_EntitySetRetrieved() {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
doReturn(getParentChildMappings()).when(metaDataProvider).getMappingsForField(AUTHORS_INDEX,
ElasticConstants.PARENT_PROPERTY);
ElasticCsdlEntitySet entitySet = edmProvider.createEntitySet(AUTHORS_INDEX, AUTHOR_TYPE);
assertEquals(AUTHORS_INDEX, entitySet.getESIndex());
assertEquals(AUTHOR_TYPE, entitySet.getESType());
assertEquals(AUTHOR_TYPE, entitySet.getName());
List<CsdlNavigationPropertyBinding> propertyBindings = entitySet
.getNavigationPropertyBindings();
assertEquals(1, propertyBindings.size());
CsdlNavigationPropertyBinding propertyBinding = propertyBindings.get(0);
assertEquals(BOOK_TYPE, propertyBinding.getPath());
assertEquals(BOOK_TYPE, propertyBinding.getTarget());
}
@Test
public void getEntitySet_ContainerNameAndSetName_EntitySetRetrieved() throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = spy(
new MultyElasticIndexCsdlEdmProvider(metaDataProvider, indices));
CsdlEntityContainer container = mock(CsdlEntityContainer.class);
doReturn(container).when(edmProvider).getEntityContainer();
when(container.getEntitySet(BOOK_TYPE))
.thenAnswer(answer -> new ElasticCsdlEntitySet().setName(answer.getArgument(0)));
ElasticCsdlEntitySet entitySet = edmProvider.getEntitySet(edmProvider.getContainerName(),
BOOK_TYPE);
assertEquals(BOOK_TYPE, entitySet.getName());
}
@Test(expected = ODataException.class)
public void getEntitySet_OtherContainerNameAndSetName_ODataExceptionRetrieved()
throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = spy(
new MultyElasticIndexCsdlEdmProvider(metaDataProvider, indices));
edmProvider.getEntitySet(new FullQualifiedName("Other.Container"), BOOK_TYPE);
}
@Test
public void getEntityContainerInfo_ContainerNameNull_EntityContainerRetieved() {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
CsdlEntityContainerInfo entityContainerInfo = edmProvider.getEntityContainerInfo(null);
assertNotNull(entityContainerInfo);
assertEquals(edmProvider.getContainerName(), entityContainerInfo.getContainerName());
}
@Test
public void getEntityContainerInfo_ContainerName_NullRetieved() {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
assertNull(edmProvider.getEntityContainerInfo(new FullQualifiedName("Test.ContainerName")));
}
@Test
public void getSchemas_EmptyNamespaces_EmptySchemaListRetrieved() throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, new HashSet<String>());
assertTrue(edmProvider.getSchemas().isEmpty());
}
@Test
public void getSchemas_Namespaces_SchemaListRetrieved() throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = spy(
new MultyElasticIndexCsdlEdmProvider(metaDataProvider, indices));
NestedTypeMapper nestedTypeMapper = mock(NestedTypeMapper.class);
doReturn(new ArrayList<CsdlEntityType>()).when(edmProvider).getEntityTypes(anyString());
doReturn(new ArrayList<CsdlComplexType>()).when(nestedTypeMapper)
.getComplexTypes(anyString());
doReturn(nestedTypeMapper).when(edmProvider).getNestedTypeMapper();
doReturn(mock(CsdlEntityContainer.class)).when(edmProvider)
.getEntityContainerForSchema(anyString());
List<CsdlSchema> schemas = edmProvider.getSchemas();
assertEquals(2, schemas.size());
}
@Test
public void getEnityTypes_IndexWithEmptyMappings_EmptyListRetrieved() throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
Builder<String, MappingMetaData> metadataBuilder = ImmutableOpenMap.builder();
when(metaDataProvider.getAllMappings(WRITERS_INDEX)).thenReturn(metadataBuilder.build());
assertTrue(edmProvider.getEntityTypes(WRITERS_INDEX).isEmpty());
}
@Test
public void getEnityTypes_IndexWithMappings_ListEntityTypesRetrieved()
throws ODataException, IOException {
MultyElasticIndexCsdlEdmProvider edmProvider = spy(
new MultyElasticIndexCsdlEdmProvider(metaDataProvider, indices));
Builder<String, MappingMetaData> mappingsBuilder = ImmutableOpenMap.builder();
mappingsBuilder.put(BOOK_TYPE, null);
when(metaDataProvider.getAllMappings(AUTHORS_INDEX)).thenReturn(mappingsBuilder.build());
doAnswer(answer -> new ElasticCsdlEntityType().setESIndex(answer.getArgument(0))
.setName(answer.getArgument(1))).when(edmProvider).createEntityType(AUTHORS_INDEX,
BOOK_TYPE);
List<ElasticCsdlEntityType> enityTypes = edmProvider.getEntityTypes(AUTHORS_INDEX);
assertEquals(1, enityTypes.size());
ElasticCsdlEntityType entityType = enityTypes.get(0);
assertEquals(AUTHORS_INDEX, entityType.getESIndex());
assertEquals(BOOK_TYPE, entityType.getESType());
assertEquals(BOOK_TYPE, entityType.getName());
}
@Test
public void getEntityContainerForSchema_Namespace_EntityContainerWithEntitySetsRetrieved()
throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = spy(
new MultyElasticIndexCsdlEdmProvider(metaDataProvider, indices));
Builder<String, MappingMetaData> mappingsBuilder = ImmutableOpenMap.builder();
mappingsBuilder.put(AUTHOR_TYPE, null);
mappingsBuilder.put(BOOK_TYPE, null);
when(metaDataProvider.getAllMappings(AUTHORS_INDEX)).thenReturn(mappingsBuilder.build());
doReturn(new ArrayList<>()).when(edmProvider).getNavigationProperties(AUTHORS_INDEX,
AUTHOR_TYPE);
doReturn(new ArrayList<>()).when(edmProvider).getNavigationProperties(AUTHORS_INDEX,
BOOK_TYPE);
CsdlEntityContainer entityContainer = edmProvider
.getEntityContainerForSchema(AUTHORS_INDEX);
assertEquals(edmProvider.getContainerName().getName(), entityContainer.getName());
assertEquals(2, entityContainer.getEntitySets().size());
}
@Test
public void getEntityContainerForSchema_NamespaceAndEmptyMetadata_EntityContainerWithEmptyEntitySetsRetrieved()
throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = new MultyElasticIndexCsdlEdmProvider(
metaDataProvider, indices);
Builder<String, MappingMetaData> mappingsBuilder = ImmutableOpenMap.builder();
when(metaDataProvider.getAllMappings(AUTHORS_INDEX)).thenReturn(mappingsBuilder.build());
CsdlEntityContainer entityContainer = edmProvider
.getEntityContainerForSchema(AUTHORS_INDEX);
assertEquals(edmProvider.getContainerName().getName(), entityContainer.getName());
assertTrue(entityContainer.getEntitySets().isEmpty());
}
@Test
public void getEntityContainer_ContainerWithEntitySetsRetrieved() throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = spy(
new MultyElasticIndexCsdlEdmProvider(metaDataProvider, indices));
List<CsdlSchema> schemas = new ArrayList<>();
CsdlSchema schema1 = mock(CsdlSchema.class);
CsdlEntityContainer container = mock(CsdlEntityContainer.class);
List<CsdlEntitySet> entitySets = new ArrayList<>();
CsdlEntitySet set1 = mock(CsdlEntitySet.class);
when(set1.isIncludeInServiceDocument()).thenReturn(true);
entitySets.add(set1);
entitySets.add(mock(CsdlEntitySet.class));
when(container.getEntitySets()).thenReturn(entitySets);
when(schema1.getEntityContainer()).thenReturn(container);
schemas.add(schema1);
CsdlSchema schema2 = mock(CsdlSchema.class);
when(schema2.getEntityContainer()).thenReturn(mock(CsdlEntityContainer.class));
schemas.add(schema2);
doReturn(schemas).when(edmProvider).getSchemas();
CsdlEntityContainer entityContainer = edmProvider.getEntityContainer();
assertEquals(edmProvider.getContainerName().getName(), entityContainer.getName());
assertEquals(1, entityContainer.getEntitySets().size());
}
@Test
public void getComplexType_DifferenetNames_ExpectedValuesRetrieved() throws ODataException {
MultyElasticIndexCsdlEdmProvider edmProvider = spy(
new MultyElasticIndexCsdlEdmProvider(metaDataProvider, indices));
List<CsdlSchema> schemas = new ArrayList<>();
CsdlSchema schema = mock(CsdlSchema.class);
String namespace = "OData";
when(schema.getNamespace()).thenReturn(namespace);
ElasticCsdlComplexType expectedComplexType = mock(ElasticCsdlComplexType.class);
String complexTypeName = "dimension";
when(schema.getComplexType(complexTypeName)).thenReturn(expectedComplexType);
schemas.add(schema);
doReturn(schemas).when(edmProvider).getSchemas();
ElasticCsdlComplexType actualComplexType = edmProvider
.getComplexType(new FullQualifiedName(namespace, complexTypeName));
assertEquals(expectedComplexType, actualComplexType);
assertNull(edmProvider.getComplexType(new FullQualifiedName("Test", "complex")));
}
private static MappingMetaData getStubProperties() throws IOException {
Map<String, Object> dimension = new HashMap<>();
dimension.put("type", "nested");
HashMap<Object, Object> dimensionProperties = new HashMap<>();
dimensionProperties.put("name", "string");
dimensionProperties.put("state", "boolean");
dimension.put("properties", dimensionProperties);
Map<String, Object> properties = new HashMap<>();
HashMap<Object, Object> currentProperties = new HashMap<>();
currentProperties.put("type", "boolean");
properties.put("dimension", dimension);
properties.put("current", currentProperties);
Map<String, Object> metadata = new HashMap<>();
metadata.put("properties", properties);
MappingMetaData mappingMetaData = mock(MappingMetaData.class);
when(mappingMetaData.sourceAsMap()).thenReturn(metadata);
return mappingMetaData;
}
private static ImmutableOpenMap<String, FieldMappingMetaData> getParentChildMappings() {
Builder<String, FieldMappingMetaData> mappingsBuilder = ImmutableOpenMap.builder();
FieldMappingMetaData mappingMetaData = mock(FieldMappingMetaData.class);
HashMap<Object, Object> parentProperties = new HashMap<>();
parentProperties.put("type", AUTHOR_TYPE);
mappingsBuilder.put(BOOK_TYPE, mappingMetaData);
HashMap<String, Object> parent = new HashMap<String, Object>();
parent.put(ElasticConstants.PARENT_PROPERTY, parentProperties);
when(mappingMetaData.sourceAsMap()).thenReturn(parent);
return mappingsBuilder.build();
}
}
|
mahaplatform/mahaplatform.com
|
src/apps/crm/admin/components/contactfieldsfield/index.js
|
import { Container, Button } from '@admin'
import PropTypes from 'prop-types'
import React from 'react'
import Edit from './edit'
import New from './new'
import _ from 'lodash'
class ContactFieldsField extends React.PureComponent {
static contextTypes = {
form: PropTypes.object
}
static propTypes = {
program: PropTypes.object,
fields: PropTypes.array,
defaultValue: PropTypes.object,
onChange: PropTypes.func,
onReady: PropTypes.func
}
static defaultProps = {
onChange: () => {},
onReady: () => {}
}
state = {
fields: null
}
_handleAdd = this._handleAdd.bind(this)
_handleBack = this._handleBack.bind(this)
_handleNew = this._handleNew.bind(this)
render() {
const { fields } = this.state
if(!fields) return null
return (
<div className="contactfieldsfield">
<div className="contactfieldsfield-field">
<div className="contactfieldsfield-field-label">
First Name <span>(textfield)</span>
</div>
</div>
<div className="contactfieldsfield-field">
<div className="contactfieldsfield-field-label">
Last Name <span>(textfield)</span>
</div>
</div>
<div className="contactfieldsfield-field">
<div className="contactfieldsfield-field-label">
Email <span>(emailfield)</span>
</div>
</div>
{ fields.map((field, index) => (
<div className="contactfieldsfield-field" key={`field_${index}`}>
<div className="contactfieldsfield-field-label">
{ field.name.value } <span>({ this._getType(field) })</span>
</div>
<div className="contactfieldsfield-field-action" onClick={ this._handleEdit.bind(this, field, index)}>
<i className="fa fa-pencil" />
</div>
<div className="contactfieldsfield-field-action" onClick={ this._handleRemove.bind(this, index)}>
<i className="fa fa-times" />
</div>
</div>
))}
<div className="contactfieldsfield-add">
<Button { ...this._getButton() } />
</div>
</div>
)
}
componentDidMount() {
const { defaultValue } = this.props
this.setState(defaultValue || { fields: [] })
this.props.onReady()
}
componentDidUpdate(prevProps, prevState) {
const { fields } = this.state
if(!_.isEqual(fields, prevState.fields)) {
this._handleChange()
}
}
_getAvailable() {
const { program } = this.props
const { fields } = this.state
const contactfields = fields.filter(field => {
return field.type === 'contactfield'
}).map(field => {
return field.contactfield.name
})
const available = [
{ label: 'Contact', fields: [
{ label: 'Phone', name: 'phone', type: 'phonefield' },
{ label: 'Address', name: 'address', type: 'addressfield' },
{ label: 'Birthday', name: 'birthday', type: 'textfield' },
{ label: 'Spouse', name: 'spouse', type: 'textfield' }
] },
{
label: program.title,
fields: this.props.fields.map(field => ({
code: field.code,
label: field.label,
name: `values.${field.code}`,
type: field.type,
instructions: field.instructions,
config: field.config
}))
},
{ label: 'Consent', fields: [
{ label: 'Email Consent', name: 'consent.email', type: 'checkbox', prompt: '<p>Please send me emails</p>' },
..._.includes(contactfields, 'phone') ? [
{ label: 'SMS Consent', name: 'consent.sms', type: 'checkbox', prompt: '<p>Please send me text messages</p>' },
{ label: 'Voice Consent', name: 'consent.voice', type: 'checkbox', prompt: '<p>Please call me</p>' }
] : []
] }
]
return available.map(group => ({
...group,
fields: group.fields.filter(field => {
return _.find(fields, {
contactfield: {
name: field.name
}
}) === undefined
})
})).filter(group => {
return group.fields.length > 0
})
}
_getButton() {
return {
label: 'Add field',
className: 'link',
handler: this._handleNew
}
}
_getEdit(field, index) {
return {
field,
fields: this._getAvailable(),
onBack: this._handleBack,
onDone: this._handleUpdate.bind(this, index)
}
}
_getNew() {
return {
fields: this._getAvailable(),
onBack: this._handleBack,
onDone: this._handleAdd
}
}
_getType(field) {
return field.contactfield ? field.contactfield.type: field.type
}
_handleAdd(field) {
this.setState({
fields: [
...this.state.fields,
field
]
})
this.context.form.pop()
}
_handleBack() {
this.context.form.pop()
}
_handleChange() {
const { fields } = this.state
this.props.onChange({ fields })
}
_handleEdit(field, index) {
this.context.form.push(Edit, this._getEdit(field, index))
}
_handleNew() {
this.context.form.push(New, this._getNew())
}
_handleRemove(index) {
this.setState({
fields: this.state.fields.filter((field, i) => {
return i !== index
})
})
}
_handleUpdate(index, newfield) {
this.setState({
fields: this.state.fields.map((field, i) => {
return i === index ? newfield : field
})
})
this.context.form.pop()
}
}
const mapResources = (props, context) => ({
fields: `/api/admin/crm/programs/${props.program.id}/fields`
})
export default Container(mapResources)(ContactFieldsField)
|
maximsmol/rolmodl
|
docs/search/files_12.js
|
var searchData=
[
['valarray_3306',['valarray',['http://en.cppreference.com/w/cpp/header/valarray.html',1,'']]],
['variant_3307',['variant',['http://en.cppreference.com/w/cpp/header/variant.html',1,'']]],
['vector_3308',['vector',['http://en.cppreference.com/w/cpp/header/vector.html',1,'']]],
['version_3309',['version',['http://en.cppreference.com/w/cpp/header/version.html',1,'']]]
];
|
wentzlau/kervi
|
kervi-cli/kervi_cli/scripts/commands/detect.py
|
import click
import socket
import json
import time
@click.group()
def detect():
"""Detect devices and applications"""
pass
@detect.command()
@click.option('--socket_port', default=9434, help='socket port to broadcast over')
@click.option('--timeout', default = 5, help='max scan time')
@click.option('--challenge', default = "kervi", help='challenge to present to kervi api')
def applications(socket_port, timeout, challenge):
"""Detect running kervi applications on your local network"""
print("Detect applications pleas wait...")
# Create a UDP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.settimeout(2)
use_local = True
message = "Are you a kervi app with challenge: " + challenge
message_reply = "I am a kervi app with challenge: " + challenge
found_apps = []
time_start = time.time()
try:
while time.time() - time_start < timeout:
# Send data
#print('sending: ' + message)
if use_local:
server_address = ('127.255.255.255', socket_port)
else:
server_address = ('255.255.255.255', socket_port)
use_local = not use_local
sock.sendto(message.encode(), server_address)
# Receive response
#print('waiting to receive')
try:
data, server = sock.recvfrom(1000)
response = json.loads(data.decode("UTF-8"))
#print(response, str(response["challenge"]).index(message_reply))
if response["challenge"].index(message_reply) == 0:
server_ip = str(server[0])
try:
found_apps.index(server_ip + "/" + response["id"])
except ValueError:
#print('Received confirmation')
print(response["name"] + " (web: " + response["web"] + " id:" +response["id"] + " ipc_port:" + server_ip + ":" + str(response["port"]) +")")
found_apps.append(server_ip + "/" + response["id"])
else:
print('Verification failed')
except KeyboardInterrupt:
break
except TimeoutError:
pass
except Exception as ex:
# print("e", ex)
pass
except KeyboardInterrupt:
pass
finally:
sock.close()
if len(found_apps)==0:
print("No applications found")
def _pretty_print(d, indent=0):
if isinstance(d, dict):
for key in d.keys():
value = d[key]
if isinstance(value, dict) or isinstance(value, list):
print( ' ' * indent + str(key))
_pretty_print(value, indent+1)
else:
print(' ' * (indent+1) + key + ":" + str(value))
elif isinstance(d, list):
for item in d:
if isinstance(item, dict) or isinstance(item, list):
_pretty_print(item, indent+1)
else:
print(' ' * (indent+1) + str(item))
else:
pass
@detect.command()
@click.option('--hw-platform', default = "auto", help='Specify platform driver. Valid values are auto, windows, linux, linux(rpi), darwin. Default is auto where the framework tries to detect the platform')
def devices(hw_platform):
"""Detect devices that are found on this device."""
import kervi.hal as hal
platform = hal._load(hw_platform)
print("platform", platform)
devices = hal.detect_devices()
_pretty_print(devices)
|
seaCheng/animation-
|
qt-mvvm/source/libmvvm_view/mvvm/widgets/propertytreeview.h
|
<filename>qt-mvvm/source/libmvvm_view/mvvm/widgets/propertytreeview.h
// ************************************************************************** //
//
// Model-view-view-model framework for large GUI applications
//
//! @license GNU General Public License v3 or higher (see COPYING)
//! @authors see AUTHORS
//
// ************************************************************************** //
#ifndef MVVM_WIDGETS_PROPERTYTREEVIEW_H
#define MVVM_WIDGETS_PROPERTYTREEVIEW_H
#include "mvvm/widgets/itemstreeview.h"
namespace ModelView {
//! Widget holding standard QTreeView and intended for displaying all properties of given
//! SessionItem.
class MVVM_VIEW_EXPORT PropertyTreeView : public ItemsTreeView {
Q_OBJECT
public:
PropertyTreeView(QWidget* parent = nullptr);
~PropertyTreeView();
void setItem(SessionItem* item);
};
} // namespace ModelView
#endif // MVVM_WIDGETS_PROPERTYTREEVIEW_H
|
walterfan/snippets
|
cpp/exam/calculator.cpp
|
<filename>cpp/exam/calculator.cpp
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <string>
using namespace std;
typedef enum {
BAD_TOKEN,
NUM_TOKEN,
ADD_OPT_TOKEN,
SUB_OPT_TOKEN,
MUL_OPT_TOKEN,
DIV_OPT_TOKEN,
LINE_END_TOKEN
} TokenKind
const size_t MAX_TOKEN_SIZE = 100;
typedef struct {
TokenKind token;
double value;
char str[MAX_TOKEN_SIZE];
}
class Calcultor
{
public:
int calculate(char* expression);
int parse(char* expression);
private:
string m_strExp;
int m_nPos
};
char* get_cur_time(char* buffer, int len=30)
{
time_t rawtime;
struct tm * timeinfo;
time(&rawtime );
timeinfo = localtime(&rawtime );
strftime (buffer,30,"%x %X\0",timeinfo);
return buffer;
}
int main(int argc, char* argv[])
{
char str[30] = {'\0'};
printf("# [%s] --- calculator ---\n", get_cur_time(str));
return 0;
}
|
chalant/pluto
|
pluto/assets/exchange.py
|
class Exchange(object):
def __init__(self, name, asset_types, country_code):
'''
Parameters
----------
name : str
asset_types : list
country_code : str
'''
self._name = name
self._asset_types = asset_types
self._country_code = country_code
|
jfalcou/kiwaku
|
test/misc/shape/swap.cpp
|
//==================================================================================================
/**
KIWAKU - Containers Well Made
Copyright 2020 <NAME>
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
SPDX-License-Identifier: MIT
**/
//==================================================================================================
#include "test.hpp"
#include <kiwaku/shape.hpp>
TTS_CASE( "swap behavior on 1D shape" )
{
kwk::shape first{4} , second{2};
kwk::shape old_first{4} , old_second{2};
first.swap(second);
TTS_EQUAL(first , old_second);
TTS_EQUAL(second, old_first );
};
TTS_CASE( "swap behavior on 2D shape" )
{
kwk::shape first{4,2} , second{6,9};
kwk::shape old_first{4,2} , old_second{6,9};
first.swap(second);
TTS_EQUAL(first , old_second);
TTS_EQUAL(second, old_first );
};
TTS_CASE( "swap behavior on 3D shape" )
{
kwk::shape first{13,3,7} , second{1,66,4};
kwk::shape old_first{13,3,7} , old_second{1,66,4};
first.swap(second);
TTS_EQUAL(first , old_second);
TTS_EQUAL(second, old_first );
};
TTS_CASE( "swap behavior on 4D shape" )
{
kwk::shape first{1,3,3,7} , second{1,6,6,4};
kwk::shape old_first{1,3,3,7} , old_second{1,6,6,4};
first.swap(second);
TTS_EQUAL(first , old_second);
TTS_EQUAL(second, old_first );
};
|
showthesunli/liuli
|
src/api/views/api_v1/bp_user.py
|
"""
Created by howie.hu at 2022-04-12.
Description: 用户API
Changelog: all notable changes to this file will be documented
"""
import datetime
import time
from flask import Blueprint, current_app, request
from flask_jwt_extended import create_access_token
from src.api.common import (
ResponseCode,
ResponseField,
ResponseReply,
UniResponse,
jwt_required,
response_handle,
)
from src.databases import MongodbBase, mongodb_find, mongodb_update_data
from src.utils import LOGGER, md5_encryption
bp_user = Blueprint("user", __name__, url_prefix="/user")
@bp_user.route("/token_valid", methods=["POST"], strict_slashes=False)
@jwt_required()
def token_valid():
"""验证jwt是否有效
eg:
{
"username": "liuli"
}
Returns:
Response: 响应类
"""
return response_handle(request=request, dict_value=UniResponse.SUCCESS)
@bp_user.route("/change_pwd", methods=["POST"], strict_slashes=False)
@jwt_required()
def change_pwd():
"""修改默认密码
eg:
{
"username": "liuli",
"o_password": "<PASSWORD>",
"n_password": "<PASSWORD>"
}
Returns:
Response: 响应类
"""
# 获取基本配置
mongodb_base: MongodbBase = current_app.config["mongodb_base"]
app_logger: LOGGER = current_app.config["app_logger"]
coll = mongodb_base.get_collection(coll_name="liuli_user")
# 获取基础数据
post_data: dict = request.json
username = post_data.get("username") or ""
o_password = post_data.get("o_password") or ""
n_password = post_data.get("n_password") or ""
user_db_res = mongodb_find(
coll_conn=coll,
filter_dict={"username": username, "password": md5_encryption(o_password)},
return_dict={"_id": 0},
)
user_info_list = user_db_res["info"]
if username and n_password and user_db_res["status"] and len(user_info_list) == 1:
# 历史用户存在
db_res = mongodb_update_data(
coll_conn=coll,
filter_dict={"username": username},
update_data={
"$set": {
"password": md5_encryption(n_password),
"updated_at": int(time.time()),
}
},
)
if db_res["status"]:
result = {
ResponseField.DATA: {"username": username},
ResponseField.MESSAGE: ResponseReply.SUCCESS,
ResponseField.STATUS: ResponseCode.SUCCESS,
}
else:
result = UniResponse.CHANGE_PWD_ERROR
err_info = f"change user pwd failed! DB response info -> {db_res['info']}"
app_logger.error(err_info)
else:
result = UniResponse.CHANGE_PWD_ERROR
err_info = f"change user pwd failed! DB response info -> {user_db_res}"
app_logger.error(err_info)
return response_handle(request=request, dict_value=result)
@bp_user.route("/login", methods=["POST"], strict_slashes=False)
def login():
"""用户登录接口
eg:
{
"username": "liuli",
"password": "<PASSWORD>"
}
Token Demo:
"<KEY>"
Returns:
Response: Flask响应类
"""
# 获取基本配置
mongodb_base: MongodbBase = current_app.config["mongodb_base"]
app_logger: LOGGER = current_app.config["app_logger"]
coll = mongodb_base.get_collection(coll_name="liuli_user")
# 获取基础数据
post_data: dict = request.json
username = post_data.get("username", "")
password = post_data.get("password", "")
user_db_res = mongodb_find(
coll_conn=coll,
filter_dict={"username": username, "password": md5_encryption(password)},
return_dict={"_id": 0},
)
user_info_list = user_db_res["info"]
if username and password and user_db_res["status"] and len(user_info_list) == 1:
# 半年过期一次 259200
expires_delta = datetime.timedelta(minutes=259200)
access_token = create_access_token(
identity=username, expires_delta=expires_delta
)
result = {
ResponseField.DATA: {"token": access_token, "username": username},
ResponseField.MESSAGE: ResponseReply.SUCCESS,
ResponseField.STATUS: ResponseCode.SUCCESS,
}
else:
result = {
ResponseField.DATA: {},
ResponseField.MESSAGE: ResponseReply.USER_LOGIN_ERROR,
ResponseField.STATUS: ResponseCode.USER_LOGIN_ERROR,
}
err_info = f"login failed! DB response info -> {user_db_res}"
app_logger.error(err_info)
return response_handle(request=request, dict_value=result)
|
stravinci/AIS-home-assistant
|
homeassistant/components/extalife/helpers/device.py
|
"""Provides device automations for Exta Life."""
import logging
from typing import List
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_PLATFORM,
CONF_TYPE,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
entity_registry,
)
from homeassistant.helpers.device_registry import DeviceEntry
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from ..pyextalife import (
DEVICE_ARR_ALL_TRANSMITTER,
MODEL_LEDIX_P260,
MODEL_P501,
MODEL_P520,
MODEL_P521L,
MODEL_P4572,
MODEL_P4574,
MODEL_P4578,
MODEL_P45736,
MODEL_RNK22,
MODEL_RNK24,
MODEL_RNM24,
MODEL_RNP21,
MODEL_RNP22,
)
from .const import (
CONF_EXTALIFE_EVENT_TRANSMITTER,
CONF_PROCESSOR_EVENT_STAT_NOTIFICATION,
DOMAIN,
TRIGGER_BUTTON_DOUBLE_CLICK,
TRIGGER_BUTTON_DOWN,
TRIGGER_BUTTON_LONG_PRESS,
TRIGGER_BUTTON_SINGLE_CLICK,
TRIGGER_BUTTON_TRIPLE_CLICK,
TRIGGER_BUTTON_UP,
TRIGGER_SUBTYPE,
TRIGGER_SUBTYPE_BUTTON_TEMPLATE,
TRIGGER_TYPE,
)
from .typing import ExtaLifeTransmitterEventProcessorType
_LOGGER = logging.getLogger(__name__)
class DeviceEvent:
def __init__(self, event, unique_id):
"""
event - event in HA
unique_id - unique identifier of the event source e.g. unique device id
"""
self._event = event
self._unique_id = unique_id
@property
def event(self):
return self._event
@property
def unique_id(self):
return self._unique_id
class Device:
def __init__(self, device: DeviceEntry, type):
"""dev_info - device info - the same passed to Device Registry
type - Exta Life module type e.g 10 = ROP-21"""
self._type = type
self._device = device
self._event_processor = None
@property
def model(self):
return self._device.model
@property
def type(self):
return self._type
@property
def identifiers(self) -> set:
return self._device.identifiers
@property
def unique_id(self):
# unpack tuple from set and return unique_id by list generator and list index 0
return [tuple for tuple in self.identifiers][0][1]
@property
def registry_id(self) -> str:
return self._device.id
@property
def triggers(self) -> list:
pass
def controller_event(self, dataa):
_LOGGER.debug("Device.controller_event")
pass
@property
def config_entry_id(self):
return [t for t in self._device.config_entries][
0
] # the same device can exist only in 1 Config Entry
@property
def event(self) -> DeviceEvent:
return DeviceEvent(CONF_EXTALIFE_EVENT_TRANSMITTER, self.unique_id)
class DeviceFactory:
@staticmethod
def get_device(device: DeviceEntry, type) -> Device: # subclass
if type in DEVICE_ARR_ALL_TRANSMITTER:
return TransmitterDevice(device, type)
else:
raise NotImplementedError
class TransmitterDevice(Device):
def __init__(self, device: DeviceEntry, type):
from .event import ExtaLifeTransmitterEventProcessor
super().__init__(device, type)
self._event_processor = ExtaLifeTransmitterEventProcessor(self)
@property
def triggers(self) -> list:
triggers = []
trigger_type = (
TRIGGER_BUTTON_UP,
TRIGGER_BUTTON_DOWN,
TRIGGER_BUTTON_SINGLE_CLICK,
TRIGGER_BUTTON_DOUBLE_CLICK,
TRIGGER_BUTTON_TRIPLE_CLICK,
TRIGGER_BUTTON_LONG_PRESS,
)
buttons = 0
if self.model in (MODEL_RNK22, MODEL_P4572):
buttons = 2
elif self.model in (
MODEL_RNK24,
MODEL_P4574,
MODEL_RNM24,
MODEL_RNP21,
MODEL_RNP22,
):
buttons = 4
elif self.model in (MODEL_P4578):
buttons = 8
elif self.model in (MODEL_P45736):
buttons = 36
for button in range(1, buttons + 1):
for type in trigger_type:
triggers.append(
{
TRIGGER_TYPE: type,
TRIGGER_SUBTYPE: TRIGGER_SUBTYPE_BUTTON_TEMPLATE.format(button),
}
)
return triggers
def controller_event(self, data):
_LOGGER.debug("TransmitterDevice.controller_event")
super().controller_event(data)
self._event_processor.process_event(
data, event_type=CONF_PROCESSOR_EVENT_STAT_NOTIFICATION
)
class DeviceManager:
def __init__(self, config_entry: ConfigEntry, core: "Core"):
from .core import Core
self._core = core
self._config_entry = config_entry
self._devices = dict()
async def register_in_dr(self, dev_info: dict) -> DeviceEntry:
device_registry = await dr.async_get_registry(self._core.hass)
device_entry = device_registry.async_get_or_create(
config_entry_id=self._config_entry.entry_id, **dev_info
)
return device_entry
async def async_add(self, type, dev_info=None, ha_device=None) -> Device:
"""
dev_info - device info data in HA device registry format. To be passed to HA Device Registry
type - Exta Life module type e.g 10 = ROP-21
ha_device: DeviceEntry - boolean whether to register device in HA Device Registry or not
"""
device_entry = ha_device if ha_device else await self.register_in_dr(dev_info)
device = DeviceFactory.get_device(device_entry, type)
self._devices.update({device_entry.id: device})
return device
async def async_get_by_registry_id(self, device_id) -> Device:
""" Get device by HA Device Registry id """
return self._devices.get(device_id)
|
ruediste/laf
|
framework/src/main/java/com/github/ruediste/rise/core/security/authorization/AuthorizationInspector.java
|
package com.github.ruediste.rise.core.security.authorization;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import com.github.ruediste.rise.util.Pair;
import com.google.common.collect.MapMaker;
/**
* Checks if a method calls {@link Authz#doAuthChecks(Runnable)}
*/
public class AuthorizationInspector {
private static class AuthorizeCallsVisitor extends ClassVisitor {
Set<Pair<String, String>> authorizeCallingMethods = new HashSet<>();
private static String OWNER = Type.getInternalName(Authz.class);
private static String NAME = "doAuthChecks";
public AuthorizeCallsVisitor() {
super(Opcodes.ASM5);
}
@Override
public MethodVisitor visitMethod(int access, String methodName, String methodDesc, String signature,
String[] exceptions) {
return new MethodVisitor(Opcodes.ASM5) {
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean itf) {
if (OWNER.equals(owner) && NAME.equals(name)) {
authorizeCallingMethods.add(Pair.of(methodName, methodDesc));
}
}
};
}
}
public static boolean callsDoAuthChecks(Class<?> clazz, Method method) {
Method impl = MethodImplementationFinder.findImplementation(clazz, method);
if (impl == null)
throw new RuntimeException(
"No implementation of " + method + " found on " + clazz + " or ancestors thereof");
return getAuthorizeCallingMethods(impl.getDeclaringClass())
.contains(Pair.of(method.getName(), Type.getMethodDescriptor(method)));
}
private static ConcurrentMap<Class<?>, Set<Pair<String, String>>> cache = new MapMaker().weakKeys().makeMap();
private static Set<Pair<String, String>> getAuthorizeCallingMethods(Class<?> clazz) {
return cache.computeIfAbsent(clazz, x -> {
AuthorizeCallsVisitor cv;
try (InputStream is = clazz.getClassLoader().getResourceAsStream(Type.getInternalName(clazz) + ".class")) {
ClassReader cr = new ClassReader(is);
cv = new AuthorizeCallsVisitor();
cr.accept(cv, ClassReader.SKIP_DEBUG + ClassReader.SKIP_FRAMES);
} catch (IOException e) {
throw new RuntimeException("error while reading class " + clazz.getName());
}
Set<Pair<String, String>> authorizeCallingMethods = cv.authorizeCallingMethods;
return authorizeCallingMethods;
});
}
}
|
1stmateusz/motech
|
platform/mds/mds/src/test/java/org/motechproject/mds/MdsDataProviderTest.java
|
<reponame>1stmateusz/motech<filename>platform/mds/mds/src/test/java/org/motechproject/mds/MdsDataProviderTest.java<gh_stars>10-100
package org.motechproject.mds;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.motechproject.mds.dto.EntityDto;
import org.motechproject.mds.dto.FieldDto;
import org.motechproject.mds.dto.LookupDto;
import org.motechproject.mds.dto.LookupFieldDto;
import org.motechproject.mds.javassist.MotechClassPool;
import org.motechproject.mds.service.EntityService;
import org.motechproject.mds.service.DefaultMotechDataService;
import org.motechproject.mds.tasks.MDSDataProvider;
import org.motechproject.mds.testutil.FieldTestHelper;
import org.motechproject.mds.testutil.records.Record;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.springframework.core.io.ResourceLoader;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static java.util.Arrays.asList;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class MdsDataProviderTest {
private static final String PARAM_VALUE = "param";
private static final long ENTITY_ID = 4;
private static final Long INSTANCE_ID = 1L;
private static final String VALUE = "sample value";
private static final String FIND_BY_ID_LOOKUP = "mds.dataprovider.byinstanceid";
private static final String ID_LOOKUP_FIELD = "mds.dataprovider.instanceid";
@Mock
private EntityService entityService;
@Mock
private BundleContext bundleContext;
@Mock
private ServiceReference serviceReference;
@Mock
private ResourceLoader resourceLoader;
@Mock
private EntityDto entity;
private MDSDataProvider dataProvider;
@Before
public void setUp() {
when(entityService.getEntityByClassName(Record.class.getName())).thenReturn(entity);
when(entity.getId()).thenReturn(ENTITY_ID);
FieldDto fieldDto = FieldTestHelper.fieldDto(1L, "field", String.class.getName(), "disp", null);
when(entityService.getEntityFields(ENTITY_ID)).thenReturn(asList(fieldDto));
LookupFieldDto lookupField = FieldTestHelper.lookupFieldDto(1L, "field");
LookupDto singleLookup = new LookupDto("singleLookup", true, false, asList(lookupField), false);
LookupDto multiLookup = new LookupDto("multiLookup", false, false, asList(lookupField), false);
lookupField = FieldTestHelper.lookupFieldDto(2L, "related");
lookupField.setRelatedName("stringVar");
LookupDto relatedLookup = new LookupDto("relatedLookup", false, false, asList(lookupField), false);
when(entityService.getLookupByName(ENTITY_ID, "singleLookup")).thenReturn(singleLookup);
when(entityService.getLookupByName(ENTITY_ID, "multiLookup")).thenReturn(multiLookup);
when(entityService.getLookupByName(ENTITY_ID, "relatedLookup")).thenReturn(relatedLookup);
Map<String, FieldDto> mapping = new HashMap<>();
mapping.put("field", fieldDto);
when(entityService.getLookupFieldsMapping(ENTITY_ID, "singleLookup")).thenReturn(mapping);
when(entityService.getLookupFieldsMapping(ENTITY_ID, "multiLookup")).thenReturn(mapping);
mapping = new HashMap<>();
mapping.put("related.stringVar", fieldDto);
when(entityService.getLookupFieldsMapping(ENTITY_ID, "relatedLookup")).thenReturn(mapping);
when(bundleContext.getServiceReference(LookupService.class.getName())).thenReturn(serviceReference);
when(bundleContext.getService(serviceReference)).thenReturn(new LookupService());
dataProvider = new MDSDataProvider(resourceLoader);
dataProvider.setEntityService(entityService);
dataProvider.setBundleContext(bundleContext);
MotechClassPool.registerServiceInterface(Record.class.getName(), LookupService.class.getName());
}
@Test
public void testSingleResultLookup() {
Map<String, String> lookupMap = new HashMap<>();
lookupMap.put("field", PARAM_VALUE);
Object result = dataProvider.lookup(Record.class.getName(), "singleLookup", lookupMap);
assertNotNull(result);
assertTrue("Wrong type returned", result instanceof Record);
Record record = (Record) result;
assertEquals("single", record.getValue());
}
@Test
public void testMultiResultLookup() {
Map<String, String> lookupMap = new HashMap<>();
lookupMap.put("field", PARAM_VALUE);
Object result = dataProvider.lookup(Record.class.getName(), "multiLookup", lookupMap);
// we expect it will return the record as result if there is only item
assertNotNull(result);
assertTrue("Wrong type returned", result instanceof Record);
Record record = (Record) result;
assertEquals("multi", record.getValue());
}
@Test
public void testFindByInstanceId() {
Map<String, String> lookupMap = new HashMap<>();
lookupMap.put(ID_LOOKUP_FIELD, INSTANCE_ID.toString());
Object result = dataProvider.lookup(Record.class.getName(), FIND_BY_ID_LOOKUP, lookupMap);
assertNotNull(result);
assertTrue("Wrong type returned", result instanceof Record);
Record record = (Record) result;
assertEquals("found by id", record.getValue());
}
@Test
public void shouldFindByRelatedField() {
Map<String, String> lookupMap = new HashMap<>();
lookupMap.put("related.stringVar", VALUE);
Object result = dataProvider.lookup(Record.class.getName(), "relatedLookup", lookupMap);
assertNotNull(result);
assertTrue("Wrong type returned", result instanceof Record);
Record record = (Record) result;
assertEquals("found by related field", record.getValue());
}
public static class LookupService extends DefaultMotechDataService<Record> {
public Record singleLookup(String field) {
assertEquals(PARAM_VALUE, field);
Record record = new Record();
record.setValue("single");
return record;
}
public List<Record> multiLookup(String field) {
assertEquals(PARAM_VALUE, field);
Record record = new Record();
record.setValue("multi");
return asList(record);
}
@Override
public Record findById(Long id) {
assertEquals(INSTANCE_ID, id);
Record record = new Record();
record.setValue("found by id");
return record;
}
public List<Record> relatedLookup(String value) {
assertEquals(VALUE, value);
Record record = new Record();
record.setValue("found by related field");
return asList(record);
}
@Override
public Class<Record> getClassType() {
return Record.class;
}
}
}
|
mitchellolsthoorn/ASE-NIER-2020-Replication
|
3-data-analysis/data/raw/evosuite_vanilla_120/tests/s16/fastjson/evosuite-tests/com/alibaba/fastjson/JSONReader_ESTest.java
|
<filename>3-data-analysis/data/raw/evosuite_vanilla_120/tests/s16/fastjson/evosuite-tests/com/alibaba/fastjson/JSONReader_ESTest.java
/*
* This file was automatically generated by EvoSuite
* Thu May 21 21:23:20 GMT 2020
*/
package com.alibaba.fastjson;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.evosuite.runtime.EvoAssertions.*;
import com.alibaba.fastjson.JSONReader;
import com.alibaba.fastjson.TypeReference;
import com.alibaba.fastjson.parser.DefaultJSONParser;
import com.alibaba.fastjson.parser.Feature;
import com.alibaba.fastjson.parser.JSONLexer;
import com.alibaba.fastjson.parser.JSONReaderScanner;
import com.alibaba.fastjson.parser.JSONScanner;
import com.alibaba.fastjson.parser.ParserConfig;
import com.alibaba.fastjson.util.GenericArrayTypeImpl;
import java.io.PipedReader;
import java.io.StringReader;
import java.lang.reflect.Type;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
import java.util.concurrent.ConcurrentHashMap;
import org.evosuite.runtime.EvoRunner;
import org.evosuite.runtime.EvoRunnerParameters;
import org.junit.runner.RunWith;
@RunWith(EvoRunner.class) @EvoRunnerParameters(mockJVMNonDeterminism = true, useVFS = true, useVNET = true, resetStaticState = true, separateClassLoader = true, useJEE = true)
public class JSONReader_ESTest extends JSONReader_ESTest_scaffolding {
@Test(timeout = 4000)
public void test00() throws Throwable {
JSONScanner jSONScanner0 = new JSONScanner("com.alibaba.fastjson.JSONReader", 2415);
JSONReader jSONReader0 = new JSONReader(jSONScanner0);
TimeZone timeZone0 = jSONReader0.getTimzeZone();
jSONReader0.setTimzeZone(timeZone0);
assertEquals(1, jSONReader0.peek());
}
@Test(timeout = 4000)
public void test01() throws Throwable {
JSONScanner jSONScanner0 = new JSONScanner("com.alibaba.fastjson.util.IOUtils");
JSONReader jSONReader0 = new JSONReader(jSONScanner0);
Feature feature0 = Feature.DisableSpecialKeyDetect;
jSONReader0.config(feature0, true);
assertEquals(33757, jSONScanner0.getFeatures());
}
@Test(timeout = 4000)
public void test02() throws Throwable {
char[] charArray0 = new char[7];
JSONReaderScanner jSONReaderScanner0 = new JSONReaderScanner(charArray0, 0, (-414));
JSONReader jSONReader0 = new JSONReader(jSONReaderScanner0);
jSONReader0.close();
assertEquals(20, jSONReader0.peek());
}
@Test(timeout = 4000)
public void test03() throws Throwable {
StringReader stringReader0 = new StringReader("3l)coK@JIORE2G");
JSONReader jSONReader0 = new JSONReader(stringReader0);
Long long0 = jSONReader0.readLong();
assertEquals(3L, (long)long0);
}
@Test(timeout = 4000)
public void test04() throws Throwable {
StringReader stringReader0 = new StringReader("5{?BG/@bu:c");
JSONReader jSONReader0 = new JSONReader(stringReader0);
Integer integer0 = jSONReader0.readInteger();
assertEquals(5, (int)integer0);
}
@Test(timeout = 4000)
public void test05() throws Throwable {
JSONScanner jSONScanner0 = new JSONScanner("", 0);
JSONReader jSONReader0 = new JSONReader(jSONScanner0);
Integer integer0 = jSONReader0.readInteger();
assertNull(integer0);
}
@Test(timeout = 4000)
public void test06() throws Throwable {
ParserConfig parserConfig0 = ParserConfig.getGlobalInstance();
DefaultJSONParser defaultJSONParser0 = new DefaultJSONParser("", parserConfig0, (-398));
JSONReader jSONReader0 = new JSONReader(defaultJSONParser0);
// Undeclared exception!
try {
jSONReader0.startObject();
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// syntax error, expect {, actual EOF
//
verifyException("com.alibaba.fastjson.parser.DefaultJSONParser", e);
}
}
@Test(timeout = 4000)
public void test07() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
// Undeclared exception!
try {
jSONReader0.startArray();
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test08() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
Locale locale0 = Locale.KOREAN;
// Undeclared exception!
try {
jSONReader0.setLocale(locale0);
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test09() throws Throwable {
JSONReaderScanner jSONReaderScanner0 = new JSONReaderScanner("", 107);
JSONReader jSONReader0 = new JSONReader(jSONReaderScanner0);
// Undeclared exception!
try {
jSONReader0.readString();
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// unterminated json string,
//
verifyException("com.alibaba.fastjson.parser.DefaultJSONParser", e);
}
}
@Test(timeout = 4000)
public void test10() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
// Undeclared exception!
try {
jSONReader0.readString();
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test11() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
ConcurrentHashMap<Object, Integer> concurrentHashMap0 = new ConcurrentHashMap<Object, Integer>();
// Undeclared exception!
try {
jSONReader0.readObject((Map) concurrentHashMap0);
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test12() throws Throwable {
StringReader stringReader0 = new StringReader("5{?BG/@bu:c");
JSONReader jSONReader0 = new JSONReader(stringReader0);
// Undeclared exception!
try {
jSONReader0.readObject((Object) stringReader0);
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// syntax error, expect {, actual int
//
verifyException("com.alibaba.fastjson.parser.DefaultJSONParser", e);
}
}
@Test(timeout = 4000)
public void test13() throws Throwable {
StringReader stringReader0 = new StringReader("~JHep5_dO%~ose8r,");
JSONReader jSONReader0 = new JSONReader(stringReader0);
Class<Object> class0 = Object.class;
// Undeclared exception!
try {
jSONReader0.readObject(class0);
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// syntax error, expect {, actual error, pos 0, fastjson-version 1.2.68
//
verifyException("com.alibaba.fastjson.parser.deserializer.JavaBeanDeserializer", e);
}
}
@Test(timeout = 4000)
public void test14() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
Class<Object> class0 = Object.class;
// Undeclared exception!
try {
jSONReader0.readObject(class0);
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test15() throws Throwable {
StringReader stringReader0 = new StringReader("com.alibaba.fastjson.JSONReader");
JSONReader jSONReader0 = new JSONReader(stringReader0);
// Undeclared exception!
try {
jSONReader0.readObject();
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// syntax error,
//
verifyException("com.alibaba.fastjson.parser.DefaultJSONParser", e);
}
}
@Test(timeout = 4000)
public void test16() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
// Undeclared exception!
try {
jSONReader0.readObject();
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test17() throws Throwable {
DefaultJSONParser defaultJSONParser0 = new DefaultJSONParser("illegal jsonp : ");
JSONReader jSONReader0 = new JSONReader(defaultJSONParser0);
// Undeclared exception!
try {
jSONReader0.readLong();
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// syntax error, pos 1, line 1, column 2illegal jsonp :
//
verifyException("com.alibaba.fastjson.parser.DefaultJSONParser", e);
}
}
@Test(timeout = 4000)
public void test18() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
// Undeclared exception!
try {
jSONReader0.readLong();
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test19() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
// Undeclared exception!
try {
jSONReader0.readInteger();
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test20() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
// Undeclared exception!
try {
jSONReader0.getTimzeZone();
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test21() throws Throwable {
DefaultJSONParser defaultJSONParser0 = new DefaultJSONParser("Wk*L413m6;V8l&E3ix5");
JSONReader jSONReader0 = new JSONReader(defaultJSONParser0);
// Undeclared exception!
try {
jSONReader0.endObject();
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// syntax error, expect }, actual error
//
verifyException("com.alibaba.fastjson.parser.DefaultJSONParser", e);
}
}
@Test(timeout = 4000)
public void test22() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
// Undeclared exception!
try {
jSONReader0.endArray();
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test23() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
// Undeclared exception!
try {
jSONReader0.close();
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test24() throws Throwable {
PipedReader pipedReader0 = new PipedReader();
JSONReader jSONReader0 = null;
try {
jSONReader0 = new JSONReader(pipedReader0, (Feature[]) null);
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// Pipe not connected
//
verifyException("com.alibaba.fastjson.parser.JSONReaderScanner", e);
}
}
@Test(timeout = 4000)
public void test25() throws Throwable {
PipedReader pipedReader0 = new PipedReader();
JSONReader jSONReader0 = null;
try {
jSONReader0 = new JSONReader(pipedReader0);
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// Pipe not connected
//
verifyException("com.alibaba.fastjson.parser.JSONReaderScanner", e);
}
}
@Test(timeout = 4000)
public void test26() throws Throwable {
JSONReaderScanner jSONReaderScanner0 = new JSONReaderScanner("nQ])", 4152);
JSONReader jSONReader0 = null;
try {
jSONReader0 = new JSONReader(jSONReaderScanner0);
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// error parse new
//
verifyException("com.alibaba.fastjson.parser.JSONLexerBase", e);
}
}
@Test(timeout = 4000)
public void test27() throws Throwable {
JSONReader jSONReader0 = null;
try {
jSONReader0 = new JSONReader((JSONLexer) null);
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.parser.DefaultJSONParser", e);
}
}
@Test(timeout = 4000)
public void test28() throws Throwable {
StringReader stringReader0 = new StringReader("");
JSONReader jSONReader0 = new JSONReader(stringReader0);
// Undeclared exception!
try {
jSONReader0.endArray();
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// syntax error, expect ], actual EOF
//
verifyException("com.alibaba.fastjson.parser.DefaultJSONParser", e);
}
}
@Test(timeout = 4000)
public void test29() throws Throwable {
StringReader stringReader0 = new StringReader("~JHep5_dO~ose8r,");
JSONReader jSONReader0 = new JSONReader(stringReader0);
ConcurrentHashMap<Object, Integer> concurrentHashMap0 = new ConcurrentHashMap<Object, Integer>();
// Undeclared exception!
try {
jSONReader0.readObject((Map) concurrentHashMap0);
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// syntax error, expect {, actual error,
//
verifyException("com.alibaba.fastjson.parser.DefaultJSONParser", e);
}
}
@Test(timeout = 4000)
public void test30() throws Throwable {
char[] charArray0 = new char[5];
JSONScanner jSONScanner0 = new JSONScanner(charArray0, 0);
JSONReader jSONReader0 = new JSONReader(jSONScanner0);
Object object0 = jSONReader0.readObject();
assertNull(object0);
}
@Test(timeout = 4000)
public void test31() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
// Undeclared exception!
try {
jSONReader0.readObject((Object) null);
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test32() throws Throwable {
char[] charArray0 = new char[5];
JSONScanner jSONScanner0 = new JSONScanner(charArray0, 0);
JSONReader jSONReader0 = new JSONReader(jSONScanner0);
Class<String> class0 = String.class;
String string0 = jSONReader0.readObject(class0);
assertNull(string0);
}
@Test(timeout = 4000)
public void test33() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
ParserConfig parserConfig0 = new ParserConfig();
Class<String> class0 = String.class;
Class<?> class1 = parserConfig0.checkAutoType(class0);
GenericArrayTypeImpl genericArrayTypeImpl0 = new GenericArrayTypeImpl(class1);
// Undeclared exception!
try {
jSONReader0.readObject((Type) genericArrayTypeImpl0);
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test34() throws Throwable {
char[] charArray0 = new char[5];
JSONScanner jSONScanner0 = new JSONScanner(charArray0, 0);
JSONReader jSONReader0 = new JSONReader(jSONScanner0);
String string0 = jSONReader0.readString();
assertNull(string0);
}
@Test(timeout = 4000)
public void test35() throws Throwable {
char[] charArray0 = new char[5];
JSONScanner jSONScanner0 = new JSONScanner(charArray0, 0);
JSONReader jSONReader0 = new JSONReader(jSONScanner0);
Long long0 = jSONReader0.readLong();
assertNull(long0);
}
@Test(timeout = 4000)
public void test36() throws Throwable {
StringReader stringReader0 = new StringReader("");
JSONReader jSONReader0 = new JSONReader(stringReader0);
// Undeclared exception!
try {
jSONReader0.readInteger();
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// unterminated json string,
//
verifyException("com.alibaba.fastjson.parser.DefaultJSONParser", e);
}
}
@Test(timeout = 4000)
public void test37() throws Throwable {
char[] charArray0 = new char[5];
JSONScanner jSONScanner0 = new JSONScanner(charArray0, 0);
JSONReader jSONReader0 = new JSONReader(jSONScanner0);
// Undeclared exception!
try {
jSONReader0.hasNext();
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// context is null
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test38() throws Throwable {
StringReader stringReader0 = new StringReader("XJHep5dO~ose8r");
JSONReader jSONReader0 = new JSONReader(stringReader0);
// Undeclared exception!
try {
jSONReader0.startArray();
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// syntax error, expect [, actual error
//
verifyException("com.alibaba.fastjson.parser.DefaultJSONParser", e);
}
}
@Test(timeout = 4000)
public void test39() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
// Undeclared exception!
try {
jSONReader0.startObject();
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test40() throws Throwable {
ClassLoader classLoader0 = ClassLoader.getSystemClassLoader();
ParserConfig parserConfig0 = new ParserConfig(classLoader0);
DefaultJSONParser defaultJSONParser0 = new DefaultJSONParser("", parserConfig0, 3288);
JSONReader jSONReader0 = new JSONReader(defaultJSONParser0);
Locale locale0 = jSONReader0.getLocal();
assertEquals("", locale0.getISO3Country());
}
@Test(timeout = 4000)
public void test41() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
// Undeclared exception!
try {
jSONReader0.readObject((TypeReference<Object>) null);
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test42() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
JSONReaderScanner jSONReaderScanner0 = new JSONReaderScanner("y|R:/M5d7/?w");
TimeZone timeZone0 = jSONReaderScanner0.getTimeZone();
// Undeclared exception!
try {
jSONReader0.setTimzeZone(timeZone0);
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test43() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
Feature feature0 = Feature.DisableCircularReferenceDetect;
// Undeclared exception!
try {
jSONReader0.config(feature0, true);
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test44() throws Throwable {
StringReader stringReader0 = new StringReader("");
JSONReader jSONReader0 = new JSONReader(stringReader0);
int int0 = jSONReader0.peek();
assertEquals(20, int0);
}
@Test(timeout = 4000)
public void test45() throws Throwable {
JSONReader jSONReader0 = new JSONReader((DefaultJSONParser) null);
// Undeclared exception!
try {
jSONReader0.endObject();
fail("Expecting exception: NullPointerException");
} catch(NullPointerException e) {
//
// no message in exception (getMessage() returned null)
//
verifyException("com.alibaba.fastjson.JSONReader", e);
}
}
@Test(timeout = 4000)
public void test46() throws Throwable {
StringReader stringReader0 = new StringReader("classame");
JSONReader jSONReader0 = new JSONReader(stringReader0);
Locale locale0 = Locale.CANADA;
jSONReader0.setLocale(locale0);
assertEquals(1, jSONReader0.peek());
}
@Test(timeout = 4000)
public void test47() throws Throwable {
JSONScanner jSONScanner0 = new JSONScanner("className");
JSONReader jSONReader0 = new JSONReader(jSONScanner0);
// Undeclared exception!
try {
jSONReader0.close();
fail("Expecting exception: RuntimeException");
} catch(RuntimeException e) {
//
// not close json text, token : error
//
verifyException("com.alibaba.fastjson.parser.DefaultJSONParser", e);
}
}
}
|
yzx66-net/dubbo_haoke
|
haoke-im/src/main/java/haoke/im/pojo/UserData.java
|
package haoke.im.pojo;
import java.util.HashMap;
import java.util.Map;
public class UserData {
public static final Map<Long,User> USER_MAP = new HashMap<>();
static {
USER_MAP.put(1001L, User.builder().id(1001L).username("zhangsan").build());
USER_MAP.put(1002L, User.builder().id(1002L).username("lisi").build());
USER_MAP.put(1003L, User.builder().id(1003L).username("wangwu").build());
USER_MAP.put(1004L, User.builder().id(1004L).username("zhaoliu").build());
USER_MAP.put(1005L, User.builder().id(1005L).username("sunqi").build());
}
}
|
prathimacode-hub/PythonScripts
|
Network Operation/Socket_Programming.py
|
<filename>Network Operation/Socket_Programming.py
import tkinter as tk
from tkinter import *
import socket
import sys
import time
root=Tk()
root.geometry("500x500")
root.title("Ash-Chat")
def Host():
s=socket.socket()
host=socket.gethostname()
print("Server will start on host:",host)
port=5005
s.bind((host,port))
print("Server is bind successfully")
s.listen(5)
conn,addr=s.accept()
print(addr,"has connected")
while 1:
msg=input(str("You:>>"))
msg=msg.encode()
conn.send(msg)
incoming_msg=conn.recv(1024)
incoming_msg=incoming_msg.decode()
print("Client:>>",incoming_msg)
def server():
s=socket.socket()
host=input(str("Please enter host name:"))
port=5005
try:
s.connect((host,port))
print("Connected to server")
except:
print("Connection to server is failed:(")
while 1:
incoming_msg=s.recv(1024)
incoming_msg=incoming_msg.decode()
print("Server:>>",incoming_msg)
msg=input(str("You:>>"))
msg=msg.encode()
s.send(msg)
root.config(bg="pink")
l=Label(root,text="Chat with Me..",font=('verdana',15,'bold'),bg="black",fg="white")
l.place(x=180,y=10)
text=ScrolledText(root,width=40,height=10)
text['font']=('verdana',10,'bold')
text.place(x=50, y=30)
root.mainloop()
|
afnanenayet/go-sdk
|
db/migration/guard_predicates.go
|
package migration
import (
"database/sql"
"fmt"
"github.com/blend/go-sdk/db"
"strings"
)
// TableExists returns a guard that ensures a table exists
func TableExists(tableName string) GuardFunc {
return guardPredicate(fmt.Sprintf("Check table exists: %s", tableName), PredicateTableExists, tableName)
}
// TableNotExists returns a guard that ensures a table does not exist
func TableNotExists(tableName string) GuardFunc {
return guardNotPredicate(fmt.Sprintf("Check table does not exist: %s", tableName), PredicateTableExists, tableName)
}
// TableExistsInSchema returns a guard that ensures a table exists
func TableExistsInSchema(schemaName, tableName string) GuardFunc {
return guardPredicate2(fmt.Sprintf("Check table exists: %s.%s", schemaName, tableName),
PredicateTableExistsInSchema, schemaName, tableName)
}
// TableNotExistsInSchema returns a guard that ensures a table exists
func TableNotExistsInSchema(schemaName, tableName string) GuardFunc {
return guardNotPredicate2(fmt.Sprintf("Check table does not exist: %s.%s", schemaName, tableName),
PredicateTableExistsInSchema, schemaName, tableName)
}
// ColumnExists returns a guard that ensures a column exists
func ColumnExists(tableName, columnName string) GuardFunc {
return guardPredicate2(fmt.Sprintf("Check column exists: %s.%s", tableName, columnName),
PredicateColumnExists, tableName, columnName)
}
// ColumnNotExists returns a guard that ensures a column does not exist
func ColumnNotExists(tableName, columnName string) GuardFunc {
return guardNotPredicate2(fmt.Sprintf("Check column does not exist: %s.%s", tableName, columnName),
PredicateColumnExists, tableName, columnName)
}
// ColumnExistsInSchema returns a guard that ensures a column exists
func ColumnExistsInSchema(schemaName, tableName, columnName string) GuardFunc {
return guardPredicate3(fmt.Sprintf("Check column exists: %s.%s.%s", schemaName, tableName, columnName),
PredicateColumnExistsInSchema, schemaName, tableName, columnName)
}
// ColumnNotExistsInSchema returns a guard that ensures a column does not exist
func ColumnNotExistsInSchema(schemaName, tableName, columnName string) GuardFunc {
return guardNotPredicate3(fmt.Sprintf("Check column does not exist: %s.%s.%s", schemaName, tableName, columnName),
PredicateColumnExistsInSchema, schemaName, tableName, columnName)
}
// ConstraintExists returns a guard that ensures a constraint exists
func ConstraintExists(tableName, constraintName string) GuardFunc {
return guardPredicate2(fmt.Sprintf("Check constraint %s exists on table %s", constraintName, tableName),
PredicateConstraintExists, tableName, constraintName)
}
// ConstraintNotExists returns a guard that ensures a constraint does not exist
func ConstraintNotExists(tableName, constraintName string) GuardFunc {
return guardNotPredicate2(fmt.Sprintf("Check constraint %s does not exist on table %s", constraintName, tableName),
PredicateConstraintExists, tableName, constraintName)
}
// ConstraintExistsInSchema returns a guard that ensures a constraint exists
func ConstraintExistsInSchema(schemaName, tableName, constraintName string) GuardFunc {
return guardPredicate3(fmt.Sprintf("Check constraint %s exists on table %s.%s", constraintName, schemaName, tableName),
PredicateConstraintExistsInSchema, schemaName, tableName, constraintName)
}
// ConstraintNotExistsInSchema returns a guard that ensures a constraint does not exist
func ConstraintNotExistsInSchema(schemaName, tableName, constraintName string) GuardFunc {
return guardNotPredicate3(fmt.Sprintf("Check constraint %s does not exist on table %s.%s", constraintName, schemaName, tableName),
PredicateConstraintExistsInSchema, schemaName, tableName, constraintName)
}
// IndexExists returns a guard that ensures an index exists
func IndexExists(tableName, indexName string) GuardFunc {
return guardPredicate2(fmt.Sprintf("Check index %s exists on table %s", indexName, tableName),
PredicateIndexExists, tableName, indexName)
}
// IndexNotExists returns a guard that ensures an index does not exist
func IndexNotExists(tableName, indexName string) GuardFunc {
return guardNotPredicate2(fmt.Sprintf("Check index %s does not exist on table %s", indexName, tableName),
PredicateIndexExists, tableName, indexName)
}
// IndexExistsInSchema returns a guard that ensures an index exists
func IndexExistsInSchema(schemaName, tableName, indexName string) GuardFunc {
return guardPredicate3(fmt.Sprintf("Check index %s exists on table %s.%s", indexName, schemaName, tableName),
PredicateIndexExistsInSchema, schemaName, tableName, indexName)
}
// IndexNotExistsInSchema returns a guard that ensures an index does not exist
func IndexNotExistsInSchema(schemaName, tableName, indexName string) GuardFunc {
return guardNotPredicate3(fmt.Sprintf("Check index %s does not exist on table %s.%s", indexName, schemaName, tableName),
PredicateIndexExistsInSchema, schemaName, tableName, indexName)
}
// RoleExists returns a guard that ensures a role (user) exists
func RoleExists(roleName string) GuardFunc {
return guardPredicate(fmt.Sprintf("Check Role Exists: %s", roleName), PredicateRoleExists, roleName)
}
// RoleNotExists returns a guard that ensures a role (user) does not exist
func RoleNotExists(roleName string) GuardFunc {
return guardNotPredicate(fmt.Sprintf("Check Role Not Exists: %s", roleName), PredicateRoleExists, roleName)
}
// SchemaExists is a guard function for asserting that a schema exists
func SchemaExists(schemaName string) GuardFunc {
return Guard(fmt.Sprintf("drop schema `%s`", schemaName),
func(c *db.Connection, tx *sql.Tx) (bool, error) {
return predicateSchemaExists(c, tx, schemaName)
})
}
// SchemaNotExists is a guard function for asserting that a schema does not exist
func SchemaNotExists(schemaName string) GuardFunc {
return Guard(fmt.Sprintf("create schema `%s`", schemaName),
func(c *db.Connection, tx *sql.Tx) (bool, error) {
return Not(predicateSchemaExists(c, tx, schemaName))
})
}
func predicateSchemaExists(c *db.Connection, tx *sql.Tx, schemaName string) (bool, error) {
return c.Invoke(db.OptTx(tx)).Query(`SELECT 1 FROM information_schema.schemata WHERE schema_name = $1`,
strings.ToLower(schemaName)).Any()
}
|
fancylou/o2oa
|
x_okr_assemble_control/src/main/java/com/x/okr/assemble/control/jaxrs/okrworkchat/WorkChatFilterException.java
|
package com.x.okr.assemble.control.jaxrs.okrworkchat;
import com.x.base.core.exception.PromptException;
class WorkChatFilterException extends PromptException {
private static final long serialVersionUID = 1859164370743532895L;
WorkChatFilterException( Throwable e ) {
super("系统条件查询工作交流信息记录列表时发生异常。", e );
}
}
|
stas-vilchik/bdd-ml
|
data/361.js
|
<gh_stars>0
{
forEach(b, function assignValue(val, key) {
if (thisArg && typeof val === "function") {
a[key] = bind(val, thisArg);
} else {
a[key] = val;
}
});
return a;
}
|
gnikolaidis/qookery
|
qookery/source/class/qookery/internal/components/HtmlComponent.js
|
<reponame>gnikolaidis/qookery<filename>qookery/source/class/qookery/internal/components/HtmlComponent.js
/*
Qookery - Declarative UI Building for Qooxdoo
Copyright (c) Ergobyte Informatics S.A., www.ergobyte.gr
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
qx.Class.define("qookery.internal.components.HtmlComponent", {
extend: qookery.internal.components.Component,
construct: function(parentComponent) {
this.base(arguments, parentComponent);
},
members: {
_createWidgets: function() {
var html = new qx.ui.embed.Html(this.getAttribute("html", null));
this._applyAttribute("overflow-x", html, "overflowX");
this._applyAttribute("overflow-y", html, "overflowY");
this._applyAttribute("css-class", html, "cssClass");
this._applyWidgetAttributes(html);
return [ html ];
},
parseXmlElement: function(elementName, xmlElement) {
if(elementName.indexOf("{http://www.w3.org/1999/xhtml}") !== 0) return false;
var html = qx.xml.Element.serialize(xmlElement);
this.setHtml(html);
return true;
},
getHtml: function() {
return this.getMainWidget().getHtml();
},
setHtml: function(html) {
this.getMainWidget().setHtml(html);
},
getDomElement: function() {
return this.getMainWidget().getContentElement().getDomElement();
},
updateAppearance: function() {
this.getMainWidget().updateAppearance();
}
}
});
|
twogoods/AsyncDao
|
asyncdao-core/src/main/java/com/tg/async/annotation/Select.java
|
package com.tg.async.annotation;
import com.tg.async.constant.SqlMode;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Created by twogoods on 2018/4/12.
*/
@Target({ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface Select {
String columns() default "";
SqlMode sqlMode() default SqlMode.SELECTIVE;
}
|
UPbook-innovations/nlu
|
nlu/components/matchers/regex_matcher/regex_matcher.py
|
<gh_stars>1-10
import sparknlp
class RegexMatcher:
@staticmethod
def get_default_model():
return sparknlp.annotator.RegexMatcherModel() \
.setInputCols("document", "token") \
.setOutputCol("regex_entity") \
@staticmethod
def get_pretrained_model(name, language):
return RegexMatcher.get_default_model()
# sparknlp.annotator.TextMatcherModel.pretrained(name,language) \
# .setInputCols("document") \
# .setOutputCol("entity") \
|
bobbrow/cpp-docs
|
docs/parallel/concrt/codesnippet/CPP/how-to-use-parallel-invoke-to-write-a-parallel-sort-routine_8.cpp
|
// parallel-bitonic-sort.cpp
// compile with: /EHsc
#include <windows.h>
#include <algorithm>
#include <iostream>
#include <random>
#include <ppl.h>
using namespace concurrency;
using namespace std;
// Calls the provided work function and returns the number of milliseconds
// that it takes to call that function.
template <class Function>
__int64 time_call(Function&& f)
{
__int64 begin = GetTickCount();
f();
return GetTickCount() - begin;
}
const bool INCREASING = true;
const bool DECREASING = false;
// Comparator function for the bitonic sort algorithm.
template <class T>
void compare(T* items, int i, int j, bool dir)
{
if (dir == (items[i] > items[j]))
{
swap(items[i], items[j]);
}
}
// Sorts a bitonic sequence in the specified order.
template <class T>
void bitonic_merge(T* items, int lo, int n, bool dir)
{
if (n > 1)
{
int m = n / 2;
for (int i = lo; i < lo + m; ++i)
{
compare(items, i, i + m, dir);
}
bitonic_merge(items, lo, m, dir);
bitonic_merge(items, lo + m, m, dir);
}
}
// Sorts the given sequence in the specified order.
template <class T>
void bitonic_sort(T* items, int lo, int n, bool dir)
{
if (n > 1)
{
// Divide the array into two partitions and then sort
// the partitions in different directions.
int m = n / 2;
bitonic_sort(items, lo, m, INCREASING);
bitonic_sort(items, lo + m, m, DECREASING);
// Merge the results.
bitonic_merge(items,lo, n, dir);
}
}
// Sorts the given sequence in increasing order.
template <class T>
void bitonic_sort(T* items, int size)
{
bitonic_sort(items, 0, size, INCREASING);
}
// Sorts a bitonic sequence in the specified order.
template <class T>
void parallel_bitonic_merge(T* items, int lo, int n, bool dir)
{
// Merge the sequences concurrently if there is sufficient work to do.
if (n > 500)
{
int m = n / 2;
for (int i = lo; i < lo + m; ++i)
{
compare(items, i, i + m, dir);
}
// Use the parallel_invoke algorithm to merge the sequences in parallel.
parallel_invoke(
[&items,lo,m,dir] { parallel_bitonic_merge(items, lo, m, dir); },
[&items,lo,m,dir] { parallel_bitonic_merge(items, lo + m, m, dir); }
);
}
// Otherwise, perform the work serially.
else if (n > 1)
{
bitonic_merge(items, lo, n, dir);
}
}
// Sorts the given sequence in the specified order.
template <class T>
void parallel_bitonic_sort(T* items, int lo, int n, bool dir)
{
if (n > 1)
{
// Divide the array into two partitions and then sort
// the partitions in different directions.
int m = n / 2;
// Sort the partitions in parallel.
parallel_invoke(
[&items,lo,m] { parallel_bitonic_sort(items, lo, m, INCREASING); },
[&items,lo,m] { parallel_bitonic_sort(items, lo + m, m, DECREASING); }
);
// Merge the results.
parallel_bitonic_merge(items, lo, n, dir);
}
}
// Sorts the given sequence in increasing order.
template <class T>
void parallel_bitonic_sort(T* items, int size)
{
parallel_bitonic_sort(items, 0, size, INCREASING);
}
int wmain()
{
// For this example, the size must be a power of two.
const int size = 0x200000;
// Create two large arrays and fill them with random values.
int* a1 = new int[size];
int* a2 = new int[size];
mt19937 gen(42);
for(int i = 0; i < size; ++i)
{
a1[i] = a2[i] = gen();
}
__int64 elapsed;
// Perform the serial version of the sort.
elapsed = time_call([&] { bitonic_sort(a1, size); });
wcout << L"serial time: " << elapsed << endl;
// Now perform the parallel version of the sort.
elapsed = time_call([&] { parallel_bitonic_sort(a2, size); });
wcout << L"parallel time: " << elapsed << endl;
delete[] a1;
delete[] a2;
}
|
zzhmark/vaa3d_tools
|
released_plugins/v3d_plugins/neurontracing_neutube/src_neutube/neurolabi/gui/zswcdisttrunkanalyzer.cpp
|
#include "zswcdisttrunkanalyzer.h"
#include <vector>
#include "zswctree.h"
using namespace std;
ZSwcDistTrunkAnalyzer::ZSwcDistTrunkAnalyzer() : m_geodesicWeight(0.2),
m_euclideanWeight(0.8)
{
}
ZSwcPath ZSwcDistTrunkAnalyzer::extractMainTrunk(ZSwcTree *tree)
{
tree->updateIterator(SWC_TREE_ITERATOR_LEAF);
vector<Swc_Tree_Node*> leafArray;
leafArray.push_back(tree->firstRegularRoot());
for (Swc_Tree_Node *tn = tree->begin(); tn != tree->end();
tn = tree->next()) {
leafArray.push_back(tn);
}
Swc_Tree_Node *leaf1 = NULL;
Swc_Tree_Node *leaf2 = NULL;
double maxLength = -1.0;
//Calculate the distance of all the node to the regular root
tree->updateIterator(SWC_TREE_ITERATOR_DEPTH_FIRST, true);
double *distanceArray = NULL;
if (m_geodesicWeight != 0.0) {
distanceArray = Swc_Tree_Accm_Length(tree->data(), NULL);
}
//Calculate distances for each pair of leaves
for (size_t i = 0; i < leafArray.size(); i++) {
#ifdef _DEBUG_2
cout << i << " / " << leafArray.size() << endl;
#endif
for (size_t j = 0; j < leafArray.size(); j++) {
if (leafArray[i] != leafArray[j]) {
double geodesicDistance = 0.0;
if (distanceArray != NULL) {
//Find the common ancestor of the leaves
Swc_Tree_Node *ancestor = SwcTreeNode::commonAncestor(leafArray[i],
leafArray[j]);
geodesicDistance = distanceArray[SwcTreeNode::index(leafArray[i])] +
distanceArray[SwcTreeNode::index(leafArray[j])] -
2.0 * distanceArray[SwcTreeNode::index(ancestor)];
}
double length = geodesicDistance * m_geodesicWeight +
SwcTreeNode::distance(leafArray[i], leafArray[j]) * m_euclideanWeight;
if (length > maxLength) {
maxLength = length;
leaf1 = leafArray[i];
leaf2 = leafArray[j];
}
}
}
}
if (distanceArray != NULL) {
free(distanceArray);
}
/*
if (SwcTreeNode::z(leaf1) > SwcTreeNode::z(leaf2)) {
return ZSwcPath(leaf2, leaf1);
}
*/
return ZSwcPath(leaf1, leaf2);
}
ZSwcPath ZSwcDistTrunkAnalyzer::extractTrunk(ZSwcTree *tree,
Swc_Tree_Node *start)
{
tree->updateIterator(SWC_TREE_ITERATOR_LEAF, start, m_blocker);
vector<Swc_Tree_Node*> leafArray;
for (Swc_Tree_Node *tn = tree->begin(); tn != tree->end();
tn = tree->next()) {
leafArray.push_back(tn);
}
Swc_Tree_Node *leaf1 = start;
Swc_Tree_Node *leaf2 = NULL;
double maxLength = -1.0;
for (size_t i = 0; i < leafArray.size(); i++) {
double length = 0.0;
Swc_Tree_Node *tn = leafArray[i];
while (tn != start) {
if (m_blocker.count(tn) > 0) {
tn = NULL;
break;
} else {
length += Swc_Tree_Node_Length(tn);
}
tn = tn->parent;
}
length = length * m_geodesicWeight +
SwcTreeNode::distance(start, leafArray[i]) * m_euclideanWeight;
if (tn != NULL) {
if (length > maxLength) {
leaf2 = leafArray[i];
maxLength = length;
}
}
}
return ZSwcPath(leaf1, leaf2);
}
/*
void ZSwcDistTrunkAnalyzer::labelTraffic(ZSwcTree *tree)
{
ZSwcTrunkAnalyzer::labelTraffic(tree);
}
*/
|
zaceno/hyperapp-form
|
src/form.js
|
<gh_stars>1-10
import { h } from 'hyperapp'
import { Submit, SetValues, SetErrors } from './actions.js'
import provide from './provide.js'
const _formActions = (getFormState, setFormState) => (
action,
getData
) => (_, data) => [action, { ...getData(data), getFormState, setFormState }]
const formActions = (getter, setter, onsubmit, validators) => {
const action = _formActions(getter, setter)
return {
Submit: action(Submit, (event) => ({ event, onsubmit, validators })),
SetValues: action(SetValues, (x) => ({ values: x })),
SetErrors: action(SetErrors, (x) => ({ errors: x })),
}
}
export default ({ state, getFormState, setFormState, onsubmit }, content) => {
const validators = []
const register = (f) => validators.push(f)
const {Submit, SetValues, SetErrors} = formActions(getFormState, setFormState, onsubmit, validators)
return h(
'form',
{
onsubmit: Submit,
},
provide(
{
...state,
register,
SetValues,
SetErrors,
},
content
)
)
}
|
DeanHwd/rally
|
tests/unit/plugins/openstack/scenarios/ceilometer/test_resources.py
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from rally import exceptions
from rally.plugins.openstack.scenarios.ceilometer import resources
from tests.unit import test
BASE = "rally.plugins.openstack.scenarios.ceilometer"
class CeilometerResourcesTestCase(test.ScenarioTestCase):
@mock.patch("%s.resources.ListMatchedResources.run" % BASE)
def test_all_resource_list_queries(
self, mock_list_matched_resources_run):
metadata_query = {"a": "test"}
start_time = "fake start time"
end_time = "fake end time"
limit = 100
scenario = resources.ListResources(self.context)
scenario.run(metadata_query, start_time, end_time, limit)
mock_list_matched_resources_run.assert_any_call(limit=100)
mock_list_matched_resources_run.assert_any_call(start_time=start_time,
end_time=end_time)
mock_list_matched_resources_run.assert_any_call(end_time=end_time)
mock_list_matched_resources_run.assert_any_call(start_time=start_time)
mock_list_matched_resources_run.assert_any_call(
metadata_query=metadata_query)
mock_list_matched_resources_run.assert_any_call(
filter_by_user_id=True)
mock_list_matched_resources_run.assert_any_call(
filter_by_project_id=True)
mock_list_matched_resources_run.assert_any_call(
filter_by_resource_id=True)
def test_list_matched_resources(self):
scenario = resources.ListMatchedResources(self.context)
scenario._list_resources = mock.MagicMock()
context = {"user": {"tenant_id": "fake", "id": "fake_id"},
"tenant": {"id": "fake_id",
"resources": ["fake_resource"]}}
scenario.context = context
metadata_query = {"a": "test"}
start_time = "2015-09-09T00:00:00"
end_time = "2015-09-10T00:00:00"
limit = 100
scenario.run(True, True, True, metadata_query,
start_time, end_time, limit)
scenario._list_resources.assert_called_once_with(
[{"field": "user_id", "value": "fake_id", "op": "eq"},
{"field": "project_id", "value": "fake_id", "op": "eq"},
{"field": "resource_id", "value": "fake_resource", "op": "eq"},
{"field": "metadata.a", "value": "test", "op": "eq"},
{"field": "timestamp", "value": "2015-09-09T00:00:00",
"op": ">="},
{"field": "timestamp", "value": "2015-09-10T00:00:00",
"op": "<="}
],
100)
def test_get_tenant_resources(self):
scenario = resources.GetTenantResources(self.context)
resource_list = ["id1", "id2", "id3", "id4"]
context = {"user": {"tenant_id": "fake"},
"tenant": {"id": "fake", "resources": resource_list}}
scenario.context = context
scenario._get_resource = mock.MagicMock()
scenario.run()
for resource_id in resource_list:
scenario._get_resource.assert_any_call(resource_id)
@mock.patch("%s.resources.ListMatchedResources.run" % BASE)
def test_resource_list_queries_without_limit_and_metadata(
self, mock_list_matched_resources_run):
scenario = resources.ListResources()
scenario.run()
expected_call_args_list = [
mock.call(filter_by_project_id=True),
mock.call(filter_by_user_id=True),
mock.call(filter_by_resource_id=True)
]
self.assertSequenceEqual(
expected_call_args_list,
mock_list_matched_resources_run.call_args_list)
def test_get_tenant_resources_with_exception(self):
scenario = resources.GetTenantResources(self.context)
resource_list = []
context = {"user": {"tenant_id": "fake"},
"tenant": {"id": "fake", "resources": resource_list}}
scenario.context = context
self.assertRaises(exceptions.RallyAssertionError, scenario.run)
|
uqlibrary/fez-frontend
|
cypress/integration/admin-edit/audio.spec.js
|
<reponame>uqlibrary/fez-frontend<gh_stars>1-10
import { default as recordList } from '../../../src/mock/data/records/publicationTypeListAudio';
context('Audio admin edit', () => {
const record = recordList.data[0];
beforeEach(() => {
cy.loadRecordForAdminEdit(record.rek_pid);
});
afterEach(() => {
cy.adminEditCleanup();
});
it('should load expected tabs', () => {
cy.adminEditCountCards(8);
cy.adminEditVerifyAlerts(1, ['Publication date is required']);
cy.adminEditTabbedView();
cy.adminEditCheckDefaultTab('Bibliographic');
cy.adminEditCheckTabErrorBadge('bibliographic');
});
it('should render the different sections as expected', () => {
// ------------------------------------------- IDENTIFIERS TAB -----------------------------------------------
cy.log('Identifiers tab');
cy.get('[data-testid=identifiers-section-content]').within(() => {
cy.get('h4').should('contain', 'Manage links');
const links = [
{
url: record.fez_record_search_key_link[0].rek_link,
description: record.fez_record_search_key_link_description[0].rek_link_description,
},
];
links.forEach((link, index) => {
cy.get(`[data-testid=rek-link-list-row-${index}]`)
.find('p')
.should('have.text', `Link: ${link.url}`)
.siblings('span')
.should('have.text', `Description: ${link.description}`);
});
});
// ------------------------------------------ BIBLIOGRAPHIC TAB ----------------------------------------------
cy.log('Bibliographic tab');
cy.get('[data-testid=bibliographic-section-content]')
.as('bibliographicCard')
.within(() => {
cy.get('h4').should('contain', 'Bibliographic');
cy.checkPartialDateFromRecordValue(
'rek-date-recorded',
record.fez_record_search_key_date_recorded.rek_date_recorded,
);
cy.get('[data-testid=rek-acknowledgements-input]').should(
'have.text',
record.fez_record_search_key_acknowledgements.rek_acknowledgements,
);
cy.get('[data-testid=rek-length-input]').should(
'have.value',
record.fez_record_search_key_length.rek_length,
);
cy.get('[data-testid=rek-source-input]').should(
'have.text',
record.fez_record_search_key_source.rek_source,
);
cy.get('[data-testid=rek-rights-input]').should(
'have.text',
record.fez_record_search_key_rights.rek_rights,
);
cy.get('div:nth-child(14) span span')
.eq(0)
.should('have.text', 'Transcript');
cy.get('#cke_rek-transcript-editor').should('exist');
cy.readCKEditor('rek-transcript').should(text => {
expect(text).to.contain(record.fez_record_search_key_transcript.rek_transcript);
});
cy.get('[data-testid=rek-alternate-genre-input]')
.should(
'have.value',
record.fez_record_search_key_alternate_genre.map(item => item.rek_alternate_genre).join(','),
)
.siblings('[role=button]')
.invoke('text')
.should(
'eq',
record.fez_record_search_key_alternate_genre
.map(item => item.rek_alternate_genre_lookup)
.join(','),
);
cy.get('[data-testid=rek-location-input]').should(
'have.value',
record.fez_record_search_key_location.map(item => item.rek_location).join(''), // only has a single entry
);
});
cy.get('@bibliographicCard')
.find('[id=rek-date]')
.as('pubDateBlock')
.find('p')
.should('exist')
.should('have.text', 'Year required');
cy.setPartialDate('rek-date', { day: 1, month: 1, year: 2020 });
cy.get('@pubDateBlock')
.find('p')
.should('not.exist');
cy.adminEditNoAlerts();
// ---------------------------------------------- FILES TAB --------------------------------------------------
cy.log('Files tab');
cy.get('[data-testid=files-section-content]').as('filesTab');
// start: check embargo date can be cleared
cy.get('@filesTab')
.find('#embargoDateButton-UQFL173_b57_R298B_2579510-mp3')
.within(() => {
cy.get('div > div > input').should('have.value', '01/01/2099');
cy.get('div > div > div > button').click(); // date picker popup appears
});
cy.get('[role="presentation"] > div:nth-child(3) > div').within(() => {
cy.get('div > button:nth-child(1) > span > h6').should('have.text', '2099');
});
cy.get('[role="presentation"] > div:nth-child(1)').click();
cy.get('@filesTab')
.find('#embargoDateButton-UQFL173_b57_R298B_2579510-mp3')
.within(() => {
cy.get('div > div > input').clear();
});
cy.get('@filesTab')
.find('.StandardCard svg + span')
.should('have.text', 'Embargo date removed - review security policy on Security tab');
// end: check embargo date can be cleared
cy.get('@filesTab')
.find('.AdminCard')
.eq(1)
.within(() => {
cy.get('h4').should('contain', 'Advisory statement');
cy.get('span span')
.eq(0)
.should('contain', 'Advisory statement');
cy.get('#cke_rek-advisory-statement-editor').should('exist');
cy.readCKEditor('rek-advisory-statement').should(text => {
expect(text).to.contain(record.fez_record_search_key_advisory_statement.rek_advisory_statement);
});
});
});
});
|
greensnow25/javaaz
|
chapter9/testTask/src/main/java/com/greensnow25/servlet/Create.java
|
<filename>chapter9/testTask/src/main/java/com/greensnow25/servlet/Create.java
package com.greensnow25.servlet;
import com.greensnow25.repository.dao.AddressDAOImpl;
import com.greensnow25.repository.dao.UserDAOImpl;
import com.greensnow25.dataBase.CreateConnection;
import com.greensnow25.entity.Address;
import com.greensnow25.entity.User;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
/**
* Public class Create.
*
* @author greensnow25.
* @version 1.
* @since 18.10.2017.
*/
public class Create extends HttpServlet {
/**
* connection pool.
*/
private CreateConnection connection;
@Override
public void init() throws ServletException {
this.connection = new CreateConnection();
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String newName = req.getParameter("newName");
String newPassword = req.getParameter("newPassword");
String country = req.getParameter("newCountry");
String city = req.getParameter("newCity");
try (Connection connection = this.connection.getConnection()) {
connection.setAutoCommit(false);
UserDAOImpl userDAO = new UserDAOImpl(connection);
AddressDAOImpl addressDAO = new AddressDAOImpl(connection);
userDAO.create(new User(newName, newPassword, 0));
connection.commit();
int id = userDAO.getOneByName(newName).getId();
addressDAO.create(new Address(country, city, id));
connection.commit();
req.getRequestDispatcher("/showTable").forward(req, resp);
} catch (SQLException e) {
e.printStackTrace();
}
}
}
|
KhronosGroup/COLLADA-CTS
|
Core/Common/FCOLLADAParser.py
|
<reponame>KhronosGroup/COLLADA-CTS
# Copyright (c) 2012 The Khronos Group Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and /or associated documentation files (the "Materials "), to deal in the Materials without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Materials, and to permit persons to whom the Materials are furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Materials.
# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
import os.path
import xml.sax.expatreader as Expat
import xml.sax.handler as XMLHandler
import Core.Common.FUtils as FUtils
def IsCOLLADADocument(filename):
""" IsCOLLADADocument(filename) -> Boolean
Retrieves whether the file identified by the given filename contains
a COLLADA document. The extension of the filename is used for this purpose.
'DAE' and 'XML' are the supported extensions.
arguments:
filename
string corresponding to a filename
returns:
Boolean indicated whether the given filename contains a COLLADA document.
"""
extension = FUtils.GetExtension(filename)
extension = extension.upper()
if extension == "DAE" or extension == "XML": return True
else: return False
def GetCOLLADAAssetInformation(filename):
"""GetCOLLADAAssetInformations(filename) -> (str,str)
Retrieves the <keywords> and <comments> the extension of the filename.
Return ("","") if the filename is invalid or the information was not found.
This implementation is quick, dirty, not XML-aware and is not meant
to be used in critical situations.
arguments:
filename
string corresponding to a COLLADA document filename.
returns:
(string, string, string) where the first string corresponds to
the <title> element content, the second string corresponds to
the <subject> element content and the third string corresponds to
the <keywords> element content.
"""
keyword = ""
title = ""
subject = ""
reader = Expat.ExpatParser(0, 4*1024)
try:
contentHandler = COLLADAAssetProcessor()
reader.setContentHandler(contentHandler)
reader.parse(filename)
# In theory, you should never get here:
# the EarlyExitException should always be triggered.
keyword = contentHandler.GetKeyword()
title = contentHandler.GetTitle()
subject = contentHandler.GetSubject()
reader.close()
except EarlyExitException, e:
keyword = contentHandler.GetKeyword()
title = contentHandler.GetTitle()
subject = contentHandler.GetSubject()
except Exception, e:
pass
return (title, subject, keyword)
class EarlyExitException(Exception):
""" [INTERNAL] This exception is used to kill the SAX parser
once we have found the information we were looking for. """
class COLLADAAssetProcessor(XMLHandler.ContentHandler):
def __init__(self):
self.__keyword = ""
self.__title = ""
self.__subject = ""
self.__elementStack = []
def GetKeyword(self): return self.__keyword
def GetSubject(self): return self.__subject
def GetTitle(self): return self.__title
# ContentHandler methods
def startElement(self, name, attrs):
if (len(self.__elementStack) == 0 and
name != "COLLADA"):
raise EarlyExitException("Not a COLLADA document..")
elif (len(self.__elementStack) == 1 and
name != "asset"):
raise EarlyExitException("The <asset> element that we are interested in should always be first. Otherwise, it is considered missing.")
self.__elementStack.append(name)
def endElement(self, name):
if (len(self.__elementStack) > 0):
# check for early exit..
if (len(self.__elementStack) == 2 and
self.__elementStack[0] == "COLLADA" and
self.__elementStack[1] == "asset"):
raise EarlyExitException("Correctly closing the <asset> element. We don't care about the rest of the information.")
self.__elementStack.pop()
def characters(self, content):
if (len(content) > 0):
if (len(self.__elementStack) == 3 and
self.__elementStack[0] == "COLLADA" and
self.__elementStack[1] == "asset" and
self.__elementStack[2] == "title"):
# Retrieve the one title.
self.__title = content.strip(' \t')
elif (len(self.__elementStack) == 3 and
self.__elementStack[0] == "COLLADA" and
self.__elementStack[1] == "asset" and
self.__elementStack[2] == "subject"):
# Retrieve the one subject.
self.__subject = content.strip(' \t')
elif (len(self.__elementStack) == 3 and
self.__elementStack[0] == "COLLADA" and
self.__elementStack[1] == "asset" and
self.__elementStack[2] == "keywords"):
# The 1.4.1 schema is vague about this: assume there may
# zero or one <keywords> element in the top-level <asset>
self.__keyword = content.strip(' \t')
def startDocument(self): pass # not interested..
def startPrefixMapping(self, prefix, uri): pass # not interested..
def endPrefixMapping(self, prefix): pass # not interested..
def ignorableWhitespace(self, content): pass # not interested..
def processingInstruction(self, target, data): pass # not interested..
def startElementNS(self, name, qname, attrs): self.startElement(name, attrs) # not interested in NS data
def endElementNS(self, name, qname): self.endElement(name) # not interested in NS data
|
Testiduk/gitlabhq
|
spec/controllers/concerns/page_limiter_spec.rb
|
<reponame>Testiduk/gitlabhq
# frozen_string_literal: true
require 'spec_helper'
class PageLimiterSpecController < ApplicationController
include PageLimiter
before_action do
limit_pages 200
end
def index
head :ok
end
end
RSpec.describe PageLimiter do
let(:controller_class) do
PageLimiterSpecController
end
let(:instance) do
controller_class.new
end
before do
allow(instance).to receive(:params) do
{
controller: "explore/projects",
action: "index"
}
end
allow(instance).to receive(:request) do
double(:request, user_agent: "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
end
end
describe "#limit_pages" do
using RSpec::Parameterized::TableSyntax
where(:max_page, :actual_page, :result) do
2 | 1 | nil
2 | 2 | nil
2 | 3 | PageLimiter::PageOutOfBoundsError
nil | 1 | PageLimiter::PageLimitNotANumberError
0 | 1 | PageLimiter::PageLimitNotSensibleError
-1 | 1 | PageLimiter::PageLimitNotSensibleError
end
with_them do
subject { instance.limit_pages(max_page) }
before do
allow(instance).to receive(:params) { { page: actual_page.to_s } }
end
it "returns the expected result" do
if result == PageLimiter::PageOutOfBoundsError
expect(instance).to receive(:record_page_limit_interception)
expect { subject }.to raise_error(result)
elsif result&.superclass == PageLimiter::PageLimiterError
expect { subject }.to raise_error(result)
else
expect(subject).to eq(result)
end
end
end
end
describe "#default_page_out_of_bounds_response" do
subject { instance.send(:default_page_out_of_bounds_response) }
it "returns a bad_request header" do
expect(instance).to receive(:head).with(:bad_request)
subject
end
end
describe "#record_page_limit_interception" do
subject { instance.send(:record_page_limit_interception) }
let(:counter) { double("counter", increment: true) }
before do
allow(Gitlab::Metrics).to receive(:counter) { counter }
end
it "creates a metric counter" do
expect(Gitlab::Metrics).to receive(:counter).with(
:gitlab_page_out_of_bounds,
controller: "explore/projects",
action: "index",
bot: true
)
subject
end
it "increments the counter" do
expect(counter).to receive(:increment)
subject
end
end
end
|
scalest/scalest
|
Scala/scalest-meta/src/main/scala/scalest/meta/ShowMacro.scala
|
<gh_stars>10-100
package scalest.meta
import scala.annotation.StaticAnnotation
import scala.collection.immutable
import scala.language.experimental.macros
import scala.reflect.macros.blackbox
class show(exclude: Set[String] = Set.empty) extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro ShowMacro.impl
}
object ShowMacro {
def impl(c: blackbox.Context)(annottees: c.Tree*): c.Tree = {
import c.universe._
def abortShowMacro() = c.abort(c.enclosingPosition, "Invalid annotation target: must be a case class")
def overrideToString(clsDef: ClassDef, clsParams: Seq[Tree]): Tree = {
val exclude: Set[String] = c.prefix.tree match {
case q"new show($exclude) " => c.eval(c.Expr[Set[String]](exclude))
case _ => abortShowMacro()
}
val params = clsParams.map {
case valDef: ValDef => valDef.name.decodedName.toString
case _ => abortShowMacro()
}
val body: Tree = params
.filter(!exclude.contains(_))
.map(fieldName => q"""${Literal(Constant(fieldName))} + " = " + ${TermName(fieldName)}""")
.reduce((a, b) => q"""$a + ", " + $b""")
q"""override def toString(): String = ${Literal(Constant(clsDef.name.decodedName.toString))} + "(" + $body + ")" """
}
val (clsDef: ClassDef, objDef: immutable.Seq[ModuleDef]) = annottees match {
case List(clsDef: ClassDef) => (clsDef, List.empty)
case List(clsDef: ClassDef, objDef: ModuleDef) => (clsDef, List(objDef))
case _ => abortShowMacro()
}
val tree = clsDef match {
case q"case class $clsName(..$clsParams) extends { ..$clsEarlyDefs } with ..$clsParents { $clsSelf => ..$clsDefs }" =>
q"""
case class $clsName(..$clsParams) extends { ..$clsEarlyDefs } with ..$clsParents { $clsSelf =>
..$clsDefs
${overrideToString(clsDef, clsParams)}
}
..$objDef
"""
case _ => abortShowMacro()
}
MacroDebug.logGeneratedCode(c)("SHOW MACRO", tree)
tree
}
}
|
gbraad/openshift-origin
|
pkg/build/builder/common_test.go
|
package builder
import (
"math/rand"
"reflect"
"strings"
"testing"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
kapi "k8s.io/kubernetes/pkg/api"
buildapi "github.com/openshift/origin/pkg/build/apis/build"
"github.com/openshift/origin/pkg/generate/git"
)
func TestBuildInfo(t *testing.T) {
b := &buildapi.Build{
ObjectMeta: metav1.ObjectMeta{
Name: "sample-app",
Namespace: "default",
},
Spec: buildapi.BuildSpec{
CommonSpec: buildapi.CommonSpec{
Source: buildapi.BuildSource{
Git: &buildapi.GitBuildSource{
URI: "github.com/openshift/sample-app",
Ref: "master",
},
},
Strategy: buildapi.BuildStrategy{
SourceStrategy: &buildapi.SourceBuildStrategy{
Env: []kapi.EnvVar{
{Name: "RAILS_ENV", Value: "production"},
},
},
},
},
},
}
sourceInfo := &git.SourceInfo{}
sourceInfo.CommitID = "1575a90c569a7cc0eea84fbd3304d9df37c9f5ee"
got := buildInfo(b, sourceInfo)
want := []KeyValue{
{"OPENSHIFT_BUILD_NAME", "sample-app"},
{"OPENSHIFT_BUILD_NAMESPACE", "default"},
{"OPENSHIFT_BUILD_SOURCE", "github.com/openshift/sample-app"},
{"OPENSHIFT_BUILD_REFERENCE", "master"},
{"OPENSHIFT_BUILD_COMMIT", "1575a90c569a7cc0eea84fbd3304d9df37c9f5ee"},
{"RAILS_ENV", "production"},
}
if !reflect.DeepEqual(got, want) {
t.Errorf("buildInfo(%+v) = %+v; want %+v", b, got, want)
}
b.Spec.Revision = &buildapi.SourceRevision{
Git: &buildapi.GitSourceRevision{
Commit: "1575a90c569a7cc0eea84fbd3304d9df37c9f5ee",
},
}
got = buildInfo(b, nil)
if !reflect.DeepEqual(got, want) {
t.Errorf("buildInfo(%+v) = %+v; want %+v", b, got, want)
}
}
func TestRandomBuildTag(t *testing.T) {
tests := []struct {
namespace, name string
want string
}{
{"test", "build-1", "test/build-1:f1f85ff5"},
// For long build namespace + build name, the returned random build tag
// would be longer than the limit of reference.NameTotalLengthMax (255
// chars). We do not truncate the repository name because it could create an
// invalid repository name (e.g., namespace=abc, name=d, repo=abc/d,
// trucated=abc/ -> invalid), so we simply take a SHA1 hash of the
// repository name (which is guaranteed to be a valid repository name) and
// preserve the random tag.
{
"namespace" + strings.Repeat(".namespace", 20),
"name" + strings.Repeat(".name", 20),
"47c1d5c686ce4563521c625457e79ca23c07bc27:f1f85ff5",
},
}
for _, tt := range tests {
rand.Seed(0)
got := randomBuildTag(tt.namespace, tt.name)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("randomBuildTag(%q, %q) = %q, want %q", tt.namespace, tt.name, got, tt.want)
}
}
}
func TestRandomBuildTagNoDupes(t *testing.T) {
rand.Seed(0)
previous := make(map[string]struct{})
for i := 0; i < 100; i++ {
tag := randomBuildTag("test", "build-1")
_, exists := previous[tag]
if exists {
t.Errorf("randomBuildTag returned a recently seen tag: %q", tag)
}
previous[tag] = struct{}{}
}
}
func TestContainerName(t *testing.T) {
rand.Seed(0)
got := containerName("test-strategy", "my-build", "ns", "hook")
want := "openshift_test-strategy-build_my-build_ns_hook_f1f85ff5"
if got != want {
t.Errorf("got %v, want %v", got, want)
}
}
|
yu9824/AtCoder
|
ABC/184/E.py
|
<gh_stars>0
# list(map(int, input().split()))
# int(input())
import sys
sys.setrecursionlimit(10 ** 9)
# 幅優先探索
# pythonだとTLE, pypyで通った.
from collections import deque
# 定数
code_a = ord('a')
def main(*args):
H, W, A, S, G, tele = args
# print(H, W, A, S, G, tele)
# 移動候補
d = ((0, 1), (1, 0), (0, -1), (-1, 0))
cost = [[float('inf') for w in range(W)] for h in range(H)]
cost[S[0]][S[1]] = 0
# print(cost)
que = deque([S])
while que:
x, y = que.popleft()
nextcost = cost[x][y] + 1
for dx, dy in d:
x2 = x + dx
y2 = y + dy
if x2 < 0 or x2 >= H or y2 < 0 or y2 >= W or A[x2][y2] == '#': # 範囲外 or 移動できない場所だった場合.
continue
if cost[x2][y2] > nextcost: # まだ訪れていない or これより高コストで来ることになっている場合
cost[x2][y2] = nextcost
que.append((x2, y2)) # 移動先候補として追加
if A[x][y].islower(): # テレポートできるなら
i = ord(A[x][y]) - code_a
for x2, y2 in tele[i]:
if cost[x2][y2] > nextcost: # まだ訪れていない or これより高コストで来ることになっている場合
cost[x2][y2] = nextcost
que.append((x2, y2)) # 移動先候補として追加
tele[i].clear() # 一回使ったテレポーターは戻るのに使われるだけで最短ルートには不要なのでクリア.
ans = cost[G[0]][G[1]]
if ans == float('inf'):
ans = -1
print(ans)
if __name__ == '__main__':
H, W = list(map(int, input().split()))
args = [H, W]
A = []
tele = [[] for i in range(26)]
for h in range(H):
a = input()
for w in range(W):
if a[w] == 'S':
S = [h, w]
elif a[w] == 'G':
G = [h, w]
elif a[w].islower():
tele[ord(a[w])-code_a].append((h, w))
A.append(a)
args.extend([A, S, G, tele])
main(*args)
|
PresidentWarfield/SpiCall_Artemide_Exodus
|
spicall_artemide_spartacus_exodus/android/support/v4/content/ContextCompat.java
|
<gh_stars>1-10
package android.support.v4.content;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ApplicationInfo;
import android.content.res.ColorStateList;
import android.content.res.Resources;
import android.graphics.drawable.Drawable;
import android.os.Build.VERSION;
import android.os.Bundle;
import android.os.Process;
import android.util.Log;
import android.util.TypedValue;
import java.io.File;
public class ContextCompat
{
private static final String TAG = "ContextCompat";
private static final Object sLock = new Object();
private static TypedValue sTempValue;
private static File buildPath(File paramFile, String... paramVarArgs)
{
int i = paramVarArgs.length;
int j = 0;
for (File localFile = paramFile; j < i; localFile = paramFile)
{
String str = paramVarArgs[j];
if (localFile == null)
{
paramFile = new File(str);
}
else
{
paramFile = localFile;
if (str != null) {
paramFile = new File(localFile, str);
}
}
j++;
}
return localFile;
}
public static int checkSelfPermission(Context paramContext, String paramString)
{
if (paramString != null) {
return paramContext.checkPermission(paramString, Process.myPid(), Process.myUid());
}
throw new IllegalArgumentException("permission is null");
}
public static Context createDeviceProtectedStorageContext(Context paramContext)
{
if (Build.VERSION.SDK_INT >= 24) {
return paramContext.createDeviceProtectedStorageContext();
}
return null;
}
private static File createFilesDir(File paramFile)
{
try
{
if ((!paramFile.exists()) && (!paramFile.mkdirs()))
{
boolean bool = paramFile.exists();
if (bool) {
return paramFile;
}
StringBuilder localStringBuilder = new java/lang/StringBuilder;
localStringBuilder.<init>();
localStringBuilder.append("Unable to create files subdir ");
localStringBuilder.append(paramFile.getPath());
Log.w("ContextCompat", localStringBuilder.toString());
return null;
}
return paramFile;
}
finally {}
}
public static File getCodeCacheDir(Context paramContext)
{
if (Build.VERSION.SDK_INT >= 21) {
return paramContext.getCodeCacheDir();
}
return createFilesDir(new File(paramContext.getApplicationInfo().dataDir, "code_cache"));
}
public static final int getColor(Context paramContext, int paramInt)
{
if (Build.VERSION.SDK_INT >= 23) {
return paramContext.getColor(paramInt);
}
return paramContext.getResources().getColor(paramInt);
}
public static final ColorStateList getColorStateList(Context paramContext, int paramInt)
{
if (Build.VERSION.SDK_INT >= 23) {
return paramContext.getColorStateList(paramInt);
}
return paramContext.getResources().getColorStateList(paramInt);
}
public static File getDataDir(Context paramContext)
{
if (Build.VERSION.SDK_INT >= 24) {
return paramContext.getDataDir();
}
paramContext = paramContext.getApplicationInfo().dataDir;
if (paramContext != null) {
paramContext = new File(paramContext);
} else {
paramContext = null;
}
return paramContext;
}
public static final Drawable getDrawable(Context paramContext, int paramInt)
{
if (Build.VERSION.SDK_INT >= 21) {
return paramContext.getDrawable(paramInt);
}
if (Build.VERSION.SDK_INT >= 16) {
return paramContext.getResources().getDrawable(paramInt);
}
synchronized (sLock)
{
if (sTempValue == null)
{
TypedValue localTypedValue = new android/util/TypedValue;
localTypedValue.<init>();
sTempValue = localTypedValue;
}
paramContext.getResources().getValue(paramInt, sTempValue, true);
paramInt = sTempValue.resourceId;
return paramContext.getResources().getDrawable(paramInt);
}
}
public static File[] getExternalCacheDirs(Context paramContext)
{
if (Build.VERSION.SDK_INT >= 19) {
return paramContext.getExternalCacheDirs();
}
return new File[] { paramContext.getExternalCacheDir() };
}
public static File[] getExternalFilesDirs(Context paramContext, String paramString)
{
if (Build.VERSION.SDK_INT >= 19) {
return paramContext.getExternalFilesDirs(paramString);
}
return new File[] { paramContext.getExternalFilesDir(paramString) };
}
public static final File getNoBackupFilesDir(Context paramContext)
{
if (Build.VERSION.SDK_INT >= 21) {
return paramContext.getNoBackupFilesDir();
}
return createFilesDir(new File(paramContext.getApplicationInfo().dataDir, "no_backup"));
}
public static File[] getObbDirs(Context paramContext)
{
if (Build.VERSION.SDK_INT >= 19) {
return paramContext.getObbDirs();
}
return new File[] { paramContext.getObbDir() };
}
public static boolean isDeviceProtectedStorage(Context paramContext)
{
if (Build.VERSION.SDK_INT >= 24) {
return paramContext.isDeviceProtectedStorage();
}
return false;
}
public static boolean startActivities(Context paramContext, Intent[] paramArrayOfIntent)
{
return startActivities(paramContext, paramArrayOfIntent, null);
}
public static boolean startActivities(Context paramContext, Intent[] paramArrayOfIntent, Bundle paramBundle)
{
if (Build.VERSION.SDK_INT >= 16) {
paramContext.startActivities(paramArrayOfIntent, paramBundle);
} else {
paramContext.startActivities(paramArrayOfIntent);
}
return true;
}
public static void startActivity(Context paramContext, Intent paramIntent, Bundle paramBundle)
{
if (Build.VERSION.SDK_INT >= 16) {
paramContext.startActivity(paramIntent, paramBundle);
} else {
paramContext.startActivity(paramIntent);
}
}
public static void startForegroundService(Context paramContext, Intent paramIntent)
{
if (Build.VERSION.SDK_INT >= 26) {
paramContext.startForegroundService(paramIntent);
} else {
paramContext.startService(paramIntent);
}
}
}
/* Location: ~/android/support/v4/content/ContextCompat.class
*
* Reversed by: J
*/
|
nfreire/Open-Data-Acquisition-Lab
|
opaf-data/src/main/java/inescid/opaf/data/convert/rdf/converter/SchemaOrgToEdmConversionSpecification.java
|
<reponame>nfreire/Open-Data-Acquisition-Lab
package inescid.opaf.data.convert.rdf.converter;
import java.util.Set;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.ResourceFactory;
import inescid.opaf.data.RdfReg;
public class SchemaOrgToEdmConversionSpecification {
public static final RdfConversionSpecification spec;
// public SchemaOrgToEdmConversionSpecification() {
static {
spec=new RdfConversionSpecification();
spec.setRootResourceTypeMapping(RdfReg.SCHEMAORG_CREATIVE_WORK, RdfReg.EDM_PROVIDED_CHO, RdfReg.ORE_AGGREGATION);
spec.setRootResourceTypeMapping(RdfReg.SCHEMAORG_VISUAL_ARTWORK, RdfReg.EDM_PROVIDED_CHO, RdfReg.ORE_AGGREGATION);
spec.setRootResourceTypeMapping(RdfReg.SCHEMAORG_BOOK, RdfReg.EDM_PROVIDED_CHO, RdfReg.ORE_AGGREGATION);
// Article
// Blog
// Book
// Clip
// Comment
// Conversation
// Course
// CreativeWorkSeason
// CreativeWorkSeries
// DataCatalog
// Dataset
// DigitalDocument
// Episode
// Game
// Map
// MediaObject
// Menu
// MenuSection
// Message
// Movie
// MusicComposition
// MusicPlaylist
// MusicRecording
// Painting
// Photograph
// PublicationIssue
// PublicationVolume
// Question
// Recipe
// Review
// Sculpture
// Series
// SoftwareApplication
// SoftwareSourceCode
// TVSeason
// TVSeries
// VisualArtwork
// WebPage
// WebPageElement
// WebSite
spec.setTypeMapping(RdfReg.SCHEMAORG_THING, RdfReg.SKOS_CONCEPT);
spec.setTypeMapping(RdfReg.SCHEMAORG_ORGANIZATION, RdfReg.FOAF_ORGANIZATION);
spec.setTypeMapping(RdfReg.SCHEMAORG_PERSON, RdfReg.EDM_AGENT);
spec.setTypeMapping(RdfReg.SCHEMAORG_AUDIO_OBJECT, RdfReg.EDM_WEB_RESOURCE);
spec.setTypeMapping(RdfReg.SCHEMAORG_IMAGE_OBJECT, RdfReg.EDM_WEB_RESOURCE);
spec.setTypeMapping(RdfReg.SCHEMAORG_WEB_PAGE, RdfReg.EDM_WEB_RESOURCE);
spec.setTypeMapping(RdfReg.SCHEMAORG_MEDIA_OBJECT, RdfReg.EDM_WEB_RESOURCE);
spec.setTypeMapping(RdfReg.SCHEMAORG_PLACE, RdfReg.EDM_PLACE);
spec.setTypeMapping(RdfReg.SCHEMAORG_POSTAL_ADDRESS, RdfReg.VCARD_ADDRESS);
ResourceTypeConversionSpecification pchoMapping = spec.getTypePropertiesMapping(RdfReg.EDM_PROVIDED_CHO);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_ABOUT, RdfReg.DC_SUBJECT);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_KEYWORDS, RdfReg.DC_SUBJECT);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_IS_PART_OF, RdfReg.DCTERMS_IS_PART_OF);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_NAME, RdfReg.DC_TITLE);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_IN_LANGUAGE, RdfReg.DC_LANGUAGE);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_DATE_CREATED, RdfReg.DCTERMS_CREATED);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_DESCRIPTION, RdfReg.DC_DESCRIPTION);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_CONTENT_LOCATION, RdfReg.DC_SUBJECT);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_CREATOR, RdfReg.DC_CREATOR);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_AUTHOR, RdfReg.DC_CREATOR);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_CONTRIBUTOR, RdfReg.DC_CONTRIBUTOR);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_SAME_AS, RdfReg.OWL_SAME_AS);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_LOCATION_CREATED, RdfReg.DC_DESCRIPTION);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_EXAMPLE_OF_WORK, RdfReg.EDM_REALIZES);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_MAIN_ENTITY_OF_PAGE, RdfReg.DCTERMS_TABLE_OF_CONTENTS);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_COPYRIGHT_HOLDER, RdfReg.DCTERMS_RIGHTS);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_GENRE, RdfReg.DC_TYPE);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_HEIGHT, RdfReg.DCTERMS_EXTENT);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_WIDTH, RdfReg.DCTERMS_EXTENT);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_SAME_AS, RdfReg.OWL_SAME_AS);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_ART_MEDIUM, RdfReg.DC_DESCRIPTION);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_ARTWORK_SURFACE, RdfReg.DCTERMS_MEDIUM);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_ARTFORM, RdfReg.DC_TYPE);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_SPATIAL_COVERAGE, RdfReg.EDM_CURRENT_LOCATION);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_TEMPORAL_COVERAGE, RdfReg.DCTERMS_TEMPORAL_COVERAGE);
pchoMapping.putPropertyMappingFromReferencedResource(RdfReg.SCHEMAORG_HEIGHT, RdfReg.SCHEMAORG_DISTANCE, RdfReg.SCHEMAORG_NAME, RdfReg.DCTERMS_EXTENT);
pchoMapping.putPropertyMappingFromReferencedResource(RdfReg.SCHEMAORG_WIDTH, RdfReg.SCHEMAORG_DISTANCE, RdfReg.SCHEMAORG_NAME, RdfReg.DCTERMS_EXTENT);
pchoMapping.putPropertyMapping(RdfReg.RDF_TYPE, RdfReg.EDM_HAS_TYPE);
pchoMapping.putPropertyMapping(RdfReg.SCHEMAORG_ADDITIONAL_TYPE, RdfReg.EDM_HAS_TYPE);
ResourceTypeConversionSpecification aggregationMapping = spec.getTypePropertiesMapping(RdfReg.ORE_AGGREGATION);
aggregationMapping.putPropertyMapping(RdfReg.SCHEMAORG_PROVIDER, RdfReg.EDM_PROVIDER);
aggregationMapping.putPropertyMapping(RdfReg.SCHEMAORG_ASSOCIATED_MEDIA, RdfReg.EDM_IS_SHOWN_BY);
aggregationMapping.putPropertyMapping(RdfReg.SCHEMAORG_AUDIO, RdfReg.EDM_IS_SHOWN_BY);
aggregationMapping.putPropertyMapping(RdfReg.SCHEMAORG_URL, RdfReg.EDM_IS_SHOWN_AT);
aggregationMapping.putPropertyMapping(RdfReg.SCHEMAORG_IMAGE, RdfReg.EDM_IS_SHOWN_BY);
aggregationMapping.putPropertyMapping(RdfReg.SCHEMAORG_THUMBNAIL_URL, RdfReg.EDM_OBJECT);
// ResourceTypeConversionSpecification aggregationMapping = spec.getTypeMapping(RdfReg.EDM_AGENT);
ResourceTypeConversionSpecification webResourceMapping = spec.getTypePropertiesMapping(RdfReg.EDM_WEB_RESOURCE);
webResourceMapping.putPropertyMapping(RdfReg.SCHEMAORG_ENCODING_FORMAT, RdfReg.DC_FORMAT);
webResourceMapping.putPropertyMapping(RdfReg.SCHEMAORG_NAME, RdfReg.DC_DESCRIPTION);
webResourceMapping.putPropertyMapping(RdfReg.SCHEMAORG_DESCRIPTION, RdfReg.DC_DESCRIPTION);
webResourceMapping.putPropertyMapping(RdfReg.SCHEMAORG_HEIGHT, RdfReg.DCTERMS_EXTENT);
webResourceMapping.putPropertyMapping(RdfReg.SCHEMAORG_WIDTH, RdfReg.DCTERMS_EXTENT);
webResourceMapping.putPropertyMapping(RdfReg.SCHEMAORG_FILE_FORMAT, RdfReg.DC_FORMAT);
webResourceMapping.addPropertyMappingToUri(RdfReg.SCHEMAORG_CONTENT_URL );
webResourceMapping.putPropertyMapping(RdfReg.RDF_TYPE, RdfReg.DC_TYPE);
ResourceTypeConversionSpecification organizationMapping = spec.getTypePropertiesMapping(RdfReg.FOAF_ORGANIZATION);
organizationMapping.putPropertyMapping(RdfReg.SCHEMAORG_NAME, RdfReg.SKOS_PREF_LABEL);
organizationMapping.putPropertyMapping(RdfReg.SCHEMAORG_ALTERNATE_NAME, RdfReg.SKOS_ALT_LABEL);
organizationMapping.putPropertyMapping(RdfReg.SCHEMAORG_DESCRIPTION, RdfReg.SKOS_NOTE);
organizationMapping.putPropertyMapping(RdfReg.SCHEMAORG_SAME_AS, RdfReg.OWL_SAME_AS);
ResourceTypeConversionSpecification agentMapping = spec.getTypePropertiesMapping(RdfReg.EDM_AGENT);
agentMapping.putPropertyMapping(RdfReg.SCHEMAORG_NAME, RdfReg.SKOS_PREF_LABEL);
agentMapping.putPropertyMapping(RdfReg.SCHEMAORG_ALTERNATE_NAME, RdfReg.SKOS_ALT_LABEL);
agentMapping.putPropertyMapping(RdfReg.SCHEMAORG_SAME_AS, RdfReg.OWL_SAME_AS);
agentMapping.putPropertyMapping(RdfReg.SCHEMAORG_JOB_TITLE, RdfReg.RDAGR2_PROFESSION_OR_OCCUPATION);
agentMapping.putPropertyMapping(RdfReg.SCHEMAORG_BIRTH_DATE, RdfReg.RDAGR2_BIRTH_DATE);
agentMapping.putPropertyMapping(RdfReg.SCHEMAORG_DEATH_DATE, RdfReg.RDAGR2_DEATH_DATE);
agentMapping.putPropertyMapping(RdfReg.SCHEMAORG_GIVEN_NAME, RdfReg.SKOS_PREF_LABEL);
agentMapping.putPropertyMerge(RdfReg.SCHEMAORG_GIVEN_NAME, RdfReg.SCHEMAORG_FAMILY_NAME);
ResourceTypeConversionSpecification conceptMapping = spec.getTypePropertiesMapping(RdfReg.SKOS_CONCEPT);
conceptMapping.putPropertyMapping(RdfReg.SCHEMAORG_NAME, RdfReg.SKOS_PREF_LABEL);
conceptMapping.putPropertyMapping(RdfReg.SCHEMAORG_ALTERNATE_NAME, RdfReg.SKOS_ALT_LABEL);
conceptMapping.putPropertyMapping(RdfReg.SCHEMAORG_SAME_AS, RdfReg.OWL_SAME_AS);
ResourceTypeConversionSpecification placeMapping = spec.getTypePropertiesMapping(RdfReg.EDM_PLACE);
placeMapping.putPropertyMapping(RdfReg.SCHEMAORG_NAME, RdfReg.SKOS_PREF_LABEL);
placeMapping.putPropertyMapping(RdfReg.SCHEMAORG_ALTERNATE_NAME, RdfReg.SKOS_ALT_LABEL);
placeMapping.putPropertyMapping(RdfReg.SCHEMAORG_SAME_AS, RdfReg.OWL_SAME_AS);
placeMapping.putPropertyMappingFromReferencedResource(RdfReg.SCHEMAORG_GEO, RdfReg.SCHEMAORG_GEO_COORDINATES, RdfReg.SCHEMAORG_LATITUDE, RdfReg.WGS84_LAT);
placeMapping.putPropertyMappingFromReferencedResource(RdfReg.SCHEMAORG_GEO, RdfReg.SCHEMAORG_GEO_COORDINATES, RdfReg.SCHEMAORG_LONGITUDE, RdfReg.WGS84_lONG);
// placeMapping.putPropertyMappingFromReferencedResource(RdfReg.SCHEMAORG_ADDRESS, RdfReg.SCHEMAORG_POSTAL_ADDRESS, RdfReg.SCHEMAORG_ADDRESS_REGION, RdfReg.WGS84_lONG);
ResourceTypeConversionSpecification addressMapping = spec.getTypePropertiesMapping(RdfReg.VCARD_ADDRESS);
addressMapping.putPropertyMapping(RdfReg.SCHEMAORG_ADDRESS_REGION, RdfReg.VCARD_REGION);
}
}
|
tb-soft/databus
|
databus-bootstrap-server/databus-bootstrap-server-impl/src/main/java/com/linkedin/databus/bootstrap/server/BootstrapHttpServer.java
|
package com.linkedin.databus.bootstrap.server;
/*
*
* Copyright 2013 LinkedIn Corp. All rights reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import java.io.IOException;
import java.nio.ByteOrder;
import java.sql.SQLException;
import java.util.Properties;
import org.apache.log4j.Logger;
import com.linkedin.databus.bootstrap.common.BootstrapHttpStatsCollector;
import com.linkedin.databus.core.util.ConfigLoader;
import com.linkedin.databus.core.util.InvalidConfigException;
import com.linkedin.databus2.core.DatabusException;
import com.linkedin.databus2.core.container.monitoring.mbean.DatabusComponentAdmin;
import com.linkedin.databus2.core.container.netty.ServerContainer;
import com.linkedin.databus2.core.container.request.ConfigRequestProcessor;
import com.linkedin.databus2.core.container.request.ContainerOperationProcessor;
import com.linkedin.databus2.core.container.request.RequestProcessorRegistry;
public class BootstrapHttpServer extends ServerContainer
{
public static final String MODULE = BootstrapHttpServer.class.getName();
public static final Logger LOG = Logger.getLogger(MODULE);
private final BootstrapServerStaticConfig _bootstrapServerConfig;
private final BootstrapHttpStatsCollector _bootstrapHttpStatsCollector;
public BootstrapHttpServer(BootstrapServerConfig config)
throws IOException, InvalidConfigException, DatabusException
{
this(config.build());
}
//V2 bootstrap server should use BIG_ENDIAN and v3 (which extends this class) will use LITTLE_ENDIAN for byte order.
public BootstrapHttpServer(BootstrapServerStaticConfig bootstrapServerConfig, ByteOrder byteOrder)
throws IOException, InvalidConfigException, DatabusException
{
super(bootstrapServerConfig.getDb().getContainer(), byteOrder);
_bootstrapServerConfig = bootstrapServerConfig;
BootstrapHttpStatsCollector httpStatsColl = null;
if (null == httpStatsColl)
{
httpStatsColl = new BootstrapHttpStatsCollector(getContainerStaticConfig().getId(),
"bootstrapHttpOutbound",
true,
//_bootstrapStaticConfig.getRuntime().getHttpStatsCollector().isEnabled(),
true,
getMbeanServer());
}
_bootstrapHttpStatsCollector = httpStatsColl;
initializeBootstrapServerCommandProcessors();
}
/**
* The default constructor. If the byte order is not explicitly set, we use BIG_ENDIAN (preserving existing v2 bootstrap behaviour).
* @param bootstrapServerConfig
* @throws DatabusException
* @throws IOException
*/
public BootstrapHttpServer(BootstrapServerStaticConfig bootstrapServerConfig)
throws DatabusException, IOException
{
this(bootstrapServerConfig, ByteOrder.BIG_ENDIAN);
}
public BootstrapHttpStatsCollector getBootstrapStatsCollector() {
return _bootstrapHttpStatsCollector;
}
@Override
protected DatabusComponentAdmin createComponentAdmin()
{
return new DatabusComponentAdmin(this,
getMbeanServer(),
BootstrapHttpServer.class.getSimpleName());
}
public static void main(String[] args) throws Exception
{
// use server container to pass the command line
Properties startupProps = ServerContainer.processCommandLineArgs(args);
BootstrapServerConfig config = new BootstrapServerConfig();
ConfigLoader<BootstrapServerStaticConfig> configLoader =
new ConfigLoader<BootstrapServerStaticConfig>("databus.bootstrap.", config);
BootstrapServerStaticConfig staticConfig = configLoader.loadConfig(startupProps);
BootstrapHttpServer bootstrapServer = new BootstrapHttpServer(staticConfig);
// Bind and start to accept incoming connections.
try
{
bootstrapServer.registerShutdownHook();
bootstrapServer.startAndBlock();
}
catch (Exception e)
{
LOG.error("Error starting the bootstrap server", e);
}
LOG.info("Exiting bootstrap server");
}
@Override
public void pause()
{
getComponentStatus().pause();
}
@Override
public void resume()
{
// tell all processes to resume processing of client request
getComponentStatus().resume();
}
@Override
public void suspendOnError(Throwable cause)
{
getComponentStatus().suspendOnError(cause);
}
protected void initializeBootstrapServerCommandProcessors() throws DatabusException
{
LOG.info("Initializing Bootstrap HTTP Server");
LOG.info("Config=" + _bootstrapServerConfig);
try{
RequestProcessorRegistry processorRegistry = getProcessorRegistry();
processorRegistry.register(ConfigRequestProcessor.COMMAND_NAME,
new ConfigRequestProcessor(null, this));
processorRegistry.register(BootstrapRequestProcessor.COMMAND_NAME,
new BootstrapRequestProcessor(null, _bootstrapServerConfig, this));
processorRegistry.register(StartSCNRequestProcessor.COMMAND_NAME,
new StartSCNRequestProcessor(null, _bootstrapServerConfig, this));
processorRegistry.register(TargetSCNRequestProcessor.COMMAND_NAME,
new TargetSCNRequestProcessor(null, _bootstrapServerConfig, this));
processorRegistry.register(ContainerOperationProcessor.COMMAND_NAME,
new ContainerOperationProcessor(null, this));
}
catch (SQLException sqle)
{
throw new DatabusException("command registration failed", sqle);
}
catch (InstantiationException e)
{
throw new DatabusException("command registration failed", e);
}
catch (IllegalAccessException e)
{
throw new DatabusException("command registration failed", e);
}
catch (ClassNotFoundException e)
{
throw new DatabusException("command registration failed", e);
}
LOG.info("Done Initializing Bootstrap HTTP Server");
}
}
|
Kait-tt/tacowassa
|
addons/evaluation/public/models/solver.js
|
const ko = require('knockout');
const moment = require('moment');
class Solver {
constructor (params) {
this.name = params.name;
this.title = params.title;
this.description = params.description;
this.isSolved = ko.observable(params.isSolved);
this.relatedProblems = ko.observableArray();
this.updatedAt = ko.observable(params.updatedAt);
this.updatedAtMoment = ko.pureComputed(() => moment(this.updatedAt()));
this.logs = ko.observableArray(params.logs);
this.solveMemos = ko.pureComputed(() => this.logs().filter(x => x.memo));
}
}
module.exports = Solver;
|
nambach/Excel4J
|
src/main/java/io/github/nambach/excelutil/core/RowError.java
|
<gh_stars>10-100
package io.github.nambach.excelutil.core;
import io.github.nambach.excelutil.validator.ObjectError;
import lombok.Getter;
import lombok.Setter;
import java.util.List;
@Getter
public class RowError {
private final int index;
@Setter
private String customError;
@Setter
private ObjectError objectError;
public RowError(int index, Class<?> clazz) {
this.index = index;
if (clazz != null) {
objectError = new ObjectError(clazz);
}
}
void appendError(String field, List<String> messages) {
objectError.appendError(field, messages);
}
public int getExcelIndex() {
return index + 1;
}
public String getRowString() {
return "Row " + (index + 1);
}
public String getMessage() {
if (customError != null) {
return customError;
}
if (objectError != null) {
return objectError.getMessage();
}
return "";
}
public String getInlineMessage() {
if (customError != null) {
return customError;
}
if (objectError != null) {
return objectError.getInlineMessage();
}
return "";
}
@Override
public String toString() {
return getRowString() + ": " + getInlineMessage();
}
}
|
goldos24/CloneCraft-Legacy
|
CloneCraft/mobs/Sheep.h
|
<filename>CloneCraft/mobs/Sheep.h
#pragma once
#include "../entity/Entity.h"
struct Sheep : Entity
{
Sheep();
enum class LegMovementMode : char
{
Forward,
Backward,
None
};
void renderModel();
void drawLeg(maths::Vec3<float> position);
maths::Vec3<float> legRotation;
void update(Game&, float);
LegMovementMode legMoveMode = LegMovementMode::Forward;
void parseSpecialProperty(std::string propertyName, std::istream& entityParserStream);
float movementSpeed = 2.f;
};
|
monciego/TypeScript
|
tests/baselines/reference/classFunctionMerging.js
|
//// [classFunctionMerging.ts]
// We allow ambient classes and functions to merge, this way callable classes
// which are also namespaces can be represented in declaration files
declare function Foo (x: number): Foo.Inst;
declare class Foo {
constructor(x: string);
}
declare namespace Foo {
export type Inst = number;
}
const a = new Foo("");
const b = Foo(12);
//// [classFunctionMerging.js]
var a = new Foo("");
var b = Foo(12);
|
dgant/PurpleWave
|
src/Information/Fingerprinting/ZergStrategies/Fingerprint12HatchHatch.scala
|
package Information.Fingerprinting.ZergStrategies
import Information.Fingerprinting.Generic.FingerprintCompleteBy
import Information.Fingerprinting.Strategies.ZergTimings
import Planning.UnitMatchers.MatchHatchlike
import Utilities.Time.Seconds
class Fingerprint12HatchHatch extends FingerprintCompleteBy(MatchHatchlike, ZergTimings.TwelveHatch11Pool13Hatch_HatchCompleteBy - Seconds(3), 3)
|
cxsper/saleor
|
saleor/lib/python3.7/site-packages/braintree/oauth_gateway.py
|
import braintree
from braintree.error_result import ErrorResult
from braintree.successful_result import SuccessfulResult
from braintree.exceptions.not_found_error import NotFoundError
from braintree.oauth_credentials import OAuthCredentials
import sys
if sys.version_info[0] == 2:
from urllib import quote_plus
else:
from urllib.parse import quote_plus
from functools import reduce
class OAuthGateway(object):
def __init__(self, gateway):
self.gateway = gateway
self.config = gateway.config
def create_token_from_code(self, params):
params["grant_type"] = "authorization_code"
return self._create_token(params)
def create_token_from_refresh_token(self, params):
params["grant_type"] = "refresh_token"
return self._create_token(params)
def revoke_access_token(self, access_token):
self.config.assert_has_client_credentials()
response = self.config.http().post("/oauth/revoke_access_token", {
"token": access_token
})
if "result" in response and response["result"]["success"]:
return SuccessfulResult
else:
return ErrorResult(self.gateway, "could not revoke access token")
def _create_token(self, params):
self.config.assert_has_client_credentials()
response = self.config.http().post("/oauth/access_tokens", {
"credentials": params
})
if "credentials" in response:
return SuccessfulResult({"credentials": OAuthCredentials(self.gateway, response["credentials"])})
else:
return ErrorResult(self.gateway, response["api_error_response"])
def connect_url(self, raw_params):
params = {"client_id": self.config.client_id}
params.update(raw_params)
user_params = self._sub_query(params, "user")
business_params = self._sub_query(params, "business")
def clean_values(accumulator, kv_pair):
key, value = kv_pair
if isinstance(value, list):
accumulator += [(key + "[]", v) for v in value]
else:
accumulator += [(key, value)]
return accumulator
params = reduce(clean_values, params.items(), [])
query = params + user_params + business_params
query_string = "&".join(quote_plus(key) + "=" + quote_plus(value) for key, value in query)
return self.config.environment.base_url + "/oauth/connect?" + query_string
def _sub_query(self, params, root):
if root in params:
sub_query = params.pop(root)
else:
sub_query = {}
query = [(root + "[" + key + "]", str(value)) for key, value in sub_query.items()]
return query
|
pborawski/visallo
|
web/plugins/ingest-cloud-s3/src/main/java/org/visallo/web/ingest/cloud/s3/routes/S3DirectoryListing.java
|
<filename>web/plugins/ingest-cloud-s3/src/main/java/org/visallo/web/ingest/cloud/s3/routes/S3DirectoryListing.java
package org.visallo.web.ingest.cloud.s3.routes;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.visallo.webster.ParameterizedHandler;
import org.visallo.webster.annotations.Handle;
import org.visallo.webster.annotations.Optional;
import org.visallo.webster.annotations.Required;
import org.visallo.core.user.User;
import org.visallo.web.ingest.cloud.s3.AmazonS3ClientFactory;
import org.visallo.web.ingest.cloud.s3.ClientApiBuckets;
import java.util.stream.Collectors;
@Singleton
public class S3DirectoryListing implements ParameterizedHandler {
private static String Delimiter = "/";
private final AmazonS3ClientFactory amazonS3ClientFactory;
@Inject
public S3DirectoryListing(AmazonS3ClientFactory amazonS3ClientFactory) {
this.amazonS3ClientFactory = amazonS3ClientFactory;
}
@Handle
public ClientApiBuckets handle(
User user,
@Required(name = "providerClass") String providerClass,
@Optional(name = "credentials") String credentials,
@Optional(name = "path") String path
) throws Exception {
AmazonS3 s3 = amazonS3ClientFactory.getClient(providerClass, credentials);
if (path == null || path.equals("/") || path.isEmpty()) {
return getBuckets(s3);
}
return getItems(s3, path);
}
private ClientApiBuckets getItems(AmazonS3 s3, String path) {
if (!path.endsWith(Delimiter)) path = path + Delimiter;
ClientApiBuckets bucketsResponse = new ClientApiBuckets();
try {
int firstDelimiter = path.indexOf("/");
String bucketName = "";
if (firstDelimiter >= 0) {
bucketName = path.substring(0, firstDelimiter);
}
String directoryKey = path.substring(firstDelimiter + 1);
String prefix = directoryKey.length() > 0 ? directoryKey : null;
ListObjectsRequest request = new ListObjectsRequest()
.withBucketName(bucketName)
.withDelimiter(Delimiter)
.withPrefix(prefix);
ObjectListing objectListing = s3.listObjects(request);
// Add all files in prefix
bucketsResponse.items = objectListing.getObjectSummaries().stream()
.filter(s3ObjectSummary -> !directoryKey.equals(s3ObjectSummary.getKey()))
.map(item -> {
ClientApiBuckets.ClientApiBucket b = new ClientApiBuckets.ClientApiBucket();
b.date = item.getLastModified();
b.name = prefix == null ? item.getKey() :
item.getKey().substring(item.getKey().lastIndexOf("/") + 1);
b.type = "file";
b.size = item.getSize();
return b;
})
.sorted((o1, o2) -> o1.name.compareToIgnoreCase(o2.name))
.collect(Collectors.toList());
// TODO: Check isTruncated and support pagination
// Add all directories in prefix
bucketsResponse.items.addAll(0,
objectListing.getCommonPrefixes().stream().map(dir -> {
ClientApiBuckets.ClientApiBucket b = new ClientApiBuckets.ClientApiBucket();
dir = dir.replaceAll("\\/$", "");
int slashIndex = dir.lastIndexOf("/");
if (slashIndex >= 0) {
dir = dir.substring(slashIndex + 1);
}
b.name = dir;
b.type = "dir";
return b;
})
.sorted((o1, o2) -> o1.name.compareToIgnoreCase(o2.name))
.collect(Collectors.toList())
);
} catch (AmazonServiceException e) {
bucketsResponse.errorMessage = "An error occurred while listing items from Amazon S3: " + e.getErrorMessage();
}
return bucketsResponse;
}
private ClientApiBuckets getBuckets(AmazonS3 s3) {
ClientApiBuckets bucketsResponse = new ClientApiBuckets();
try {
bucketsResponse.items = s3.listBuckets()
.stream()
.map(bucket -> {
ClientApiBuckets.ClientApiBucket b = new ClientApiBuckets.ClientApiBucket();
b.date = bucket.getCreationDate();
b.name = bucket.getName();
b.type = "bucket";
return b;
})
.sorted((o1, o2) -> o1.name.compareToIgnoreCase(o2.name))
.collect(Collectors.toList());
} catch (AmazonServiceException e) {
bucketsResponse.errorMessage = "An error occurred while retrieving buckets from Amazon S3: " + e.getErrorMessage();
}
return bucketsResponse;
}
}
|
luciVuc/openui5
|
src/sap.ui.webc.common/src/sap/ui/webc/common/thirdparty/icons/toaster-top.js
|
<gh_stars>0
sap.ui.define(['sap/ui/webc/common/thirdparty/base/config/Theme', './v5/toaster-top', './v4/toaster-top'], function (Theme, toasterTop$2, toasterTop$1) { 'use strict';
const pathData = Theme.isTheme("sap_horizon") ? toasterTop$1 : toasterTop$2;
var toasterTop = { pathData };
return toasterTop;
});
|
parkin/CU-Bus-Guide
|
main/src/main/java/com/teamparkin/mtdapp/dataclasses/Vehicle.java
|
<gh_stars>0
package com.teamparkin.mtdapp.dataclasses;
import android.os.Parcel;
import android.os.Parcelable;
import com.google.android.gms.maps.model.LatLng;
public class Vehicle extends IdBasedData implements Parcelable {
private static final String TAG = Vehicle.class.getSimpleName();
Trip trip;
LatLng latLng;
String previousStopId;
String nextStopId;
String originStopId;
String destinationStopId;
String lastUpdated;
public Vehicle(String id, Trip trip, LatLng latLng, String previousStopId,
String nextStopId, String originStopId, String destinationStopId,
String lastUpdated) {
super(id);
this.trip = trip;
this.latLng = latLng;
this.previousStopId = previousStopId;
this.nextStopId = nextStopId;
this.originStopId = originStopId;
this.destinationStopId = destinationStopId;
this.lastUpdated = lastUpdated;
}
public Vehicle(Parcel source) {
super(source.readString()); // reads Id
this.trip = source.readParcelable(Trip.class.getClassLoader());
this.latLng = source.readParcelable(LatLng.class.getClassLoader());
this.previousStopId = source.readString();
this.nextStopId = source.readString();
this.originStopId = source.readString();
this.destinationStopId = source.readString();
this.lastUpdated = source.readString();
}
public static final Parcelable.Creator<Vehicle> CREATOR = new Parcelable.Creator<Vehicle>() {
@Override
public Vehicle createFromParcel(Parcel source) {
return new Vehicle(source);
}
@Override
public Vehicle[] newArray(int size) {
return new Vehicle[size];
}
};
@Override
public int describeContents() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(id);
dest.writeParcelable(trip, 0);
dest.writeParcelable(latLng, 0);
dest.writeString(previousStopId);
dest.writeString(nextStopId);
dest.writeString(originStopId);
dest.writeString(destinationStopId);
dest.writeString(lastUpdated);
}
}
|
jstormes/wp-search-with-algolia
|
js/instantsearch.js/dist-es5-module/src/decorators/__tests__/headerFooter-test.js
|
'use strict';
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; /* eslint-env mocha */
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _expect = require('expect');
var _expect2 = _interopRequireDefault(_expect);
var _enzyme = require('enzyme');
var _reactAddonsTestUtils = require('react-addons-test-utils');
var _reactAddonsTestUtils2 = _interopRequireDefault(_reactAddonsTestUtils);
var _TestComponent = require('./TestComponent');
var _TestComponent2 = _interopRequireDefault(_TestComponent);
var _headerFooter = require('../headerFooter');
var _headerFooter2 = _interopRequireDefault(_headerFooter);
var _Template = require('../../components/Template');
var _Template2 = _interopRequireDefault(_Template);
var _expectJsx = require('expect-jsx');
var _expectJsx2 = _interopRequireDefault(_expectJsx);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
_expect2.default.extend(_expectJsx2.default);
describe('headerFooter', function () {
var renderer = void 0;
var defaultProps = void 0;
function render() {
var props = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var HeaderFooter = (0, _headerFooter2.default)(_TestComponent2.default);
renderer.render(_react2.default.createElement(HeaderFooter, props));
return renderer.getRenderOutput();
}
function shallowRender() {
var extraProps = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var props = _extends({
templateProps: {}
}, extraProps);
var componentWrappedInHeaderFooter = (0, _headerFooter2.default)(_TestComponent2.default);
return (0, _enzyme.shallow)(_react2.default.createElement(componentWrappedInHeaderFooter, props));
}
beforeEach(function () {
var createRenderer = _reactAddonsTestUtils2.default.createRenderer;
defaultProps = {
cssClasses: {
root: 'root',
body: 'body'
},
collapsible: false,
templateProps: {}
};
renderer = createRenderer();
});
it('should render the component in a root and body', function () {
var out = render(defaultProps);
(0, _expect2.default)(out).toEqualJSX(_react2.default.createElement(
'div',
{ className: 'ais-root root' },
_react2.default.createElement(
'div',
{ className: 'ais-body body' },
_react2.default.createElement(_TestComponent2.default, defaultProps)
)
));
});
it('should add a header if such a template is passed', function () {
// Given
defaultProps.templateProps.templates = {
header: 'HEADER'
};
// When
var out = render(defaultProps);
// Then
var templateProps = {
data: undefined,
templateKey: 'header',
transformData: null,
templates: {
header: 'HEADER'
}
};
(0, _expect2.default)(out).toEqualJSX(_react2.default.createElement(
'div',
{ className: 'ais-root root' },
_react2.default.createElement(_Template2.default, _extends({ rootProps: { className: 'ais-header', onClick: null } }, templateProps)),
_react2.default.createElement(
'div',
{ className: 'ais-body body' },
_react2.default.createElement(_TestComponent2.default, defaultProps)
)
));
});
it('should add a footer if such a template is passed', function () {
// Given
defaultProps.templateProps.templates = {
footer: 'FOOTER'
};
// When
var out = render(defaultProps);
// Then
var templateProps = {
data: undefined,
templateKey: 'footer',
transformData: null,
templates: {
footer: 'FOOTER'
}
};
(0, _expect2.default)(out).toEqualJSX(_react2.default.createElement(
'div',
{ className: 'ais-root root' },
_react2.default.createElement(
'div',
{ className: 'ais-body body' },
_react2.default.createElement(_TestComponent2.default, defaultProps)
),
_react2.default.createElement(_Template2.default, _extends({ rootProps: { className: 'ais-footer', onClick: null } }, templateProps))
));
});
describe('collapsible', function () {
var templateProps = void 0;
var headerTemplateProps = void 0;
var footerTemplateProps = void 0;
beforeEach(function () {
defaultProps.templateProps.templates = {
header: 'yo header',
footer: 'yo footer'
};
templateProps = {
data: undefined,
transformData: null,
templates: {
header: 'yo header',
footer: 'yo footer'
}
};
headerTemplateProps = _extends({
templateKey: 'header'
}, templateProps);
footerTemplateProps = _extends({
templateKey: 'footer'
}, templateProps);
});
it('when true', function () {
defaultProps.collapsible = true;
var out = render(defaultProps);
(0, _expect2.default)(out).toEqualJSX(_react2.default.createElement(
'div',
{ className: 'ais-root root ais-root__collapsible' },
_react2.default.createElement(_Template2.default, _extends({ rootProps: { className: 'ais-header', onClick: function onClick() {}
} }, headerTemplateProps)),
_react2.default.createElement(
'div',
{ className: 'ais-body body' },
_react2.default.createElement(_TestComponent2.default, defaultProps)
),
_react2.default.createElement(_Template2.default, _extends({ rootProps: { className: 'ais-footer', onClick: null } }, footerTemplateProps))
));
});
it('when collapsed', function () {
defaultProps.collapsible = { collapsed: true };
var out = render(defaultProps);
(0, _expect2.default)(out).toEqualJSX(_react2.default.createElement(
'div',
{ className: 'ais-root root ais-root__collapsible ais-root__collapsed' },
_react2.default.createElement(_Template2.default, _extends({ rootProps: { className: 'ais-header', onClick: function onClick() {}
} }, headerTemplateProps)),
_react2.default.createElement(
'div',
{ className: 'ais-body body' },
_react2.default.createElement(_TestComponent2.default, defaultProps)
),
_react2.default.createElement(_Template2.default, _extends({ rootProps: { className: 'ais-footer', onClick: null } }, footerTemplateProps))
));
});
});
describe('headerFooterData', function () {
it('should call the header and footer template with the given data', function () {
// Given
var props = {
headerFooterData: {
header: {
foo: 'bar'
},
footer: {
foo: 'baz'
}
},
templateProps: {
templates: {
header: 'header',
footer: 'footer'
}
}
};
// When
var actual = shallowRender(props);
var header = actual.find({ templateKey: 'header' });
var footer = actual.find({ templateKey: 'footer' });
// Then
(0, _expect2.default)(header.props().data.foo).toEqual('bar');
(0, _expect2.default)(footer.props().data.foo).toEqual('baz');
});
});
});
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.