repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
threerings/game-gardens
|
client/src/main/java/com/threerings/gardens/lobby/MapViewer.java
|
//
// Game Gardens - a platform for hosting simple multiplayer Java games
// Copyright (c) 2005-2013, Three Rings Design, Inc. - All rights reserved.
// https://github.com/threerings/game-gardens/blob/master/LICENSE
package com.threerings.gardens.lobby;
import java.util.HashMap;
import java.util.Map;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Widget;
import com.threerings.nexus.distrib.DMap;
public abstract class MapViewer<K,V> {
public final DMap.Listener<K,V> listener = new DMap.Listener<K,V>() {
@Override public void onPut (K key, V value) {
Widget view = createView(key, value);
_widgets.put(key, view);
_target.add(view); // TODO: sorted
}
@Override public void onRemove (K key) {
Widget view = _widgets.remove(key);
if (view != null) _target.remove(view);
}
};
public MapViewer (FlowPanel target) {
_target = target;
}
public void connect (DMap<K,V> map) {
map.connect(listener);
for (Map.Entry<K,V> entry : map.entrySet()) {
listener.onPut(entry.getKey(), entry.getValue());
}
}
protected abstract Widget createView (K key, V value);
protected FlowPanel _target;
protected Map<K,Widget> _widgets = new HashMap<K,Widget>();
}
|
jpintoduarte76/FarLands
|
src/main/java/net/farlands/sanctuary/mechanic/AFK.java
|
package net.farlands.sanctuary.mechanic;
import com.kicas.rp.util.Pair;
import net.farlands.sanctuary.FarLands;
import net.farlands.sanctuary.command.FLCommandEvent;
import net.farlands.sanctuary.command.player.CommandMessage;
import net.farlands.sanctuary.command.staff.CommandVanish;
import net.farlands.sanctuary.data.Cooldown;
import net.farlands.sanctuary.data.FLPlayerSession;
import net.farlands.sanctuary.discord.DiscordChannel;
import net.farlands.sanctuary.util.ComponentColor;
import net.farlands.sanctuary.util.FLUtils;
import net.farlands.sanctuary.util.Logging;
import net.md_5.bungee.api.ChatMessageType;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.player.AsyncPlayerChatEvent;
import org.bukkit.event.player.PlayerEditBookEvent;
import org.bukkit.event.player.PlayerMoveEvent;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import static com.kicas.rp.util.TextUtils.sendFormatted;
/**
* Handles players going afk, including afk check and kicking.
*/
public class AFK extends Mechanic {
private final Map<UUID, Pair<String, Integer>> afkCheckList;
private static AFK instance;
public AFK() {
this.afkCheckList = new HashMap<>();
}
@Override
public void onStartup() {
instance = this;
Bukkit.getScheduler().runTaskTimerAsynchronously(FarLands.getInstance(), () -> {
Bukkit.getOnlinePlayers().forEach(player -> {
if (afkCheckList.containsKey(player.getUniqueId())) {
sendFormatted(player, ChatMessageType.ACTION_BAR, "&(red,bold,magic)MM {&(reset)%0} MM",
afkCheckList.get(player.getUniqueId()).getFirst());
}
});
}, 0L, 40L);
}
@Override
public void onPlayerJoin(Player player, boolean isNew) {
setAFKCooldown(player);
}
@Override
public void onPlayerQuit(Player player) {
afkCheckList.remove(player.getUniqueId());
FLPlayerSession session = FarLands.getDataHandler().getSession(player);
if (session.afkCheckInitializerCooldown != null)
session.afkCheckInitializerCooldown.cancel();
session.afkCheckCooldown.cancel();
session.afk = false;
}
@EventHandler
public void onBlockBroken(BlockBreakEvent event) {
resetInitializerCooldown(event.getPlayer());
}
@EventHandler
public void onBlockPlaced(BlockPlaceEvent event) {
resetInitializerCooldown(event.getPlayer());
}
@EventHandler
public void onBookEdit(PlayerEditBookEvent event) {
resetInitializerCooldown(event.getPlayer());
}
@EventHandler
public void onFLCommand(FLCommandEvent event) {
if (CommandMessage.class.equals(event.getCommand()) && event.getSender() instanceof Player)
resetInitializerCooldown((Player) event.getSender());
}
@EventHandler(priority = EventPriority.LOW)
public void onChat(AsyncPlayerChatEvent event) {
FLPlayerSession session = FarLands.getDataHandler().getSession(event.getPlayer());
if (afkCheckList.containsKey(event.getPlayer().getUniqueId())) {
event.setCancelled(true);
int answer;
try {
answer = Integer.parseInt(event.getMessage());
} catch (NumberFormatException ex) {
answer = Integer.MAX_VALUE;
}
int actualAnswer = afkCheckList.get(event.getPlayer().getUniqueId()).getSecond();
if (answer != actualAnswer) {
Bukkit.getScheduler().runTask(FarLands.getInstance(),
() -> event.getPlayer().kickPlayer(ChatColor.RED + "The correct answer was: " + actualAnswer));
} else {
setAFKCooldown(event.getPlayer());
event.getPlayer().sendMessage(ChatColor.GREEN + "Correct.");
}
session.afkCheckCooldown.cancel();
afkCheckList.remove(event.getPlayer().getUniqueId());
return;
}
if (session.afk)
setNotAFK(session);
resetInitializerCooldown(session);
}
@EventHandler
public void onPlayerMove(PlayerMoveEvent event) {
if (event.getFrom().getYaw() != event.getTo().getYaw() || event.getFrom().getPitch() != event.getTo().getPitch()) {
FLPlayerSession session = FarLands.getDataHandler().getSession(event.getPlayer());
if (session.afk)
setNotAFK(session);
resetInitializerCooldown(session);
}
}
public static void setNotAFK(FLPlayerSession session) {
session.afk = false;
if (!session.handle.vanished)
Logging.broadcast(flp -> !flp.handle.getIgnoreStatus(session.player).includesChat(), " * %0 is no longer AFK.", session.handle.username);
}
public static void setAFKCooldown(Player player) {
FLPlayerSession session = FarLands.getDataHandler().getSession(player);
if (session.afkCheckInitializerCooldown == null)
session.afkCheckInitializerCooldown = new Cooldown(session.handle.rank.getAfkCheckInterval() * 60L * 20L);
session.afkCheckInitializerCooldown.reset(() -> {
if (player.isOnline()) {
// TODO: reinstate AFK bypass after 1.18
if (/* "farlands".equals(player.getWorld().getName()) || */ player.isGliding() || session.isInEvent) {
setAFKCooldown(player);
return;
}
if (session.handle.rank.isStaff()) {
// Put the player into vanish
if (!session.handle.vanished) {
FarLands.getCommandHandler().getCommand(CommandVanish.class).execute(session.player, new String[]{"on"});
Logging.broadcastStaff(
ComponentColor.red("%s has gone AFK and is now vanished.", session.handle.username),
DiscordChannel.ALERTS
);
session.handle.secondsPlayed -= session.handle.rank.getAfkCheckInterval() * 60L;
}
// Reset the timer
setAFKCooldown(player);
return;
}
if (session.afk) {
kickAFK(player);
return;
}
int a = FLUtils.RNG.nextInt(17), b = FLUtils.RNG.nextInt(17);
boolean op = FLUtils.RNG.nextBoolean(); // true: +, false: -
String check = ChatColor.RED.toString() + ChatColor.BOLD + "AFK Check: " + a + (op ? " + " : " - ") + b;
player.sendMessage(check);
player.sendTitle(check, "", 20, 120, 60);
FarLands.getDebugger().echo("Sent AFK check to " + player.getName());
instance.afkCheckList.put(player.getUniqueId(), new Pair<>(check, op ? a + b : a - b));
session.afkCheckCooldown.reset(() -> kickAFK(player));
}
});
}
private static void resetInitializerCooldown(Player player) {
resetInitializerCooldown(FarLands.getDataHandler().getSession(player));
}
private static void resetInitializerCooldown(FLPlayerSession session) {
if (session.afkCheckCooldown != null && session.afkCheckCooldown.isComplete() &&
session.afkCheckInitializerCooldown != null) {
session.afkCheckInitializerCooldown.resetCurrentTask();
}
}
private static void kickAFK(Player player) {
if (player.isOnline()) {
FarLands.getDebugger().echo("Kicking " + player.getName() + " for being AFK or answering the question incorrectly.");
player.kick(ComponentColor.red("Kicked for being AFK."));
Logging.broadcastStaff(ComponentColor.red(player.getName() + " was kicked for being AFK."));
}
}
}
|
SkyCraft78/DiSky3
|
src/main/java/info/itsthesky/disky3/core/skript/properties/member/MemberNickname.java
|
<reponame>SkyCraft78/DiSky3
package info.itsthesky.disky3.core.skript.properties.member;
import ch.njol.skript.classes.Changer;
import ch.njol.skript.lang.Expression;
import ch.njol.skript.lang.SkriptParser;
import ch.njol.util.Kleenean;
import ch.njol.util.coll.CollectionUtils;
import info.itsthesky.disky3.DiSky;
import info.itsthesky.disky3.api.Utils;
import info.itsthesky.disky3.api.bot.Bot;
import info.itsthesky.disky3.api.changers.ChangeablePropertyExpression;
import info.itsthesky.disky3.api.skript.NodeInformation;
import net.dv8tion.jda.api.entities.Member;
import org.bukkit.event.Event;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class MemberNickname extends ChangeablePropertyExpression<Member, String> {
static {
register(
MemberNickname.class,
String.class,
"[discord] nick[( |-)]name",
"member"
);
}
private NodeInformation info;
@Override
public Class<?>[] acceptChange(Changer.ChangeMode mode, boolean diskyChanger) {
if (mode == Changer.ChangeMode.SET)
return CollectionUtils.array(String.class);
return CollectionUtils.array();
}
@Override
public void change(Event e, Object[] delta, Bot bot, Changer.ChangeMode mode) {
if (delta == null || delta.length == 0 || delta[0] == null) return;
Member member = Utils.verifyVar(e, getExpr(), null);
final String value = delta[0].toString();
if (value == null || member == null) return;
member = bot.getCore().getGuildById(member.getGuild().getId()).getMemberById(member.getId());
member.modifyNickname(value).queue(null, ex -> DiSky.exception(ex, info));
}
@Override
protected String @NotNull [] get(@NotNull Event e, Member @NotNull [] source) {
return new String[] {source[0].getEffectiveName()};
}
@Override
public @NotNull Class<? extends String> getReturnType() {
return String.class;
}
@Override
public @NotNull String toString(@Nullable Event e, boolean debug) {
return "nick name of " + getExpr().toString(e, debug);
}
@Override
public boolean init(Expression<?> @NotNull [] exprs, int matchedPattern, @NotNull Kleenean isDelayed, SkriptParser.@NotNull ParseResult parseResult) {
setExpr((Expression<? extends Member>) exprs[0]);
info = new NodeInformation();
return true;
}
}
|
LukasGasior1/crypto-tool
|
src/main/scala/cryptotool/ext/bittrex/api/model.scala
|
package cryptotool.ext.bittrex.api
import java.util.Date
case class BalanceItem(
currency: String,
balance: Double,
available: Double,
pending: Double,
cryptoAddress: Option[String],
requested: Option[Boolean],
uuid: Option[String])
case class OrderHistoryItem(
orderUuid : String,
exchange : String,
timeStamp : Date,
orderType : String,
limit : Double,
quantity : Double,
quantityRemaining : Double,
commission : Double,
price : Double,
pricePerUnit : Option[Double],
osConditional : Boolean,
condition : Option[String],
conditionTarget : Option[String],
ommediateOrCancel : Boolean)
case class MarketSummary(
marketName: String,
high: Double,
low: Double,
volume: Double,
last: Double,
baseVolume: Double,
timeStamp: String,
bid: Double,
ask: Double,
openBuyOrders: Long,
openSellOrders: Long,
prevDay: Double,
created: String,
displayMarketName: Option[String])
|
sqreen/dd-trace-java
|
test-published-dependencies/src/main/java/test/published/dependencies/App.java
|
package test.published.dependencies;
public class App {
public static void main(String[] args) {
System.out.println("Test published dependencies!");
}
}
|
rnarla123/aliyun-openapi-java-sdk
|
aliyun-java-sdk-cloudapi/src/main/java/com/aliyuncs/cloudapi/transform/v20160714/DescribeApiTrafficControlsResponseUnmarshaller.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aliyuncs.cloudapi.transform.v20160714;
import java.util.ArrayList;
import java.util.List;
import com.aliyuncs.cloudapi.model.v20160714.DescribeApiTrafficControlsResponse;
import com.aliyuncs.cloudapi.model.v20160714.DescribeApiTrafficControlsResponse.ApiTrafficControlItem;
import com.aliyuncs.transform.UnmarshallerContext;
public class DescribeApiTrafficControlsResponseUnmarshaller {
public static DescribeApiTrafficControlsResponse unmarshall(DescribeApiTrafficControlsResponse describeApiTrafficControlsResponse, UnmarshallerContext _ctx) {
describeApiTrafficControlsResponse.setRequestId(_ctx.stringValue("DescribeApiTrafficControlsResponse.RequestId"));
describeApiTrafficControlsResponse.setTotalCount(_ctx.integerValue("DescribeApiTrafficControlsResponse.TotalCount"));
describeApiTrafficControlsResponse.setPageSize(_ctx.integerValue("DescribeApiTrafficControlsResponse.PageSize"));
describeApiTrafficControlsResponse.setPageNumber(_ctx.integerValue("DescribeApiTrafficControlsResponse.PageNumber"));
List<ApiTrafficControlItem> apiTrafficControls = new ArrayList<ApiTrafficControlItem>();
for (int i = 0; i < _ctx.lengthValue("DescribeApiTrafficControlsResponse.ApiTrafficControls.Length"); i++) {
ApiTrafficControlItem apiTrafficControlItem = new ApiTrafficControlItem();
apiTrafficControlItem.setApiId(_ctx.stringValue("DescribeApiTrafficControlsResponse.ApiTrafficControls["+ i +"].ApiId"));
apiTrafficControlItem.setApiName(_ctx.stringValue("DescribeApiTrafficControlsResponse.ApiTrafficControls["+ i +"].ApiName"));
apiTrafficControlItem.setTrafficControlId(_ctx.stringValue("DescribeApiTrafficControlsResponse.ApiTrafficControls["+ i +"].TrafficControlId"));
apiTrafficControlItem.setTrafficControlName(_ctx.stringValue("DescribeApiTrafficControlsResponse.ApiTrafficControls["+ i +"].TrafficControlName"));
apiTrafficControlItem.setBoundTime(_ctx.stringValue("DescribeApiTrafficControlsResponse.ApiTrafficControls["+ i +"].BoundTime"));
apiTrafficControls.add(apiTrafficControlItem);
}
describeApiTrafficControlsResponse.setApiTrafficControls(apiTrafficControls);
return describeApiTrafficControlsResponse;
}
}
|
Voloshch/storage
|
src/Components/GeneralComponents/contentPage.js
|
import React from 'react';
import { Grid, Loader } from 'semantic-ui-react';
import messages from '../../Messages';
import PropTypes from 'prop-types';
import NoContent from './noContent';
import GeneralHeader from './generalHeader';
const ContentPage = ({ status, pageData, title, noContentMessage, componentDataList, componentModal }) => {
const checkStatus = () => {
if (status === 'pending' && pageData.length === 0) { return 'loader'; }
if (status === 'rejected') { return 'error'; }
if (pageData.length > 0) { return 'content'; }
};
const content = <>
<Grid>
<Grid.Row>
<GeneralHeader title={title} />
<Grid.Column textAlign='right' width={10}>
{React.createElement(componentModal)}
</Grid.Column>
</Grid.Row>
</Grid>
{React.createElement(componentDataList, { items: pageData })}
</>;
const contentPage = () => {
switch (checkStatus()) {
case 'loader':
return <Loader active inline='centered' />;
case 'error':
return <NoContent icon="frown" textMessage={messages.wrong} />;
case 'content':
return <>{content}</>;
default:
return <NoContent icon="meh" textMessage={noContentMessage} noContent componentModal={componentModal} />;
}
};
return <>
{ contentPage() }
</>;
};
ContentPage.propTypes = {
status: PropTypes.string,
pageData: PropTypes.array,
title: PropTypes.any,
noContentMessage: PropTypes.any,
componentDataList: PropTypes.any,
componentModal: PropTypes.any
};
export default ContentPage;
|
ksmonkey123/moba2
|
core/src/main/java/ch/awae/moba2/core/control/ProcessorService.java
|
<filename>core/src/main/java/ch/awae/moba2/core/control/ProcessorService.java<gh_stars>0
package ch.awae.moba2.core.control;
import ch.awae.moba2.common.LogHelper;
import ch.awae.moba2.core.lights.LightModel;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
@Service
public class ProcessorService {
private final static Logger LOG = LogHelper.getLogger();
private final LightModel lightModel;
private final List<Processor> processors;
@Autowired
public ProcessorService(ApplicationContext context, LightModel lightModel) {
this.lightModel = lightModel;
this.processors = new ArrayList<>(context.getBeansOfType(Processor.class).values());
LOG.info("loaded " + this.processors.size() + " processors");
for (Processor processor : this.processors) {
LOG.fine(" * " + processor.getClass());
}
}
@Scheduled(fixedDelay = 50)
public void tick() {
processors.forEach(Processor::tick);
lightModel.flushChanges();
}
}
|
mymong/FlexboxCanvas
|
FlexboxCanvas/Foundation/Variable/FCVariableListener.h
|
<filename>FlexboxCanvas/Foundation/Variable/FCVariableListener.h
//
// FCVariableListener.h
// FlexboxCanvas
//
// Created by <NAME> on 2021/7/28.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@protocol FCVariableListener <NSObject>
- (void)onChangeVariableForName:(NSString *)name;
@end
NS_ASSUME_NONNULL_END
|
DAC-KOJI-EGUCHI/Prebid.js
|
test/spec/modules/googleAnalyticsAdapter_spec.js
|
import ga from 'modules/googleAnalyticsAdapter';
var assert = require('assert');
describe('Ga', function () {
describe('enableAnalytics', function () {
it('should accept a tracker name option and output prefixed send string', function () {
var config = { options: { trackerName: 'foo' } };
ga.enableAnalytics(config);
var output = ga.getTrackerSend();
assert.equal(output, 'foo.send');
});
});
});
|
GeorgeDeac/project-athena
|
unpublishedScripts/DomainContent/Toybox/basketball/createRack.js
|
<filename>unpublishedScripts/DomainContent/Toybox/basketball/createRack.js<gh_stars>10-100
//
// createRack.js
//
// Created by <NAME> @imgntn on 10/5/2015
// Copyright 2015 High Fidelity, Inc.
//
// This is a script that creates a persistent basketball rack.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
Script.include("../libraries/utils.js");
var basketballURL ="http://hifi-production.s3.amazonaws.com/DomainContent/Toybox/basketball/basketball2.fbx";
var collisionSoundURL = "http://hifi-production.s3.amazonaws.com/DomainContent/Toybox/basketball/basketball.wav";
var rackURL = "http://hifi-production.s3.amazonaws.com/DomainContent/Toybox/basketball/basketball_rack.fbx";
var rackCollisionHullURL ="http://hifi-production.s3.amazonaws.com/DomainContent/Toybox/basketball/rack_collision_hull.obj";
var NUMBER_OF_BALLS = 4;
var DIAMETER = 0.30;
var RESET_DISTANCE = 1;
var MINIMUM_MOVE_LENGTH = 0.05;
var rackStartPosition =
Vec3.sum(MyAvatar.position,
Vec3.multiplyQbyV(MyAvatar.orientation, {
x: 0,
y: 0.0,
z: -2
}));
var rack = Entities.addEntity({
name: 'Basketball Rack',
type: "Model",
modelURL: rackURL,
position: rackStartPosition,
shapeType: 'compound',
gravity: {
x: 0,
y: -9.8,
z: 0
},
damping: 1,
dimensions: {
x: 0.4,
y: 1.37,
z: 1.73
},
dynamic: true,
collisionless: false,
collisionSoundURL: collisionSoundURL,
compoundShapeURL: rackCollisionHullURL,
userData: JSON.stringify({
grabbableKey: {
grabbable: false
}
})
});
var balls = [];
var originalBallPositions = [];
function createBalls() {
var position = rackStartPosition;
var i;
for (i = 0; i < NUMBER_OF_BALLS; i++) {
var ballPosition = {
x: position.x,
y: position.y + DIAMETER * 2,
z: position.z + (DIAMETER) - (DIAMETER * i)
};
var ball = Entities.addEntity({
type: "Model",
name: 'Hifi-Basketball',
shapeType: 'Sphere',
position: ballPosition,
dimensions: {
x: DIAMETER,
y: DIAMETER,
z: DIAMETER
},
restitution: 1.0,
damping: 0.00001,
gravity: {
x: 0,
y: -9.8,
z: 0
},
dynamic: true,
collisionless: false,
modelURL: basketballURL,
userData: JSON.stringify({
grabbableKey: {
invertSolidWhileHeld: true
}
})
});
balls.push(ball);
originalBallPositions.push(position);
}
}
function testBallDistanceFromStart() {
var resetCount = 0;
balls.forEach(function(ball, index) {
var currentPosition = Entities.getEntityProperties(ball, "position").position;
var originalPosition = originalBallPositions[index];
var distance = Vec3.subtract(originalPosition, currentPosition);
var length = Vec3.length(distance);
if (length > RESET_DISTANCE) {
Script.setTimeout(function() {
var newPosition = Entities.getEntityProperties(ball, "position").position;
var moving = Vec3.length(Vec3.subtract(currentPosition, newPosition));
if (moving < MINIMUM_MOVE_LENGTH) {
resetCount++;
if (resetCount === NUMBER_OF_BALLS) {
deleteBalls();
createBalls();
}
}
}, 200)
}
});
}
function deleteEntity(entityID) {
if (entityID === rack) {
deleteBalls();
Script.clearInterval(distanceCheckInterval);
Entities.deletingEntity.disconnect(deleteEntity);
}
}
function deleteBalls() {
while (balls.length > 0) {
Entities.deleteEntity(balls.pop());
}
}
createBalls();
Entities.deletingEntity.connect(deleteEntity);
var distanceCheckInterval = Script.setInterval(testBallDistanceFromStart, 1000);
function atEnd() {
Script.clearInterval(distanceCheckInterval);
}
Script.scriptEnding.connect(atEnd);
|
yukihiko-shinoda/zaim-csv-converter
|
convert.py
|
<gh_stars>1-10
#!/usr/bin/env python
"""This module implements only calling Zaim CSV converter package."""
from zaimcsvconverter.zaim_csv_converter import ZaimCsvConverter
def main() -> None:
"""This function calls Zaim CSV converter package."""
ZaimCsvConverter.execute()
if __name__ == "__main__":
main()
|
thucnt93/MailCoreStaticLib
|
mailcore2/src/java/native/com_libmailcore_SMTPCheckAccountOperation.cpp
|
<gh_stars>1000+
#include "com_libmailcore_SMTPCheckAccountOperation.h"
#include "MCBaseTypes.h"
#include "JavaHandle.h"
#include "TypesUtils.h"
#include "MCSMTPCheckAccountOperation.h"
using namespace mailcore;
#define nativeType SMTPCheckAccountOperation
#define javaType nativeType
MC_JAVA_BRIDGE
|
test-java-code/pmd-test
|
javaparser-master/javaparser-core/src/main/java/com/github/javaparser/ast/type/Type.java
|
<gh_stars>0
/*
* Copyright (C) 2007-2010 <NAME>.
* Copyright (C) 2011, 2013-2016 The JavaParser Team.
*
* This file is part of JavaParser.
*
* JavaParser can be used either under the terms of
* a) the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* b) the terms of the Apache License
*
* You should have received a copy of both licenses in LICENCE.LGPL and
* LICENCE.APACHE. Please refer to those files for details.
*
* JavaParser is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*/
package com.github.javaparser.ast.type;
import com.github.javaparser.ast.AllFieldsConstructor;
import com.github.javaparser.ast.DataKey;
import com.github.javaparser.ast.Node;
import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.expr.AnnotationExpr;
import com.github.javaparser.ast.observer.ObservableProperty;
import com.github.javaparser.ast.visitor.CloneVisitor;
import com.github.javaparser.metamodel.JavaParserMetaModel;
import com.github.javaparser.metamodel.TypeMetaModel;
import static com.github.javaparser.utils.Utils.assertNotNull;
import javax.annotation.Generated;
import com.github.javaparser.TokenRange;
import com.github.javaparser.resolution.Resolvable;
import com.github.javaparser.resolution.SymbolResolver;
import com.github.javaparser.resolution.types.ResolvedType;
import java.util.function.Consumer;
import java.util.function.Supplier;
import static com.github.javaparser.utils.CodeGenerationUtils.f;
import java.util.Optional;
/**
* Base class for types.
*
* @author <NAME>
*/
public abstract class Type extends Node implements Resolvable<ResolvedType> {
private NodeList<AnnotationExpr> annotations;
/**
* Several sub classes do not support annotations.
* This is a support constructor for them.
*/
protected Type(TokenRange range) {
this(range, new NodeList<>());
}
@AllFieldsConstructor
public Type(NodeList<AnnotationExpr> annotations) {
this(null, annotations);
}
/**
* This constructor is used by the parser and is considered private.
*/
@Generated("com.github.javaparser.generator.core.node.MainConstructorGenerator")
public Type(TokenRange tokenRange, NodeList<AnnotationExpr> annotations) {
super(tokenRange);
setAnnotations(annotations);
customInitialization();
}
@Generated("com.github.javaparser.generator.core.node.PropertyGenerator")
public NodeList<AnnotationExpr> getAnnotations() {
return annotations;
}
public AnnotationExpr getAnnotation(int i) {
return getAnnotations().get(i);
}
@Generated("com.github.javaparser.generator.core.node.PropertyGenerator")
public Type setAnnotations(final NodeList<AnnotationExpr> annotations) {
assertNotNull(annotations);
if (annotations == this.annotations) {
return (Type) this;
}
notifyPropertyChange(ObservableProperty.ANNOTATIONS, this.annotations, annotations);
if (this.annotations != null)
this.annotations.setParentNode(null);
this.annotations = annotations;
setAsParentNodeOf(annotations);
return this;
}
/**
* Finds the element type, meaning: the type without ArrayTypes around it.
* <p>
* In "<code>int[] a[];</code>", the element type is int.
*/
public Type getElementType() {
if (this instanceof ArrayType) {
return ((ArrayType) this).getComponentType().getElementType();
}
return this;
}
public int getArrayLevel() {
if (this instanceof ArrayType) {
return 1 + ((ArrayType) this).getComponentType().getArrayLevel();
} else {
return 0;
}
}
@Override
@Generated("com.github.javaparser.generator.core.node.RemoveMethodGenerator")
public boolean remove(Node node) {
if (node == null)
return false;
for (int i = 0; i < annotations.size(); i++) {
if (annotations.get(i) == node) {
annotations.remove(i);
return true;
}
}
return super.remove(node);
}
public abstract String asString();
@Override
@Generated("com.github.javaparser.generator.core.node.CloneGenerator")
public Type clone() {
return (Type) accept(new CloneVisitor(), null);
}
@Override
@Generated("com.github.javaparser.generator.core.node.GetMetaModelGenerator")
public TypeMetaModel getMetaModel() {
return JavaParserMetaModel.typeMetaModel;
}
@Override
@Generated("com.github.javaparser.generator.core.node.ReplaceMethodGenerator")
public boolean replace(Node node, Node replacementNode) {
if (node == null)
return false;
for (int i = 0; i < annotations.size(); i++) {
if (annotations.get(i) == node) {
annotations.set(i, (AnnotationExpr) replacementNode);
return true;
}
}
return super.replace(node, replacementNode);
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public boolean isArrayType() {
return false;
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public ArrayType asArrayType() {
throw new IllegalStateException(f("%s is not an ArrayType", this));
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public boolean isClassOrInterfaceType() {
return false;
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public ClassOrInterfaceType asClassOrInterfaceType() {
throw new IllegalStateException(f("%s is not an ClassOrInterfaceType", this));
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public boolean isIntersectionType() {
return false;
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public IntersectionType asIntersectionType() {
throw new IllegalStateException(f("%s is not an IntersectionType", this));
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public boolean isPrimitiveType() {
return false;
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public PrimitiveType asPrimitiveType() {
throw new IllegalStateException(f("%s is not an PrimitiveType", this));
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public boolean isReferenceType() {
return false;
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public ReferenceType asReferenceType() {
throw new IllegalStateException(f("%s is not an ReferenceType", this));
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public boolean isTypeParameter() {
return false;
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public TypeParameter asTypeParameter() {
throw new IllegalStateException(f("%s is not an TypeParameter", this));
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public boolean isUnionType() {
return false;
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public UnionType asUnionType() {
throw new IllegalStateException(f("%s is not an UnionType", this));
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public boolean isUnknownType() {
return false;
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public UnknownType asUnknownType() {
throw new IllegalStateException(f("%s is not an UnknownType", this));
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public boolean isVoidType() {
return false;
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public VoidType asVoidType() {
throw new IllegalStateException(f("%s is not an VoidType", this));
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public boolean isWildcardType() {
return false;
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public WildcardType asWildcardType() {
throw new IllegalStateException(f("%s is not an WildcardType", this));
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public void ifArrayType(Consumer<ArrayType> action) {
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public void ifClassOrInterfaceType(Consumer<ClassOrInterfaceType> action) {
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public void ifIntersectionType(Consumer<IntersectionType> action) {
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public void ifPrimitiveType(Consumer<PrimitiveType> action) {
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public void ifReferenceType(Consumer<ReferenceType> action) {
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public void ifTypeParameter(Consumer<TypeParameter> action) {
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public void ifUnionType(Consumer<UnionType> action) {
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public void ifUnknownType(Consumer<UnknownType> action) {
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public void ifVoidType(Consumer<VoidType> action) {
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public void ifWildcardType(Consumer<WildcardType> action) {
}
@Override
public abstract ResolvedType resolve();
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public Optional<ArrayType> toArrayType() {
return Optional.empty();
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public Optional<ClassOrInterfaceType> toClassOrInterfaceType() {
return Optional.empty();
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public Optional<IntersectionType> toIntersectionType() {
return Optional.empty();
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public Optional<PrimitiveType> toPrimitiveType() {
return Optional.empty();
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public Optional<ReferenceType> toReferenceType() {
return Optional.empty();
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public Optional<TypeParameter> toTypeParameter() {
return Optional.empty();
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public Optional<UnionType> toUnionType() {
return Optional.empty();
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public Optional<UnknownType> toUnknownType() {
return Optional.empty();
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public Optional<VoidType> toVoidType() {
return Optional.empty();
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public Optional<WildcardType> toWildcardType() {
return Optional.empty();
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public boolean isVarType() {
return false;
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public VarType asVarType() {
throw new IllegalStateException(f("%s is not an VarType", this));
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public Optional<VarType> toVarType() {
return Optional.empty();
}
@Generated("com.github.javaparser.generator.core.node.TypeCastingGenerator")
public void ifVarType(Consumer<VarType> action) {
}
}
|
zealoussnow/chromium
|
ppapi/cpp/private/x509_certificate_private.cc
|
<reponame>zealoussnow/chromium<filename>ppapi/cpp/private/x509_certificate_private.cc
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ppapi/cpp/private/x509_certificate_private.h"
#include "ppapi/cpp/module_impl.h"
#include "ppapi/cpp/pass_ref.h"
#include "ppapi/cpp/var.h"
namespace pp {
namespace {
template <> const char* interface_name<PPB_X509Certificate_Private_0_1>() {
return PPB_X509CERTIFICATE_PRIVATE_INTERFACE_0_1;
}
} // namespace
X509CertificatePrivate::X509CertificatePrivate() : Resource() {}
X509CertificatePrivate::X509CertificatePrivate(PassRef, PP_Resource resource)
: Resource(PASS_REF, resource) {
}
X509CertificatePrivate::X509CertificatePrivate(const InstanceHandle& instance) {
if (has_interface<PPB_X509Certificate_Private_0_1>()) {
PassRefFromConstructor(get_interface<PPB_X509Certificate_Private_0_1>()->
Create(instance.pp_instance()));
}
}
// static
bool X509CertificatePrivate::IsAvailable() {
return has_interface<PPB_X509Certificate_Private_0_1>();
}
bool X509CertificatePrivate::Initialize(const char* bytes, uint32_t length) {
if (!has_interface<PPB_X509Certificate_Private_0_1>())
return false;
PP_Bool result = get_interface<PPB_X509Certificate_Private_0_1>()->Initialize(
pp_resource(),
bytes,
length);
return PP_ToBool(result);
}
Var X509CertificatePrivate::GetField(
PP_X509Certificate_Private_Field field) const {
if (!has_interface<PPB_X509Certificate_Private_0_1>())
return Var();
return Var(PassRef(),
get_interface<PPB_X509Certificate_Private_0_1>()->GetField(pp_resource(),
field));
}
} // namespace pp
|
mirwansyahs/siabanks
|
assets/ckeditor/tests/core/editable/keystrokes/delbackspacequirks/_helpers/tools.js
|
/* exported quirksTools */
var quirksTools = ( function() {
'use strict';
var DEL = 46,
BACKSPACE = 8,
keyNames = {
46: 'DEL',
8: 'BACKSPACE'
};
function assertKeystroke( key, keyModifiers, handled, html, expected ) {
function decodeBoguses( html ) {
return html.replace( /@/g, CKEDITOR.env.needsBrFiller ? '<br />' : '' );
}
return function() {
var bot = this.editorBot,
editor = bot.editor,
handledNatively = 0;
html = decodeBoguses( html );
bender.tools.selection.setWithHtml( editor, html );
var listener = editor.on( 'key', function() {
++handledNatively;
}, null, null, 999 );
editor.editable().fire( 'keydown', new CKEDITOR.dom.event( {
keyCode: key,
ctrlKey: keyModifiers & CKEDITOR.CTRL,
shiftKey: keyModifiers & CKEDITOR.SHIFT
} ) );
var htmlWithSelection = bender.tools.selection.getWithHtml( editor );
var message = '(' + keyNames[ key ] + ') Correct DOM state after the keystroke';
assert.isInnerHtmlMatching(
expected,
htmlWithSelection,
{ compareSelection: true, normalizeSelection: true },
message
);
assert.areSame( handled, handledNatively, '(' + keyNames[ key ] + ') Keystroke handled by the browser' );
listener.removeListener();
};
}
function d() {
return assertKeystroke.apply( this, [ DEL, 0, 0 ].concat( [].slice.call( arguments ) ) );
}
function b() {
return assertKeystroke.apply( this, [ BACKSPACE, 0, 0 ].concat( [].slice.call( arguments ) ) );
}
// We need expected param, because in some cases selection normalization will change the input
// selection markers. Therefore, in some cases we can't compare the result after with the input HTML.
function bf( html, expected ) {
if ( !expected ) {
expected = html;
}
return assertKeystroke.apply( this, [ BACKSPACE, 0, 1, html, expected ] );
}
function df( html, expected ) {
if ( !expected ) {
expected = html;
}
return assertKeystroke.apply( this, [ DEL, 0, 1, html, expected ] );
}
return {
DEL: DEL,
BACKSPACE: BACKSPACE,
assertKeystroke: assertKeystroke,
d: d,
b: b,
// Calls d() and b() for the same arguments.
bd: function() {
var bfn = b.apply( this, arguments ),
dfn = d.apply( this, arguments );
return function() {
bfn.call( this );
dfn.call( this );
};
},
bf: bf,
df: df,
// Calls df() and bf() for the same arguments.
bdf: function() {
var bffn = bf.apply( this, arguments ),
dffn = df.apply( this, arguments );
return function() {
bffn.call( this );
dffn.call( this );
};
}
};
} )();
|
karelskiy/react-app
|
src/components/example.js
|
import React, { Component } from 'react';
function Warning(props){
if(!props.warn){
return null;
}
return (
<div>
<h1>Warning</h1>
</div>
)
}
class Example extends Component {
constructor(props){
super(props);
this.state = {
showing: true
}
this.click = this.click.bind(this)
}
click(){
this.setState({
showing: !this.state.showing
})
}
render(){
return (
<div>
<Warning warn={this.state.showing} />
<button onClick={this.click}>{this.state.showing? 'Скрыть' : 'Показать'}</button>
</div>
)
}
}
export default Example;
|
kiiadi/smithy
|
aws/smithy-aws-traits/src/main/java/software/amazon/smithy/aws/traits/apigateway/Authorizer.java
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package software.amazon.smithy.aws.traits.apigateway;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import software.amazon.smithy.model.node.Node;
import software.amazon.smithy.model.node.NumberNode;
import software.amazon.smithy.model.node.ObjectNode;
import software.amazon.smithy.model.node.StringNode;
import software.amazon.smithy.model.node.ToNode;
import software.amazon.smithy.utils.ListUtils;
import software.amazon.smithy.utils.SmithyBuilder;
import software.amazon.smithy.utils.ToSmithyBuilder;
/**
* Represents an API Gateway authorizer.
*
* @see AuthorizersTrait
*/
public final class Authorizer implements ToNode, ToSmithyBuilder<Authorizer> {
private static final String SCHEME_KEY = "scheme";
private static final String TYPE_KEY = "type";
private static final String URI_KEY = "uri";
private static final String CREDENTIALS_KEY = "credentials";
private static final String IDENTITY_SOURCE_KEY = "identitySource";
private static final String IDENTITY_VALIDATION_EXPRESSION_KEY = "identityValidationExpression";
private static final String RESULT_TTL_IN_SECONDS = "resultTtlInSeconds";
private static final List<String> PROPERTIES = ListUtils.of(
SCHEME_KEY, TYPE_KEY, URI_KEY, CREDENTIALS_KEY, IDENTITY_SOURCE_KEY,
IDENTITY_VALIDATION_EXPRESSION_KEY, RESULT_TTL_IN_SECONDS);
private final String scheme;
private final String type;
private final String uri;
private final String credentials;
private final String identitySource;
private final String identityValidationExpression;
private final Integer resultTtlInSeconds;
private Authorizer(Builder builder) {
type = SmithyBuilder.requiredState(TYPE_KEY, builder.type);
scheme = SmithyBuilder.requiredState(SCHEME_KEY, builder.scheme);
uri = SmithyBuilder.requiredState(URI_KEY, builder.uri);
credentials = builder.credentials;
identitySource = builder.identitySource;
identityValidationExpression = builder.identityValidationExpression;
resultTtlInSeconds = builder.resultTtlInSeconds;
}
/**
* Creates a builder for an Authorizer.
*
* @return Returns the created builder.
*/
public static Builder builder() {
return new Builder();
}
/**
* Gets the Smithy scheme used as the client authentication type.
*
* @return Returns the optionally defined client authentication type.
*/
public String getScheme() {
return scheme;
}
/**
* Gets the type of the authorizer.
*
* <p>This is a required property and the value must be "token",
* for an authorizer with the caller identity embedded in an
* authorization token, or "request", for an authorizer with the
* caller identity contained in request parameters.
*
* @return Returns the authorizer type.
*/
public String getType() {
return type;
}
/**
* Gets the Uniform Resource Identifier (URI) of the authorizer
* Lambda function.
*
* @return Returns the Lambda URI.
*/
public String getUri() {
return uri;
}
/**
* Gets the Credentials required for invoking the authorizer, if any, in
* the form of an ARN of an IAM execution role.
*
* <p>For example, "arn:aws:iam::account-id:IAM_role".
*
* @return Returns the optional credential ARN.
*/
public Optional<String> getCredentials() {
return Optional.ofNullable(credentials);
}
/**
* Gets the comma-separated list of mapping expressions of the request
* parameters as the identity source.
*
* <p>This property is only applicable for the authorizer of the
* "request" type only.
*
* @return Returns the optional identity source string.
*/
public Optional<String> getIdentitySource() {
return Optional.ofNullable(identitySource);
}
/**
* Gets the regular expression for validating the token as the incoming
* identity. For example, {@code "^x-[a-z]+"}.
*
* @return Returns the identity regular expression.
*/
public Optional<String> getIdentityValidationExpression() {
return Optional.ofNullable(identityValidationExpression);
}
/**
* Gets the number of seconds during which the resulting IAM policy
* is cached.
*
* @return Returns the cache amount in seconds.
*/
public Optional<Integer> getResultTtlInSeconds() {
return Optional.ofNullable(resultTtlInSeconds);
}
@Override
public Builder toBuilder() {
return builder()
.scheme(scheme)
.type(type)
.uri(uri)
.credentials(credentials)
.identitySource(identitySource)
.identityValidationExpression(identityValidationExpression)
.resultTtlInSeconds(resultTtlInSeconds);
}
@Override
public Node toNode() {
return Node.objectNodeBuilder()
.withMember(TYPE_KEY, Node.from(getType()))
.withMember(SCHEME_KEY, Node.from(getScheme()))
.withMember(URI_KEY, Node.from(getUri()))
.withOptionalMember(CREDENTIALS_KEY, getCredentials().map(Node::from))
.withOptionalMember(IDENTITY_SOURCE_KEY, getIdentitySource().map(Node::from))
.withOptionalMember(IDENTITY_VALIDATION_EXPRESSION_KEY,
getIdentityValidationExpression().map(Node::from))
.withOptionalMember(RESULT_TTL_IN_SECONDS, getResultTtlInSeconds().map(Node::from))
.build();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof Authorizer)) {
return false;
}
Authorizer that = (Authorizer) o;
return Objects.equals(scheme, that.scheme)
&& type.equals(that.type)
&& uri.equals(that.uri)
&& Objects.equals(credentials, that.credentials)
&& Objects.equals(identitySource, that.identitySource)
&& Objects.equals(identityValidationExpression, that.identityValidationExpression)
&& Objects.equals(resultTtlInSeconds, that.resultTtlInSeconds);
}
@Override
public int hashCode() {
return Objects.hash(scheme, type, uri);
}
static Authorizer fromNode(ObjectNode node) {
node.warnIfAdditionalProperties(PROPERTIES);
Builder builder = builder();
node.getStringMember(SCHEME_KEY)
.map(StringNode::getValue)
.ifPresent(builder::scheme);
builder.type(node.expectMember(TYPE_KEY).expectStringNode().getValue());
builder.uri(node.expectMember(URI_KEY).expectStringNode().getValue());
node.getStringMember(CREDENTIALS_KEY)
.map(StringNode::getValue)
.ifPresent(builder::credentials);
node.getStringMember(IDENTITY_SOURCE_KEY)
.map(StringNode::getValue)
.ifPresent(builder::identitySource);
node.getStringMember(IDENTITY_VALIDATION_EXPRESSION_KEY)
.map(StringNode::getValue)
.ifPresent(builder::identityValidationExpression);
node.getNumberMember(RESULT_TTL_IN_SECONDS)
.map(NumberNode::getValue)
.map(Number::intValue)
.ifPresent(builder::resultTtlInSeconds);
return builder.build();
}
/**
* Builder used to create an {@link Authorizer}.
*/
public static final class Builder implements SmithyBuilder<Authorizer> {
private String scheme;
private String type;
private String uri;
private String credentials;
private String identitySource;
private String identityValidationExpression;
private Integer resultTtlInSeconds;
@Override
public Authorizer build() {
return new Authorizer(this);
}
/**
* Sets the client authentication scheme name.
*
* @param scheme Client authentication scheme (e.g., aws.v4).
* @return Returns the builder.
*/
public Builder scheme(String scheme) {
this.scheme = scheme;
return this;
}
/**
* Sets the type of the authorizer.
*
* <p>This is a required property and the value must be "token",
* for an authorizer with the caller identity embedded in an
* authorization token, or "request", for an authorizer with the
* caller identity contained in request parameters.
*
* @param type authorizer type.
* @return Returns the builder.
*/
public Builder type(String type) {
this.type = type;
return this;
}
/**
* Sets the Uniform Resource Identifier (URI) of the authorizer
* Lambda function.
*
* <p>The syntax is as follows:
*
* @param uri the Lambda URI to set.
* @return Returns the builder.
*/
public Builder uri(String uri) {
this.uri = uri;
return this;
}
/**
* Sets the Credentials required for invoking the authorizer, if any, in
* the form of an ARN of an IAM execution role.
*
* <p>For example, "arn:aws:iam::account-id:IAM_role".
*
* @param credentials Credentials ARN to set.
* @return Returns the builder.
*/
public Builder credentials(String credentials) {
this.credentials = credentials;
return this;
}
/**
* Sets the comma-separated list of mapping expressions of the request
* parameters as the identity source.
*
* <p>This property is only applicable for the authorizer of the
* "request" type only.
*
* @param identitySource Identity source CSV to set.
* @return Returns the builder.
*/
public Builder identitySource(String identitySource) {
this.identitySource = identitySource;
return this;
}
/**
* Sets the regular expression for validating the token as the incoming
* identity. For example, {@code "^x-[a-z]+"}.
*
* @param identityValidationExpression Expression to set.
* @return Returns the builder.
*/
public Builder identityValidationExpression(String identityValidationExpression) {
this.identityValidationExpression = identityValidationExpression;
return this;
}
/**
* Sets the number of seconds during which the resulting IAM policy
* is cached.
*
* @param resultTtlInSeconds Number of seconds to cache.
* @return Returns the builder.
*/
public Builder resultTtlInSeconds(Integer resultTtlInSeconds) {
this.resultTtlInSeconds = resultTtlInSeconds;
return this;
}
}
}
|
SoftReaper/Mesa-Renoir-deb
|
src/gallium/targets/pipe-loader/pipe_kmsro.c
|
#include "target-helpers/inline_debug_helper.h"
#include "frontend/drm_driver.h"
#include "kmsro/drm/kmsro_drm_public.h"
#define GALLIUM_KMSRO_ONLY
#include "target-helpers/drm_helper.h"
|
figtools/figgy-cli
|
src/figcli/test/cli/data/put.py
|
import pexpect
from figcli.test.cli.config import *
from figcli.test.cli.data.delete import DataDelete, DeleteAction
from figcli.test.cli.figgy import FiggyTest
from figcli.config import *
from figcli.utils.utils import *
class DataPut(FiggyTest):
def __init__(self, extra_args=""):
super().__init__(pexpect.spawn(f'{CLI_NAME} config {put.name} --env {DEFAULT_ENV} '
f'--skip-upgrade {extra_args}',
timeout=20, encoding='utf-8'), extra_args=extra_args)
self.step(f"Testing `{CLI_NAME} config {put.name} --env {DEFAULT_ENV}`")
def run(self):
self.expect('.*Please input a PS Name.*')
self.sendline(data_param_1)
self.expect('.*Please input a value.*')
self.sendline(data_param_1_val)
self.expect('.*Please input an optional.*')
self.sendline(data_param_1_desc)
self.expect('.*secret?.*')
self.sendline('n')
self.expect('.*Add another?.*')
self.sendline('y')
self.expect('.*PS Name.*')
self.sendline('/devops/test/invalid')
self.expect('.*value.*')
self.sendline(DELETE_ME_VALUE)
self.expect('.*not have permissions.*another?.*')
self.sendline('n')
def add(self, key, value, desc, add_more=False):
delete = DeleteAction(self.extra_args)
delete.delete(key)
self.expect('.*Please input a PS Name.*')
self.sendline(key)
self.expect('.*Please input a value.*')
self.sendline(value)
self.expect('.*Please input an optional.*')
self.sendline(desc)
self.expect('.*secret?.*')
self.sendline('n')
self.expect('.*another.*')
if add_more:
self.sendline('y')
else:
self.sendline('n')
def add_another(self, key, value, desc, add_more=True):
print(f"Adding another: {key} -> {value}")
self.expect('.*PS Name.*')
self.sendline(key)
self.expect('.*Please input a value.*')
self.sendline(value)
self.expect('.*Please input an optional.*')
self.sendline(desc)
self.expect('.*secret?.*')
self.sendline('n')
self.expect('.*Add another.*')
if add_more:
self.sendline('y')
else:
self.sendline('n')
|
dbdxnuliba/IHMC-
|
ihmc-common-walking-control-modules/src/test/java/us/ihmc/commonWalkingControlModules/dynamicPlanning/slipJumping/costs/SLIPDesiredTrackingCostTest.java
|
<reponame>dbdxnuliba/IHMC-
package us.ihmc.commonWalkingControlModules.dynamicPlanning.slipJumping.costs;
import org.ejml.data.DenseMatrix64F;
import org.junit.Assert;
import org.junit.Test;
import us.ihmc.commonWalkingControlModules.dynamicPlanning.TrackingCostFunctionTest;
import us.ihmc.commonWalkingControlModules.dynamicPlanning.slipJumping.SLIPState;
import us.ihmc.continuousIntegration.ContinuousIntegrationAnnotations.ContinuousIntegrationTest;
import us.ihmc.robotics.random.RandomGeometry;
import us.ihmc.trajectoryOptimization.LQTrackingCostFunction;
import java.util.Random;
import static us.ihmc.commonWalkingControlModules.dynamicPlanning.slipJumping.SLIPState.*;
public class SLIPDesiredTrackingCostTest extends TrackingCostFunctionTest<SLIPState>
{
public int getNumberOfStates()
{
return 2;
}
public int getStateVectorSize()
{
return SLIPState.stateVectorSize;
}
public int getControlVectorSize()
{
return SLIPState.controlVectorSize;
}
public int getConstantVectorSize()
{
return SLIPState.constantVectorSize;
}
public SLIPState getHybridState(int hybridStateIndex)
{
switch (hybridStateIndex)
{
case 0:
return SLIPState.FLIGHT;
default:
return SLIPState.STANCE;
}
}
public LQTrackingCostFunction<SLIPState> getCostFunction()
{
return new SLIPDesiredTrackingCost();
}
@Override
@ContinuousIntegrationTest(estimatedDuration = 0.0)
@Test(timeout = 30000)
public void testCost()
{
LQTrackingCostFunction<SLIPState> costFunction = getCostFunction();
Random random = new Random(1738L);
DenseMatrix64F currentState = RandomGeometry.nextDenseMatrix64F(random, stateVectorSize, 1);
DenseMatrix64F currentControl = RandomGeometry.nextDenseMatrix64F(random, controlVectorSize, 1);
DenseMatrix64F desiredState = RandomGeometry.nextDenseMatrix64F(random, getStateVectorSize(), 1);
DenseMatrix64F desiredControl = RandomGeometry.nextDenseMatrix64F(random, getControlVectorSize(), 1);
DenseMatrix64F constants = RandomGeometry.nextDenseMatrix64F(random, getConstantVectorSize(), 1);
double cost = costFunction.getCost(SLIPState.FLIGHT, currentControl, currentState, desiredControl, desiredState, constants);
double expectedCost = SLIPDesiredTrackingCost.qXFlight * (currentState.get(x) - desiredState.get(x)) * (currentState.get(x) - desiredState.get(x));
expectedCost += SLIPDesiredTrackingCost.qYFlight * (currentState.get(y) - desiredState.get(y)) * (currentState.get(y) - desiredState.get(y));
expectedCost += SLIPDesiredTrackingCost.qZFlight * (currentState.get(z) - desiredState.get(z)) * (currentState.get(z) - desiredState.get(z));
expectedCost += SLIPDesiredTrackingCost.qThetaXFlight * (currentState.get(thetaX) - desiredState.get(thetaX)) * (currentState.get(thetaX) - desiredState.get(thetaX));
expectedCost += SLIPDesiredTrackingCost.qThetaYFlight * (currentState.get(thetaY) - desiredState.get(thetaY)) * (currentState.get(thetaY) - desiredState.get(thetaY));
expectedCost += SLIPDesiredTrackingCost.qThetaZFlight * (currentState.get(thetaZ) - desiredState.get(thetaZ)) * (currentState.get(thetaZ) - desiredState.get(thetaZ));
expectedCost += SLIPDesiredTrackingCost.qXDotFlight * (currentState.get(xDot) - desiredState.get(xDot)) * (currentState.get(xDot) - desiredState.get(xDot));
expectedCost += SLIPDesiredTrackingCost.qYDotFlight * (currentState.get(yDot) - desiredState.get(yDot)) * (currentState.get(yDot) - desiredState.get(yDot));
expectedCost += SLIPDesiredTrackingCost.qZDotFlight * (currentState.get(zDot) - desiredState.get(zDot)) * (currentState.get(zDot) - desiredState.get(zDot));
expectedCost += SLIPDesiredTrackingCost.qThetaDotXFlight * (currentState.get(thetaXDot) - desiredState.get(thetaXDot)) * (currentState.get(thetaXDot) - desiredState.get(thetaXDot));
expectedCost += SLIPDesiredTrackingCost.qThetaDotYFlight * (currentState.get(thetaYDot) - desiredState.get(thetaYDot)) * (currentState.get(thetaYDot) - desiredState.get(thetaYDot));
expectedCost += SLIPDesiredTrackingCost.qThetaDotZFlight * (currentState.get(thetaZDot) - desiredState.get(thetaZDot)) * (currentState.get(thetaZDot) - desiredState.get(thetaZDot));
expectedCost += SLIPDesiredTrackingCost.rFxFlight * (currentControl.get(fx) - desiredControl.get(fx)) * (currentControl.get(fx) - desiredControl.get(fx));
expectedCost += SLIPDesiredTrackingCost.rFyFlight * (currentControl.get(fy) - desiredControl.get(fy)) * (currentControl.get(fy) - desiredControl.get(fy));
expectedCost += SLIPDesiredTrackingCost.rFzFlight * (currentControl.get(fz) - desiredControl.get(fz)) * (currentControl.get(fz) - desiredControl.get(fz));
expectedCost += SLIPDesiredTrackingCost.rTauXFlight * (currentControl.get(tauX) - desiredControl.get(tauX)) * (currentControl.get(tauX) - desiredControl.get(tauX));
expectedCost += SLIPDesiredTrackingCost.rTauYFlight * (currentControl.get(tauY) - desiredControl.get(tauY)) * (currentControl.get(tauY) - desiredControl.get(tauY));
expectedCost += SLIPDesiredTrackingCost.rTauZFlight * (currentControl.get(tauZ) - desiredControl.get(tauZ)) * (currentControl.get(tauZ) - desiredControl.get(tauZ));
expectedCost += SLIPDesiredTrackingCost.rXfFlight * (currentControl.get(xF) - desiredControl.get(xF)) * (currentControl.get(xF) - desiredControl.get(xF));
expectedCost += SLIPDesiredTrackingCost.rYfFlight * (currentControl.get(yF) - desiredControl.get(yF)) * (currentControl.get(yF) - desiredControl.get(yF));
expectedCost += SLIPDesiredTrackingCost.rKFlight * (currentControl.get(k) - desiredControl.get(k)) * (currentControl.get(k) - desiredControl.get(k));
Assert.assertEquals(expectedCost, cost, 1e-7);
cost = costFunction.getCost(SLIPState.STANCE, currentControl, currentState, desiredControl, desiredState, constants);
expectedCost = SLIPDesiredTrackingCost.qXStance * (currentState.get(x) - desiredState.get(x)) * (currentState.get(x) - desiredState.get(x));
expectedCost += SLIPDesiredTrackingCost.qYStance * (currentState.get(y) - desiredState.get(y)) * (currentState.get(y) - desiredState.get(y));
expectedCost += SLIPDesiredTrackingCost.qZStance * (currentState.get(z) - desiredState.get(z)) * (currentState.get(z) - desiredState.get(z));
expectedCost += SLIPDesiredTrackingCost.qThetaXStance * (currentState.get(thetaX) - desiredState.get(thetaX)) * (currentState.get(thetaX) - desiredState.get(thetaX));
expectedCost += SLIPDesiredTrackingCost.qThetaYStance * (currentState.get(thetaY) - desiredState.get(thetaY)) * (currentState.get(thetaY) - desiredState.get(thetaY));
expectedCost += SLIPDesiredTrackingCost.qThetaZStance * (currentState.get(thetaZ) - desiredState.get(thetaZ)) * (currentState.get(thetaZ) - desiredState.get(thetaZ));
expectedCost += SLIPDesiredTrackingCost.qXDotStance * (currentState.get(xDot) - desiredState.get(xDot)) * (currentState.get(xDot) - desiredState.get(xDot));
expectedCost += SLIPDesiredTrackingCost.qYDotStance * (currentState.get(yDot) - desiredState.get(yDot)) * (currentState.get(yDot) - desiredState.get(yDot));
expectedCost += SLIPDesiredTrackingCost.qZDotStance * (currentState.get(zDot) - desiredState.get(zDot)) * (currentState.get(zDot) - desiredState.get(zDot));
expectedCost += SLIPDesiredTrackingCost.qThetaDotXStance * (currentState.get(thetaXDot) - desiredState.get(thetaXDot)) * (currentState.get(thetaXDot) - desiredState.get(thetaXDot));
expectedCost += SLIPDesiredTrackingCost.qThetaDotYStance * (currentState.get(thetaYDot) - desiredState.get(thetaYDot)) * (currentState.get(thetaYDot) - desiredState.get(thetaYDot));
expectedCost += SLIPDesiredTrackingCost.qThetaDotZStance * (currentState.get(thetaZDot) - desiredState.get(thetaZDot)) * (currentState.get(thetaZDot) - desiredState.get(thetaZDot));
expectedCost += SLIPDesiredTrackingCost.rFxStance * (currentControl.get(fx) - desiredControl.get(fx)) * (currentControl.get(fx) - desiredControl.get(fx));
expectedCost += SLIPDesiredTrackingCost.rFyStance * (currentControl.get(fy) - desiredControl.get(fy)) * (currentControl.get(fy) - desiredControl.get(fy));
expectedCost += SLIPDesiredTrackingCost.rFzStance * (currentControl.get(fz) - desiredControl.get(fz)) * (currentControl.get(fz) - desiredControl.get(fz));
expectedCost += SLIPDesiredTrackingCost.rTauXStance * (currentControl.get(tauX) - desiredControl.get(tauX)) * (currentControl.get(tauX) - desiredControl.get(tauX));
expectedCost += SLIPDesiredTrackingCost.rTauYStance * (currentControl.get(tauY) - desiredControl.get(tauY)) * (currentControl.get(tauY) - desiredControl.get(tauY));
expectedCost += SLIPDesiredTrackingCost.rTauZStance * (currentControl.get(tauZ) - desiredControl.get(tauZ)) * (currentControl.get(tauZ) - desiredControl.get(tauZ));
expectedCost += SLIPDesiredTrackingCost.rXfStance * (currentControl.get(xF) - desiredControl.get(xF)) * (currentControl.get(xF) - desiredControl.get(xF));
expectedCost += SLIPDesiredTrackingCost.rYfStance * (currentControl.get(yF) - desiredControl.get(yF)) * (currentControl.get(yF) - desiredControl.get(yF));
expectedCost += SLIPDesiredTrackingCost.rKStance * (currentControl.get(k) - desiredControl.get(k)) * (currentControl.get(k) - desiredControl.get(k));
Assert.assertEquals(expectedCost, cost, 1e-7);
}
@Override
@ContinuousIntegrationTest(estimatedDuration = 0.0)
@Test(timeout = 30000)
public void testCostStateGradientNumerically()
{
super.testCostStateGradientNumerically();
}
@Override
@ContinuousIntegrationTest(estimatedDuration = 0.0)
@Test(timeout = 30000)
public void testCostControlGradientNumerically()
{
super.testCostControlGradientNumerically();
}
@Override
@ContinuousIntegrationTest(estimatedDuration = 0.0)
@Test(timeout = 30000)
public void testCostStateHessianNumerically()
{
super.testCostStateHessianNumerically();
}
@Override
@ContinuousIntegrationTest(estimatedDuration = 0.0)
@Test(timeout = 30000)
public void testCostControlHessianNumerically()
{
super.testCostControlHessianNumerically();
}
@Override
@ContinuousIntegrationTest(estimatedDuration = 0.0)
@Test(timeout = 30000)
public void testCostStateControlHessianNumerically()
{
super.testCostStateControlHessianNumerically();
}
}
|
tiagosm1/Python_Nilo_Ney
|
exercicios_resolvidos3/exercicios3/capitulo 04/exercicio-04-10.py
|
##############################################################################
# Parte do livro Introdução à Programação com Python
# Autor: <NAME>
# Editora Novatec (c) 2010-2020
# Primeira edição - Novembro/2010 - ISBN 978-85-7522-250-8
# Segunda edição - Junho/2014 - ISBN 978-85-7522-408-3
# Terceira Edição - Janeiro/2019 - ISBN 978-85-7522-718-3
#
# Site: https://python.nilo.pro.br/
#
# Arquivo: exercicios3\capitulo 04\exercicio-04-10.py
##############################################################################
consumo = int(input("Consumo em kWh: "))
tipo = input("Tipo da instalação (R, C ou I): ")
if tipo == "R":
if consumo <= 500:
preço = 0.40
else:
preço = 0.65
elif tipo == "I":
if consumo <= 5000:
preço = 0.55
else:
preço = 0.60
elif tipo == "C":
if consumo <= 1000:
preço = 0.55
else:
preço = 0.60
else:
preço = 0
print("Erro ! Tipo de instalação desconhecido!")
custo = consumo * preço
print(f"Valor a pagar: R$ {custo:7.2f}")
|
macminix/MacMinix
|
src/fs/const.h
|
/* Tables sizes */
#define NR_ZONE_NUMS 9 /* # zone numbers in an inode */
#define NR_FILPS 64 /* # slots in filp table */
#define I_MAP_SLOTS 8 /* max # of blocks in the inode bit map */
#define ZMAP_SLOTS 8 /* max # of blocks in the zone bit map */
#define NR_INODES 32 /* # slots in "in core" inode table */
#define NR_SUPERS 5 /* # slots in super block table */
#define FS_STACK_BYTES (272 * sizeof (char *)) /* size of file system stack */
/* Miscellaneous constants */
#define SUPER_MAGIC 0x137F /* magic number contained in super-block */
#define SU_UID (uid_t) 0 /* super_user's uid_t */
#define SYS_UID (uid_t) 0 /* uid_t for processes MM and INIT */
#define SYS_GID (gid_t) 0 /* gid_t for processes MM and INIT */
#define NORMAL 0 /* forces get_block to do disk read */
#define NO_READ 1 /* prevents get_block from doing disk read */
#define PREFETCH 2 /* tells get_block not to read or mark dev */
#define XPIPE -(NR_TASKS+1) /* used in fp_task when suspended on pipe */
#define XOPEN -(NR_TASKS+2) /* used in fp_task when suspended in open */
#define NO_BIT (bit_nr) 0 /* returned by alloc_bit() to signal failure */
#define DUP_MASK 0100 /* mask to distinguish dup2 from dup */
#define LOOK_UP 0 /* tells search_dir to lookup string */
#define ENTER 1 /* tells search_dir to make dir entry */
#define DELETE 2 /* tells search_dir to delete entry */
#define CLEAN 0 /* disk and memory copies identical */
#define DIRTY 1 /* disk and memory copies differ */
#define ATIME 002 /* set if atime field needs updating */
#define CTIME 004 /* set if ctime field needs updating */
#define MTIME 010 /* set if mtime field needs updating */
#define BOOT_BLOCK (block_nr)0 /* block number of boot block */
#define SUPER_BLOCK (block_nr)1 /* block number of super block */
#define ROOT_INODE (ino_t)1 /* inode number for root directory */
#define INFO 2 /* where in data_org is info from build */
#define END_OF_FILE -104 /* eof detected */
/* Derived sizes */
#define ZONE_NUM_SIZE sizeof(zone_nr) /* # bytes in zone nr */
#define NR_DZONE_NUM (NR_ZONE_NUMS-2) /* # zones in inode */
#define DIR_ENTRY_SIZE sizeof(dir_struct) /* # bytes/dir entry */
#define INODES_PER_BLOCK (BLOCK_SIZE/INODE_SIZE) /* # inodes/disk blk */
#define INODE_SIZE (sizeof (d_inode)) /* bytes in disk inode */
#define NR_DIR_ENTRIES (BLOCK_SIZE/DIR_ENTRY_SIZE) /* # dir entries/block */
#define NR_INDIRECTS (BLOCK_SIZE/ZONE_NUM_SIZE) /* # zones/indir block */
#define INTS_PER_BLOCK (BLOCK_SIZE/sizeof(int)) /* # integers/block */
#define SUPER_SIZE sizeof(struct super_block) /* super_block size */
#define PIPE_SIZE (NR_DZONE_NUM*BLOCK_SIZE) /* pipe size in bytes */
#define MAX_ZONES (NR_DZONE_NUM+NR_INDIRECTS+(long)NR_INDIRECTS*NR_INDIRECTS)
/* max zones in a file */
#define printf printk
|
iMats/Singularity
|
SingularityService/src/test/java/com/hubspot/singularity/auth/SingularityGroupsAuthorizerTest.java
|
package com.hubspot.singularity.auth;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.*;
import com.google.common.collect.ImmutableSet;
import com.hubspot.singularity.RequestState;
import com.hubspot.singularity.RequestType;
import com.hubspot.singularity.SingularityAuthorizationScope;
import com.hubspot.singularity.SingularityRequest;
import com.hubspot.singularity.SingularityRequestBuilder;
import com.hubspot.singularity.SingularityRequestWithState;
import com.hubspot.singularity.SingularityUser;
import com.hubspot.singularity.config.AuthConfiguration;
import com.hubspot.singularity.config.MesosConfiguration;
import com.hubspot.singularity.config.SingularityConfiguration;
import com.hubspot.singularity.data.RequestManager;
import com.squarespace.jersey2.guice.JerseyGuiceUtils;
import java.util.Collections;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import javax.ws.rs.WebApplicationException;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class SingularityGroupsAuthorizerTest {
static {
JerseyGuiceUtils.install((s, serviceLocator) -> null);
}
public static SingularityConfiguration buildAuthDisabledConfig() {
AuthConfiguration authConfiguration = new AuthConfiguration();
authConfiguration.setEnabled(false);
SingularityConfiguration configuration = new SingularityConfiguration();
configuration.setAuthConfiguration(authConfiguration);
configuration.setMesosConfiguration(new MesosConfiguration());
return configuration;
}
public static SingularityConfiguration buildAuthEnabledConfig() {
return buildAuthEnabledConfig(
Collections.<String>emptySet(),
Collections.<String>emptySet(),
Collections.<String>emptySet()
);
}
public static SingularityConfiguration buildAuthEnabledConfig(
Set<String> requiredGroups,
Set<String> adminGroups,
Set<String> jitaGroups
) {
AuthConfiguration authConfiguration = new AuthConfiguration();
authConfiguration.setEnabled(true);
authConfiguration.setRequiredGroups(requiredGroups);
authConfiguration.setAdminGroups(adminGroups);
authConfiguration.setJitaGroups(jitaGroups);
SingularityConfiguration configuration = new SingularityConfiguration();
configuration.setAuthConfiguration(authConfiguration);
configuration.setMesosConfiguration(new MesosConfiguration());
return configuration;
}
public static final SingularityRequest REQUEST_WITH_NO_GROUP = new SingularityRequestBuilder(
"test",
RequestType.SERVICE
)
.build();
public static final SingularityRequest REQUEST_WITH_GROUP_A = new SingularityRequestBuilder(
"test_a",
RequestType.SERVICE
)
.setGroup(Optional.of("a"))
.build();
public static final SingularityRequest REQUEST_WITH_GROUP_A_CHANGED_TO_B = new SingularityRequestBuilder(
"test_a",
RequestType.SERVICE
)
.setGroup(Optional.of("b"))
.build();
public static final SingularityRequest REQUEST_WITH_GROUP_B = new SingularityRequestBuilder(
"test_b",
RequestType.SERVICE
)
.setGroup(Optional.of("b"))
.build();
public static final SingularityUser NOT_LOGGED_IN = SingularityUser.DEFAULT_USER;
public static final SingularityUser USER_GROUP_A = new SingularityUser(
"test1",
Optional.of("test user1"),
Optional.of("<EMAIL>"),
ImmutableSet.of("a")
);
public static final SingularityUser USER_GROUP_AB = new SingularityUser(
"test2",
Optional.of("test user2"),
Optional.of("<EMAIL>"),
ImmutableSet.of("a", "b")
);
public static final SingularityUser USER_GROUP_B = new SingularityUser(
"test3",
Optional.of("test user3"),
Optional.of("<EMAIL>"),
ImmutableSet.of("b")
);
public static final SingularityUser USER_GROUP_ADMIN = new SingularityUser(
"admin",
Optional.of("admin user"),
Optional.of("<EMAIL>"),
ImmutableSet.of("admin")
);
private SingularityAuthorizer buildAuthorizationHelper(
SingularityConfiguration configuration
) {
return new SingularityGroupsAuthorizer(requestManager, configuration);
}
private final RequestManager requestManager;
public SingularityGroupsAuthorizerTest() {
requestManager = mock(RequestManager.class);
when(requestManager.getRequest(REQUEST_WITH_NO_GROUP.getId()))
.thenReturn(
Optional.of(
new SingularityRequestWithState(REQUEST_WITH_NO_GROUP, RequestState.ACTIVE, 0)
)
);
when(requestManager.getRequest(REQUEST_WITH_GROUP_A.getId()))
.thenReturn(
Optional.of(
new SingularityRequestWithState(REQUEST_WITH_GROUP_A, RequestState.ACTIVE, 0)
)
);
when(requestManager.getRequest(REQUEST_WITH_GROUP_B.getId()))
.thenReturn(
Optional.of(
new SingularityRequestWithState(REQUEST_WITH_GROUP_B, RequestState.ACTIVE, 0)
)
);
}
@Test
public void testAuthDisabled() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthDisabledConfig()
);
// anyone should be authorized for requests with no group
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_NO_GROUP,
NOT_LOGGED_IN,
SingularityAuthorizationScope.READ
)
);
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_NO_GROUP,
USER_GROUP_A,
SingularityAuthorizationScope.READ
)
);
// users with matching group(s) should be authorized
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_A,
USER_GROUP_A,
SingularityAuthorizationScope.READ
)
);
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_A,
USER_GROUP_AB,
SingularityAuthorizationScope.READ
)
);
// users without matching group(s) should be authorized
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_A,
USER_GROUP_B,
SingularityAuthorizationScope.READ
)
);
}
@Test
public void testAuth() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig()
);
// user must be authenticated
assertFalse(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_NO_GROUP,
NOT_LOGGED_IN,
SingularityAuthorizationScope.READ
)
);
assertFalse(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_A,
NOT_LOGGED_IN,
SingularityAuthorizationScope.READ
)
);
// anyone should be authorized for requests with no group
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_NO_GROUP,
USER_GROUP_A,
SingularityAuthorizationScope.READ
)
);
// user must be logged in to be authorized for any request
assertFalse(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_NO_GROUP,
NOT_LOGGED_IN,
SingularityAuthorizationScope.READ
)
);
assertFalse(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_A,
NOT_LOGGED_IN,
SingularityAuthorizationScope.READ
)
);
// users with matching group(s) should be authorized
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_A,
USER_GROUP_A,
SingularityAuthorizationScope.READ
)
);
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_A,
USER_GROUP_AB,
SingularityAuthorizationScope.READ
)
);
// users without matching group(s) should not be authorized
assertFalse(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_B,
USER_GROUP_A,
SingularityAuthorizationScope.READ
)
);
}
@Test
public void testAuthRequiredGroup() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig(
ImmutableSet.of("a"),
Collections.<String>emptySet(),
Collections.<String>emptySet()
)
);
// users not in the required group are unauthorized
assertFalse(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_NO_GROUP,
NOT_LOGGED_IN,
SingularityAuthorizationScope.READ
)
);
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_NO_GROUP,
USER_GROUP_A,
SingularityAuthorizationScope.READ
)
);
assertFalse(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_NO_GROUP,
USER_GROUP_B,
SingularityAuthorizationScope.READ
)
);
// user must be part of required group(s) and request group
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_B,
USER_GROUP_AB,
SingularityAuthorizationScope.READ
)
);
assertFalse(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_B,
USER_GROUP_A,
SingularityAuthorizationScope.READ
)
);
assertFalse(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_B,
USER_GROUP_B,
SingularityAuthorizationScope.READ
)
);
}
@Test
public void testAuthAdminGroup() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig(
Collections.<String>emptySet(),
ImmutableSet.of("admin"),
Collections.<String>emptySet()
)
);
// only users in admin group has admin authorization
assertFalse(authorizationHelper.hasAdminAuthorization(NOT_LOGGED_IN));
assertFalse(authorizationHelper.hasAdminAuthorization(USER_GROUP_A));
assertFalse(authorizationHelper.hasAdminAuthorization(USER_GROUP_AB));
assertTrue(authorizationHelper.hasAdminAuthorization(USER_GROUP_ADMIN));
// users in admin group have access to all
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_NO_GROUP,
USER_GROUP_ADMIN,
SingularityAuthorizationScope.READ
)
);
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_A,
USER_GROUP_ADMIN,
SingularityAuthorizationScope.READ
)
);
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_B,
USER_GROUP_ADMIN,
SingularityAuthorizationScope.READ
)
);
}
@Test
public void testAuthJitaGroup() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig(
Collections.<String>emptySet(),
ImmutableSet.of("admin"),
ImmutableSet.of("b")
)
);
// user in JITA group(s) are authorized for all requests
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_NO_GROUP,
USER_GROUP_B,
SingularityAuthorizationScope.READ
)
);
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_A,
USER_GROUP_B,
SingularityAuthorizationScope.READ
)
);
assertTrue(
authorizationHelper.isAuthorizedForRequest(
REQUEST_WITH_GROUP_B,
USER_GROUP_B,
SingularityAuthorizationScope.READ
)
);
// but still aren't admins
assertFalse(authorizationHelper.hasAdminAuthorization(USER_GROUP_B));
}
@Test
public void testCheckAdminAuthorizationThrowsOnForbidden() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig(
Collections.<String>emptySet(),
ImmutableSet.of("admin"),
Collections.<String>emptySet()
)
);
Assertions.assertThrows(
WebApplicationException.class,
() -> authorizationHelper.checkAdminAuthorization(USER_GROUP_A)
);
}
@Test
public void testCheckAdminAuthorizationDoesntThrowOnAuthorized() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig(
Collections.<String>emptySet(),
ImmutableSet.of("admin"),
Collections.<String>emptySet()
)
);
authorizationHelper.checkAdminAuthorization(USER_GROUP_ADMIN);
}
@Test
public void testCheckForAuthorizationByTaskIdDoesntThrowOnAuthorized() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig(
Collections.<String>emptySet(),
ImmutableSet.of("admin"),
Collections.<String>emptySet()
)
);
authorizationHelper.checkForAuthorizationByRequestId(
REQUEST_WITH_GROUP_A.getId(),
USER_GROUP_A,
SingularityAuthorizationScope.READ
);
}
@Test
public void testCheckForAuthorizationByTaskIdThrowsOnForbidden() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig(
Collections.<String>emptySet(),
ImmutableSet.of("admin"),
Collections.<String>emptySet()
)
);
Assertions.assertThrows(
WebApplicationException.class,
() ->
authorizationHelper.checkForAuthorizationByRequestId(
REQUEST_WITH_GROUP_A.getId(),
USER_GROUP_B,
SingularityAuthorizationScope.READ
)
);
}
@Test
public void testCheckForAuthorizationDoesntThrowOnAuthorized() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig(
Collections.<String>emptySet(),
ImmutableSet.of("admin"),
Collections.<String>emptySet()
)
);
authorizationHelper.checkForAuthorization(
REQUEST_WITH_GROUP_A,
USER_GROUP_A,
SingularityAuthorizationScope.READ
);
}
@Test
public void testCheckForAuthorizationThrowsOnForbidden() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig(
Collections.<String>emptySet(),
ImmutableSet.of("admin"),
Collections.<String>emptySet()
)
);
Assertions.assertThrows(
WebApplicationException.class,
() ->
authorizationHelper.checkForAuthorization(
REQUEST_WITH_GROUP_A,
USER_GROUP_B,
SingularityAuthorizationScope.READ
)
);
}
@Test
public void testCheckForAuthorizationDoesntThrowOnValidChange() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig(
Collections.<String>emptySet(),
ImmutableSet.of("admin"),
Collections.<String>emptySet()
)
);
authorizationHelper.checkForAuthorization(
REQUEST_WITH_GROUP_A_CHANGED_TO_B,
USER_GROUP_AB,
SingularityAuthorizationScope.READ
);
}
@Test
public void testCheckForAuthorizationThrowsOnForbiddenChange() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig(
Collections.<String>emptySet(),
ImmutableSet.of("admin"),
Collections.<String>emptySet()
)
);
Assertions.assertThrows(
WebApplicationException.class,
() ->
authorizationHelper.checkForAuthorization(
REQUEST_WITH_GROUP_A_CHANGED_TO_B,
USER_GROUP_A,
SingularityAuthorizationScope.READ
)
);
}
@Test
public void itAllowsUserInReadWriteGroupsToUpdateReadWriteGroups() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig()
);
Set<String> readWriteGroupsOld = new HashSet<>();
readWriteGroupsOld.add("a");
final SingularityRequest oldRequest = new SingularityRequestBuilder(
"test_c",
RequestType.SERVICE
)
.setGroup(Optional.of("c"))
.setReadWriteGroups(Optional.of(readWriteGroupsOld))
.build();
Set<String> readWriteGroupsNew = new HashSet<>();
readWriteGroupsNew.addAll(readWriteGroupsOld);
readWriteGroupsNew.add("b");
final SingularityRequest newRequest = new SingularityRequestBuilder(
"test_c",
RequestType.SERVICE
)
.setGroup(Optional.of("c"))
.setReadWriteGroups(Optional.of(readWriteGroupsNew))
.build();
authorizationHelper.checkForAuthorizedChanges(newRequest, oldRequest, USER_GROUP_A);
}
@Test
public void itRestrictsAUserFromUpdatingGroupsIfTheyWillNotHaveAccess() {
final SingularityAuthorizer authorizationHelper = buildAuthorizationHelper(
buildAuthEnabledConfig()
);
Set<String> readWriteGroupsOld = new HashSet<>();
readWriteGroupsOld.add("a");
final SingularityRequest oldRequest = new SingularityRequestBuilder(
"test_c",
RequestType.SERVICE
)
.setGroup(Optional.of("c"))
.setReadWriteGroups(Optional.of(readWriteGroupsOld))
.build();
Set<String> readWriteGroupsNew = new HashSet<>();
readWriteGroupsNew.add("b");
final SingularityRequest newRequest = new SingularityRequestBuilder(
"test_c",
RequestType.SERVICE
)
.setGroup(Optional.of("c"))
.setReadWriteGroups(Optional.of(readWriteGroupsNew))
.build();
Assertions.assertThrows(
WebApplicationException.class,
() ->
authorizationHelper.checkForAuthorizedChanges(
newRequest,
oldRequest,
USER_GROUP_A
)
);
}
}
|
ilyazavadskij/BKP
|
ts-backend/voice/src/main/java/com/stc/trawl/voice/data/model/VoiceModelSearchRequest.java
|
package com.stc.trawl.voice.data.model;
import lombok.Builder;
import lombok.Data;
import java.net.URI;
import java.util.List;
@Data
@Builder
public class VoiceModelSearchRequest {
private List<URI> segmentationUris;
}
|
Sk3pper/AASSS-PoC
|
Scripts/Dealer/CustomThreads/Utils/Utils_function.py
|
<reponame>Sk3pper/AASSS-PoC<filename>Scripts/Dealer/CustomThreads/Utils/Utils_function.py
import os
import socket
import datetime
from CustomThreads.groups import parametres
from CustomThreads.PedersenUtilities.VSS import genRand
from CustomThreads.groups import MODP2048
# load group
g2048 = MODP2048()
par = parametres()
CHAR_MSG_SPLIT = par.CHAR_MSG_SPLIT
PATH_DATA_USERS = par.PATH_DATA_USERS
CHAR_COORD_SPLIT = par.CHAR_COORD_SPLIT
WHICH_PHASE = par.WHICH_PHASE
FILE_NAME_COMM = par.FILE_NAME_COMM
FILE_NAME_SHARE = par.FILE_NAME_SHARE
CHAR_DATA_SPLIT = par.CHAR_DATA_SPLIT
FILE_NAME_NW_INFORMATION = par.FILE_NAME_NW_INFORMATION
FILE_NAME_MC = par.FILE_NAME_MC
IP_SHAREHOLDERS = par.IP_SHAREHOLDERS
PORT_SHAREHOLDERS = par.PORT_SHAREHOLDERS
PORT_EXTERNAL_SERVER = par.PORT_EXTERNAL_SERVER
IP_EXTERNAL_SERVER = par.IP_EXTERNAL_SERVER
IP_LOG = par.IP_LOG
PORT_LOG = par.PORT_LOG
CHAR_LOG_DATA_SPLIT = par.CHAR_LOG_DATA_SPLIT
CHAR_LOG_MSG_SPLIT = par.CHAR_LOG_MSG_SPLIT
DELIM_LOG = par. DELIM_LOG
COD200 = par.COD200 # every is went well
COD300 = par.COD300 # saving is not went well
COD400 = par.COD400 # xi != h(xi)
COD450 = par.COD450 # (s_i,t_i) given by DEALER TO SHAREHOLDER is not consistent with commitments
COD500 = par.COD500 # (s_i,t_i) given by SHAREHOLDER TO DEALER is not consistent with commitments
COD550 = par.COD550 # S' != \overline(S')
COD999 = par.COD999 # error in SHA1-ExternalServer
COD888 = par.COD888 # error in REC1 ES
BUFFER_SIZE_REQUEST_MESSAGES = par.BUFFER_SIZE_REQUEST_MESSAGES
WHICH_LOG = par.WHICH_LOG
def write_data(path_file_name, data):
"""
:type path_file_name: str
:type data: str
"""
# file does not exist
if not os.path.isfile(path_file_name):
with open(path_file_name, "w+") as f:
f.write(data)
print " " + str(data[:-2]) + " are saved in path:" + path_file_name
return True
# file exists
else:
with open(path_file_name, "a") as f:
f.write(data)
print " " + str(data[:-2]) + " are saved in path:" + path_file_name
return True
# check if exists a user dir -> if not create it
def check_dir(path_user):
if not os.path.isdir(path_user):
os.makedirs(path_user)
def send_data(IP, PORT, out_data):
print " For " + IP + ": " + " out_data = " + out_data
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((IP, PORT))
client.sendall(bytes(out_data).encode("utf-8"))
in_data = client.recv(BUFFER_SIZE_REQUEST_MESSAGES)
print " From " + IP + ": " + str(in_data.decode())
return in_data
def readPenultimeLine(pathFiletoRead): # read a text file as a list of lines
# find the last line, change to a file you have
fileHandle = open(pathFiletoRead, "r")
lineList = fileHandle.readlines()
fileHandle.close()
return lineList[len(lineList) - 1]
# or simply
# return lineList[-1]
def readIntPenultimeLine(pathFiletoRead):
data = split(readPenultimeLine(pathFiletoRead), CHAR_DATA_SPLIT)
data[len(data) - 1] = data[len(data) - 1][:-1]
newIntData = strToIntList(data)
return newIntData
def computeCoordinate():
coordinate = []
for i in range(0, g2048.n):
r = genRand(g2048.MAX_COORDINATE)
while r in coordinate:
r = genRand(g2048.MAX_COORDINATE)
coordinate.append(r)
coordinate.sort()
str_coordinate = ""
for xi in coordinate:
str_coordinate = str_coordinate + str(xi) + CHAR_COORD_SPLIT
str_coordinate = str_coordinate[:-len(CHAR_COORD_SPLIT)]
return str_coordinate
def split(data, char):
return data.split(char)
def strToIntList(list):
newDataInt = []
for l in list:
if l is not None:
newDataInt.append(int(l))
else:
newDataInt.append(0)
return newDataInt
def logMsg(From, To, Payload, Phase, id_user):
timestamp = str(datetime.datetime.now())
# timestamp, From, To, Payload, Phase, id_user
out_data = WHICH_LOG[0] + CHAR_LOG_MSG_SPLIT + \
str(timestamp) + CHAR_LOG_DATA_SPLIT + \
str(From) + CHAR_LOG_DATA_SPLIT + \
str(To) + CHAR_LOG_DATA_SPLIT + \
str(Payload) + CHAR_LOG_DATA_SPLIT + \
str(Phase) + CHAR_LOG_DATA_SPLIT + \
str(id_user) + DELIM_LOG
print " Send to " + IP_LOG + ": " + " out_data = " + out_data
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((IP_LOG, PORT_LOG))
client.sendall(bytes(out_data).encode("utf-8"))
client.close()
def logError(Phase, Actor, CODError, Payload, id_user):
# ERROR-|-timestamp|-|Phase|-|Actor|-|CODError|-|Payload|-|id_user
timestamp = str(datetime.datetime.now())
# timestamp, From, To, Payload, Phase, id_user
out_data = WHICH_LOG[1] + CHAR_LOG_MSG_SPLIT + \
str(timestamp) + CHAR_LOG_DATA_SPLIT + \
str(Phase) + CHAR_LOG_DATA_SPLIT + \
str(Actor) + CHAR_LOG_DATA_SPLIT + \
str(CODError) + CHAR_LOG_DATA_SPLIT + \
str(Payload) + CHAR_LOG_DATA_SPLIT + \
str(id_user) + DELIM_LOG
print " Send to " + IP_LOG + ": " + " out_data = " + out_data
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((IP_LOG, PORT_LOG))
client.sendall(bytes(out_data).encode("utf-8"))
client.close()
|
smdb21/java-miape-api
|
src/main/java/org/proteored/miapeapi/xml/mzidentml_1_1/IdentifiedPeptideImpl.java
|
<gh_stars>1-10
package org.proteored.miapeapi.xml.mzidentml_1_1;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.proteored.miapeapi.cv.Accession;
import org.proteored.miapeapi.cv.ControlVocabularyManager;
import org.proteored.miapeapi.cv.ControlVocabularyTerm;
import org.proteored.miapeapi.cv.msi.Score;
import org.proteored.miapeapi.exceptions.IllegalMiapeArgumentException;
import org.proteored.miapeapi.interfaces.msi.IdentifiedPeptide;
import org.proteored.miapeapi.interfaces.msi.IdentifiedProtein;
import org.proteored.miapeapi.interfaces.msi.InputData;
import org.proteored.miapeapi.interfaces.msi.PeptideModification;
import org.proteored.miapeapi.interfaces.msi.PeptideScore;
import org.proteored.miapeapi.xml.mzidentml.util.Utils;
import org.proteored.miapeapi.xml.mzidentml_1_1.util.MzidentmlControlVocabularyXmlFactory;
import org.proteored.miapeapi.xml.util.MiapeXmlUtil;
import org.proteored.miapeapi.xml.util.parallel.MapSync;
import gnu.trove.set.hash.THashSet;
import uk.ac.ebi.jmzidml.model.mzidml.AbstractParam;
import uk.ac.ebi.jmzidml.model.mzidml.CvParam;
import uk.ac.ebi.jmzidml.model.mzidml.DBSequence;
import uk.ac.ebi.jmzidml.model.mzidml.Modification;
import uk.ac.ebi.jmzidml.model.mzidml.Peptide;
import uk.ac.ebi.jmzidml.model.mzidml.PeptideEvidence;
import uk.ac.ebi.jmzidml.model.mzidml.PeptideEvidenceRef;
import uk.ac.ebi.jmzidml.model.mzidml.ProteinDetectionHypothesis;
import uk.ac.ebi.jmzidml.model.mzidml.SpectrumIdentificationItem;
import uk.ac.ebi.jmzidml.model.mzidml.SubstitutionModification;
public class IdentifiedPeptideImpl implements IdentifiedPeptide {
private static final String CV_SEARCH_ENGINE_SCORE = "MS:1001153";
private final InputData inputData;
private final MzidentmlControlVocabularyXmlFactory cvUtil;
private final String spectrumRef;
private final int identifier;
private List<IdentifiedProtein> identifiedProteins;
private static Logger log = Logger.getLogger("log4j.logger.org.proteored");
private final Peptide peptide;
private final Set<PeptideModification> modifications;
private final Set<PeptideScore> scores;
private final String massDeviation;
private final String charge;
private final int rank;
private final String RT;
private final MapSync<String, ProteinDetectionHypothesis> proteinDetectionHypotesisWithPeptideEvidence;
private final Map<String, IdentifiedProtein> proteinHash;
public IdentifiedPeptideImpl(SpectrumIdentificationItem spectIdentItemXML, Peptide peptide, InputData inputData,
String spectrumRef, Integer peptideID, ControlVocabularyManager cvManager,
MapSync<String, ProteinDetectionHypothesis> proteinDetectionHypotesisWithPeptideEvidence2,
Map<String, IdentifiedProtein> proteinHash2, String RT) {
if (peptide == null || peptide.getPeptideSequence() == null || "".equals(peptide.getPeptideSequence()))
throw new IllegalMiapeArgumentException("The peptide should have a peptide sequence!");
this.peptide = peptide;
this.inputData = inputData;
cvUtil = new MzidentmlControlVocabularyXmlFactory(null, cvManager);
identifier = peptideID;
proteinDetectionHypotesisWithPeptideEvidence = proteinDetectionHypotesisWithPeptideEvidence2;
proteinHash = proteinHash2;
identifiedProteins = getProteinsFromThisPeptide(spectIdentItemXML);
// this.identifiedProteins =
// getProteinsFromThisPeptide2(spectIdentItemXML);
modifications = getModificationsFromThisPeptide(spectIdentItemXML);
scores = getScoresFromThisPeptide(spectIdentItemXML, peptide, cvManager);
massDeviation = getMassDesviationFromThisPeptide(spectIdentItemXML);
charge = getChargeFromThisPeptide(spectIdentItemXML);
rank = getRankFromThisPeptide(spectIdentItemXML);
this.RT = RT;
// In case of spectrumRef is null, get it from the ID of the peptide. If
// it is not null, no changes are made.
this.spectrumRef = getSpectrumRefFromThisPeptide(spectrumRef);
}
public void addProtein(IdentifiedProtein protein) {
if (identifiedProteins == null)
identifiedProteins = new ArrayList<IdentifiedProtein>();
identifiedProteins.add(protein);
}
private int getRankFromThisPeptide(SpectrumIdentificationItem spectIdentItemXML) {
return spectIdentItemXML.getRank();
}
private String getChargeFromThisPeptide(SpectrumIdentificationItem spectIdentItemXML) {
return String.valueOf(spectIdentItemXML.getChargeState());
}
private String getMassDesviationFromThisPeptide(SpectrumIdentificationItem spectrumItemXML) {
StringBuilder sb = new StringBuilder();
Double calculatedMassToCharge = spectrumItemXML.getCalculatedMassToCharge();
if (calculatedMassToCharge != null) {
sb.append(MiapeXmlUtil.CALCULATED_MZ + "=");
sb.append(calculatedMassToCharge);
sb.append(MiapeXmlUtil.TERM_SEPARATOR);
}
double experimentalMassToCharge = spectrumItemXML.getExperimentalMassToCharge();
if (experimentalMassToCharge > 0) {
sb.append(MiapeXmlUtil.EXPERIMENTAL_MZ + "=");
sb.append(experimentalMassToCharge);
}
return Utils.checkReturnedString(sb);
}
/**
* In case of spectrumRef is null, get it from the ID of the peptide
*
* @param spectrumRef
* @return
*/
private String getSpectrumRefFromThisPeptide(String spectrumRef) {
if (spectrumRef != null)
return spectrumRef;
String specRef = null;
if (peptide != null) {
String id = peptide.getId();
if (!id.equals("")) {
// TODO, support more formats?
String entireSpectrumRef = id;
// if the spectrumRef have the following format: peptide_x_y
// where x is the query number and y is the number of candidate
// of this query:
String regexp = "^peptide_(\\d+)_\\d+$";
if (Pattern.matches(regexp, entireSpectrumRef)) {
Pattern p = Pattern.compile(regexp);
Matcher m = p.matcher(entireSpectrumRef);
if (m.find()) {
specRef = m.group(1);
}
} else {
specRef = id;
}
// if the spectrumRef have the following format:
// IGGGEKLIVR%ACET_nterm:::::::::::%sample_0%cmpd_11555
// where the desired ref is the number after "cmpd_"
regexp = "^.*cmpd_(\\d+).*$";
if (Pattern.matches(regexp, entireSpectrumRef)) {
Pattern p = Pattern.compile(regexp);
Matcher m = p.matcher(entireSpectrumRef);
if (m.find()) {
specRef = m.group(1);
}
} else {
regexp = "^.*PEP_(\\d+).*$";
if (Pattern.matches(regexp, entireSpectrumRef)) {
Pattern p = Pattern.compile(regexp);
Matcher m = p.matcher(entireSpectrumRef);
if (m.find()) {
specRef = m.group(1);
}
} else {
// throw new IllegalMiapeArgumentException(
// "The spectrum ref is not recognized: "
// + entireSpectrumRef);
}
}
}
}
return specRef;
}
public static Set<PeptideScore> getScoresFromThisPeptide(SpectrumIdentificationItem spectrumItemXML,
Peptide peptideXML, ControlVocabularyManager cvManager) {
Set<PeptideScore> peptideScores = new THashSet<PeptideScore>();
// Scores in the SpectrumIdentificationItem
List<AbstractParam> params = spectrumItemXML.getParamGroup();
// Scores in the Peptide element
params.addAll(peptideXML.getParamGroup());
for (AbstractParam param : params) {
if (param instanceof CvParam) {
CvParam cvParam = (CvParam) param;
ControlVocabularyTerm cvTerm = cvManager.getCVTermByAccession(new Accession(cvParam.getAccession()),
Score.getInstance(cvManager));
if (cvTerm != null) {
peptideScores.add(new PeptideScoreImpl(cvTerm, param.getValue()));
}
} else {
final Accession controlVocabularyId = cvManager.getControlVocabularyId(param.getName(),
Score.getInstance(cvManager));
if (controlVocabularyId != null) {
final ControlVocabularyTerm cvTerm = cvManager.getCVTermByAccession(controlVocabularyId,
Score.getInstance(cvManager));
if (cvTerm != null) {
peptideScores.add(new PeptideScoreImpl(cvTerm, param.getValue()));
}
}
}
}
return peptideScores;
}
private Set<PeptideModification> getModificationsFromThisPeptide(SpectrumIdentificationItem spectrumItemXML) {
Peptide peptideXML = spectrumItemXML.getPeptide();
if (peptideXML == null) {
peptideXML = peptide;
}
if (peptideXML != null) {
Set<PeptideModification> modifications = new THashSet<PeptideModification>();
// Modifications
List<Modification> xmlModifications = peptideXML.getModification();
if (xmlModifications != null) {
for (Modification modification : xmlModifications) {
modifications.add(new PeptideModificationImpl(modification, cvUtil));
}
}
// Substitution modifications
List<SubstitutionModification> xmlSubstitutionModification = peptideXML.getSubstitutionModification();
if (xmlSubstitutionModification != null) {
for (SubstitutionModification substitutionModification : xmlSubstitutionModification) {
modifications.add(new PeptideModificationImpl(substitutionModification, cvUtil));
}
}
if (modifications.size() > 0)
return modifications;
}
return null;
}
private List<IdentifiedProtein> getProteinsFromThisPeptide(SpectrumIdentificationItem spectrumItemXML) {
if (spectrumItemXML.getId().equals(
"Hsilam-fmrKO1P17-ctx-p2-03_Spec_Hsilam-fmrKO1P17-ctx-p2-03-9493-TELEDTLDSTAAQQELR-2_TELEDTLDSTAAQQELR")) {
log.info(spectrumItemXML);
}
List<IdentifiedProtein> ret = new ArrayList<IdentifiedProtein>();
for (PeptideEvidenceRef peptideEvidenceRef : spectrumItemXML.getPeptideEvidenceRef()) {
final PeptideEvidence peptideEvidence = peptideEvidenceRef.getPeptideEvidence();
final DBSequence dbSequenceXML = peptideEvidence.getDBSequence();
ProteinDetectionHypothesis proteinHypotesisXML = proteinDetectionHypotesisWithPeptideEvidence
.get(peptideEvidence.getId());
// Only create the protein if has passed the threshold
if ((proteinHypotesisXML != null && proteinHypotesisXML.isPassThreshold()) || dbSequenceXML != null) {
Integer proteinID = MiapeXmlUtil.ProteinCounter.increaseCounter();
IdentifiedProtein protein = null;
// If the protein has been added previously, not add to the
// general proteinhash
if (!proteinHash.containsKey(dbSequenceXML.getAccession())) {
protein = new IdentifiedProteinImpl(dbSequenceXML, proteinHypotesisXML, proteinID,
cvUtil.getCvManager());
// log.info("adding protein " + protein.getAccession() + "/"
// +
// protein.getId()
// + " to the hash from peptide " + getSequence());
proteinHash.put(dbSequenceXML.getAccession(), protein);
} else {
protein = proteinHash.get(dbSequenceXML.getAccession());
}
// new 23-may-2013: add peptide to the protein
((IdentifiedProteinImpl) protein).addIdentifiedPeptide(this);
ret.add(protein);
}
}
return ret;
}
// private List<IdentifiedProtein> getProteinsFromThisPeptide2(
// SpectrumIdentificationItem spectrumItemXML) {
// List<IdentifiedProtein> ret = new ArrayList<IdentifiedProtein>();
// for (ProteinDetectionHypothesis proteinHypotesisXML :
// this.pdhFromPeptide) {
//
// final DBSequence dbSequenceXML = proteinHypotesisXML
// .getDBSequence();
//
// // Only create the protein if has passed the threshold
// if ((proteinHypotesisXML != null && proteinHypotesisXML
// .isPassThreshold()) || dbSequenceXML != null) {
// Integer proteinID = MiapeXmlUtil.ProteinCounter
// .increaseCounter();
// IdentifiedProtein protein = null;
// // If the protein has been added previously, not add to the
// // general proteinhash
// if (!this.proteinHash.containsKey(dbSequenceXML.getAccession())) {
// protein = new IdentifiedProteinImpl(dbSequenceXML,
// proteinHypotesisXML, proteinID,
// cvUtil.getCvManager());
// // log.info("adding protein " + protein.getAccession() + "/"
// // +
// // protein.getId()
// // + " to the hash from peptide " + getSequence());
// this.proteinHash.put(protein.getAccession(), protein);
// } else {
// protein = this.proteinHash
// .get(dbSequenceXML.getAccession());
// }
//
// // new 23-may-2013: add peptide to the protein
// ((IdentifiedProteinImpl) protein).addIdentifiedPeptide(this);
// ret.add(protein);
// }
//
// }
// return ret;
//
// }
@Override
public String toString() {
List<String> scoreNames = new ArrayList<String>();
for (PeptideScore peptScore : getScores()) {
scoreNames.add(peptScore.getName());
}
Collections.sort(scoreNames);
String value = null;
for (PeptideScore peptScore : getScores()) {
if (peptScore.getName().equals(scoreNames.get(0)))
value = peptScore.getValue();
}
return getId() + "-" + getSequence() + "(" + value + ")";
}
@Override
public String getCharge() {
return charge;
}
@Override
public String getMassDesviation() {
return massDeviation;
}
@Override
public Set<PeptideModification> getModifications() {
return modifications;
}
@Override
public Set<PeptideScore> getScores() {
return scores;
}
@Override
public String getSequence() {
if (peptide != null)
return peptide.getPeptideSequence();
return null;
}
@Override
public String getSpectrumRef() {
return spectrumRef;
}
@Override
public InputData getInputData() {
return inputData;
}
@Override
public int getRank() {
return rank;
}
@Override
public int getId() {
return identifier;
}
@Override
public List<IdentifiedProtein> getIdentifiedProteins() {
return identifiedProteins;
}
@Override
public String getRetentionTimeInSeconds() {
return RT;
}
}
|
googleapis/googleapis-gen
|
google/cloud/retail/v2beta/google-cloud-retail-v2beta-java/proto-google-cloud-retail-v2beta-java/src/main/java/com/google/cloud/retail/v2beta/ImportProductsRequestOrBuilder.java
|
<gh_stars>1-10
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2beta/import_config.proto
package com.google.cloud.retail.v2beta;
public interface ImportProductsRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.cloud.retail.v2beta.ImportProductsRequest)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Required.
* `projects/1234/locations/global/catalogs/default_catalog/branches/default_branch`
* If no updateMask is specified, requires products.create permission.
* If updateMask is specified, requires products.update permission.
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The parent.
*/
java.lang.String getParent();
/**
* <pre>
* Required.
* `projects/1234/locations/global/catalogs/default_catalog/branches/default_branch`
* If no updateMask is specified, requires products.create permission.
* If updateMask is specified, requires products.update permission.
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for parent.
*/
com.google.protobuf.ByteString
getParentBytes();
/**
* <pre>
* Unique identifier provided by client, within the ancestor
* dataset scope. Ensures idempotency and used for request deduplication.
* Server-generated if unspecified. Up to 128 characters long and must match
* the pattern: `[a-zA-Z0-9_]+`. This is returned as [Operation.name][] in
* [ImportMetadata][google.cloud.retail.v2beta.ImportMetadata].
* Only supported when
* [ImportProductsRequest.reconciliation_mode][google.cloud.retail.v2beta.ImportProductsRequest.reconciliation_mode]
* is set to `FULL`.
* </pre>
*
* <code>string request_id = 6;</code>
* @return The requestId.
*/
java.lang.String getRequestId();
/**
* <pre>
* Unique identifier provided by client, within the ancestor
* dataset scope. Ensures idempotency and used for request deduplication.
* Server-generated if unspecified. Up to 128 characters long and must match
* the pattern: `[a-zA-Z0-9_]+`. This is returned as [Operation.name][] in
* [ImportMetadata][google.cloud.retail.v2beta.ImportMetadata].
* Only supported when
* [ImportProductsRequest.reconciliation_mode][google.cloud.retail.v2beta.ImportProductsRequest.reconciliation_mode]
* is set to `FULL`.
* </pre>
*
* <code>string request_id = 6;</code>
* @return The bytes for requestId.
*/
com.google.protobuf.ByteString
getRequestIdBytes();
/**
* <pre>
* Required. The desired input location of the data.
* </pre>
*
* <code>.google.cloud.retail.v2beta.ProductInputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return Whether the inputConfig field is set.
*/
boolean hasInputConfig();
/**
* <pre>
* Required. The desired input location of the data.
* </pre>
*
* <code>.google.cloud.retail.v2beta.ProductInputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The inputConfig.
*/
com.google.cloud.retail.v2beta.ProductInputConfig getInputConfig();
/**
* <pre>
* Required. The desired input location of the data.
* </pre>
*
* <code>.google.cloud.retail.v2beta.ProductInputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
com.google.cloud.retail.v2beta.ProductInputConfigOrBuilder getInputConfigOrBuilder();
/**
* <pre>
* The desired location of errors incurred during the Import.
* </pre>
*
* <code>.google.cloud.retail.v2beta.ImportErrorsConfig errors_config = 3;</code>
* @return Whether the errorsConfig field is set.
*/
boolean hasErrorsConfig();
/**
* <pre>
* The desired location of errors incurred during the Import.
* </pre>
*
* <code>.google.cloud.retail.v2beta.ImportErrorsConfig errors_config = 3;</code>
* @return The errorsConfig.
*/
com.google.cloud.retail.v2beta.ImportErrorsConfig getErrorsConfig();
/**
* <pre>
* The desired location of errors incurred during the Import.
* </pre>
*
* <code>.google.cloud.retail.v2beta.ImportErrorsConfig errors_config = 3;</code>
*/
com.google.cloud.retail.v2beta.ImportErrorsConfigOrBuilder getErrorsConfigOrBuilder();
/**
* <pre>
* Indicates which fields in the provided imported 'products' to update. If
* not set, will by default update all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
* @return Whether the updateMask field is set.
*/
boolean hasUpdateMask();
/**
* <pre>
* Indicates which fields in the provided imported 'products' to update. If
* not set, will by default update all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
* @return The updateMask.
*/
com.google.protobuf.FieldMask getUpdateMask();
/**
* <pre>
* Indicates which fields in the provided imported 'products' to update. If
* not set, will by default update all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder();
/**
* <pre>
* The mode of reconciliation between existing products and the products to be
* imported. Defaults to
* [ReconciliationMode.INCREMENTAL][google.cloud.retail.v2beta.ImportProductsRequest.ReconciliationMode.INCREMENTAL].
* </pre>
*
* <code>.google.cloud.retail.v2beta.ImportProductsRequest.ReconciliationMode reconciliation_mode = 5;</code>
* @return The enum numeric value on the wire for reconciliationMode.
*/
int getReconciliationModeValue();
/**
* <pre>
* The mode of reconciliation between existing products and the products to be
* imported. Defaults to
* [ReconciliationMode.INCREMENTAL][google.cloud.retail.v2beta.ImportProductsRequest.ReconciliationMode.INCREMENTAL].
* </pre>
*
* <code>.google.cloud.retail.v2beta.ImportProductsRequest.ReconciliationMode reconciliation_mode = 5;</code>
* @return The reconciliationMode.
*/
com.google.cloud.retail.v2beta.ImportProductsRequest.ReconciliationMode getReconciliationMode();
/**
* <pre>
* Pub/Sub topic for receiving notification. If this field is set,
* when the import is finished, a notification will be sent to
* specified Pub/Sub topic. The message data will be JSON string of a
* [Operation][google.longrunning.Operation].
* Format of the Pub/Sub topic is `projects/{project}/topics/{topic}`.
* Only supported when
* [ImportProductsRequest.reconciliation_mode][google.cloud.retail.v2beta.ImportProductsRequest.reconciliation_mode]
* is set to `FULL`.
* </pre>
*
* <code>string notification_pubsub_topic = 7;</code>
* @return The notificationPubsubTopic.
*/
java.lang.String getNotificationPubsubTopic();
/**
* <pre>
* Pub/Sub topic for receiving notification. If this field is set,
* when the import is finished, a notification will be sent to
* specified Pub/Sub topic. The message data will be JSON string of a
* [Operation][google.longrunning.Operation].
* Format of the Pub/Sub topic is `projects/{project}/topics/{topic}`.
* Only supported when
* [ImportProductsRequest.reconciliation_mode][google.cloud.retail.v2beta.ImportProductsRequest.reconciliation_mode]
* is set to `FULL`.
* </pre>
*
* <code>string notification_pubsub_topic = 7;</code>
* @return The bytes for notificationPubsubTopic.
*/
com.google.protobuf.ByteString
getNotificationPubsubTopicBytes();
}
|
talehm/frontity
|
node_modules/@material-ui/icons/esm/ShowChartTwoTone.js
|
import React from 'react';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(React.createElement("path", {
d: "M13.5 13.48l-4-4L2 16.99l1.5 1.5 6-6.01 4 4L22 6.92l-1.41-1.41z"
}), 'ShowChartTwoTone');
|
antont/tundra
|
src/Core/AssetModule/AssetModule.h
|
<reponame>antont/tundra
// For conditions of distribution and use, see copyright notice in license.txt
#pragma once
#include "IModule.h"
#include <QObject>
#include "IAssetProvider.h"
#include "AssetModuleApi.h"
struct MsgAssetDiscovery;
struct MsgAssetDeleted;
struct UserConnectedResponseData;
class UserConnection;
namespace kNet
{
class MessageConnection;
typedef unsigned long message_id_t;
}
namespace Asset
{
class ASSET_MODULE_API AssetModule : public IModule
{
Q_OBJECT
public:
AssetModule();
virtual ~AssetModule();
virtual void Initialize();
public slots:
void ConsoleRequestAsset(const QString &assetRef, const QString &assetType);
void AddAssetStorage(const QString &storageString);
void ListAssetStorages();
void ConsoleRefreshHttpStorages();
void ConsoleDumpAssetTransfers();
void ConsoleDumpAssets();
/// Loads from all the registered local storages all assets that have the given suffix.
/// Type can also be optionally specified
/// \todo Will be replaced with AssetStorage's GetAllAssetsRefs / GetAllAssets functionality
void LoadAllLocalAssetsWithSuffix(const QString &suffix, const QString &assetType = "");
/// Refreshes asset refs of all http storages
void RefreshHttpStorages();
/// If we are the server, this function gets called whenever a new connection is received. Populates the response data with the known asset storages in this server.
void ServerNewUserConnected(int connectionID, UserConnection *connection, UserConnectedResponseData *responseData);
/// If we are the client, this function gets called when we connect to a server. Adds all storages received from the server to our storage list.
void ClientConnectedToServer(UserConnectedResponseData *responseData);
/// If we are the client, this function gets called when we disconnected. Removes all storages received from the server from our storage list.
void ClientDisconnectedFromServer();
private slots:
/// Handles a Kristalli protocol message. Used for AssetDiscovery & AssetDeleted messages
void HandleKristalliMessage(kNet::MessageConnection* source, kNet::message_id_t id, const char* data, size_t numBytes);
/// Handle incoming asset discovery message.
void HandleAssetDiscovery(kNet::MessageConnection* source, MsgAssetDiscovery& msg);
/// Handle incoming asset deleted message.
void HandleAssetDeleted(kNet::MessageConnection* source, MsgAssetDeleted& msg);
/// Asset uploaded. Send AssetDiscovery network message
void OnAssetUploaded(const QString& assetRef);
/// Asset deleted from a storage. Send AssetDeleted network message
void OnAssetDeleted(const QString& assetRef);
private:
void ProcessCommandLineOptions();
/// When the client connects to the server, it adds to its list of known storages all the storages on the server side.
/// To be able to also remove these storages from being used after we disconnect, we track all the server-originated storages here.
std::vector<AssetStorageWeakPtr> storagesReceivedFromServer;
};
}
|
o-evin/glacier
|
source/renderer/modules/retrievals/reducer.js
|
import {listUpdate} from '../../helpers';
import {
RETRIEVAL_GET_SUCCESS,
RETRIEVAL_CREATE_SUCCESS,
RETRIEVAL_UPDATE_SUCCESS,
RETRIEVAL_LIST_SUCCESS,
RETRIEVAL_DELETE_SUCCESS,
} from '../../../contracts/enums/action_types';
import {Retrieval} from '../../../contracts/entities';
function cast(data) {
return Array.isArray(data) ?
data.map(item => new Retrieval(item)) : new Retrieval(data);
}
export default function(state = {}, action) {
switch (action.type) {
case RETRIEVAL_GET_SUCCESS:
case RETRIEVAL_CREATE_SUCCESS:
case RETRIEVAL_UPDATE_SUCCESS:
case RETRIEVAL_LIST_SUCCESS:
return {
...state,
list: listUpdate(state.list, cast(action.payload)),
};
case RETRIEVAL_DELETE_SUCCESS:
return {
...state,
list: state.list.filter(item => item.id !== action.payload),
};
default:
return state;
}
}
|
Daz2345/dhPulse
|
packages/dh-ground/lib/ground.js
|
Ground.Collection(Posts);
|
pasmuss/cmssw
|
RecoLocalCalo/CaloTowersCreator/src/CaloTowersCreator.cc
|
<filename>RecoLocalCalo/CaloTowersCreator/src/CaloTowersCreator.cc<gh_stars>0
#include "RecoLocalCalo/CaloTowersCreator/src/CaloTowersCreator.h"
#include "Geometry/CaloGeometry/interface/CaloGeometry.h"
#include "Geometry/Records/interface/CaloGeometryRecord.h"
#include "FWCore/Framework/interface/ESHandle.h"
#include "Geometry/CaloTopology/interface/HcalTopology.h"
#include "Geometry/CaloTopology/interface/CaloTowerTopology.h"
// severity level for ECAL
#include "RecoLocalCalo/EcalRecAlgos/interface/EcalSeverityLevelAlgoRcd.h"
#include "CommonTools/Utils/interface/StringToEnumValue.h"
CaloTowersCreator::CaloTowersCreator(const edm::ParameterSet& conf) :
algo_(conf.getParameter<double>("EBThreshold"),
conf.getParameter<double>("EEThreshold"),
conf.getParameter<bool>("UseEtEBTreshold"),
conf.getParameter<bool>("UseEtEETreshold"),
conf.getParameter<bool>("UseSymEBTreshold"),
conf.getParameter<bool>("UseSymEETreshold"),
conf.getParameter<double>("HcalThreshold"),
conf.getParameter<double>("HBThreshold"),
conf.getParameter<double>("HESThreshold"),
conf.getParameter<double>("HEDThreshold"),
conf.getParameter<double>("HOThreshold0"),
conf.getParameter<double>("HOThresholdPlus1"),
conf.getParameter<double>("HOThresholdMinus1"),
conf.getParameter<double>("HOThresholdPlus2"),
conf.getParameter<double>("HOThresholdMinus2"),
conf.getParameter<double>("HF1Threshold"),
conf.getParameter<double>("HF2Threshold"),
conf.getParameter<std::vector<double> >("EBGrid"),
conf.getParameter<std::vector<double> >("EBWeights"),
conf.getParameter<std::vector<double> >("EEGrid"),
conf.getParameter<std::vector<double> >("EEWeights"),
conf.getParameter<std::vector<double> >("HBGrid"),
conf.getParameter<std::vector<double> >("HBWeights"),
conf.getParameter<std::vector<double> >("HESGrid"),
conf.getParameter<std::vector<double> >("HESWeights"),
conf.getParameter<std::vector<double> >("HEDGrid"),
conf.getParameter<std::vector<double> >("HEDWeights"),
conf.getParameter<std::vector<double> >("HOGrid"),
conf.getParameter<std::vector<double> >("HOWeights"),
conf.getParameter<std::vector<double> >("HF1Grid"),
conf.getParameter<std::vector<double> >("HF1Weights"),
conf.getParameter<std::vector<double> >("HF2Grid"),
conf.getParameter<std::vector<double> >("HF2Weights"),
conf.getParameter<double>("EBWeight"),
conf.getParameter<double>("EEWeight"),
conf.getParameter<double>("HBWeight"),
conf.getParameter<double>("HESWeight"),
conf.getParameter<double>("HEDWeight"),
conf.getParameter<double>("HOWeight"),
conf.getParameter<double>("HF1Weight"),
conf.getParameter<double>("HF2Weight"),
conf.getParameter<double>("EcutTower"),
conf.getParameter<double>("EBSumThreshold"),
conf.getParameter<double>("EESumThreshold"),
conf.getParameter<bool>("UseHO"),
// (for momentum reconstruction algorithm)
conf.getParameter<int>("MomConstrMethod"),
conf.getParameter<double>("MomHBDepth"),
conf.getParameter<double>("MomHEDepth"),
conf.getParameter<double>("MomEBDepth"),
conf.getParameter<double>("MomEEDepth"),
conf.getParameter<int>("HcalPhase")
),
ecalLabels_(conf.getParameter<std::vector<edm::InputTag> >("ecalInputs")),
allowMissingInputs_(conf.getParameter<bool>("AllowMissingInputs")),
theHcalAcceptSeverityLevel_(conf.getParameter<unsigned int>("HcalAcceptSeverityLevel")),
theRecoveredHcalHitsAreUsed_(conf.getParameter<bool>("UseHcalRecoveredHits")),
theRecoveredEcalHitsAreUsed_(conf.getParameter<bool>("UseEcalRecoveredHits")),
// paramaters controlling the use of rejected hits
useRejectedHitsOnly_(conf.getParameter<bool>("UseRejectedHitsOnly")),
theHcalAcceptSeverityLevelForRejectedHit_(conf.getParameter<unsigned int>("HcalAcceptSeverityLevelForRejectedHit")),
useRejectedRecoveredHcalHits_(conf.getParameter<bool>("UseRejectedRecoveredHcalHits")),
useRejectedRecoveredEcalHits_(conf.getParameter<bool>("UseRejectedRecoveredEcalHits"))
{
// register for data access
tok_hbhe_ = consumes<HBHERecHitCollection>(conf.getParameter<edm::InputTag>("hbheInput"));
tok_ho_ = consumes<HORecHitCollection>(conf.getParameter<edm::InputTag>("hoInput"));
tok_hf_ = consumes<HFRecHitCollection>(conf.getParameter<edm::InputTag>("hfInput"));
const unsigned nLabels = ecalLabels_.size();
for ( unsigned i=0; i != nLabels; i++ )
toks_ecal_.push_back(consumes<EcalRecHitCollection>(ecalLabels_[i]));
EBEScale=eScales_.EBScale;
EEEScale=eScales_.EEScale;
HBEScale=eScales_.HBScale;
HESEScale=eScales_.HESScale;
HEDEScale=eScales_.HEDScale;
HOEScale=eScales_.HOScale;
HF1EScale=eScales_.HF1Scale;
HF2EScale=eScales_.HF2Scale;
// get the Ecal severities to be excluded
const std::vector<std::string> severitynames =
conf.getParameter<std::vector<std::string> >("EcalRecHitSeveritiesToBeExcluded");
theEcalSeveritiesToBeExcluded_ = StringToEnumValue<EcalSeverityLevel::SeverityLevel>(severitynames);
// get the Ecal severities to be used for bad towers
theEcalSeveritiesToBeUsedInBadTowers_ =
StringToEnumValue<EcalSeverityLevel::SeverityLevel>(conf.getParameter<std::vector<std::string> >("EcalSeveritiesToBeUsedInBadTowers") );
if (eScales_.instanceLabel=="") produces<CaloTowerCollection>();
else produces<CaloTowerCollection>(eScales_.instanceLabel);
/*
std::cout << "VI Producer "
<< (useRejectedHitsOnly_ ? "use rejectOnly " : " ")
<< (allowMissingInputs_ ? "allowMissing " : " " )
<< nLabels << ' ' << severitynames.size()
<< std::endl;
*/
}
void CaloTowersCreator::produce(edm::Event& e, const edm::EventSetup& c) {
// get the necessary event setup objects...
edm::ESHandle<CaloGeometry> pG;
edm::ESHandle<HcalTopology> htopo;
edm::ESHandle<CaloTowerTopology> cttopo;
edm::ESHandle<CaloTowerConstituentsMap> ctmap;
c.get<CaloGeometryRecord>().get(pG);
c.get<HcalRecNumberingRecord>().get(htopo);
c.get<HcalRecNumberingRecord>().get(cttopo);
c.get<CaloGeometryRecord>().get(ctmap);
// ECAL channel status map ****************************************
edm::ESHandle<EcalChannelStatus> ecalChStatus;
c.get<EcalChannelStatusRcd>().get( ecalChStatus );
const EcalChannelStatus* dbEcalChStatus = ecalChStatus.product();
// HCAL channel status map ****************************************
edm::ESHandle<HcalChannelQuality> hcalChStatus;
c.get<HcalChannelQualityRcd>().get( "withTopo", hcalChStatus );
const HcalChannelQuality* dbHcalChStatus = hcalChStatus.product();
// Assignment of severity levels **********************************
edm::ESHandle<HcalSeverityLevelComputer> hcalSevLvlComputerHndl;
c.get<HcalSeverityLevelComputerRcd>().get(hcalSevLvlComputerHndl);
const HcalSeverityLevelComputer* hcalSevLvlComputer = hcalSevLvlComputerHndl.product();
edm::ESHandle<EcalSeverityLevelAlgo> ecalSevLvlAlgoHndl;
c.get<EcalSeverityLevelAlgoRcd>().get(ecalSevLvlAlgoHndl);
const EcalSeverityLevelAlgo* ecalSevLvlAlgo = ecalSevLvlAlgoHndl.product();
algo_.setEBEScale(EBEScale);
algo_.setEEEScale(EEEScale);
algo_.setHBEScale(HBEScale);
algo_.setHESEScale(HESEScale);
algo_.setHEDEScale(HEDEScale);
algo_.setHOEScale(HOEScale);
algo_.setHF1EScale(HF1EScale);
algo_.setHF2EScale(HF2EScale);
algo_.setGeometry(cttopo.product(),ctmap.product(),htopo.product(),pG.product());
// for treatment of problematic and anomalous cells
algo_.setHcalChStatusFromDB(dbHcalChStatus);
algo_.setEcalChStatusFromDB(dbEcalChStatus);
algo_.setHcalAcceptSeverityLevel(theHcalAcceptSeverityLevel_);
algo_.setEcalSeveritiesToBeExcluded(theEcalSeveritiesToBeExcluded_);
algo_.setRecoveredHcalHitsAreUsed(theRecoveredHcalHitsAreUsed_);
algo_.setRecoveredEcalHitsAreUsed(theRecoveredEcalHitsAreUsed_);
algo_.setHcalSevLvlComputer(hcalSevLvlComputer);
algo_.setEcalSevLvlAlgo(ecalSevLvlAlgo);
algo_.setUseRejectedHitsOnly(useRejectedHitsOnly_);
algo_.setHcalAcceptSeverityLevelForRejectedHit(theHcalAcceptSeverityLevelForRejectedHit_);
algo_.SetEcalSeveritiesToBeUsedInBadTowers (theEcalSeveritiesToBeUsedInBadTowers_);
algo_.setUseRejectedRecoveredHcalHits(useRejectedRecoveredHcalHits_);
algo_.setUseRejectedRecoveredEcalHits(useRejectedRecoveredEcalHits_);
/*
std::cout << "VI Produce: "
<< (useRejectedHitsOnly_ ? "use rejectOnly " : " ")
<< (allowMissingInputs_ ? "allowMissing " : " " )
<< (theRecoveredEcalHitsAreUsed_ ? "use RecoveredEcal ": " " )
<< toks_ecal_.size()
<< ' ' << theEcalSeveritiesToBeExcluded_.size()
<< ' ' << theEcalSeveritiesToBeUsedInBadTowers_.size()
<< std::endl;
*/
algo_.begin(); // clear the internal buffer
// can't chain these in a big OR statement, or else it'll
// get triggered for each of the first three events
bool check1 = hcalSevLevelWatcher_.check(c);
bool check2 = hcalChStatusWatcher_.check(c);
bool check3 = caloTowerConstituentsWatcher_.check(c);
if(check1 || check2 || check3)
{
algo_.makeHcalDropChMap();
}
// check ecal SevLev
if (ecalSevLevelWatcher_.check(c)) algo_.makeEcalBadChs();
// ----------------------------------------------------------
// For ecal error handling need to
// have access to the EB and EE collections at the end of
// tower reconstruction.
edm::Handle<EcalRecHitCollection> ebHandle;
edm::Handle<EcalRecHitCollection> eeHandle;
for (std::vector<edm::EDGetTokenT<EcalRecHitCollection> >::const_iterator i=toks_ecal_.begin();
i!=toks_ecal_.end(); i++) {
edm::Handle<EcalRecHitCollection> ec_tmp;
if (! e.getByToken(*i,ec_tmp) ) continue;
if (ec_tmp->size()==0) continue;
// check if this is EB or EE
if ( (ec_tmp->begin()->detid()).subdetId() == EcalBarrel ) {
ebHandle = ec_tmp;
}
else if ((ec_tmp->begin()->detid()).subdetId() == EcalEndcap) {
eeHandle = ec_tmp;
}
}
algo_.setEbHandle(ebHandle);
algo_.setEeHandle(eeHandle);
//-----------------------------------------------------------
bool present;
// Step A/C: Get Inputs and process (repeatedly)
edm::Handle<HBHERecHitCollection> hbhe;
present=e.getByToken(tok_hbhe_,hbhe);
if (present || !allowMissingInputs_) algo_.process(*hbhe);
edm::Handle<HORecHitCollection> ho;
present=e.getByToken(tok_ho_,ho);
if (present || !allowMissingInputs_) algo_.process(*ho);
edm::Handle<HFRecHitCollection> hf;
present=e.getByToken(tok_hf_,hf);
if (present || !allowMissingInputs_) algo_.process(*hf);
std::vector<edm::EDGetTokenT<EcalRecHitCollection> >::const_iterator i;
for (i=toks_ecal_.begin(); i!=toks_ecal_.end(); i++) {
edm::Handle<EcalRecHitCollection> ec;
present=e.getByToken(*i,ec);
if (present || !allowMissingInputs_) algo_.process(*ec);
}
// Step B: Create empty output
auto prod = std::make_unique<CaloTowerCollection>();
// Step C: Process
algo_.finish(*prod);
/*
int totc=0; float totE=0;
reco::LeafCandidate::LorentzVector totP4;
for (auto const & tw : (*prod) ) { totc += tw.constituents().size(); totE+=tw.energy(); totP4+=tw.p4();}
std::cout << "VI " << (*prod).size() << " " << totc << " " << totE << " " << totP4 << std::endl;
*/
// Step D: Put into the event
if (eScales_.instanceLabel=="") e.put(std::move(prod));
else e.put(std::move(prod),eScales_.instanceLabel);
}
void CaloTowersCreator::fillDescriptions(edm::ConfigurationDescriptions& descriptions) {
edm::ParameterSetDescription desc;
desc.add<double>("EBSumThreshold", 0.2);
desc.add<double>("HF2Weight", 1.0);
desc.add<double>("EBWeight", 1.0);
desc.add<double>("EESumThreshold", 0.45);
desc.add<double>("HOThreshold0", 1.1);
desc.add<double>("HOThresholdPlus1", 3.5);
desc.add<double>("HOThresholdMinus1", 3.5);
desc.add<double>("HOThresholdPlus2", 3.5);
desc.add<double>("HOThresholdMinus2", 3.5);
desc.add<double>("HBThreshold", 0.7);
desc.add<double>("HF1Threshold", 0.5);
desc.add<double>("HEDWeight", 1.0);
desc.add<double>("EEWeight", 1.0);
desc.add<double>("HESWeight", 1.0);
desc.add<double>("HF1Weight", 1.0);
desc.add<double>("HOWeight", 1.0);
desc.add<double>("EBThreshold", 0.07);
desc.add<double>("EEThreshold", 0.3);
desc.add<double>("HcalThreshold", -1000.0);
desc.add<double>("HF2Threshold", 0.85);
desc.add<double>("HESThreshold", 0.8);
desc.add<double>("HEDThreshold", 0.8);
desc.add<double>("EcutTower", -1000.0);
desc.add<double>("HBWeight", 1.0);
desc.add<double>("MomHBDepth", 0.2);
desc.add<double>("MomHEDepth", 0.4);
desc.add<double>("MomEBDepth", 0.3);
desc.add<double>("MomEEDepth", 0.0);
desc.add<bool>("UseHO", true);
desc.add<bool>("UseEtEBTreshold", false);
desc.add<bool>("UseSymEBTreshold", true);
desc.add<bool>("UseEtEETreshold", false);
desc.add<bool>("UseSymEETreshold", true);
desc.add<bool>("UseHcalRecoveredHits", true);
desc.add<bool>("UseEcalRecoveredHits", false);
desc.add<bool>("UseRejectedHitsOnly", false);
desc.add<bool>("UseRejectedRecoveredHcalHits", true);
desc.add<bool>("UseRejectedRecoveredEcalHits", false);
desc.add<bool>("AllowMissingInputs", false);
desc.add<std::vector<double> >("HBGrid", {-1.0, 1.0, 10.0, 100.0, 1000.0});
desc.add<std::vector<double> >("EEWeights", {1.0, 1.0, 1.0, 1.0, 1.0});
desc.add<std::vector<double> >("HF2Weights", {1.0, 1.0, 1.0, 1.0, 1.0});
desc.add<std::vector<double> >("HOWeights", {1.0, 1.0, 1.0, 1.0, 1.0});
desc.add<std::vector<double> >("EEGrid", {-1.0, 1.0, 10.0, 100.0, 1000.0});
desc.add<std::vector<double> >("HBWeights", {1.0, 1.0, 1.0, 1.0, 1.0});
desc.add<std::vector<double> >("HF2Grid", {-1.0, 1.0, 10.0, 100.0, 1000.0});
desc.add<std::vector<double> >("HEDWeights", {1.0, 1.0, 1.0, 1.0, 1.0});
desc.add<std::vector<double> >("HF1Grid", {-1.0, 1.0, 10.0, 100.0, 1000.0});
desc.add<std::vector<double> >("EBWeights", {1.0, 1.0, 1.0, 1.0, 1.0});
desc.add<std::vector<double> >("HF1Weights", {1.0, 1.0, 1.0, 1.0, 1.0});
desc.add<std::vector<double> >("HESGrid", {-1.0, 1.0, 10.0, 100.0, 1000.0});
desc.add<std::vector<double> >("HESWeights", {1.0, 1.0, 1.0, 1.0, 1.0});
desc.add<std::vector<double> >("HEDGrid", {-1.0, 1.0, 10.0, 100.0, 1000.0});
desc.add<std::vector<double> >("HOGrid", {-1.0, 1.0, 10.0, 100.0, 1000.0});
desc.add<std::vector<double> >("EBGrid", {-1.0, 1.0, 10.0, 100.0, 1000.0});
desc.add<edm::InputTag>("hfInput", edm::InputTag("hfreco"));
desc.add<edm::InputTag>("hbheInput", edm::InputTag("hbhereco"));
desc.add<edm::InputTag>("hoInput", edm::InputTag("horeco"));
desc.add<std::vector<edm::InputTag> >("ecalInputs", {edm::InputTag("ecalRecHit","EcalRecHitsEB"), edm::InputTag("ecalRecHit","EcalRecHitsEE")});
desc.add<int>("MomConstrMethod", 1);
desc.add<unsigned int>("HcalAcceptSeverityLevel", 9);
desc.add<std::vector<std::string> >("EcalRecHitSeveritiesToBeExcluded", {"kTime","kWeird","kBad"});
desc.add<unsigned int>("HcalAcceptSeverityLevelForRejectedHit", 9999);
desc.add<std::vector<std::string> >("EcalSeveritiesToBeUsedInBadTowers", {});
desc.add<int>("HcalPhase", 0);
descriptions.addDefault(desc);
}
|
colvinco/apicurio-data-models
|
src/main/java/io/apicurio/datamodels/core/io/DataModelWriter.java
|
<reponame>colvinco/apicurio-data-models
/*
* Copyright 2019 Red Hat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apicurio.datamodels.core.io;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import io.apicurio.datamodels.compat.JsonCompat;
import io.apicurio.datamodels.core.Constants;
import io.apicurio.datamodels.core.models.Document;
import io.apicurio.datamodels.core.models.Extension;
import io.apicurio.datamodels.core.models.Node;
import io.apicurio.datamodels.core.models.ValidationProblem;
import io.apicurio.datamodels.core.models.common.Contact;
import io.apicurio.datamodels.core.models.common.ExternalDocumentation;
import io.apicurio.datamodels.core.models.common.IDefinition;
import io.apicurio.datamodels.core.models.common.Info;
import io.apicurio.datamodels.core.models.common.License;
import io.apicurio.datamodels.core.models.common.Operation;
import io.apicurio.datamodels.core.models.common.Parameter;
import io.apicurio.datamodels.core.models.common.Schema;
import io.apicurio.datamodels.core.models.common.SecurityRequirement;
import io.apicurio.datamodels.core.models.common.SecurityScheme;
import io.apicurio.datamodels.core.models.common.Tag;
import io.apicurio.datamodels.core.visitors.IVisitor;
/**
* Base class for all data model writers.
* @author <EMAIL>
*/
public class DataModelWriter implements IVisitor {
private Object _result;
private Map<Integer, Object> _modelIdToJS;
/**
* Constructor.
*/
public DataModelWriter() {
this.reset();
}
/**
* Resets the visitor.
*/
private void reset() {
this._modelIdToJS = new HashMap<>();
}
/**
* Gets the result of the writing.
*/
public Object getResult() {
return JsonCompat.removeNullProperties(this._result);
}
protected void updateIndex(Node node, Object json) {
this._modelIdToJS.put(node.modelId(), json);
// Note: the first object created by the visitor is the result (we always traverse top-down).
if (this._result == null) {
this._result = json;
}
}
protected void writeExtraProperties(Object json, Node node) {
node.getExtraPropertyNames().forEach(pname -> {
Object value = node.getExtraProperty(pname);
JsonCompat.setProperty(json, pname, value);
});
}
/**
* Lookup the parent node via modelId if it exists (we already processed it).
* Caller must provide the parent object as a second argument for cases when it does not exist.
* @param modelId
* @param jsonDefault not null
*/
protected Object lookup(int modelId, Object jsonDefault) {
Objects.requireNonNull(jsonDefault);
Object rval = this._modelIdToJS.get(modelId);
// If not found, return a throwaway object (this would happen when doing a partial
// read of a subsection of a document).
// TODO: This assumption is not always correct,
// The parent node may be an array
if (rval == null) {
rval = jsonDefault;
}
return rval;
}
/**
* Lookup a JS object using the model ID of the node's parent.
* @param node
*/
protected Object lookupParentJson(Node node) {
return this.lookup(node.parent().modelId(), JsonCompat.objectNode());
}
protected Object lookupParentJson(Node node, Object jsonDefault) {
return this.lookup(node.parent().modelId(), jsonDefault);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitDocument(io.apicurio.datamodels.core.models.Document)
*/
@Override
public void visitDocument(Document node) {
Object root = JsonCompat.objectNode();
writeDocument(node, root);
this.updateIndex(node, root);
}
/**
* Writes the document node info into the given json object.
* @param node
* @param json
*/
protected void writeDocument(Document node, Object json) {
// Subclasses should implement this.
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitExtension(io.apicurio.datamodels.core.models.Extension)
*/
@Override
public void visitExtension(Extension node) {
Object parent = this.lookupParentJson(node);
JsonCompat.setProperty(parent, node.name, node.value);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitInfo(io.apicurio.datamodels.core.models.common.Info)
*/
@Override
public void visitInfo(Info node) {
Object parent = this.lookupParentJson(node);
Object json = JsonCompat.objectNode();
JsonCompat.setPropertyString(json, Constants.PROP_TITLE, node.title);
JsonCompat.setPropertyString(json, Constants.PROP_VERSION, node.version);
JsonCompat.setPropertyString(json, Constants.PROP_DESCRIPTION, node.description);
JsonCompat.setPropertyString(json, Constants.PROP_TERMS_OF_SERVICE, node.termsOfService);
JsonCompat.setPropertyNull(json, Constants.PROP_CONTACT);
JsonCompat.setPropertyNull(json, Constants.PROP_LICENSE);
writeExtraProperties(json, node);
JsonCompat.setProperty(parent, Constants.PROP_INFO, json);
this.updateIndex(node, json);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitContact(io.apicurio.datamodels.core.models.common.Contact)
*/
@Override
public void visitContact(Contact node) {
Object parent = this.lookupParentJson(node);
Object json = JsonCompat.objectNode();
JsonCompat.setPropertyString(json, Constants.PROP_NAME, node.name);
JsonCompat.setPropertyString(json, Constants.PROP_URL, node.url);
JsonCompat.setPropertyString(json, Constants.PROP_EMAIL, node.email);
writeExtraProperties(json, node);
JsonCompat.setProperty(parent, Constants.PROP_CONTACT, json);
this.updateIndex(node, json);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitLicense(io.apicurio.datamodels.core.models.common.License)
*/
@Override
public void visitLicense(License node) {
Object parent = this.lookupParentJson(node);
Object json = JsonCompat.objectNode();
JsonCompat.setPropertyString(json, Constants.PROP_NAME, node.name);
JsonCompat.setPropertyString(json, Constants.PROP_URL, node.url);
writeExtraProperties(json, node);
JsonCompat.setProperty(parent, Constants.PROP_LICENSE, json);
this.updateIndex(node, json);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitTag(io.apicurio.datamodels.core.models.common.Tag)
*/
@Override
public void visitTag(Tag node) {
Object parent = this.lookupParentJson(node);
Object json = JsonCompat.objectNode();
JsonCompat.setPropertyString(json, Constants.PROP_NAME, node.name);
JsonCompat.setPropertyString(json, Constants.PROP_DESCRIPTION, node.description);
JsonCompat.setPropertyNull(json, Constants.PROP_EXTERNAL_DOCS);
writeExtraProperties(json, node);
JsonCompat.appendToArrayProperty(parent, Constants.PROP_TAGS, json);
this.updateIndex(node, json);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitSecurityRequirement(io.apicurio.datamodels.core.models.common.SecurityRequirement)
*/
@Override
public void visitSecurityRequirement(SecurityRequirement node) {
Object parent = this.lookupParentJson(node);
Object json = JsonCompat.objectNode();
node.getSecurityRequirementNames().forEach(name -> {
List<String> scopes = node.getScopes(name);
JsonCompat.setPropertyStringArray(json, name, scopes);
});
JsonCompat.appendToArrayProperty(parent, Constants.PROP_SECURITY, json);
this.updateIndex(node, json);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitExternalDocumentation(io.apicurio.datamodels.core.models.common.ExternalDocumentation)
*/
@Override
public void visitExternalDocumentation(ExternalDocumentation node) {
Object parent = this.lookupParentJson(node);
Object json = JsonCompat.objectNode();
JsonCompat.setPropertyString(json, Constants.PROP_DESCRIPTION, node.description);
JsonCompat.setPropertyString(json, Constants.PROP_URL, node.url);
writeExtraProperties(json, node);
JsonCompat.setProperty(parent, Constants.PROP_EXTERNAL_DOCS, json);
this.updateIndex(node, json);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitOperation(io.apicurio.datamodels.core.models.common.Operation)
*/
@Override
public void visitOperation(Operation node) {
Object parent = this.lookupParentJson(node);
Object json = JsonCompat.objectNode();
writeOperation(json, node);
writeExtraProperties(json, node);
JsonCompat.setProperty(parent, node.getType(), json);
this.updateIndex(node, json);
}
protected void writeOperation(Object json, Operation node) {
JsonCompat.setPropertyString(json, Constants.PROP_OPERATION_ID, node.operationId);
JsonCompat.setPropertyString(json, Constants.PROP_SUMMARY, node.summary);
JsonCompat.setPropertyString(json, Constants.PROP_DESCRIPTION, node.description);
JsonCompat.setPropertyNull(json, Constants.PROP_EXTERNAL_DOCS);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitParameter(io.apicurio.datamodels.core.models.common.Parameter)
*/
@Override
public void visitParameter(Parameter node) {
Object parent = this.lookupParentJson(node);
Object json = JsonCompat.objectNode();
writeParameter(json, node);
writeExtraProperties(json, node);
JsonCompat.appendToArrayProperty(parent, Constants.PROP_PARAMETERS, json);
this.updateIndex(node, json);
}
protected void writeParameter(Object json, Parameter node) {
JsonCompat.setPropertyString(json, Constants.PROP_$REF, node.$ref);
JsonCompat.setPropertyString(json, Constants.PROP_NAME, node.name);
JsonCompat.setPropertyString(json, Constants.PROP_DESCRIPTION, node.description);
JsonCompat.setPropertyNull(json, Constants.PROP_SCHEMA);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitSchema(io.apicurio.datamodels.core.models.common.Schema)
*/
@Override
public void visitSchema(Schema node) {
this.doVisitSchema(node, Constants.PROP_SCHEMA, false);
}
protected void doVisitSchema(Schema node, String parentPropertyName, boolean isCollection) {
Object parent = this.lookupParentJson(node);
Object json = JsonCompat.objectNode();
writeSchema(json, node);
writeExtraProperties(json, node);
if (isCollection) {
JsonCompat.appendToArrayProperty(parent, parentPropertyName, json);
} else {
JsonCompat.setProperty(parent, parentPropertyName, json);
}
this.updateIndex(node, json);
}
protected void writeSchema(Object json, Schema node) {
JsonCompat.setPropertyString(json, Constants.PROP_$REF, node.$ref);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitValidationProblem(io.apicurio.datamodels.core.models.ValidationProblem)
*/
@Override
public void visitValidationProblem(ValidationProblem problem) {
// Validation problems are not written out, obviously.
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitSecurityScheme(io.apicurio.datamodels.core.models.common.SecurityScheme)
*/
@Override
public void visitSecurityScheme(SecurityScheme node) {
Object parent = this.lookupParentJson(node);
Object json = JsonCompat.objectNode();
writeSecurityScheme(json, node);
writeExtraProperties(json, node);
this.addSecuritySchemeToParent(parent, json, node);
this.updateIndex(node, json);
}
protected void writeSecurityScheme(Object json, SecurityScheme node) {
JsonCompat.setPropertyString(json, Constants.PROP_TYPE, node.type);
JsonCompat.setPropertyString(json, Constants.PROP_DESCRIPTION, node.description);
JsonCompat.setPropertyString(json, Constants.PROP_NAME, node.name);
JsonCompat.setPropertyString(json, Constants.PROP_IN, node.in);
}
protected void addSecuritySchemeToParent(Object parent, Object json, SecurityScheme node) {
JsonCompat.setProperty(parent, node.getSchemeName(), json);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitSchemaDefinition(io.apicurio.datamodels.core.models.common.IDefinition)
*/
@Override
public void visitSchemaDefinition(IDefinition node) {
Schema schema = (Schema) node;
Object parent = this.lookupParentJson(schema);
Object json = JsonCompat.objectNode();
writeSchema(json, schema);
writeExtraProperties(json, schema);
addSchemaDefinitionToParent(parent, json, node);
this.updateIndex(schema, json);
}
protected void addSchemaDefinitionToParent(Object parent, Object json, IDefinition node) {
JsonCompat.setProperty(parent, node.getName(), json);
}
/**
* @see io.apicurio.datamodels.core.visitors.IVisitor#visitParameterDefinition(io.apicurio.datamodels.core.models.common.IDefinition)
*/
@Override
public void visitParameterDefinition(IDefinition node) {
Parameter pdef = (Parameter) node;
Object parent = this.lookupParentJson(pdef);
Object json = JsonCompat.objectNode();
this.writeParameter(json, pdef);
this.writeExtraProperties(json, pdef);
addParameterDefinitionToParent(parent, json, node);
this.updateIndex(pdef, json);
}
protected void addParameterDefinitionToParent(Object parent, Object json, IDefinition node) {
JsonCompat.setProperty(parent, node.getName(), json);
}
}
|
sherlkk/NetLogo-GIS
|
src/org/myworldgis/netlogo/gui/GISDataView.java
|
//
// Copyright (c) 2007 <NAME>. All rights reserved.
//
package org.myworldgis.netlogo.gui;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryCollection;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineString;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.Polygon;
import com.vividsolutions.jts.geom.PrecisionModel;
import com.vividsolutions.jts.geom.util.GeometryTransformer;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Insets;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.event.MouseEvent;
import java.awt.event.MouseWheelEvent;
import java.awt.event.MouseWheelListener;
import java.awt.geom.AffineTransform;
import java.awt.geom.Ellipse2D;
import java.awt.geom.GeneralPath;
import java.awt.geom.Point2D;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;
import javax.swing.JComponent;
import javax.swing.JFrame;
import javax.swing.event.MouseInputAdapter;
import javax.swing.event.MouseInputListener;
import org.myworldgis.io.shapefile.ESRIShapefileReader;
import org.myworldgis.netlogo.GISExtension;
import org.myworldgis.projection.Projection;
import org.myworldgis.projection.ProjectionFormat;
/**
*
*/
public final strictfp class GISDataView extends JComponent {
//--------------------------------------------------------------------------
// Inner classes
//--------------------------------------------------------------------------
/** */
private static class ProjectedSpaceToPixelSpaceTransform extends AffineTransform {
static final long serialVersionUID = 1L;
private Point2D _gisCenter;
private double _scale;
private Point2D _pixelCenter;
public ProjectedSpaceToPixelSpaceTransform (Point2D gisCenter,
double scale,
Point2D pixelCenter) {
_gisCenter = gisCenter;
_scale = scale;
_pixelCenter = pixelCenter;
recomputeTransform();
}
public Point2D getProjectedCenter () {
return (Point2D)_gisCenter.clone();
}
public void setProjectedCenter (Point2D newCenter) {
_gisCenter.setLocation(newCenter.getX(), newCenter.getY());
recomputeTransform();
}
public double getScale () {
return(_scale);
}
public void setScale (double newScale) {
if (newScale != _scale) {
_scale = newScale;
recomputeTransform();
}
}
@SuppressWarnings("unused")
public Point2D getPixelCenter () {
return (Point2D)_pixelCenter.clone();
}
public void setPixelCenter (Point2D newCenter) {
_pixelCenter.setLocation(newCenter.getX(), newCenter.getY());
recomputeTransform();
}
private void recomputeTransform () {
setToTranslation(_pixelCenter.getX(), _pixelCenter.getY());
scale(_scale, -_scale);
translate(-_gisCenter.getX(), -_gisCenter.getY());
}
}
/** */
private class DragListener extends MouseInputAdapter {
private java.awt.Point _last;
public void mousePressed (MouseEvent evt) {
_last = evt.getPoint();
}
public void mouseReleased (MouseEvent evt) {
_last = null;
}
public void mouseDragged (MouseEvent evt) {
if (_last != null) {
int dx = evt.getX() - _last.x;
int dy = evt.getY() - _last.y;
Point2D oldGISCenter = _transform.getProjectedCenter();
double newX = oldGISCenter.getX() - (dx / _transform.getScale());
double newY = oldGISCenter.getY() + (dy / _transform.getScale());
_transform.setProjectedCenter(new Point2D.Double(newX, newY));
_last = evt.getPoint();
repaint();
}
}
public void mouseMoved (MouseEvent evt) {
if (_proj != null) {
try {
Point2D p = _transform.inverseTransform(evt.getPoint(), null);
System.out.println(_proj.getInverseTransformer().transform(FACTORY.createPoint(new Coordinate(p.getX(), p.getY()))));
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
/** */
private class ZoomListener implements MouseWheelListener {
public void mouseWheelMoved (MouseWheelEvent evt) {
if (evt.getUnitsToScroll() > 0) {
_transform.setScale(_transform.getScale() * 0.875);
} else {
_transform.setScale(_transform.getScale() * 1.142857);
}
repaint();
}
}
//--------------------------------------------------------------------------
// Class variables
//--------------------------------------------------------------------------
/** */
static final long serialVersionUID = 1L;
/** */
static final Stroke STROKE = new BasicStroke(0.001f, BasicStroke.CAP_SQUARE, BasicStroke.JOIN_BEVEL);
/** */
static final GeometryFactory FACTORY = new GeometryFactory(new PrecisionModel(PrecisionModel.FLOATING));
//--------------------------------------------------------------------------
// Class methods
//--------------------------------------------------------------------------
/** */
public static void main (String[] args) {
try {
GISDataView dataView = new GISDataView();
//Projection proj = ProjectionFormat.getInstance().parseProjection("PROJCS[\"Orthographic\",GEOGCS[\"GCS_WGS_1984\",DATUM[\"D_WGS_1984\",SPHEROID[\"WGS_1984\",6378137,298.257223563]],PRIMEM[\"Greenwich\",0],UNIT[\"Degree\",0.017453292519943295]],PROJECTION[\"Orthographic\"],PARAMETER[\"False_Easting\",0],PARAMETER[\"False_Northing\",0],PARAMETER[\"Longitude_Of_Center\",-98],PARAMETER[\"Latitude_Of_Center\",36],UNIT[\"Meter\",1]]");
//Projection proj = ProjectionFormat.getInstance().parseProjection("PROJCS[\"World_Equidistant_Conic\",GEOGCS[\"GCS_WGS_1984\",DATUM[\"D_WGS_1984\",SPHEROID[\"WGS_1984\",6378137,298.257223563]],PRIMEM[\"Greenwich\",0],UNIT[\"Degree\",0.017453292519943295]],PROJECTION[\"Equidistant_Conic\"],PARAMETER[\"False_Easting\",0],PARAMETER[\"False_Northing\",0],PARAMETER[\"Central_Meridian\",0],PARAMETER[\"Standard_Parallel_1\",60],PARAMETER[\"Standard_Parallel_2\",60],PARAMETER[\"Latitude_Of_Origin\",0],UNIT[\"Meter\",1]]");
Projection proj = ProjectionFormat.getInstance().parseProjection(new BufferedReader(new FileReader("c:/java/gis/data/Lambert_Conformal_Conic.prj")));
dataView.setProjection(proj);
GeometryTransformer xform = proj.getForwardTransformer();
Envelope env = new Envelope();
GeometryFactory factory = null;
if (GISExtension.getState() != null) {
factory = GISExtension.getState().factory();
} else {
factory = FACTORY;
}
ESRIShapefileReader shp = new ESRIShapefileReader(new FileInputStream(new File("data/countries.shp")),
Projection.DEGREES_TO_RADIANS,
factory);
while (true) {
try {
Geometry geom = shp.getNextShape();
if (geom == null) {
break;
} else {
Geometry xGeom = xform.transform(geom);
env.expandToInclude(xGeom.getEnvelopeInternal());
dataView.addGeometry(xGeom);
}
} catch (Exception e) {
e.printStackTrace();
}
}
dataView.zoomToEnvelope(env);
JFrame window = new JFrame("GIS Data View");
window.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
window.getContentPane().add(dataView);
window.pack();
window.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
/** */
private static void addToPath (LineString line, GeneralPath p) {
Coordinate c = line.getCoordinateN(0);
p.moveTo((float)c.x, (float)c.y);
for (int i = 1; i < line.getNumPoints(); i += 1) {
c = line.getCoordinateN(i);
p.lineTo((float)c.x, (float)c.y);
}
}
//--------------------------------------------------------------------------
// Instance variables
//--------------------------------------------------------------------------
/** */
private List<Shape> _shapes;
/** */
private ProjectedSpaceToPixelSpaceTransform _transform;
/** */
private Projection _proj;
//--------------------------------------------------------------------------
// Constructors
//--------------------------------------------------------------------------
/** */
public GISDataView () {
setOpaque(true);
setPreferredSize(new Dimension(300, 300));
setBackground(Color.WHITE);
_shapes = new ArrayList<Shape>();
_transform = new ProjectedSpaceToPixelSpaceTransform(new Point2D.Float(0.0f, 0.0f),
180.0 / 300.0,
new Point2D.Float(150f, 150f));
_proj = null;
MouseInputListener dragListener = new DragListener();
addMouseListener(dragListener);
addMouseMotionListener(dragListener);
addMouseWheelListener(new ZoomListener());
setSize(getPreferredSize());
}
//--------------------------------------------------------------------------
// Instance methods
//--------------------------------------------------------------------------
/** */
public void zoomToEnvelope (Envelope env) {
double cx = env.getMinX() + ((env.getMaxX() - env.getMinX()) / 2.0);
double cy = env.getMinY() + ((env.getMaxY() - env.getMinY()) / 2.0);
_transform.setProjectedCenter(new Point2D.Double(Double.isNaN(cx) ? 0.0 : cx,
Double.isNaN(cy) ? 0.0 : cy));
double xScale = getWidth() / env.getWidth();
if (xScale == 0) {
xScale = Double.MAX_VALUE;
}
double yScale = getHeight() / env.getHeight();
if (yScale == 0) {
yScale = Double.MAX_VALUE;
}
_transform.setScale(StrictMath.min(xScale, yScale));
}
/** */
public void addGeometry (Geometry geom) {
if (geom instanceof Point) {
Coordinate c = ((Point)geom).getCoordinate();
_shapes.add(new Ellipse2D.Double(c.x - 0.01, c.y - 0.01, 0.02, 0.02));
_shapes.add(new Ellipse2D.Double(c.x - 1000, c.y - 1000, 2000, 2000));
_shapes.add(new Ellipse2D.Double(c.x - 10000, c.y - 10000, 20000, 20000));
_shapes.add(new Ellipse2D.Double(c.x - 100000, c.y - 100000, 200000, 200000));
} else if (geom instanceof LineString) {
GeneralPath p = new GeneralPath(GeneralPath.WIND_EVEN_ODD);
addToPath((LineString)geom, p);
_shapes.add(p);
} else if (geom instanceof Polygon) {
GeneralPath p = new GeneralPath(GeneralPath.WIND_NON_ZERO);
Polygon poly = (Polygon)geom;
addToPath(poly.getExteriorRing(), p);
for (int i = 0; i < poly.getNumInteriorRing(); i += 1) {
addToPath(poly.getInteriorRingN(i), p);
}
_shapes.add(p);
} else if (geom instanceof GeometryCollection) {
GeometryCollection gc = (GeometryCollection)geom;
for (int i = 0; i < gc.getNumGeometries(); i += 1) {
addGeometry(gc.getGeometryN(i));
}
}
}
/** */
public Projection getProjection () {
return _proj;
}
/** */
public void setProjection (Projection proj) {
_proj = proj;
}
/** */
@SuppressWarnings("deprecation")
public void reshape (int x, int y, int newWidth, int newHeight) {
super.reshape(x, y, (newWidth > 0) ? newWidth : 1, (newHeight > 0) ? newHeight : 1);
Dimension size = getSize();
Insets insets = getInsets();
_transform.setPixelCenter(new Point2D.Float(insets.left + ((size.width-(insets.left+insets.right)) / 2.0f),
insets.top + ((size.height-(insets.top+insets.bottom)) / 2.0f)));
}
/** */
public void paintComponent (Graphics g) {
g.setColor(getBackground());
g.fillRect(0, 0, getWidth(),getHeight());
super.paintComponent(g);
Graphics2D g2d = (Graphics2D)g;
g2d.setColor(Color.BLACK);
g2d.setStroke(STROKE);
g2d.setTransform(_transform);
for (int i = 0; i < _shapes.size(); i += 1) {
g2d.draw(_shapes.get(i));
}
}
}
|
muminkoykiran/computervision-recipes
|
utils_cv/segmentation/data.py
|
<filename>utils_cv/segmentation/data.py
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import List
from urllib.parse import urljoin
class Urls:
# base url
base = "https://cvbp-secondary.z19.web.core.windows.net/datasets/image_segmentation/"
# traditional datasets
fridge_objects_path = urljoin(base, "segFridgeObjects.zip")
fridge_objects_tiny_path = urljoin(base, "segFridgeObjectsTiny.zip")
@classmethod
def all(cls) -> List[str]:
return [v for k, v in cls.__dict__.items() if k.endswith("_path")]
|
TheBadZhang/OJ
|
hdu/1000/12/1205.cpp
|
<reponame>TheBadZhang/OJ
#include<cstdio>
#include<cstring>
#include<algorithm>
#include<iostream>
using namespace std;
const int maxn=1000005;
typedef long long LL;
LL a[maxn];
int main(){
int t;
cin>>t;
while(t--){
int n;
cin>>n;
LL max1=0;
LL sum=0;
for(int i=1;i<=n;i++){
cin>>a[i];
sum+=a[i];
max1=max(a[i],max1);
}
if(max1-1>sum-max1)cout<<"No"<<endl;
else cout<<"Yes"<<endl;
}
}
|
muiz6/android-music-player
|
app/src/main/java/com/muiz6/musicplayer/ui/main/home/library/genres/GenreViewModel.java
|
package com.muiz6.musicplayer.ui.main.home.library.genres;
import android.support.v4.media.MediaBrowserCompat;
import androidx.annotation.NonNull;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.MutableLiveData;
import androidx.lifecycle.Observer;
import androidx.lifecycle.ViewModel;
import com.muiz6.musicplayer.data.MusicRepository;
import com.muiz6.musicplayer.media.MediaRunnable;
import com.muiz6.musicplayer.media.MusicServiceConnection;
import java.util.Collections;
import java.util.List;
import javax.inject.Inject;
public class GenreViewModel extends ViewModel {
private final MutableLiveData<List<GenreItemModel>> _genreList =
new MutableLiveData<>(Collections.<GenreItemModel>emptyList());
private final MediaBrowserCompat.SubscriptionCallback _subscriptionCallback =
new MediaBrowserCompat.SubscriptionCallback() {
@Override
public void onChildrenLoaded(@NonNull String parentId,
@NonNull List<MediaBrowserCompat.MediaItem> children) {
super.onChildrenLoaded(parentId, children);
_genreMediaList = children;
new Thread(new MediaRunnable(children) {
@Override
public void run() {
_genreList.postValue(GenreUtil.getGenreList(getMediaItemList()));
}
}).start();
}
};
private final Observer<Boolean> _connectionObserver = new Observer<Boolean>() {
@Override
public void onChanged(Boolean state) {
if (state) {
_connection.subscribe(MusicRepository.MEDIA_ID_GENRES, _subscriptionCallback);
}
}
};
private final MusicServiceConnection _connection;
private List<MediaBrowserCompat.MediaItem> _genreMediaList;
@Inject
public GenreViewModel(MusicServiceConnection connection) {
_connection = connection;
_connection.isConnected().observeForever(_connectionObserver);
}
@Override
protected void onCleared() {
super.onCleared();
_connection.isConnected().removeObserver(_connectionObserver);
_connection.unsubscribe(MusicRepository.MEDIA_ID_GENRES, _subscriptionCallback);
}
public LiveData<List<GenreItemModel>> getGenreList() {
return _genreList;
}
public String getGenreId(int index) {
return _genreMediaList.get(index).getMediaId();
}
public String getGenreTitle(int index) {
return String.valueOf(_genreMediaList.get(index).getDescription().getTitle());
}
}
|
CMP-Studio/cmoa-app-cms
|
config/environment.rb
|
# Load the rails application.
require File.expand_path('../application', __FILE__)
# Initialize the rails application.
CMOA::Application.initialize!
|
iznauy/MyCourseBackEnd
|
src/main/java/top/nju/iznauy/dao/daoImpl/user/TeacherDaoImpl.java
|
<reponame>iznauy/MyCourseBackEnd<gh_stars>0
package top.nju.iznauy.dao.daoImpl.user;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import top.nju.iznauy.dao.TeacherDao;
import top.nju.iznauy.po.user.TeacherPO;
import top.nju.iznauy.po.uservalidation.TeacherMailValidationPO;
import java.util.Collection;
import java.util.List;
/**
* Created on 06/02/2019.
* Description:
*
* @author iznauy
*/
@Repository
public class TeacherDaoImpl implements TeacherDao {
private TeacherRepository teacherRepository;
private TeacherEmailValidationRepository emailValidationRepository;
@Override
public TeacherPO getTeacherByEmail(String email) {
return teacherRepository.findById(email).orElse(null);
}
@Override
public void saveTeacher(TeacherPO teacherPO) {
teacherRepository.save(teacherPO);
}
@Override
public void saveCode(TeacherMailValidationPO mailValidationPO) {
emailValidationRepository.save(mailValidationPO);
}
@Override
public TeacherMailValidationPO getTeacherMailValidationPO(String email) {
return emailValidationRepository.findById(email).orElse(null);
}
@Override
public List<TeacherPO> getTeachersByEmailCollection(Collection<String> collection) {
return teacherRepository.findAllByMailIn(collection);
}
@Override
public int countHasValidatedTeacher() {
return teacherRepository.countByHasValidated(true);
}
@Autowired
public void setTeacherRepository(TeacherRepository teacherRepository) {
this.teacherRepository = teacherRepository;
}
@Autowired
public void setEmailValidationRepository(TeacherEmailValidationRepository emailValidationRepository) {
this.emailValidationRepository = emailValidationRepository;
}
}
|
Uvacoder/html-demo-code-and-experiments
|
wc3/libwww/Library/src/HTMulti.c
|
/*
** CONTENT NEGOTIATION
**
** (c) COPYRIGHT MIT 1995.
** Please first read the full copyright statement in the file COPYRIGH.
** @(#) $Id$
**
** History:
** March 94 AL Separated from HTFile.c because
** multiformat handling would be a mess in VMS.
*/
/* Library include files */
#include "wwwsys.h"
#include "WWWUtil.h"
#include "WWWCore.h"
#include "HTMulti.h"
#include "HTBind.h"
#include "HTFile.h"
#define MULTI_SUFFIX ".multi"/* Extension for scanning formats */
#define MAX_SUFF 15 /* Maximum number of suffixes for a file */
#define VARIANTS 4 /* We start with this array size */
typedef struct _HTContentDescription {
char * filename;
HTFormat content_type;
HTLanguage content_language;
HTEncoding content_encoding;
HTEncoding content_transfer;
int content_length;
double quality;
} HTContentDescription;
PRIVATE HTList * welcome_names = NULL; /* Welcome.html, index.html etc. */
/* ------------------------------------------------------------------------- */
/*
** Sort the q values in descending order
*/
PRIVATE int VariantSort (const void * a, const void * b)
{
HTContentDescription * aa = *(HTContentDescription **) a;
HTContentDescription * bb = *(HTContentDescription **) b;
if (aa && bb) return (aa->quality > bb->quality) ? -1 : 1;
return bb - aa;
}
/*
* Added by <EMAIL> (94/04/08)
*/
PRIVATE BOOL lang_match (HTAtom * tmplate, HTAtom * actual)
{
const char *t, *a;
char *st, *sa;
BOOL match = NO;
if (tmplate && actual &&
(t = HTAtom_name(tmplate)) && (a = HTAtom_name(actual))) {
st = strchr(t, '_');
sa = strchr(a, '_');
if ((st != NULL) && (sa != NULL)) {
if (!strcasecomp(t, a))
match = YES;
else
match = NO;
}
else {
if (st != NULL) *st = 0;
if (sa != NULL) *sa = 0;
if (!strcasecomp(t, a))
match = YES;
else
match = NO;
if (st != NULL) *st = '_';
if (sa != NULL) *sa = '_';
}
}
return match;
}
PRIVATE double type_value (HTAtom * content_type, HTList * accepted)
{
if (!content_type) return (1.0);
if (accepted) {
HTList * cur = accepted;
HTPresentation * pres;
HTPresentation * wild = NULL;
while ((pres = (HTPresentation *) HTList_nextObject(cur))) {
if (pres->rep == content_type)
return pres->quality;
else if (HTMIMEMatch(pres->rep, content_type))
wild = pres;
}
if (wild) return wild->quality;
else return (0.0); /* Nothing matched */
}
return (1.0); /* We accept all types */
}
PRIVATE double lang_value (HTAtom * language, HTList * accepted)
{
if (!language) return (1.0);
if (accepted) {
HTList * cur = accepted;
HTAcceptNode * node;
HTAcceptNode * wild = NULL;
while ((node = (HTAcceptNode *) HTList_nextObject(cur))) {
if (node->atom == language)
return node->quality;
/*
* patch by <EMAIL> (94/04/08)
* the original line was
* else if (HTMIMEMatch(node->atom, language)) {
* and the new line is
*/
else if (lang_match(node->atom, language))
wild = node;
}
if (wild) return wild->quality;
else return (0.0); /* Nothing matched */
}
return (1.0); /* We accept all languages */
}
PRIVATE double encoding_value (HTAtom * encoding, HTList * accepted)
{
if (!encoding) return (1.0);
if (accepted) {
HTList * cur = accepted;
HTAcceptNode * node;
HTAcceptNode * wild = NULL;
const char * e = HTAtom_name(encoding);
if (!strcmp(e, "7bit") || !strcmp(e, "8bit") || !strcmp(e, "binary"))
return (1.0);
while ((node = (HTAcceptNode*)HTList_nextObject(cur))) {
if (node->atom == encoding)
return node->quality;
else if (HTMIMEMatch(node->atom, encoding))
wild = node;
}
if (wild) return wild->quality;
else return (0.0); /* Nothing matched */
}
return (1.0); /* We accept all encodings */
}
PRIVATE BOOL HTRank (HTRequest * request, HTArray * variants)
{
HTContentDescription * cd;
void ** data;
if (!variants) {
HTTRACE(PROT_TRACE, "Ranking..... No variants\n");
return NO;
}
/*
** Walk through the list of local and global preferences and find the
** overall q factor for each variant
*/
cd = (HTContentDescription *) HTArray_firstObject(variants, data);
while (cd) {
double ctq_local = type_value(cd->content_type, HTRequest_conversion(request));
double ctq_global = type_value(cd->content_type, HTFormat_conversion());
double clq_local = lang_value(cd->content_language, HTRequest_language(request));
double clq_global = lang_value(cd->content_language, HTFormat_language());
double ceq_local = encoding_value(cd->content_encoding, HTRequest_encoding(request));
double ceq_global = encoding_value(cd->content_encoding, HTFormat_contentCoding());
HTTRACE(PROT_TRACE, "Qualities... Content type: %.3f, Content language: %.3f, Content encoding: %.3f\n" _
HTMAX(ctq_local, ctq_global) _
HTMAX(clq_local, clq_global) _
HTMAX(ceq_local, ceq_global));
cd->quality *= (HTMAX(ctq_local, ctq_global) *
HTMAX(clq_local, clq_global) *
HTMAX(ceq_local, ceq_global));
cd = (HTContentDescription *) HTArray_nextObject(variants, data);
}
/* Sort the array of all our accepted preferences */
HTArray_sort(variants, VariantSort);
/* Write out the result */
#ifdef HTDEBUG
if (PROT_TRACE) {
int cnt = 1;
cd = (HTContentDescription *) HTArray_firstObject(variants, data);
HTTRACE(PROT_TRACE, "Ranking.....\n");
HTTRACE(PROT_TRACE, "RANK QUALITY CONTENT-TYPE LANGUAGE ENCODING FILE\n");
while (cd) {
HTTRACE(PROT_TRACE, "%d. %.4f %-20.20s %-8.8s %-10.10s %s\n" _
cnt++ _
cd->quality _
cd->content_type ? HTAtom_name(cd->content_type) : "-" _
cd->content_language?HTAtom_name(cd->content_language):"-" _
cd->content_encoding?HTAtom_name(cd->content_encoding):"-" _
cd->filename ? cd->filename :"-");
cd = (HTContentDescription *) HTArray_nextObject(variants, data);
}
}
#endif /* HTDEBUG */
return YES;
}
/* PUBLIC HTSplitFilename()
**
** Split the filename to an array of suffixes.
** Return the number of parts placed to the array.
** Array should have MAX_SUFF+1 items.
*/
PRIVATE int HTSplitFilename (char * s_str, char ** s_arr)
{
const char *delimiters = HTBind_delimiters();
char * start = s_str;
char * end;
char save;
int i;
if (!s_str || !s_arr) return 0;
for (i=0; i < MAX_SUFF && *start; i++) {
for(end=start+1; *end && !strchr(delimiters, *end); end++);
save = *end;
*end = 0;
StrAllocCopy(s_arr[i], start); /* Frees the previous value */
*end = save;
start = end;
}
HT_FREE(s_arr[i]); /* Terminating NULL */
return i;
}
/*
** Set default file name for welcome page on each directory.
*/
PUBLIC void HTAddWelcome (char * name)
{
if (name) {
char * mycopy = NULL;
StrAllocCopy(mycopy,name);
if (!welcome_names)
welcome_names = HTList_new();
HTList_addObject(welcome_names, (void*)mycopy);
}
}
#ifdef HAVE_READDIR
/* PRIVATE multi_match()
**
** Check if actual filename (split in parts) fulfills
** the requirements.
*/
PRIVATE BOOL multi_match (char ** required, int m, char ** actual, int n)
{
int c;
int i,j;
#ifdef VMS
for(c=0; c<m && c<n && !strcasecomp(required[c], actual[c]); c++);
#else /* not VMS */
for(c=0; c<m && c<n && !strcmp(required[c], actual[c]); c++);
#endif /* not VMS */
if (!c) return NO; /* Names differ rigth from start */
for(i=c; i<m; i++) {
BOOL found = NO;
for(j=c; j<n; j++) {
#ifdef VMS
if (!strcasecomp(required[i], actual[j])) {
#else /* not VMS */
if (!strcmp(required[i], actual[j])) {
#endif /* not VMS */
found = YES;
break;
}
}
if (!found) return NO;
}
return YES;
}
/*
** Get multi-match possibilities for a given file
** ----------------------------------------------
** On entry:
** path absolute path to one file in a directory,
** may end in .multi.
** On exit:
** returns a list of ContentDesription structures
** describing the mathing files.
**
*/
PRIVATE HTArray * dir_matches (char * path)
{
static char * required[MAX_SUFF+1];
static char * actual[MAX_SUFF+1];
int m,n;
char * dirname = NULL;
char * basename = NULL;
int baselen;
char * multi = NULL;
DIR * dp;
struct dirent * dirbuf;
HTArray * matches = NULL;
#ifdef HT_REENTRANT
struct dirent result; /* For readdir_r */
#endif
if (!path) return NULL;
StrAllocCopy(dirname, path);
basename = (strrchr(dirname, '/'));
if (!basename)
goto dir_match_failed;
*basename++ = 0;
multi = strrchr(basename, MULTI_SUFFIX[0]);
if (multi && !strcasecomp(multi, MULTI_SUFFIX))
*multi = 0;
baselen = strlen(basename);
m = HTSplitFilename(basename, required);
dp = opendir(dirname);
if (!dp) {
HTTRACE(PROT_TRACE, "Warning..... Can't open directory %s\n" _ dirname);
goto dir_match_failed;
}
matches = HTArray_new(VARIANTS);
#ifdef HAVE_READDIR_R_2
while ((dirbuf = (struct dirent *) readdir_r(dp, &result))) {
#elif defined(HAVE_READDIR_R_3)
while (readdir_r(dp, &result, &dirbuf) == 0) {
#else
while ((dirbuf = readdir(dp))) {
#endif /* HAVE_READDIR_R_2 */
if (!dirbuf->d_ino) continue; /* Not in use */
if (!strcmp(dirbuf->d_name,".") ||
!strcmp(dirbuf->d_name,"..") ||
!strcmp(dirbuf->d_name, DEFAULT_DIR_FILE))
continue;
/* Use of direct->namlen is only valid in BSD'ish system */
/* Thanks to <EMAIL> (<NAME>) */
/* if ((int)(dirbuf->d_namlen) >= baselen) { */
if ((int) strlen(dirbuf->d_name) >= baselen) {
n = HTSplitFilename(dirbuf->d_name, actual);
if (multi_match(required, m, actual, n)) {
HTContentDescription * cd;
if ((cd = (HTContentDescription *)
HT_CALLOC(1, sizeof(HTContentDescription))) == NULL)
HT_OUTOFMEM("dir_matches");
if (HTBind_getFormat(dirbuf->d_name,
&cd->content_type,
&cd->content_encoding,
&cd->content_transfer,
&cd->content_language,
&cd->quality)) {
if (cd->content_type) {
if ((cd->filename = (char *) HT_MALLOC(strlen(dirname) + 2 + strlen(dirbuf->d_name))) == NULL)
HT_OUTOFMEM("dir_matches");
sprintf(cd->filename, "%s/%s", dirname, dirbuf->d_name);
HTArray_addObject(matches, (void *) cd);
} else {
HT_FREE(cd);
}
} else {
HT_FREE(cd);
}
}
}
}
closedir(dp);
dir_match_failed:
HT_FREE(dirname);
return matches;
}
/*
** Get the best match for a given file
** -----------------------------------
** On entry:
** req->conversions accepted content-types
** req->encodings accepted content-transfer-encodings
** req->languages accepted content-languages
** path absolute pathname of the filename for
** which the match is desired.
** On exit:
** returns a newly allocated absolute filepath.
*/
PRIVATE char * HTGetBest (HTRequest * req, char * path)
{
HTArray * variants = NULL;
char * representation = NULL;
if (!path || !*path) return NULL;
if ((variants = dir_matches(path)) == NULL) {
HTTRACE(PROT_TRACE, "No matches.. for \"%s\"\n" _ path);
return NULL;
}
#ifdef HTDEBUG
if (PROT_TRACE) {
void ** data;
HTContentDescription * cd = HTArray_firstObject(variants, data);
HTTRACE(PROT_TRACE, "Multi....... Possibilities for \"%s\"\n" _ path);
HTTRACE(PROT_TRACE, " QUALITY CONTENT-TYPE LANGUAGE ENCODING FILE\n");
while (cd) {
HTTRACE(PROT_TRACE, " %.4f %-20.20s %-8.8s %-10.10s %s\n" _
cd->quality _
cd->content_type ?HTAtom_name(cd->content_type) :"-\t" _
cd->content_language?HTAtom_name(cd->content_language):"-" _
cd->content_encoding?HTAtom_name(cd->content_encoding):"-" _
cd->filename ?cd->filename :"-");
cd = (HTContentDescription *) HTArray_nextObject(variants, data);
}
}
#endif /* HTDEBUG */
/*
** Finally get the best variant which is readable
*/
if (HTRank(req, variants)) {
void ** data = NULL;
HTContentDescription * cd = HTArray_firstObject(variants, data);
while (cd) {
if (cd->filename) {
if (access(cd->filename, R_OK) != -1)
StrAllocCopy(representation, cd->filename);
else HTTRACE(PROT_TRACE, "Multi....... `%s\' is not readable\n" _
cd->filename);
}
HT_FREE(cd->filename);
HT_FREE(cd);
cd = (HTContentDescription *) HTArray_nextObject(variants, data);
}
}
HTArray_delete(variants);
return representation;
}
PRIVATE int welcome_value (char * name)
{
HTList * cur = welcome_names;
char * welcome;
int v = 0;
while ((welcome = (char*)HTList_nextObject(cur))) {
v++;
if (!strcmp(welcome,name)) return v;
}
return 0;
}
PRIVATE char * get_best_welcome (char * path)
{
char * best_welcome = NULL;
int best_value = 0;
DIR * dp;
struct dirent * dirbuf;
char * last = strrchr(path, '/');
if (!welcome_names) {
HTAddWelcome("Welcome.html");
HTAddWelcome("welcome.html");
#if 0
HTAddWelcome("Index.html");
#endif
HTAddWelcome("index.html");
}
if (last && last!=path) *last = 0;
dp = opendir(path);
if (last && last!=path) *last='/';
if (!dp) {
HTTRACE(PROT_TRACE, "Warning..... Can't open directory %s\n" _ path);
return NULL;
}
while ((dirbuf = readdir(dp))) {
if (!dirbuf->d_ino ||
!strcmp(dirbuf->d_name,".") ||
!strcmp(dirbuf->d_name,"..") ||
!strcmp(dirbuf->d_name, DEFAULT_DIR_FILE))
continue;
else {
int v = welcome_value(dirbuf->d_name);
if (v > best_value) {
best_value = v;
StrAllocCopy(best_welcome, dirbuf->d_name);
}
}
}
closedir(dp);
if (best_welcome) {
char * welcome;
if ((welcome = (char *) HT_MALLOC(strlen(path) + strlen(best_welcome)+2)) == NULL)
HT_OUTOFMEM("get_best_welcome");
sprintf(welcome, "%s%s%s", path, last ? "" : "/", best_welcome);
HT_FREE(best_welcome);
HTTRACE(PROT_TRACE, "Welcome..... \"%s\"\n" _ welcome);
return welcome;
}
return NULL;
}
#endif /* HAVE_READDIR */
/*
** Do multiformat handling
** -----------------------
** On entry:
** req->conversions accepted content-types
** req->encodings accepted content-transfer-encodings
** req->languages accepted content-languages
** path absolute pathname of the filename for
** which the match is desired.
** stat_info pointer to result space.
**
** On exit:
** returns a newly allocated absolute filepath of the best
** match, or NULL if no match.
** stat_info will contain inode information as
** returned by stat().
*/
PUBLIC char * HTMulti (HTRequest * req,
char * path,
struct stat * stat_info)
{
char * new_path = NULL;
int stat_status = -1;
if (!req || !path || !*path || !stat_info)
return NULL;
#ifdef HAVE_READDIR
if (*(path+strlen(path)-1) == '/') { /* Find welcome page */
new_path = get_best_welcome(path);
if (new_path) path = new_path;
} else{
char * multi = strrchr(path, MULTI_SUFFIX[0]);
if (multi && !strcasecomp(multi, MULTI_SUFFIX)) {
HTTRACE(PROT_TRACE, "Multi....... by %s suffix\n" _ MULTI_SUFFIX);
if (!(new_path = HTGetBest(req, path))) {
HTTRACE(PROT_TRACE, "Multi....... failed -- giving up\n");
return NULL;
}
path = new_path;
} else {
stat_status = HT_STAT(path, stat_info);
if (stat_status == -1) {
HTTRACE(PROT_TRACE, "AutoMulti... can't stat \"%s\"(errno %d)\n" _
path _ errno);
if (!(new_path = HTGetBest(req, path))) {
HTTRACE(PROT_TRACE, "AutoMulti... failed -- giving up\n");
return NULL;
}
path = new_path;
}
}
}
#endif /* HAVE_READDIR */
if (stat_status == -1)
stat_status = HT_STAT(path, stat_info);
if (stat_status == -1) {
HTTRACE(PROT_TRACE, "Stat fails.. on \"%s\" -- giving up (errno %d)\n" _
path _ errno);
return NULL;
} else {
if (!new_path) {
StrAllocCopy(new_path, path);
return new_path;
}
else return path;
}
}
|
ning1875/nightingale
|
models/chart.go
|
package models
import "github.com/toolkits/pkg/logger"
type Chart struct {
Id int64 `json:"id"`
GroupId int64 `json:"group_id"`
Configs string `json:"configs"`
Weight int `json:"weight"`
}
func (c *Chart) TableName() string {
return "chart"
}
func (c *Chart) Add() error {
return DBInsertOne(c)
}
func (c *Chart) Update(cols ...string) error {
_, err := DB.Where("id=?", c.Id).Cols(cols...).Update(c)
if err != nil {
logger.Errorf("mysql.error: update chart(id=%d) fail: %v", c.Id, err)
return internalServerError
}
return nil
}
func (c *Chart) Del() error {
_, err := DB.Where("id=?", c.Id).Delete(new(Chart))
if err != nil {
logger.Errorf("mysql.error: delete chart(id=%d) fail: %v", c.Id, err)
return internalServerError
}
return nil
}
func ChartGets(groupId int64) ([]Chart, error) {
var objs []Chart
err := DB.Where("group_id=?", groupId).OrderBy("weight").Find(&objs)
if err != nil {
logger.Errorf("mysql.error: ChartGets(groupId=%d) fail: %v", groupId, err)
return nil, internalServerError
}
if len(objs) == 0 {
return []Chart{}, nil
}
return objs, nil
}
func ChartGet(where string, args ...interface{}) (*Chart, error) {
var obj Chart
has, err := DB.Where(where, args...).Get(&obj)
if err != nil {
logger.Errorf("mysql.error: get chart(%s)%+v fail: %s", where, args, err)
return nil, internalServerError
}
if !has {
return nil, nil
}
return &obj, nil
}
|
brian32768/map46
|
navbar/head.js
|
<filename>navbar/head.js
// head.js
import 'bootstrap/dist/css/bootstrap';
|
andrewt0301/ispras-microtesk
|
src/main/java/core/ru/ispras/microtesk/translator/nml/antlrex/MemoryFactory.java
|
<gh_stars>1-10
/*
* Copyright 2012-2018 ISP RAS (http://www.ispras.ru)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package ru.ispras.microtesk.translator.nml.antlrex;
import ru.ispras.castle.util.Logger;
import ru.ispras.microtesk.model.memory.Memory;
import ru.ispras.microtesk.translator.antlrex.SemanticException;
import ru.ispras.microtesk.translator.antlrex.symbols.Where;
import ru.ispras.microtesk.translator.nml.ir.expr.Expr;
import ru.ispras.microtesk.translator.nml.ir.shared.MemoryAlias;
import ru.ispras.microtesk.translator.nml.ir.shared.MemoryResource;
import ru.ispras.microtesk.translator.nml.ir.shared.Type;
import java.math.BigInteger;
public final class MemoryFactory extends WalkerFactoryBase {
public MemoryFactory(final WalkerContext context) {
super(context);
}
public MemoryResource createMemory(
final Where where,
final Memory.Kind kind,
final String name,
final Type type,
final Expr sizeExpr,
final boolean shared,
final MemoryAlias alias) throws SemanticException {
if (shared && kind == Memory.Kind.VAR) {
Logger.warning(
"%s: Variable %s cannot be shared. The keyword will be ignored.", where, name);
}
final BigInteger size = sizeExpr != null ? sizeExpr.bigIntegerValue() : BigInteger.ONE;
if (null == alias) {
return new MemoryResource(kind, name, type, size, shared, null);
}
final BigInteger bitSize = size.multiply(BigInteger.valueOf(type.getBitSize()));
final int aliasBitSize;
if (MemoryAlias.Kind.LOCATION == alias.getKind()) {
aliasBitSize = alias.getLocation().getType().getBitSize();
} else { // MemoryAlias.Kind.MEMORY == alias.getKind()
aliasBitSize = (alias.getMax() - alias.getMin() + 1)
* alias.getMemory().getType().getBitSize();
}
if (!bitSize.equals(BigInteger.valueOf(aliasBitSize))) {
raiseError(where, String.format(
"Size of the alias (%d) must be equal to the size of the defined memory (%d).",
aliasBitSize,
bitSize
));
}
return new MemoryResource(kind, name, type, size, shared, alias);
}
public MemoryAlias createAlias(
final Where where,
final String memoryId,
final Expr min,
final Expr max) throws SemanticException {
final MemoryResource memory = getIr().getMemory().get(memoryId);
if (null == memory) {
raiseError(where, memoryId + " is not defined or is not a memory storage.");
}
final int minIndex = min.integerValue();
final int maxIndex = max.integerValue();
if (!(0 <= minIndex) && (minIndex < memory.getSize().intValue())) {
raiseError(where, String.format("min (%d) is out of bounds: [0, %d)",
minIndex, memory.getSize()));
}
if (!(0 <= maxIndex) && (maxIndex < memory.getSize().intValue())) {
raiseError(where, String.format("max (%d) is out of bounds: [0, %d)",
maxIndex, memory.getSize()));
}
return MemoryAlias.forMemory(memoryId, memory, minIndex, maxIndex);
}
}
|
NeoHuang/sphere-fedora
|
app/utils/PriceUtils.java
|
<reponame>NeoHuang/sphere-fedora
package utils;
import com.google.common.base.Optional;
import io.sphere.client.model.Money;
import io.sphere.client.shop.model.TaxRate;
import models.ShopCustomer;
public final class PriceUtils {
private PriceUtils() {
}
/**
* Gets the price in gross or net, according to the type of customer.
* @param amount the amount to be converted.
* @param taxRate the tax rate that applies to this price.
* @param customer the customer to which the price is applied.
* @return the calculated net price for B2B customers, or gross price otherwise.
*/
public static Money customerPrice(Money amount, TaxRate taxRate, Optional<ShopCustomer> customer) {
if (customer.isPresent() && customer.get().isB2B()) {
return netPrice(amount, taxRate);
} else {
return grossPrice(amount, taxRate);
}
}
/**
* Gets the gross price for the given price and applied tax rate.
* @param amount the amount to be converted.
* @param taxRate the tax rate that applies to this price.
* @return the calculated gross price, or the same amount if taxes are included in the price.
*/
public static Money grossPrice(Money amount, TaxRate taxRate) {
if (taxRate.isIncludedInPrice()) {
return amount;
} else {
return grossPrice(amount, taxRate.getAmount());
}
}
/**
* Gets the net price for the given price and applied tax rate.
* @param amount the amount to be converted.
* @param taxRate the tax rate that applies to this price.
* @return the calculated net price, or the same amount if taxes are not included in the price.
*/
public static Money netPrice(Money amount, TaxRate taxRate) {
if (!taxRate.isIncludedInPrice()) {
return amount;
} else {
return netPrice(amount, taxRate.getAmount());
}
}
/**
* Gets the gross price for the given net price and tax rate amount applied.
* @param netAmount the net amount to be converted.
* @param taxRate the tax rate to be applied, e.g. 0.19 for a tax rate of 19%.
* @return the calculated gross price.
*/
public static Money grossPrice(Money netAmount, double taxRate) {
return netAmount.plus(netAmount.multiply(taxRate));
}
/**
* Gets the net price for the given gross price and tax rate amount applied.
* @param grossAmount the gross amount to be converted.
* @param taxRate the tax rate to be applied, e.g. 0.19 for a tax rate of 19%.
* @return the calculated net price.
*/
public static Money netPrice(Money grossAmount, double taxRate) {
return grossAmount.multiply(1 / (1 + taxRate));
}
}
|
SolovyovAlexander/reindexer
|
cpp_src/cmd/reindexer_tool/commandsexecutor.cc
|
<reponame>SolovyovAlexander/reindexer<gh_stars>0
#include "commandsexecutor.h"
#include <iomanip>
#include "client/cororeindexer.h"
#include "core/cjson/jsonbuilder.h"
#include "core/reindexer.h"
#include "coroutine/waitgroup.h"
#include "executorscommand.h"
#include "tableviewscroller.h"
#include "tools/fsops.h"
#include "tools/jsontools.h"
namespace reindexer_tool {
using reindexer::iequals;
using reindexer::WrSerializer;
using reindexer::NamespaceDef;
using reindexer::JsonBuilder;
using reindexer::Query;
const string kConfigFile = "rxtool_settings.txt";
const string kVariableOutput = "output";
const string kOutputModeJson = "json";
const string kOutputModeTable = "table";
const string kOutputModePretty = "pretty";
const string kOutputModePrettyCollapsed = "collapsed";
const string kBenchNamespace = "rxtool_bench";
const string kBenchIndex = "id";
constexpr int kSingleThreadCoroCount = 200;
constexpr int kBenchItemsCount = 10000;
constexpr int kBenchDefaultTime = 5;
constexpr size_t k64KStack = 64 * 1024;
constexpr size_t k24KStack = 24 * 1024;
constexpr size_t k8KStack = 8 * 1024;
template <>
template <typename... Args>
Error CommandsExecutor<reindexer::Reindexer>::Run(const std::string& dsn, const Args&... args) {
return runImpl(dsn, args...);
}
template <>
template <typename... Args>
Error CommandsExecutor<reindexer::client::CoroReindexer>::Run(const std::string& dsn, const Args&... args) {
return runImpl(dsn, std::ref(loop_), args...);
}
template <typename DBInterface>
void CommandsExecutor<DBInterface>::GetSuggestions(const std::string& input, std::vector<std::string>& suggestions) {
OutParamCommand<std::vector<std::string>> cmd(
[this, &input](std::vector<std::string>& suggestions) {
getSuggestions(input, suggestions);
return errOK;
},
suggestions);
execCommand(cmd);
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::Stop() {
GenericCommand cmd([this] { return stop(true); });
auto err = execCommand(cmd);
if (err.ok() && executorThr_.joinable()) {
executorThr_.join();
}
return err;
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::Process(const string& command) {
GenericCommand cmd([this, &command] { return processImpl(command); });
return execCommand(cmd);
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::FromFile(std::istream& in) {
GenericCommand cmd([this, &in] { return fromFileImpl(in); });
return execCommand(cmd);
}
template <typename DBInterface>
typename CommandsExecutor<DBInterface>::Status CommandsExecutor<DBInterface>::getStatus() {
std::lock_guard<std::mutex> lck(mtx_);
return status_;
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::fromFileImpl(std::istream& in) {
using reindexer::coroutine::wait_group;
using reindexer::coroutine::wait_group_guard;
Error lastErr;
reindexer::coroutine::channel<std::string> cmdCh(500);
auto handleResultFn = [this, &lastErr](Error err) {
try {
if (!err.ok()) {
if (err.code() == errCanceled || !db().Status().ok()) {
if (lastErr.ok()) {
lastErr = err;
std::cerr << "ERROR: " << err.what() << std::endl;
}
return false;
}
lastErr = err;
std::cerr << "ERROR: " << err.what() << std::endl;
}
} catch (...) {
std::cout << "exc";
}
return true;
};
auto workerFn = [this, &cmdCh](std::function<bool(Error)> handleResult, wait_group& wg) {
wait_group_guard wgg(wg);
for (;;) {
auto cmdp = cmdCh.pop();
if (cmdp.second) {
auto err = processImpl(cmdp.first);
if (!handleResult(err)) {
if (cmdCh.opened()) {
cmdCh.close();
}
return;
}
} else {
return;
}
}
};
wait_group wg;
wg.add(kSingleThreadCoroCount);
for (size_t i = 0; i < kSingleThreadCoroCount; ++i) {
loop_.spawn(std::bind(workerFn, handleResultFn, std::ref(wg)), k64KStack);
}
std::string line;
while (std::getline(in, line)) {
if (reindexer::checkIfStartsWith("\\upsert ", line) || reindexer::checkIfStartsWith("\\delete ", line)) {
try {
cmdCh.push(line);
} catch (std::exception&) {
break;
}
} else {
auto err = processImpl(line);
if (!handleResultFn(err)) {
break;
}
}
}
cmdCh.close();
wg.wait();
return lastErr;
}
template <typename DBInterface>
reindexer::Error CommandsExecutor<DBInterface>::execCommand(IExecutorsCommand& cmd) {
std::unique_lock<std::mutex> lck_(mtx_);
curCmd_ = &cmd;
cmdAsync_.send();
condVar_.wait(lck_, [&cmd] { return cmd.IsExecuted(); });
return cmd.Status();
}
template <typename DBInterface>
template <typename... Args>
Error CommandsExecutor<DBInterface>::runImpl(const string& dsn, Args&&... args) {
using reindexer::net::ev::sig;
assert(!executorThr_.joinable());
auto fn = [this](const string& dsn, Args&&... args) {
sig sint;
sint.set(loop_);
sint.set([this](sig&) { cancelCtx_.Cancel(); });
sint.start(SIGINT);
cmdAsync_.set(loop_);
cmdAsync_.set([this](reindexer::net::ev::async&) {
loop_.spawn([this] {
std::unique_lock<std::mutex> lck(mtx_);
if (curCmd_) {
auto cmd = curCmd_;
curCmd_ = nullptr;
lck.unlock();
loop_.spawn(
[this, cmd] {
cmd->Execute();
std::unique_lock<std::mutex> lck(mtx_);
condVar_.notify_all();
},
k64KStack);
}
});
});
cmdAsync_.start();
auto fn = [this](const string& dsn, Args&&... args) {
string outputMode;
if (reindexer::fs::ReadFile(reindexer::fs::JoinPath(reindexer::fs::GetHomeDir(), kConfigFile), outputMode) > 0) {
gason::JsonParser jsonParser;
gason::JsonNode value = jsonParser.Parse(reindexer::giftStr(outputMode));
for (auto node : value) {
WrSerializer ser;
reindexer::jsonValueToString(node.value, ser, 0, 0, false);
variables_[kVariableOutput] = string(ser.Slice());
}
}
if (variables_.empty()) {
variables_[kVariableOutput] = kOutputModeJson;
}
Error err;
if (!uri_.parse(dsn)) {
err = Error(errNotValid, "Cannot connect to DB: Not a valid uri");
}
if (err.ok()) err = db().Connect(dsn, std::forward<Args>(args)...);
if (err.ok()) {
loop_.spawn(
[this] {
// This coroutine should prevent loop from stopping for core::Reindexer
stopCh_.pop();
},
k8KStack);
}
std::lock_guard<std::mutex> lck(mtx_);
status_.running = err.ok();
status_.err = std::move(err);
};
loop_.spawn(std::bind(fn, std::cref(dsn), std::forward<Args>(args)...));
loop_.run();
};
status_ = Status();
executorThr_ = std::thread(std::bind(fn, std::cref(dsn), std::forward<Args>(args)...));
auto status = getStatus();
while (!status.running && status.err.ok()) {
std::this_thread::sleep_for(std::chrono::milliseconds(1));
status = getStatus();
}
if (!status.err.ok()) {
executorThr_.join();
return status.err;
}
auto err = output_.Status();
if (!err.ok()) {
std::cerr << "Output error: " << err.what() << std::endl;
}
return err;
}
template <typename DBInterface>
string CommandsExecutor<DBInterface>::getCurrentDsn(bool withPath) const {
string dsn(uri_.scheme() + "://");
if (!uri_.password().empty() && !uri_.username().empty()) {
dsn += uri_.username() + ":" + uri_.password() + "@";
}
dsn += uri_.hostname() + ":" + uri_.port() + (withPath ? uri_.path() : "/");
return dsn;
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::queryResultsToJson(ostream& o, const typename DBInterface::QueryResultsT& r, bool isWALQuery,
bool fstream) {
if (cancelCtx_.IsCancelled()) return errOK;
WrSerializer ser;
size_t i = 0;
bool scrollable = !fstream && !reindexer::isStdoutRedirected();
reindexer::TerminalSize terminalSize;
if (scrollable) {
terminalSize = reindexer::getTerminalSize();
scrollable = (int(r.Count()) > terminalSize.height);
}
bool prettyPrint = variables_[kVariableOutput] == kOutputModePretty;
for (auto it : r) {
if (cancelCtx_.IsCancelled()) break;
if (isWALQuery) ser << '#' << it.GetLSN() << ' ';
if (it.IsRaw()) {
reindexer::WALRecord rec(it.GetRaw());
rec.Dump(ser, [this, &r](string_view cjson) {
auto item = db().NewItem(r.GetNamespaces()[0]);
item.FromCJSON(cjson);
return string(item.GetJSON());
});
} else {
if (isWALQuery) ser << "WalItemUpdate ";
Error err = it.GetJSON(ser, false);
if (!err.ok()) return err;
if (prettyPrint) {
string json(ser.Slice());
ser.Reset();
prettyPrintJSON(reindexer::giftStr(json), ser);
}
}
if ((++i != r.Count()) && !isWALQuery) ser << ',';
ser << '\n';
if ((ser.Len() > 0x100000) || prettyPrint || scrollable) {
if (scrollable && (i % (terminalSize.height - 1) == 0)) {
WaitEnterToContinue(o, terminalSize.width, [this]() -> bool { return cancelCtx_.IsCancelled(); });
}
o << ser.Slice();
ser.Reset();
}
}
if (!cancelCtx_.IsCancelled()) {
o << ser.Slice();
}
return errOK;
}
template <>
Error CommandsExecutor<reindexer::client::CoroReindexer>::getAvailableDatabases(vector<string>& dbList) {
return db().EnumDatabases(dbList);
}
template <>
Error CommandsExecutor<reindexer::Reindexer>::getAvailableDatabases(vector<string>&) {
return Error();
}
template <typename DBInterface>
void CommandsExecutor<DBInterface>::addCommandsSuggestions(std::string const& cmd, std::vector<string>& suggestions) {
LineParser parser(cmd);
string_view token = parser.NextToken();
if ((token == "\\upsert") || (token == "\\delete")) {
token = parser.NextToken();
if (parser.End()) {
checkForNsNameMatch(token, suggestions);
}
} else if ((token == "\\dump") && !parser.End()) {
while (!parser.End()) {
checkForNsNameMatch(parser.NextToken(), suggestions);
}
} else if (token == "\\namespaces") {
token = parser.NextToken();
if (token == "drop") {
checkForNsNameMatch(parser.NextToken(), suggestions);
} else {
checkForCommandNameMatch(token, {"add", "list", "drop"}, suggestions);
}
} else if (token == "\\meta") {
checkForCommandNameMatch(parser.NextToken(), {"put", "list"}, suggestions);
} else if (token == "\\set") {
token = parser.NextToken();
if (token == "output") {
checkForCommandNameMatch(parser.NextToken(), {"json", "pretty", "table"}, suggestions);
} else {
checkForCommandNameMatch(token, {"output"}, suggestions);
}
} else if (token == "\\subscribe") {
token = parser.NextToken();
checkForCommandNameMatch(token, {"on", "off"}, suggestions);
checkForNsNameMatch(token, suggestions);
} else if (token == "\\databases") {
token = parser.NextToken();
if (token == "use") {
vector<string> dbList;
Error err = getAvailableDatabases(dbList);
if (err.ok()) {
token = parser.NextToken();
for (const string& dbName : dbList) {
if (token.empty() || reindexer::isBlank(token) ||
((token.length() < dbName.length()) && reindexer::checkIfStartsWith(token, dbName))) {
suggestions.emplace_back(dbName);
}
}
}
} else {
checkForCommandNameMatch(token, {"use", "list"}, suggestions);
}
} else {
for (const commandDefinition& cmdDef : cmds_) {
if (token.empty() || reindexer::isBlank(token) ||
((token.length() < cmdDef.command.length()) && reindexer::checkIfStartsWith(token, cmdDef.command))) {
suggestions.emplace_back(cmdDef.command[0] == '\\' ? cmdDef.command.substr(1) : cmdDef.command);
}
}
}
}
template <typename DBInterface>
void CommandsExecutor<DBInterface>::checkForNsNameMatch(string_view str, std::vector<string>& suggestions) {
vector<NamespaceDef> allNsDefs;
Error err = db().EnumNamespaces(allNsDefs, reindexer::EnumNamespacesOpts().WithClosed());
if (!err.ok()) return;
for (auto& ns : allNsDefs) {
if (str.empty() || reindexer::isBlank(str) || ((str.length() < ns.name.length()) && reindexer::checkIfStartsWith(str, ns.name))) {
suggestions.emplace_back(ns.name);
}
}
}
template <typename DBInterface>
void CommandsExecutor<DBInterface>::checkForCommandNameMatch(string_view str, std::initializer_list<string_view> cmds,
std::vector<string>& suggestions) {
for (string_view cmd : cmds) {
if (str.empty() || reindexer::isBlank(str) || ((str.length() < cmd.length()) && reindexer::checkIfStartsWith(str, cmd))) {
suggestions.emplace_back(cmd);
}
}
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::processImpl(const std::string& command) {
LineParser parser(command);
auto token = parser.NextToken();
if (!token.length() || token.substr(0, 2) == "--") return errOK;
Error ret;
for (auto& c : cmds_) {
if (iequals(token, c.command)) {
ret = (this->*(c.handler))(command);
if (cancelCtx_.IsCancelled()) {
ret = Error(errCanceled, "Canceled");
}
cancelCtx_.Reset();
return ret;
}
}
return Error(errParams, "Unknown command '%s'. Type '\\help' to list of available commands", token);
}
template <>
Error CommandsExecutor<reindexer::Reindexer>::stop(bool terminate) {
if (terminate) {
stopCh_.close();
}
return Error();
}
template <>
Error CommandsExecutor<reindexer::client::CoroReindexer>::stop(bool terminate) {
if (terminate) {
stopCh_.close();
}
return db().Stop();
}
template <typename DBInterface>
void CommandsExecutor<DBInterface>::getSuggestions(const std::string& input, std::vector<std::string>& suggestions) {
if (!input.empty() && input[0] != '\\') db().GetSqlSuggestions(input, input.length() - 1, suggestions);
if (suggestions.empty()) {
addCommandsSuggestions(input, suggestions);
}
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandSelect(const string& command) {
typename DBInterface::QueryResultsT results(kResultsWithPayloadTypes | kResultsCJson | kResultsWithItemID | kResultsWithRaw);
Query q;
try {
q.FromSQL(command);
} catch (const Error& err) {
return err;
}
auto err = db().Select(q, results);
if (err.ok()) {
if (results.Count()) {
auto& outputType = variables_[kVariableOutput];
if (outputType == kOutputModeTable) {
auto isCanceled = [this]() -> bool { return cancelCtx_.IsCancelled(); };
reindexer::TableViewBuilder<typename DBInterface::QueryResultsT> tableResultsBuilder(results);
if (output_.IsCout() && !reindexer::isStdoutRedirected()) {
TableViewScroller<typename DBInterface::QueryResultsT> resultsScroller(results, tableResultsBuilder,
reindexer::getTerminalSize().height - 1);
resultsScroller.Scroll(output_, isCanceled);
} else {
tableResultsBuilder.Build(output_(), isCanceled);
}
} else {
output_() << "[" << std::endl;
err = queryResultsToJson(output_(), results, q.IsWALQuery(), !output_.IsCout());
output_() << "]" << std::endl;
}
}
string explain = results.GetExplainResults();
if (!explain.empty() && !cancelCtx_.IsCancelled()) {
output_() << "Explain: " << std::endl;
if (variables_[kVariableOutput] == kOutputModePretty) {
WrSerializer ser;
prettyPrintJSON(reindexer::giftStr(explain), ser);
output_() << ser.Slice() << std::endl;
} else {
output_() << explain << std::endl;
}
}
output_() << "Returned " << results.Count() << " rows";
if (results.TotalCount()) output_() << ", total count " << results.TotalCount();
output_() << std::endl;
auto& aggResults = results.GetAggregationResults();
if (aggResults.size() && !cancelCtx_.IsCancelled()) {
output_() << "Aggregations: " << std::endl;
for (auto& agg : aggResults) {
switch (agg.type) {
case AggFacet: {
assert(!agg.fields.empty());
reindexer::h_vector<int, 1> maxW;
maxW.reserve(agg.fields.size());
for (const auto& field : agg.fields) {
maxW.push_back(field.length());
}
for (auto& row : agg.facets) {
assert(row.values.size() == agg.fields.size());
for (size_t i = 0; i < row.values.size(); ++i) {
maxW.at(i) = std::max(maxW.at(i), int(row.values[i].length()));
}
}
int rowWidth = 8 + (maxW.size() - 1) * 2;
for (auto& mW : maxW) {
mW += 3;
rowWidth += mW;
}
for (size_t i = 0; i < agg.fields.size(); ++i) {
if (i != 0) output_() << "| ";
output_() << std::left << std::setw(maxW.at(i)) << agg.fields[i];
}
output_() << "| count" << std::endl;
output_() << std::left << std::setw(rowWidth) << std::setfill('-') << "" << std::endl << std::setfill(' ');
for (auto& row : agg.facets) {
for (size_t i = 0; i < row.values.size(); ++i) {
if (i != 0) output_() << "| ";
output_() << std::left << std::setw(maxW.at(i)) << row.values[i];
}
output_() << "| " << row.count << std::endl;
}
} break;
case AggDistinct:
assert(agg.fields.size() == 1);
output_() << "Distinct (" << agg.fields.front() << ")" << std::endl;
for (auto& v : agg.distincts) {
output_() << v << std::endl;
}
output_() << "Returned " << agg.distincts.size() << " values" << std::endl;
break;
default:
assert(agg.fields.size() == 1);
output_() << agg.aggTypeToStr(agg.type) << "(" << agg.fields.front() << ") = " << agg.value << std::endl;
}
}
}
}
return err;
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandUpsert(const string& command) {
LineParser parser(command);
parser.NextToken();
string nsName = reindexer::unescapeString(parser.NextToken());
auto item = db().NewItem(nsName);
Error status = item.Status();
if (!status.ok()) {
return status;
}
status = item.Unsafe().FromJSON(parser.CurPtr());
if (!status.ok()) {
return status;
}
if (!parser.CurPtr().empty() && (parser.CurPtr())[0] == '[') {
return Error(errParams, "Impossible to update entire item with array - only objects are allowed");
}
return db().Upsert(nsName, item);
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandUpdateSQL(const string& command) {
typename DBInterface::QueryResultsT results;
Query q;
try {
q.FromSQL(command);
} catch (const Error& err) {
return err;
}
auto err = db().Update(q, results);
if (err.ok()) {
output_() << "Updated " << results.Count() << " documents" << std::endl;
}
return err;
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandDelete(const string& command) {
LineParser parser(command);
parser.NextToken();
auto nsName = reindexer::unescapeString(parser.NextToken());
auto item = db().NewItem(nsName);
if (!item.Status().ok()) return item.Status();
auto err = item.Unsafe().FromJSON(parser.CurPtr());
if (!err.ok()) return err;
return db().Delete(nsName, item);
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandDeleteSQL(const string& command) {
typename DBInterface::QueryResultsT results;
Query q;
try {
q.FromSQL(command);
} catch (const Error& err) {
return err;
}
auto err = db().Delete(q, results);
if (err.ok()) {
output_() << "Deleted " << results.Count() << " documents" << std::endl;
}
return err;
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandDump(const string& command) {
LineParser parser(command);
parser.NextToken();
vector<NamespaceDef> allNsDefs, doNsDefs;
auto err = db().WithContext(&cancelCtx_).EnumNamespaces(allNsDefs, reindexer::EnumNamespacesOpts());
if (err) return err;
if (!parser.End()) {
// build list of namespaces for dumped
while (!parser.End()) {
auto ns = parser.NextToken();
auto nsDef = std::find_if(allNsDefs.begin(), allNsDefs.end(), [&ns](const NamespaceDef& nsDef) { return ns == nsDef.name; });
if (nsDef != allNsDefs.end()) {
doNsDefs.push_back(std::move(*nsDef));
allNsDefs.erase(nsDef);
} else {
std::cerr << "Namespace '" << ns << "' - skipped. (not found in storage)" << std::endl;
}
}
} else {
doNsDefs = std::move(allNsDefs);
}
reindexer::WrSerializer wrser;
wrser << "-- Reindexer DB backup file" << '\n';
wrser << "-- VERSION 1.0" << '\n';
for (auto& nsDef : doNsDefs) {
// skip system namespaces, except #config
if (nsDef.name.length() > 0 && nsDef.name[0] == '#' && nsDef.name != "#config") continue;
wrser << "-- Dumping namespace '" << nsDef.name << "' ..." << '\n';
wrser << "\\NAMESPACES ADD " << reindexer::escapeString(nsDef.name) << " ";
nsDef.GetJSON(wrser);
wrser << '\n';
vector<string> meta;
err = db().WithContext(&cancelCtx_).EnumMeta(nsDef.name, meta);
if (err) {
return err;
}
for (auto& mkey : meta) {
string mdata;
err = db().WithContext(&cancelCtx_).GetMeta(nsDef.name, mkey, mdata);
if (err) {
return err;
}
wrser << "\\META PUT " << reindexer::escapeString(nsDef.name) << " " << reindexer::escapeString(mkey) << " "
<< reindexer::escapeString(mdata) << '\n';
}
typename DBInterface::QueryResultsT itemResults;
err = db().WithContext(&cancelCtx_).Select(Query(nsDef.name), itemResults);
if (!err.ok()) return err;
for (auto it : itemResults) {
if (!it.Status().ok()) return it.Status();
if (cancelCtx_.IsCancelled()) {
return Error(errCanceled, "Canceled");
}
wrser << "\\UPSERT " << reindexer::escapeString(nsDef.name) << ' ';
it.GetJSON(wrser, false);
wrser << '\n';
if (wrser.Len() > 0x100000) {
output_() << wrser.Slice();
wrser.Reset();
}
}
}
output_() << wrser.Slice();
return errOK;
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandNamespaces(const string& command) {
LineParser parser(command);
parser.NextToken();
string_view subCommand = parser.NextToken();
if (iequals(subCommand, "add")) {
auto nsName = reindexer::unescapeString(parser.NextToken());
NamespaceDef def("");
Error err = def.FromJSON(reindexer::giftStr(parser.CurPtr()));
if (!err.ok()) {
return Error(errParseJson, "Namespace structure is not valid - %s", err.what());
}
def.storage.DropOnFileFormatError(true);
def.storage.CreateIfMissing(true);
err = db().OpenNamespace(def.name);
if (!err.ok()) {
return err;
}
for (auto& idx : def.indexes) {
err = db().AddIndex(def.name, idx);
if (!err.ok()) {
return err;
}
}
err = db().SetSchema(def.name, def.schemaJson);
if (!err.ok()) {
return err;
}
return errOK;
} else if (iequals(subCommand, "list")) {
vector<NamespaceDef> allNsDefs;
auto err = db().EnumNamespaces(allNsDefs, reindexer::EnumNamespacesOpts().WithClosed());
for (auto& ns : allNsDefs) {
output_() << ns.name << std::endl;
}
return err;
} else if (iequals(subCommand, "drop")) {
auto nsName = reindexer::unescapeString(parser.NextToken());
return db().DropNamespace(nsName);
} else if (iequals(subCommand, "truncate")) {
auto nsName = reindexer::unescapeString(parser.NextToken());
return db().TruncateNamespace(nsName);
} else if (iequals(subCommand, "rename")) {
auto nsName = reindexer::unescapeString(parser.NextToken());
auto nsNewName = reindexer::unescapeString(parser.NextToken());
return db().RenameNamespace(nsName, nsNewName);
}
return Error(errParams, "Unknown sub command '%s' of namespaces command", subCommand);
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandMeta(const string& command) {
LineParser parser(command);
parser.NextToken();
string_view subCommand = parser.NextToken();
if (iequals(subCommand, "put")) {
string nsName = reindexer::unescapeString(parser.NextToken());
string metaKey = reindexer::unescapeString(parser.NextToken());
string metaData = reindexer::unescapeString(parser.NextToken());
return db().PutMeta(nsName, metaKey, metaData);
} else if (iequals(subCommand, "list")) {
auto nsName = reindexer::unescapeString(parser.NextToken());
vector<std::string> allMeta;
auto err = db().EnumMeta(nsName, allMeta);
for (auto& metaKey : allMeta) {
string metaData;
db().GetMeta(nsName, metaKey, metaData);
output_() << metaKey << " = " << metaData << std::endl;
}
return err;
}
return Error(errParams, "Unknown sub command '%s' of meta command", subCommand);
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandHelp(const string& command) {
LineParser parser(command);
parser.NextToken();
string_view subCommand = parser.NextToken();
if (!subCommand.length()) {
output_() << "Available commands:\n\n";
for (auto cmd : cmds_) {
output_() << " " << std::left << std::setw(20) << cmd.command << "- " << cmd.description << std::endl;
}
} else {
auto it = std::find_if(cmds_.begin(), cmds_.end(),
[&subCommand](const commandDefinition& def) { return iequals(def.command, subCommand); });
if (it == cmds_.end()) {
return Error(errParams, "Unknown command '%s' to help. To list of available command type '\\help'", subCommand);
}
output_() << it->command << " - " << it->description << ":" << std::endl << it->help << std::endl;
}
return errOK;
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandQuit(const string&) {
stopCh_.close();
return errOK;
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandSet(const string& command) {
LineParser parser(command);
parser.NextToken();
string_view variableName = parser.NextToken();
string_view variableValue = parser.NextToken();
variables_[string(variableName)] = string(variableValue);
WrSerializer wrser;
reindexer::JsonBuilder configBuilder(wrser);
for (auto it = variables_.begin(); it != variables_.end(); ++it) {
configBuilder.Put(it->first, it->second);
}
configBuilder.End();
reindexer::fs::WriteFile(reindexer::fs::JoinPath(reindexer::fs::GetHomeDir(), kConfigFile), wrser.Slice());
return errOK;
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandBench(const string& command) {
LineParser parser(command);
parser.NextToken();
int benchTime = stoi(parser.NextToken());
if (benchTime == 0) benchTime = kBenchDefaultTime;
db().DropNamespace(kBenchNamespace);
NamespaceDef nsDef(kBenchNamespace);
nsDef.AddIndex("id", "hash", "int", IndexOpts().PK());
auto err = db().AddNamespace(nsDef);
if (!err.ok()) return err;
output_() << "Seeding " << kBenchItemsCount << " documents to bench namespace..." << std::endl;
err = seedBenchItems();
output_() << "done." << std::endl;
if (!err.ok()) {
return err;
}
output_() << "Running " << benchTime << "s benchmark..." << std::endl;
std::this_thread::sleep_for(std::chrono::seconds(1));
auto deadline = std::chrono::system_clock::now() + std::chrono::seconds(benchTime);
std::atomic<int> count(0), errCount(0);
auto worker = std::bind(getBenchWorkerFn(count, errCount), deadline);
auto threads = std::unique_ptr<std::thread[]>(new std::thread[numThreads_ - 1]);
for (int i = 0; i < numThreads_ - 1; i++) threads[i] = std::thread(worker);
worker();
for (int i = 0; i < numThreads_ - 1; i++) threads[i].join();
output_() << "Done. Got " << count / benchTime << " QPS, " << errCount << " errors" << std::endl;
return err;
}
template <typename DBInterface>
Error CommandsExecutor<DBInterface>::commandSubscribe(const string& command) {
LineParser parser(command);
parser.NextToken();
reindexer::UpdatesFilters filters;
auto token = parser.NextToken();
if (iequals(token, "off")) {
return db().UnsubscribeUpdates(this);
} else if (token.empty() || iequals(token, "on")) {
return db().SubscribeUpdates(this, filters);
}
std::vector<std::string> nsInSubscription;
while (!token.empty()) {
filters.AddFilter(token, reindexer::UpdatesFilters::Filter());
nsInSubscription.emplace_back(token);
token = parser.NextToken();
}
auto err = db().SubscribeUpdates(this, filters);
if (!err.ok()) {
return err;
}
vector<NamespaceDef> allNsDefs;
err = db().EnumNamespaces(allNsDefs, reindexer::EnumNamespacesOpts().WithClosed());
if (!err.ok()) {
return err;
}
for (auto& ns : allNsDefs) {
for (auto it = nsInSubscription.begin(); it != nsInSubscription.end();) {
if (*it == ns.name) {
it = nsInSubscription.erase(it);
} else {
++it;
}
}
}
if (!nsInSubscription.empty()) {
output_() << "WARNING: You have subscribed for non-existing namespace updates: ";
for (auto it = nsInSubscription.begin(); it != nsInSubscription.end(); ++it) {
if (it != nsInSubscription.begin()) {
output_() << ", ";
}
output_() << *it;
}
output_() << std::endl;
}
return errOK;
}
template <>
Error CommandsExecutor<reindexer::client::CoroReindexer>::commandProcessDatabases(const string& command) {
LineParser parser(command);
parser.NextToken();
string_view subCommand = parser.NextToken();
assert(uri_.scheme() == "cproto");
if (subCommand == "list") {
vector<string> dbList;
Error err = getAvailableDatabases(dbList);
if (!err.ok()) return err;
for (const string& dbName : dbList) output_() << dbName << std::endl;
return Error();
} else if (subCommand == "use") {
string currentDsn = getCurrentDsn() + std::string(parser.NextToken());
Error err = stop(false);
if (!err.ok()) return err;
err = db().Connect(currentDsn, loop_);
if (err.ok()) err = db().Status();
if (err.ok()) output_() << "Succesfully connected to " << currentDsn << std::endl;
return err;
} else if (subCommand == "create") {
auto dbName = parser.NextToken();
string currentDsn = getCurrentDsn() + std::string(dbName);
Error err = stop(false);
if (!err.ok()) return err;
output_() << "Creating database '" << dbName << "'" << std::endl;
err = db().Connect(currentDsn, loop_, reindexer::client::ConnectOpts().CreateDBIfMissing());
if (!err.ok()) {
std::cerr << "Error on database '" << dbName << "' creation" << std::endl;
return err;
}
std::vector<std::string> dbNames;
err = db().EnumDatabases(dbNames);
if (std::find(dbNames.begin(), dbNames.end(), std::string(dbName)) != dbNames.end()) {
output_() << "Succesfully created database '" << dbName << "'" << std::endl;
} else {
std::cerr << "Error on database '" << dbName << "' creation" << std::endl;
}
return err;
}
return Error(errNotValid, "Invalid command");
}
template <>
Error CommandsExecutor<reindexer::Reindexer>::commandProcessDatabases(const string& command) {
(void)command;
return Error(errNotValid, "Database processing commands are not supported in builtin mode");
}
template <>
Error CommandsExecutor<reindexer::client::CoroReindexer>::seedBenchItems() {
for (int i = 0; i < kBenchItemsCount; i++) {
auto item = db().NewItem(kBenchNamespace);
WrSerializer ser;
JsonBuilder(ser).Put("id", i).Put("data", i);
auto err = item.Unsafe().FromJSON(ser.Slice());
if (!err.ok()) return err;
err = db().Upsert(kBenchNamespace, item);
if (!err.ok()) return err;
}
return errOK;
}
template <>
Error CommandsExecutor<reindexer::Reindexer>::seedBenchItems() {
using reindexer::coroutine::wait_group;
Error err;
auto upsertFn = [this, &err](size_t beg, size_t end, wait_group& wg) {
reindexer::coroutine::wait_group_guard wgg(wg);
for (size_t i = beg; i < end; ++i) {
auto item = db().NewItem(kBenchNamespace);
WrSerializer ser;
JsonBuilder(ser).Put("id", i).Put("data", i);
auto intErr = item.Unsafe().FromJSON(ser.Slice());
if (intErr.ok()) intErr = db().Upsert(kBenchNamespace, item);
if (!intErr.ok()) {
err = intErr;
return;
}
if (!err.ok()) {
return;
}
}
};
auto itemsPerCoro = kBenchItemsCount / kSingleThreadCoroCount;
wait_group wg;
wg.add(kSingleThreadCoroCount);
for (int i = 0; i < kBenchItemsCount; i += itemsPerCoro) {
loop_.spawn(std::bind(upsertFn, i, std::min(i + itemsPerCoro, kBenchItemsCount), std::ref(wg)), k24KStack);
}
wg.wait();
return err;
}
template <>
std::function<void(std::chrono::system_clock::time_point)> CommandsExecutor<reindexer::client::CoroReindexer>::getBenchWorkerFn(
std::atomic<int>& count, std::atomic<int>& errCount) {
using reindexer::coroutine::wait_group;
return [this, &count, &errCount](std::chrono::system_clock::time_point deadline) {
reindexer::net::ev::dynamic_loop loop;
loop.spawn([this, &loop, deadline, &count, &errCount] {
reindexer::client::CoroReindexer rx;
rx.Connect(getCurrentDsn(true), loop);
auto selectFn = [&rx, deadline, &count, &errCount](wait_group& wg) {
reindexer::coroutine::wait_group_guard wgg(wg);
for (; std::chrono::system_clock::now() < deadline; ++count) {
Query q(kBenchNamespace);
q.Where(kBenchIndex, CondEq, count % kBenchItemsCount);
reindexer::client::CoroReindexer::QueryResultsT results;
auto err = rx.Select(q, results);
if (!err.ok()) errCount++;
}
};
wait_group wg;
wg.add(kSingleThreadCoroCount);
for (int i = 0; i < kSingleThreadCoroCount; ++i) {
loop.spawn(std::bind(selectFn, std::ref(wg)), k24KStack);
}
wg.wait();
rx.Stop();
});
loop.run();
};
}
template <>
std::function<void(std::chrono::system_clock::time_point)> CommandsExecutor<reindexer::Reindexer>::getBenchWorkerFn(
std::atomic<int>& count, std::atomic<int>& errCount) {
return [this, &count, &errCount](std::chrono::system_clock::time_point deadline) {
for (; (count % 1000) || std::chrono::system_clock::now() < deadline; count++) {
Query q(kBenchNamespace);
q.Where(kBenchIndex, CondEq, count % kBenchItemsCount);
auto results = new typename reindexer::Reindexer::QueryResultsT;
db().WithCompletion([results, &errCount](const Error& err) {
delete results;
if (!err.ok()) errCount++;
})
.Select(q, *results);
}
};
}
template <typename DBInterface>
void CommandsExecutor<DBInterface>::OnWALUpdate(reindexer::LSNPair LSNs, string_view nsName, const reindexer::WALRecord& wrec) {
WrSerializer ser;
ser << "# LSN " << int64_t(LSNs.upstreamLSN_) << " originLSN " << int64_t(LSNs.originLSN_) << nsName << " ";
wrec.Dump(ser, [this, nsName](string_view cjson) {
auto item = db().NewItem(nsName);
item.FromCJSON(cjson);
return string(item.GetJSON());
});
output_() << ser.Slice() << std::endl;
}
template <typename DBInterface>
void CommandsExecutor<DBInterface>::OnConnectionState(const Error& err) {
if (err.ok())
output_() << "[OnConnectionState] connected" << std::endl;
else
output_() << "[OnConnectionState] closed, reason: " << err.what() << std::endl;
}
template <typename DBInterface>
void CommandsExecutor<DBInterface>::OnUpdatesLost(string_view nsName) {
output_() << "[OnUpdatesLost] " << nsName << std::endl;
}
template class CommandsExecutor<reindexer::client::CoroReindexer>;
template class CommandsExecutor<reindexer::Reindexer>;
template Error CommandsExecutor<reindexer::Reindexer>::Run(const string& dsn, const ConnectOpts& opts);
template Error CommandsExecutor<reindexer::client::CoroReindexer>::Run(const string& dsn, const reindexer::client::ConnectOpts& opts);
} // namespace reindexer_tool
|
jnthn/intellij-community
|
plugins/stream-debugger/testData/chain/positive/termination/ForEach.java
|
<reponame>jnthn/intellij-community
import java.util.stream.Stream;
public class Baz {
public static void bar() {
<caret> Stream.of(1).forEach(x -> {});
}
}
|
XiaZhouZero/OPEC
|
STM32Cube_FW_F4_V1.25.0/Middlewares/Third_Party/mbedTLS/doxygen/input/doc_ssltls.h
|
/**
* \file doc_ssltls.h
*
* \brief SSL/TLS communication module documentation file.
*/
/*
*
* Copyright (C) 2006-2015, ARM Limited, All Rights Reserved
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file is part of mbed TLS (https://tls.mbed.org)
*/
/**
* @addtogroup ssltls_communication_module SSL/TLS communication module
*
* The SSL/TLS communication module provides the means to create an SSL/TLS
* communication channel.
*
* The basic provisions are:
* - initialise an SSL/TLS context (see \c mbedtls_ssl_init()).
* - perform an SSL/TLS handshake (see \c mbedtls_ssl_handshake()).
* - read/write (see \c mbedtls_ssl_read() and \c mbedtls_ssl_write()).
* - notify a peer that connection is being closed (see \c mbedtls_ssl_close_notify()).
*
* Many aspects of such a channel are set through parameters and callback
* functions:
* - the endpoint role: client or server.
* - the authentication mode. Should verification take place.
* - the Host-to-host communication channel. A TCP/IP module is provided.
* - the random number generator (RNG).
* - the ciphers to use for encryption/decryption.
* - session control functions.
* - X.509 parameters for certificate-handling and key exchange.
*
* This module can be used to create an SSL/TLS server and client and to provide a basic
* framework to setup and communicate through an SSL/TLS communication channel.\n
* Note that you need to provide for several aspects yourself as mentioned above.
*/
|
peter-stoyanov/Python
|
Softuni Assignments/exam 11 March/train_system.py
|
#!/usr/bin/env python
"""Ticket train system"""
__author__ = "<NAME>"
class Ticket():
def __init__(self, destination, price, card=None, discounted=False):
self.destination = destination
if discounted:
self.price = 0.5 * price
self.card = card
else:
self.price = price
self.card = ''
class Client():
def __init__(self, name, card=None):
self.name = name
if card:
self.cards = [card]
else:
self.cards = []
self.tickets = []
def get_total(self):
sum = 0
for ticket in self.tickets:
sum += ticket.price
return sum
def main():
"""Docstring"""
clients = []
existing_cards_count = int(input())
for n in range(existing_cards_count):
tokens = input().split(' ')
name = tokens[0] + ' ' + tokens[1]
card = tokens[2]
existing_client = find_client_by_name(clients, name)
if existing_client:
existing_client.cards.append(card)
else:
clients.append(Client(name, card))
while True:
tokens = input()
if tokens == 'time to leave!':
break
tokens = tokens.split(' ')
name = tokens[1] + ' ' + tokens[2]
destination = tokens[3]
attempted_card = tokens[4]
client = find_client_by_name(clients, name)
if client:
existing_for_this_client = attempted_card in client.cards
if existing_for_this_client:
new_ticket = Ticket(destination, get_price(destination), attempted_card, discounted=True)
client.tickets.append(new_ticket)
else:
if card_is_valid(attempted_card):
client_with_this_card = find_client_by_card(clients, attempted_card)
if client_with_this_card:
print(f'card {attempted_card} already exists for another passenger!')
new_ticket = Ticket(destination, get_price(destination))
client.tickets.append(new_ticket)
else:
print(f'issuing card {attempted_card}')
client.cards.append(attempted_card)
new_ticket = Ticket(destination, get_price(destination), attempted_card, discounted=True)
client.tickets.append(new_ticket)
else:
print(f'card {attempted_card} is not valid!')
new_ticket = Ticket(destination, get_price(destination))
client.tickets.append(new_ticket)
else:
client = Client(name)
if card_is_valid(attempted_card):
client_with_this_card = find_client_by_card(clients, attempted_card)
if client_with_this_card:
print(f'card {attempted_card} already exists for another passenger!')
new_ticket = Ticket(destination, get_price(destination))
client.tickets.append(new_ticket)
clients.append(client)
else:
print(f'issuing card {attempted_card}')
client.cards.append(attempted_card)
new_ticket = Ticket(destination, get_price(destination), attempted_card, discounted=True)
client.tickets.append(new_ticket)
clients.append(client)
else:
print(f'card {attempted_card} is not valid!')
new_ticket = Ticket(destination, get_price(destination))
client.tickets.append(new_ticket)
clients.append(client)
sorted_clients = sorted([c for c in clients if c.get_total() > 0], key=lambda x: x.get_total(), reverse=True)
for client in sorted_clients:
print(f'{client.name}:')
sorted_tickets = sorted(client.tickets, key=lambda x: x.price, reverse=True)
for ticket in sorted_tickets:
addition = f' (using card {ticket.card})' if ticket.card != '' else ''
print(f'--{ticket.destination}: {ticket.price:.2f}lv' + addition)
print(f'total: {client.get_total():.2f}lv')
def card_is_valid(attempted_card):
sum = 0
for ch in attempted_card:
sum += int(ch)
return sum % 7 == 0
def get_price(destination):
price = 0.00
for ch in destination:
price += ord(ch)
return price / 100
def find_client_by_card(clients, card):
for c in clients:
if card in c.cards:
return c
def find_client_by_name(clients, name):
for c in clients:
if c.name == name:
return c
if __name__ == '__main__':
main()
|
CNAD666/TaoLibrary
|
matisse_go/src/main/java/com/yt/matisse/Constant.java
|
<gh_stars>1-10
package com.yt.matisse;
import android.os.Environment;
/**
* Author 余涛
* Description 功能说明
* Time 2020/6/3 10:56
*/
public class Constant {
public static final int DURATION = 600 * 1000; //录制视频最长时间限制
public static final int MIN_DURATION = 3 * 1000; //录制最短视频时间限制
//视频保存路径
public static final String VIDEO_PATH = Environment.getExternalStorageDirectory().getPath() + "/matisse/video";
}
|
mgorny/gentoopm
|
gentoopm/pkgcorepm/depend.py
|
#!/usr/bin/python
# vim:fileencoding=utf-8
# (c) 2011 <NAME> <<EMAIL>>
# Released under the terms of the 2-clause BSD license.
from pkgcore.ebuild.atom import atom
from pkgcore.restrictions.boolean import (OrRestriction, AndRestriction,
JustOneRestriction, AtMostOneOfRestriction)
from pkgcore.restrictions.packages import Conditional
from pkgcore.restrictions.values import ContainmentMatch
from ..basepm.depend import (PMPackageDepSet, PMConditionalDep,
PMAnyOfDep, PMAllOfDep, PMExactlyOneOfDep, PMAtMostOneOfDep,
PMBaseDep, PMRequiredUseAtom)
from .atom import PkgCoreAtom
class PkgCoreBaseDep(PMBaseDep):
def __init__(self, deps, pkg):
self._deps = deps
self._pkg = pkg
def __iter__(self):
for d in self._deps:
if isinstance(d, atom):
yield PkgCoreAtom(d)
elif isinstance(d, ContainmentMatch): # REQUIRED_USE
assert(len(d.vals) == 1)
yield PMRequiredUseAtom(next(iter(d.vals)))
elif isinstance(d, OrRestriction):
yield PkgCoreAnyOfDep(d, self._pkg)
elif isinstance(d, AndRestriction):
yield PkgCoreAllOfDep(d, self._pkg)
elif isinstance(d, JustOneRestriction):
yield PkgCoreExactlyOneOfDep(d, self._pkg)
elif isinstance(d, AtMostOneOfRestriction):
yield PkgCoreAtMostOneOfDep(d, self._pkg)
elif isinstance(d, Conditional) and d.attr == 'use':
yield PkgCoreConditionalUseDep(d, self._pkg)
else:
raise NotImplementedError('Parsing %s not implemented' \
% repr(d))
class PkgCoreAnyOfDep(PMAnyOfDep, PkgCoreBaseDep):
pass
class PkgCoreAllOfDep(PMAllOfDep, PkgCoreBaseDep):
pass
class PkgCoreExactlyOneOfDep(PMExactlyOneOfDep, PkgCoreBaseDep):
pass
class PkgCoreAtMostOneOfDep(PMAtMostOneOfDep, PkgCoreBaseDep):
pass
class PkgCoreConditionalUseDep(PMConditionalDep, PkgCoreBaseDep):
@property
def enabled(self):
return self._deps.restriction.match(self._pkg.use)
class PkgCorePackageDepSet(PMPackageDepSet, PkgCoreAllOfDep):
@property
def without_conditionals(self):
return PkgCoreUncondAllOfDep(
self._deps.evaluate_depset(self._pkg.use))
class PkgCoreUncondDep(PkgCoreBaseDep):
def __init__(self, deps):
self._deps = deps
@property
def without_conditionals(self):
return self
def __iter__(self):
for d in self._deps:
if isinstance(d, atom):
yield PkgCoreAtom(d)
elif isinstance(d, OrRestriction):
yield PkgCoreUncondAnyOfDep(d)
elif isinstance(d, AndRestriction):
yield PkgCoreUncondAllOfDep(d, self._pkg)
elif isinstance(d, JustOneRestriction):
yield PkgCoreUncondExactlyOneOfDep(d, self._pkg)
elif isinstance(d, AtMostOneOfRestriction):
yield PkgCoreUncondAtMostOneOfDep(d, self._pkg)
else:
raise NotImplementedError('Parsing %s not implemented' \
% repr(d))
class PkgCoreUncondAnyOfDep(PMAnyOfDep, PkgCoreUncondDep):
pass
class PkgCoreUncondAllOfDep(PMAllOfDep, PkgCoreUncondDep):
pass
class PkgCoreUncondAllOfDep(PMExactlyOneOfDep, PkgCoreUncondDep):
pass
class PkgCoreUncondAllOfDep(PMAtMostOneOfDep, PkgCoreUncondDep):
pass
|
openharmony-gitee-mirror/aafwk_standard
|
services/abilitymgr/test/unittest/phone/pending_want_key_test/pending_want_key_test.cpp
|
/*
* Copyright (c) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <gtest/gtest.h>
#define private public
#include "pending_want_key.h"
#undef private
#include "want.h"
#include "wants_info.h"
constexpr int PENDING_WANT_TYPE = 1;
const std::string PENDING_WANT_BUNDLENAME = "bundleName";
const std::string PENDING_WANT_WHO = "who";
constexpr int PENDING_WANT_REQUESTCODE = 2;
const std::string PENDING_WANT_REQUESTRESLOVEDTYPE = "requestResolvedType";
constexpr int PENDING_WANT_FLAGS = 10;
constexpr int PENDING_WANT_CODE = 20;
constexpr int PENDING_WANT_USERID = 123456;
using namespace testing::ext;
using namespace OHOS::AppExecFwk;
namespace OHOS {
namespace AAFwk {
class PendingWantKeyTest : public testing::Test {
public:
static void SetUpTestCase(void);
static void TearDownTestCase(void);
void SetUp();
void TearDown();
};
void PendingWantKeyTest::SetUpTestCase(void)
{}
void PendingWantKeyTest::TearDownTestCase(void)
{}
void PendingWantKeyTest::SetUp(void)
{}
void PendingWantKeyTest::TearDown(void)
{}
/*
* @tc.number : SetType_0100
* @tc.name : set type
* @tc.desc : Set type, use GetType to verify whether the type value is set successfully
*/
HWTEST_F(PendingWantKeyTest, SetType_0100, TestSize.Level1)
{
std::unique_ptr<PendingWantKey> amsPendingWantKey = std::make_unique<PendingWantKey>();
amsPendingWantKey->SetType(PENDING_WANT_TYPE);
EXPECT_EQ(PENDING_WANT_TYPE, amsPendingWantKey->GetType());
}
/*
* @tc.number : SetBundleName_0100
* @tc.name : set BundleName
* @tc.desc : Set BundleName, use GetBundleName to verify whether the BundleName is set successfully
*/
HWTEST_F(PendingWantKeyTest, SetBundleName_0100, TestSize.Level1)
{
std::unique_ptr<PendingWantKey> amsPendingWantKey = std::make_unique<PendingWantKey>();
amsPendingWantKey->SetBundleName(PENDING_WANT_BUNDLENAME);
EXPECT_EQ(PENDING_WANT_BUNDLENAME, amsPendingWantKey->GetBundleName());
}
/*
* @tc.number : SetRequestWho_0100
* @tc.name : set RequestWho
* @tc.desc : Set RequestWho, use GetWho to verify whether the RequestWho is set successfully
*/
HWTEST_F(PendingWantKeyTest, SetRequestWho_0100, TestSize.Level1)
{
std::unique_ptr<PendingWantKey> amsPendingWantKey = std::make_unique<PendingWantKey>();
amsPendingWantKey->SetRequestWho(PENDING_WANT_WHO);
EXPECT_EQ(PENDING_WANT_WHO, amsPendingWantKey->GetRequestWho());
}
/*
* @tc.number : SetRequestCode_0100
* @tc.name : set RequestCode
* @tc.desc : Set RequestCode, use GetRequestCode to verify whether the RequestCode is set successfully
*/
HWTEST_F(PendingWantKeyTest, SetRequestCode_0100, TestSize.Level1)
{
std::unique_ptr<PendingWantKey> amsPendingWantKey = std::make_unique<PendingWantKey>();
amsPendingWantKey->SetRequestCode(PENDING_WANT_REQUESTCODE);
EXPECT_EQ(PENDING_WANT_REQUESTCODE, amsPendingWantKey->GetRequestCode());
}
/*
* @tc.number : SetRequestWant_0100
* @tc.name : set RequestWant
* @tc.desc : Set RequestWant, use GetRequestWant to verify whether the RequestWant is set successfully
*/
HWTEST_F(PendingWantKeyTest, SetRequestWant_0100, TestSize.Level1)
{
std::unique_ptr<PendingWantKey> amsPendingWantKey = std::make_unique<PendingWantKey>();
Want requestWant;
OHOS::AAFwk::Operation operation;
std::string bundleName = "ohos.pending.want.key.test";
operation.SetBundleName(bundleName);
requestWant.SetOperation(operation);
amsPendingWantKey->SetRequestWant(requestWant);
Want requestWant_ = amsPendingWantKey->GetRequestWant();
EXPECT_EQ(bundleName, requestWant_.GetOperation().GetBundleName());
}
/*
* @tc.number : SetRequestResolvedType_0100
* @tc.name : set request resolved Type
* @tc.desc : Set RequestResolvedType, use GetRequestResolvedType to verify whether the RequestResolvedType is
* set successfully
*/
HWTEST_F(PendingWantKeyTest, SetRequestResolvedType_0100, TestSize.Level1)
{
std::unique_ptr<PendingWantKey> amsPendingWantKey = std::make_unique<PendingWantKey>();
amsPendingWantKey->SetRequestResolvedType(PENDING_WANT_REQUESTRESLOVEDTYPE);
EXPECT_EQ(PENDING_WANT_REQUESTRESLOVEDTYPE, amsPendingWantKey->GetRequestResolvedType());
}
/*
* @tc.number : SetAllWantsInfos_0100
* @tc.name : set All WantsInfos
* @tc.desc : Set AllWantsInfos, use GetAllWantsInfos to verify whether the AllWantsInfos is set successfully
*/
HWTEST_F(PendingWantKeyTest, SetAllWantsInfos_0100, TestSize.Level1)
{
std::unique_ptr<PendingWantKey> amsPendingWantKey = std::make_unique<PendingWantKey>();
amsPendingWantKey->allWantsInfos_.clear();
std::vector<WantsInfo> allWantsInfos;
WantsInfo wantsInfo;
wantsInfo.resolvedTypes = "resolvedTypes";
allWantsInfos.push_back(wantsInfo);
amsPendingWantKey->SetAllWantsInfos(allWantsInfos);
std::vector<WantsInfo> wantsInfo_ = amsPendingWantKey->GetAllWantsInfos();
EXPECT_EQ(wantsInfo.resolvedTypes, wantsInfo_.front().resolvedTypes);
}
/*
* @tc.number : SetFlags_0100
* @tc.name : set Flags
* @tc.desc : Set Flags, use GetFlags to verify whether the Flags is set successfully
*/
HWTEST_F(PendingWantKeyTest, SetFlags_0100, TestSize.Level1)
{
std::unique_ptr<PendingWantKey> amsPendingWantKey = std::make_unique<PendingWantKey>();
amsPendingWantKey->SetFlags(PENDING_WANT_FLAGS);
EXPECT_EQ(PENDING_WANT_FLAGS, amsPendingWantKey->GetFlags());
}
/*
* @tc.number : SetCode_0100
* @tc.name : set Code
* @tc.desc : Set Code, use GetCode to verify whether the Code is set successfully
*/
HWTEST_F(PendingWantKeyTest, SetCode_0100, TestSize.Level1)
{
std::unique_ptr<PendingWantKey> amsPendingWantKey = std::make_unique<PendingWantKey>();
amsPendingWantKey->SetCode(PENDING_WANT_CODE);
EXPECT_EQ(PENDING_WANT_CODE, amsPendingWantKey->GetCode());
}
/*
* @tc.number : SetUserId_0100
* @tc.name : set UserId
* @tc.desc : Set UserId, use GetUserId to verify whether the UserId is set successfully
*/
HWTEST_F(PendingWantKeyTest, SetUserId_0100, TestSize.Level1)
{
std::unique_ptr<PendingWantKey> amsPendingWantKey = std::make_unique<PendingWantKey>();
amsPendingWantKey->SetUserId(PENDING_WANT_USERID);
EXPECT_EQ(PENDING_WANT_USERID, amsPendingWantKey->GetUserId());
}
} // namespace AAFwk
} // namespace OHOS
|
luiz158/Hibernate-SpringBoot
|
HibernateSpringBootDomainEvents/src/main/java/com/bookstore/entity/Book.java
|
package com.bookstore.entity;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.OneToMany;
@Entity
public class Book implements Serializable {
private static final long serialVersionUID = 1L;
@Id
private Long id;
private String title;
private String isbn;
private String author;
@OneToMany(cascade = CascadeType.ALL,
mappedBy = "book", orphanRemoval = true)
private List<BookReview> reviews = new ArrayList<>();
public void addReview(BookReview review) {
this.reviews.add(review);
review.setBook(this);
}
public void removeReview(BookReview review) {
review.setBook(null);
this.reviews.remove(review);
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getIsbn() {
return isbn;
}
public void setIsbn(String isbn) {
this.isbn = isbn;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public List<BookReview> getReviews() {
return reviews;
}
public void setReviews(List<BookReview> reviews) {
this.reviews = reviews;
}
}
|
Lube-Project/ProgettoLube
|
ProgettoLube/frontend/dashboard/node_modules/rsuite/es/Picker/PickerToggleTrigger.js
|
<gh_stars>1-10
import _extends from "@babel/runtime/helpers/esm/extends";
import _objectWithoutPropertiesLoose from "@babel/runtime/helpers/esm/objectWithoutPropertiesLoose";
import _pick from "lodash/pick";
import React from 'react';
import OverlayTrigger from '../Overlay/OverlayTrigger';
import { placementPolyfill } from '../utils';
import IntlContext from '../IntlProvider/IntlContext';
var PickerToggleTriggerProps = ['onEntered', 'onExited', 'open', 'defaultOpen', 'disabled', 'onEnter', 'onEntering', 'onExit', 'onExiting', 'onHide', 'container', 'containerPadding', 'preventOverflow', 'positionRef'];
var PickerToggleTrigger = React.forwardRef(function (props, ref) {
var pickerProps = props.pickerProps,
speaker = props.speaker,
_props$trigger = props.trigger,
trigger = _props$trigger === void 0 ? 'click' : _props$trigger,
open = props.open,
rest = _objectWithoutPropertiesLoose(props, ["pickerProps", "speaker", "trigger", "open"]);
var placement = pickerProps.placement;
return React.createElement(IntlContext.Consumer, null, function (context) {
return React.createElement(OverlayTrigger, _extends({
trigger: trigger,
ref: ref,
open: open,
placement: placementPolyfill(placement, context === null || context === void 0 ? void 0 : context.rtl),
speaker: speaker
}, _pick(pickerProps, PickerToggleTriggerProps), rest));
});
});
PickerToggleTrigger.displayName = 'PickerToggleTrigger';
export default PickerToggleTrigger;
|
Insouciant21/solution
|
Codeforces/CF585B.cpp
|
/*
Problem: CF585B
Time: 2020/10/01 14:01:13
Author: Insouciant21
Status: Accepted
*/
#include <bits/stdc++.h>
using namespace std;
struct Node {
int x, y;
};
int d[3] = {0, 1, -1};
bool mp[4][110];
bool vis[4][110];
int n, k;
int t;
Node st;
void bfs() {
queue<Node> q;
q.push(st);
vis[st.x][st.y] = true;
while (!q.empty()) {
Node prs = q.front();
q.pop();
if (prs.y >= n) {
puts("YES");
return;
}
if (mp[prs.x][prs.y + 1] || mp[prs.x][prs.y]) continue;
for (int i : d) {
Node ftr = prs;
ftr.x += i, ftr.y++;
if (ftr.x < 1 || ftr.x > 3) continue;
if (mp[ftr.x][ftr.y]) continue;
ftr.y++;
if (mp[ftr.x][ftr.y]) continue;
ftr.y++;
if (mp[ftr.x][ftr.y]) continue;
if (vis[ftr.x][ftr.y]) continue;
vis[ftr.x][ftr.y] = true;
q.push(ftr);
}
}
puts("NO");
}
int main() {
scanf("%d", &t);
while (t--) {
memset(vis, 0, sizeof(vis));
memset(mp, 0, sizeof(mp));
scanf("%d%d", &n, &k);
for (int i = 1; i <= 3; i++) {
char x[n + 1];
scanf("%s", x + 1);
for (int j = 1; j <= n; j++) {
if (x[j] == 's') {
mp[i][j] = false;
st.x = i, st.y = j;
}
if (x[j] >= 'A' && x[j] <= 'Z') mp[i][j] = true;
}
}
bfs();
}
return 0;
}
|
duchoangitt/SmsApp
|
app/src/main/java/com/android/messaging/ui/conversationlist/ArchivedConversationListActivity.java
|
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.messaging.ui.conversationlist;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import androidx.appcompat.app.ActionBar;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.Toast;
import com.android.messaging.R;
import com.android.messaging.util.DebugUtils;
public class ArchivedConversationListActivity extends AbstractConversationListActivity {
@Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final ConversationListFragment fragment =
ConversationListFragment.createArchivedConversationListFragment();
getFragmentManager().beginTransaction().add(android.R.id.content, fragment).commit();
invalidateActionBar();
}
protected void updateActionBar(ActionBar actionBar) {
actionBar.setTitle(getString(R.string.archived_activity_title));
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setBackgroundDrawable(new ColorDrawable(
getResources().getColor(
R.color.archived_conversation_action_bar_background_color_dark)));
actionBar.show();
super.updateActionBar(actionBar);
}
@Override
public void onBackPressed() {
if (isInConversationListSelectMode()) {
exitMultiSelectState();
} else {
super.onBackPressed();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (super.onCreateOptionsMenu(menu)) {
return true;
}
getMenuInflater().inflate(R.menu.archived_conversation_list_menu, menu);
final MenuItem item = menu.findItem(R.id.action_debug_options);
if (item != null) {
final boolean enableDebugItems = DebugUtils.isDebugEnabled();
item.setVisible(enableDebugItems).setEnabled(enableDebugItems);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem menuItem) {
switch(menuItem.getItemId()) {
case R.id.action_debug_options:
onActionBarDebug();
return true;
case android.R.id.home:
onActionBarHome();
return true;
default:
return super.onOptionsItemSelected(menuItem);
}
}
@Override
public void onActionBarHome() {
onBackPressed();
}
@Override
public boolean isSwipeAnimatable() {
return false;
}
}
|
woshiluo/oi
|
isij_train/d1/C/c_bf.cpp
|
#include <ctime>
#include <cstdio>
#include <cstdlib>
inline int aabs( int a ) { return a < 0? -a: a; }
const int N = 3e6 + 1e2;
int n;
int a[N], b[N];
bool vis[N];
long long ans = (long long)( 1LL << 61LL );
int main() {
#ifdef woshiluo
freopen( "c.in", "r", stdin );
freopen( "c.out", "w", stdout );
#endif
srand( time(0) );
scanf( "%d", &n );
for( int i = 1; i <= n; i ++ ) {
scanf( "%d", &b[i] );
}
int k = (int)(7e7) / n;
if( k > n )
k = n - 2;
for( int i = 1; i <= k; i ++ ) {
for( int i = 1; i <= n; i ++ ) {
a[i] = -(1e9) + 7;
}
int x = ( rand() % n ) + 1;
int y = x + 2;
long long res = 0;
while( y > n || vis[x] ) {
x = ( rand() % n ) + 1;
y = x + 2;
}
vis[x] = true;
a[x] = b[x];
a[y] = b[y];
a[ x + 1 ] = a[x] + a[y];
res += aabs( b[ x + 1 ] - a[ x + 1 ] );
int cur = x - 1;
while( cur >= 1 ) {
a[cur] = a[ cur + 1 ] - a[ cur + 2 ];
res += aabs( b[cur] - a[cur] );
cur --;
}
cur = x + 3;
while( cur <= n ) {
a[cur] = a[ cur - 1 ] - a[ cur - 2 ];
res += aabs( b[cur] - a[cur] );
cur ++;
}
if( res <= ans ) {
ans = res;
}
}
printf( "%lld\n", ans );
}
|
mekcone/mekcone-studio
|
excode-generator/src/main/java/com/vancone/excode/generator/config/property/SystemConfig.java
|
package com.vancone.excode.generator.config.property;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
/**
* @author <NAME>
* @date 10/1/2020
*/
@Data
@Component
@ConfigurationProperties(prefix = "system")
public class SystemConfig {
private List<String> securityIgnoreUrls = new ArrayList<>();
private PasswordKeyConfig pwdKey;
@Data
public static class PasswordKeyConfig {
private String publicKey;
private String privateKey;
}
}
|
peakhigh/tweb
|
templates/dashboard/pages/manageRequests/manageRequests.js
|
<gh_stars>1-10
console.log('template data', UTILS.getCurrentTemplateData());
$(document).ready(function () {
var moduleData = UTILS.getCurrentTemplateData();
var grid = new GRID_HELPER.GRID('.truck-requests-content', {
gridData: moduleData,
gridId: 'gridTruckRequests',
rowConfig: {
template: 'grid-row-template-details',
optionsTemplate: 'grid-row-options-template',
optionsEvent: 'mouseover',
optionsPostRender: function(rowElement, record) {
$(rowElement).find('.approve-trip').click(function() {
var options = {
tripDetails: record,
extraHref: record._id
};
MENU_HELPER.menuClick('acceptTripReq', 'manageRequests', options);
});
$(rowElement).find('.reject-trip').click(function() {
MENU_HELPER.menuClick('rejectTripReq', 'manageRequests', {extraHref: record._id});
});
}
},
sortConfig: {
multiple: false,
options: [
{
title: 'Pickup Date',
key: 'pickup.date',
selected: true,
order: 'desc'
},
{
title: 'Drop Date',
key: 'drop.date'
},
{
title: 'Material Value',
key: 'pickup.material.approximateCost'
}
]
},
//drawFilters: true,
filterConfig: {
type: 'default',//hybrid/
order: [],
formOptions: {
bindings: {
'pickup.date_start': "column-1",
'pickup.date_end': "column-2",
'drop.date_start': "column-1",
'drop.date_end': "column-2",
status: "column-1"
},
schemaOverride: {
fields: {
'pickup.date': {
title: 'Pickup Date',
format: 'date',
rangeField: true
},
'drop.date': {
title: 'Drop Date',
format: 'date',
rangeField: true
},
'status': {
default: 'Running'
// default: 'None'
}
}
},
optionsOverride: {
fields: {
'pickup.date_start': {
order: 1
},
'pickup.date_end': {
order: 2
},
'drop.date_start': {
order: 3
},
'drop.date_end': {
order: 4
},
status: {
order: 5
}
}
}
}
}
});
statusChanged = function(newStatus){
var options = {
data : {
status: newStatus
}
};
if(newStatus === 'All'){
options.data = {};
}
MENU_HELPER.menuClick('manageRequests',null,options);
}
Handlebars.registerHelper('getGridOptions', function(status,id) {
var options = [];
var loggedInUser = API_HELPER.getLoggedInUser();
switch (status){
case 'Pending':
options = [{'option':'ApproveTrip','_id':id,'id':'approve-trip'},
{'option':'RejectTrip','_id':id,'id':'reject-trip'}];
break;
case 'Approved':
options = [];
break;
case 'Rejected':
options = [];
break;
}
return options;
});
});
|
manchik85/Contact-Center
|
resources/js/admin/edit_form.js
|
import $ from "jquery";
import axios from "axios";
window.Vue = require('vue');
// import VueTheMask from 'vue-the-mask';
// Vue.use(VueTheMask);
// import Inputmask from 'inputmask';
// Vue.use(Inputmask);
/**
* Next, we will create a fresh Vue application instance and attach it to
* the page. Then, you may begin adding components to this application
* or customize the JavaScript scaffolding to fit your unique needs.
*/
Vue.component('add-gov-role-form', require('../components/admin/AddGovRoleComponent.vue').default);
Vue.component('add-gov', require('../components/admin/AddGovComponent.vue').default);
Vue.component('add-form', require('../components/admin/AddFormComponent.vue').default);
Vue.component('add-name', require('../components/admin/AddNameComponent.vue').default);
Vue.component('prior-days', require('../components/admin/PriorDaysComponent.vue').default);
const app = new Vue({
el: '#app'
});
const EditForm = {
init: function () {
$('.delete_gov').on('click', function () {
$('#gov_delete_id').val($(this).attr('id-gov'));
});
$('.del_gov_confirm').on('click', function () {
const data = {
id: $('#gov_delete_id').val(),
};
EditForm.deleteGov(data.id);
$('.btn-secondary').click();
$('#gov_' + data.id).remove();
});
$('.delete_form').on('click', function () {
$('#form_delete_id').val($(this).attr('id-form'));
});
$('.del_form_confirm').on('click', function () {
const data = {
id: $('#form_delete_id').val(),
};
EditForm.deleteForm(data.id);
$('.btn-secondary').click();
$('#form_' + data.id).remove();
});
$('.delete_gov_role_form').on('click', function () {
$('#gov_group_delete_id').val($(this).attr('id-form'));
});
$('.del_gov_group_confirm').on('click', function () {
const data = {
id: $('#gov_group_delete_id').val(),
};
EditForm.deleteFormRole(data.id);
$('.btn-secondary').click();
$('#role_' + data.id).remove();
});
$('.name_group_del').on('click', function () {
$('#name_group_del_id').val($(this).attr('id-name'));
});
$('.name_group_del_confirm').on('click', function () {
const data = {
id: $('#name_group_del_id').val(),
};
EditForm.deleteFormName(data.id);
$('.btn-secondary').click();
$('#name_' + data.id).remove();
});
},
deleteGov: function (idUser) {
const apiUrl = '/gov_del';
const dataObj = {
id: idUser
};
axios.post(apiUrl, dataObj).catch(function (error) {
console.log(error);
});
},
deleteForm: function (idUser) {
const apiUrl = '/form_del';
const dataObj = {
id: idUser
};
axios.post(apiUrl, dataObj).catch(function (error) {
console.log(error);
});
},
deleteFormRole: function (idUser) {
const apiUrl = '/gov_group_del';
const dataObj = {
gov_group_id: idUser
};
axios.post(apiUrl, dataObj).catch(function (error) {
console.log(error);
});
},
deleteFormName: function (idUser) {
const apiUrl = '/name_group_del';
const dataObj = {
id: idUser
};
axios.post(apiUrl, dataObj).catch(function (error) {
console.log(error);
});
},
};
$(document).ready(function () {
EditForm.init();
});
|
gaoht/house
|
java/classes/com/alipay/android/a/a/a/x.java
|
package com.alipay.android.a.a.a;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
final class x
implements ThreadFactory
{
private final AtomicInteger a = new AtomicInteger(1);
public final Thread newThread(Runnable paramRunnable)
{
paramRunnable = new Thread(paramRunnable, "com.alipay.mobile.common.transport.http.HttpManager.HttpWorker #" + this.a.getAndIncrement());
paramRunnable.setPriority(4);
return paramRunnable;
}
}
/* Location: /Users/gaoht/Downloads/zirom/classes-dex2jar.jar!/com/alipay/android/a/a/a/x.class
* Java compiler version: 6 (50.0)
* JD-Core Version: 0.7.1
*/
|
Meowtimer/C4DT
|
main/src/net/arctics/clonk/parser/CStyleScanner.java
|
package net.arctics.clonk.parser;
import static net.arctics.clonk.util.Utilities.block;
import java.util.ArrayList;
import java.util.List;
import net.arctics.clonk.c4script.ast.Comment;
/**
* Scanner that ignores C-style comments
* @author madeen
*
*/
public class CStyleScanner extends BufferedScanner {
public CStyleScanner(final Object source) { super(source); }
protected Comment parseComment() {
final int start = this.offset;
final int a = this.read();
final int b = this.read();
if (a == -1 || b == -1) {
this.seek(start);
return null;
} else if (a == '/' && b == '/') {
final boolean javadoc = read() == '/' ? true : block(() -> { unread(); return false; });
final String commentText = this.readStringUntil(BufferedScanner.NEWLINE_CHARS);
return new Comment(commentText, false, javadoc);
} else if (a == '/' && b == '*') {
final boolean javadoc = read() == '*' ? true : block(() -> { unread(); return false; });
final int startMultiline = this.offset;
while (!this.reachedEOF()) {
if (this.read() == '*') {
if (this.read() == '/') {
final String commentText = this.readStringAt(startMultiline, this.offset-2);
return new Comment(commentText, true, javadoc);
} else {
this.unread();
}
}
}
final String commentText = this.readStringAt(startMultiline, this.offset);
return new Comment(commentText, true, javadoc);
} else {
this.seek(start);
return null;
}
}
@Override
public int eatWhitespace() {
final int pos = offset;
while (super.eatWhitespace() > 0 || parseComment() != null) {
;
}
return offset-pos;
}
public List<Comment> collectComments() {
List<Comment> result = null;
while (true) {
if (super.eatWhitespace() > 0) {
continue;
}
final Comment c = parseComment();
if (c != null) {
c.setPrependix(true);
if (result == null) {
result = new ArrayList<Comment>(3);
}
result.add(c);
continue;
}
break;
}
return result;
}
}
|
qfys521/XiaoMingBot
|
src/main/java/cn/chuanwise/xiaoming/recept/ReceptionistManagerImpl.java
|
package cn.chuanwise.xiaoming.recept;
import cn.chuanwise.common.sized.SizedResidentConcurrentHashMap;
import cn.chuanwise.common.util.Maps;
import cn.chuanwise.xiaoming.annotation.EventListener;
import cn.chuanwise.xiaoming.bot.XiaoMingBot;
import cn.chuanwise.xiaoming.contact.message.Message;
import cn.chuanwise.xiaoming.contact.message.MessageImpl;
import cn.chuanwise.xiaoming.event.Listeners;
import cn.chuanwise.xiaoming.event.MessageEvent;
import cn.chuanwise.xiaoming.object.ModuleObjectImpl;
import cn.chuanwise.xiaoming.user.*;
import cn.chuanwise.xiaoming.user.MemberXiaoMingUser;
import cn.chuanwise.xiaoming.user.PrivateXiaoMingUser;
import lombok.Getter;
import net.mamoe.mirai.contact.*;
import net.mamoe.mirai.event.events.FriendMessageEvent;
import net.mamoe.mirai.event.events.GroupMessageEvent;
import net.mamoe.mirai.event.events.GroupTempMessageEvent;
import net.mamoe.mirai.message.code.MiraiCode;
import net.mamoe.mirai.message.data.OnlineMessageSource;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
/**
* @author Chuanwise
*/
@Getter
public class ReceptionistManagerImpl
extends ModuleObjectImpl
implements ReceptionistManager, Listeners {
public ReceptionistManagerImpl(XiaoMingBot xiaoMingBot) {
super(xiaoMingBot);
this.receptionists = new SizedResidentConcurrentHashMap<>(xiaoMingBot.getConfiguration().getMaxReceptionistQuantity());
}
/** 用户接待员记录器 */
final Map<Long, Receptionist> receptionists;
@Override
public Receptionist getReceptionist(long code) {
return Maps.getOrPutSupply(receptionists, code, () -> new ReceptionistImpl(getXiaoMingBot(), code));
}
@Override
@EventListener
public void onGroupMessageEvent(GroupMessageEvent event) {
final Group group = event.getGroup();
final Member member = event.getSender();
final long accountCode = member.getId();
final Receptionist receptionist = getReceptionist(accountCode);
final long groupCode = group.getId();
final GroupXiaoMingUser user = receptionist.getGroupXiaoMingUser(groupCode).orElseThrow(NoSuchElementException::new);
final OnlineMessageSource.Incoming.FromGroup source = event.getSource();
final Message message = new MessageImpl(xiaoMingBot,
event.getMessage(),
source.getIds(),
source.getInternalIds(),
((long) event.getTime()) * 1000);
xiaoMingBot.getEventManager().callEventAsync(new MessageEvent(user, message));
xiaoMingBot.getStatistician().increaseCallNumber();
}
@Override
@EventListener
public void onPrivateMessageEvent(FriendMessageEvent event) {
final Friend friend = event.getFriend();
final long accountCode = friend.getId();
final Receptionist receptionist = getReceptionist(accountCode);
final PrivateXiaoMingUser user = receptionist.getPrivateXiaoMingUser().orElseThrow(NoSuchElementException::new);
final OnlineMessageSource.Incoming.FromFriend source = event.getSource();
final Message message = new MessageImpl(xiaoMingBot,
event.getMessage(),
source.getIds(),
source.getInternalIds(),
((long) event.getTime()) * 1000);
xiaoMingBot.getEventManager().callEventAsync(new MessageEvent(user, message));
xiaoMingBot.getStatistician().increaseCallNumber();
}
@Override
@EventListener
public void onMemberMessageEvent(GroupTempMessageEvent event) {
final Group group = event.getGroup();
final NormalMember member = event.getSender();
final long accountCode = member.getId();
final Receptionist receptionist = getReceptionist(accountCode);
final long groupCode = group.getId();
final MemberXiaoMingUser user = receptionist.getMemberXiaoMingUser(groupCode).orElseThrow(NoSuchElementException::new);
final OnlineMessageSource.Incoming.FromTemp source = event.getSource();
final Message message = new MessageImpl(xiaoMingBot,
event.getMessage(),
source.getIds(),
source.getInternalIds(),
((long) event.getTime()) * 1000);
xiaoMingBot.getEventManager().callEventAsync(new MessageEvent(user, message));
xiaoMingBot.getStatistician().increaseCallNumber();
}
@EventListener
public void onMessageEvent(MessageEvent messageEvent) {
final Message message = messageEvent.getMessage();
final XiaoMingUser user = messageEvent.getUser();
if (xiaoMingBot.getConfiguration().isTrimMessage()) {
final String beforeTrim = message.serialize();
final String afterTrim = beforeTrim.trim();
if (!Objects.equals(beforeTrim, afterTrim)) {
message.setMessageChain(MiraiCode.deserializeMiraiCode(afterTrim));
}
}
// 唤醒正在等待这一条消息的线程
xiaoMingBot.getContactManager().onNextMessageEvent(messageEvent);
if (user instanceof ConsoleXiaoMingUser) {
return;
}
final boolean privateInteractorsDisabled = user instanceof PrivateXiaoMingUser && !xiaoMingBot.getConfiguration().isEnablePrivateInteractors();
final boolean memberInteractorsDisabled = user instanceof MemberXiaoMingUser && !xiaoMingBot.getConfiguration().isEnableMemberInteractors();
final boolean groupInteractorsDisabled = user instanceof GroupXiaoMingUser && !xiaoMingBot.getConfiguration().isEnableGroupInteractors();
if (privateInteractorsDisabled || memberInteractorsDisabled || groupInteractorsDisabled) {
return;
}
if (Objects.nonNull(user.getInteractorContext())) {
xiaoMingBot.getStatistician().increaseEffectiveCallNumber();
getLogger().info(user.getCompleteName() + "已有交互上下文,不再启动新的接待任务");
return;
}
xiaoMingBot.getScheduler().run(new ReceptionTaskImpl<>(user, message));
}
}
|
Shtrikh17/Multidimensional-Blockchain-MVP
|
mbc/src/main/java/ru/mbc/ledger/core/error/TxFromAddressError.java
|
<filename>mbc/src/main/java/ru/mbc/ledger/core/error/TxFromAddressError.java
package ru.mbc.ledger.core.error;
public class TxFromAddressError extends Error{
public TxFromAddressError(String errorMessage) {
super(errorMessage);
}
}
|
limasigor2/heimdall
|
heimdall-gateway/src/main/java/br/com/conductor/heimdall/gateway/router/EnvironmentInfoRepository.java
|
<reponame>limasigor2/heimdall<gh_stars>100-1000
/*-
* =========================LICENSE_START==================================
* heimdall-gateway
* ========================================================================
* Copyright (C) 2018 Conductor Tecnologia SA
* ========================================================================
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ==========================LICENSE_END===================================
*/
package br.com.conductor.heimdall.gateway.router;
import br.com.conductor.heimdall.core.util.ConstantsCache;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Repository;
import javax.sql.DataSource;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Repository
public class EnvironmentInfoRepository {
private JdbcTemplate jdbcTemplate;
public EnvironmentInfoRepository(DataSource dataSource) {
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
@Cacheable(cacheNames = ConstantsCache.ENVIRONMENT_ACTIVE_CACHE, key = "#apiId.toString() + ':' + #inboundURL")
public EnvironmentInfo findByApiIdAndEnvironmentInboundURL(Long apiId, String inboundURL) {
StringBuilder sql = new StringBuilder();
sql.append("SELECT EN.ID, EN.OUTBOUND_URL as OUTBOUNDURL FROM ENVIRONMENTS AS EN ");
sql.append("INNER JOIN APIS_ENVIRONMENTS AE ON EN.ID = AE.ENVIRONMENT_ID ");
sql.append("WHERE AE.API_ID = ");
sql.append(apiId);
sql.append(" AND EN.INBOUND_URL LIKE '%");
sql.append(inboundURL);
sql.append("%'");
final List<EnvironmentInfo> environmentInfos = jdbcTemplate.query(sql.toString(), (resultSet, ignored) -> {
EnvironmentInfo env = new EnvironmentInfo();
env.setId(resultSet.getLong("id"));
env.setOutboundURL(resultSet.getString("outboundURL"));
return env;
});
EnvironmentInfo environment = null;
if (!environmentInfos.isEmpty()) {
environment = environmentInfos.get(0);
}
if (environment != null) {
String getVariables = "select \"key\", value from variables where environment_id = " + environment.getId();
Map variables = jdbcTemplate.query(getVariables, resultSetExtractor -> {
Map<String, String> result = new HashMap<>();
while (resultSetExtractor.next()) {
result.put(
resultSetExtractor.getString("key"),
resultSetExtractor.getString("value")
);
}
return result;
});
environment.setVariables(variables);
return environment;
} else {
return null;
}
}
}
|
Masterlupo/EvilCraft
|
src/main/java/org/cyclops/evilcraft/item/ItemRedstoneGrenade.java
|
<reponame>Masterlupo/EvilCraft
package org.cyclops.evilcraft.item;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.entity.projectile.ThrowableEntity;
import net.minecraft.item.ItemStack;
import net.minecraft.world.World;
import org.cyclops.evilcraft.entity.item.EntityRedstoneGrenade;
/**
* Grenade that will trigger a redstone signal.
* @author immortaleeb
*
*/
public class ItemRedstoneGrenade extends ItemAbstractGrenade {
public ItemRedstoneGrenade(Properties properties) {
super(properties);
}
@Override
protected ThrowableEntity getThrowableEntity(ItemStack itemStack, World world, PlayerEntity player) {
return new EntityRedstoneGrenade(world, player);
}
}
|
luzpaz/PyFlow
|
PyFlow/Packages/PyFlowBase/Nodes/forLoopBegin.py
|
## Copyright 2015-2019 <NAME>, <NAME>
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
## http://www.apache.org/licenses/LICENSE-2.0
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
from PyFlow.Core import NodeBase
from PyFlow.Core.PathsRegistry import PathsRegistry
from PyFlow.Core.NodeBase import NodePinsSuggestionsHelper
from PyFlow.Core.Common import *
from PyFlow.Packages.PyFlowBase.Nodes import FLOW_CONTROL_ORANGE
import threading
class forLoopBegin(NodeBase):
def __init__(self, name):
super(forLoopBegin, self).__init__(name)
self._working = False
self.currentIndex = 0
self.prevIndex = -1
self.inExec = self.createInputPin('inExec', 'ExecPin', None, self.compute)
self.firstIndex = self.createInputPin('Start', 'IntPin')
self.lastIndex = self.createInputPin('Stop', 'IntPin')
self.loopEndNode = self.createInputPin('Paired block', 'StringPin')
self.loopEndNode.setInputWidgetVariant("ObjectPathWIdget")
self.loopBody = self.createOutputPin('LoopBody', 'ExecPin')
self.index = self.createOutputPin('Index', 'IntPin')
self.headerColor = FLOW_CONTROL_ORANGE
self.setExperimental()
@staticmethod
def pinTypeHints():
helper = NodePinsSuggestionsHelper()
helper.addInputDataType('ExecPin')
helper.addInputDataType('IntPin')
helper.addOutputDataType('ExecPin')
helper.addOutputDataType('IntPin')
helper.addInputStruct(StructureType.Single)
helper.addOutputStruct(StructureType.Single)
return helper
@staticmethod
def category():
return 'FlowControl'
@staticmethod
def keywords():
return ['iter']
@staticmethod
def description():
return 'For loop begin block'
def reset(self):
self.currentIndex = 0
self.prevIndex = -1
#self._working = False
def isDone(self):
indexTo = self.lastIndex.getData()
if self.currentIndex >= indexTo:
self.reset()
#loopEndNode = PathsRegistry().getEntity(self.loopEndNode.getData())
#loopEndNode.completed.call()
self._working = False
return True
return False
def onNext(self, *args, **kwargs):
while not self.isDone():
if self.currentIndex > self.prevIndex:
self.index.setData(self.currentIndex)
self.prevIndex = self.currentIndex
self.loopBody.call()
def compute(self, *args, **kwargs):
self.reset()
endNodePath = self.loopEndNode.getData()
loopEndNode = PathsRegistry().getEntity(endNodePath)
if loopEndNode is not None:
if loopEndNode.loopBeginNode.getData() != self.path():
self.setError("Invalid pair")
return
if self.graph() is not loopEndNode.graph():
err = "block ends in different graphs"
self.setError(err)
loopEndNode.setError(err)
return
else:
self.setError("{} not found".format(endNodePath))
if not self._working:
self.thread = threading.Thread(target=self.onNext,args=(self, args, kwargs))
self.thread.start()
self._working = True
#self.onNext(*args, **kwargs)
|
alexandra-bucur/egeria-connector-ibm-information-server
|
igc-clientlibrary/src/main/java/org/odpi/egeria/connectors/ibm/igc/clientlibrary/model/base/MappingProject.java
|
/* SPDX-License-Identifier: Apache-2.0 */
/* Copyright Contributors to the ODPi Egeria project. */
package org.odpi.egeria.connectors.ibm.igc.clientlibrary.model.base;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonTypeName;
import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE;
import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.odpi.egeria.connectors.ibm.igc.clientlibrary.model.common.ItemList;
/**
* POJO for the {@code mapping_project} asset type in IGC, displayed as '{@literal Mapping Project}' in the IGC UI.
* <br><br>
* (this code has been created based on out-of-the-box IGC metadata types.
* If modifications are needed, eg. to handle custom attributes,
* extending from this class in your own custom class is the best approach.)
*/
@JsonTypeInfo(use=JsonTypeInfo.Id.NAME, include=JsonTypeInfo.As.EXISTING_PROPERTY, property="_type", visible=true, defaultImpl=MappingProject.class)
@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown=true)
@JsonTypeName("mapping_project")
public class MappingProject extends InformationAsset {
@JsonProperty("generated_jobs")
protected ItemList<Dsjob> generatedJobs;
@JsonProperty("mapping_components")
protected ItemList<MappingComponent> mappingComponents;
@JsonProperty("mapping_specifications")
protected ItemList<MappingSpecification> mappingSpecifications;
/**
* Retrieve the {@code generated_jobs} property (displayed as '{@literal Generated Jobs}') of the object.
* @return {@code ItemList<Dsjob>}
*/
@JsonProperty("generated_jobs")
public ItemList<Dsjob> getGeneratedJobs() { return this.generatedJobs; }
/**
* Set the {@code generated_jobs} property (displayed as {@code Generated Jobs}) of the object.
* @param generatedJobs the value to set
*/
@JsonProperty("generated_jobs")
public void setGeneratedJobs(ItemList<Dsjob> generatedJobs) { this.generatedJobs = generatedJobs; }
/**
* Retrieve the {@code mapping_components} property (displayed as '{@literal Mapping Components}') of the object.
* @return {@code ItemList<MappingComponent>}
*/
@JsonProperty("mapping_components")
public ItemList<MappingComponent> getMappingComponents() { return this.mappingComponents; }
/**
* Set the {@code mapping_components} property (displayed as {@code Mapping Components}) of the object.
* @param mappingComponents the value to set
*/
@JsonProperty("mapping_components")
public void setMappingComponents(ItemList<MappingComponent> mappingComponents) { this.mappingComponents = mappingComponents; }
/**
* Retrieve the {@code mapping_specifications} property (displayed as '{@literal Mapping Specifications}') of the object.
* @return {@code ItemList<MappingSpecification>}
*/
@JsonProperty("mapping_specifications")
public ItemList<MappingSpecification> getMappingSpecifications() { return this.mappingSpecifications; }
/**
* Set the {@code mapping_specifications} property (displayed as {@code Mapping Specifications}) of the object.
* @param mappingSpecifications the value to set
*/
@JsonProperty("mapping_specifications")
public void setMappingSpecifications(ItemList<MappingSpecification> mappingSpecifications) { this.mappingSpecifications = mappingSpecifications; }
}
|
dmfr/CSipSimple-mirror
|
CSipSimple/jni/webrtc/sources/modules/interface/module_common_types.h
|
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULE_COMMON_TYPES_H
#define MODULE_COMMON_TYPES_H
#include <cstring> // memcpy
#include <assert.h>
#include "typedefs.h"
#include "common_types.h"
#ifdef _WIN32
#pragma warning(disable:4351) // remove warning "new behavior: elements of array
// 'array' will be default initialized"
#endif
namespace webrtc
{
struct RTPHeader
{
bool markerBit;
WebRtc_UWord8 payloadType;
WebRtc_UWord16 sequenceNumber;
WebRtc_UWord32 timestamp;
WebRtc_UWord32 ssrc;
WebRtc_UWord8 numCSRCs;
WebRtc_UWord32 arrOfCSRCs[kRtpCsrcSize];
WebRtc_UWord8 paddingLength;
WebRtc_UWord16 headerLength;
};
struct RTPHeaderExtension
{
WebRtc_Word32 transmissionTimeOffset;
};
struct RTPAudioHeader
{
WebRtc_UWord8 numEnergy; // number of valid entries in arrOfEnergy
WebRtc_UWord8 arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel
bool isCNG; // is this CNG
WebRtc_UWord8 channel; // number of channels 2 = stereo
};
enum {kNoPictureId = -1};
enum {kNoTl0PicIdx = -1};
enum {kNoTemporalIdx = -1};
enum {kNoKeyIdx = -1};
enum {kNoSimulcastIdx = 0};
struct RTPVideoHeaderVP8
{
void InitRTPVideoHeaderVP8()
{
nonReference = false;
pictureId = kNoPictureId;
tl0PicIdx = kNoTl0PicIdx;
temporalIdx = kNoTemporalIdx;
layerSync = false;
keyIdx = kNoKeyIdx;
partitionId = 0;
beginningOfPartition = false;
frameWidth = 0;
frameHeight = 0;
}
bool nonReference; // Frame is discardable.
WebRtc_Word16 pictureId; // Picture ID index, 15 bits;
// kNoPictureId if PictureID does not exist.
WebRtc_Word16 tl0PicIdx; // TL0PIC_IDX, 8 bits;
// kNoTl0PicIdx means no value provided.
WebRtc_Word8 temporalIdx; // Temporal layer index, or kNoTemporalIdx.
bool layerSync; // This frame is a layer sync frame.
// Disabled if temporalIdx == kNoTemporalIdx.
int keyIdx; // 5 bits; kNoKeyIdx means not used.
int partitionId; // VP8 partition ID
bool beginningOfPartition; // True if this packet is the first
// in a VP8 partition. Otherwise false
int frameWidth; // Exists for key frames.
int frameHeight; // Exists for key frames.
};
union RTPVideoTypeHeader
{
RTPVideoHeaderVP8 VP8;
};
enum RTPVideoCodecTypes
{
kRTPVideoGeneric = 0,
kRTPVideoVP8 = 8,
kRTPVideoNoVideo = 10,
kRTPVideoFEC = 11,
kRTPVideoI420 = 12
};
struct RTPVideoHeader
{
WebRtc_UWord16 width; // size
WebRtc_UWord16 height;
bool isFirstPacket; // first packet in frame
WebRtc_UWord8 simulcastIdx; // Index if the simulcast encoder creating
// this frame, 0 if not using simulcast.
RTPVideoCodecTypes codec;
RTPVideoTypeHeader codecHeader;
};
union RTPTypeHeader
{
RTPAudioHeader Audio;
RTPVideoHeader Video;
};
struct WebRtcRTPHeader
{
RTPHeader header;
FrameType frameType;
RTPTypeHeader type;
RTPHeaderExtension extension;
};
class RTPFragmentationHeader
{
public:
RTPFragmentationHeader() :
fragmentationVectorSize(0),
fragmentationOffset(NULL),
fragmentationLength(NULL),
fragmentationTimeDiff(NULL),
fragmentationPlType(NULL)
{};
~RTPFragmentationHeader()
{
delete [] fragmentationOffset;
delete [] fragmentationLength;
delete [] fragmentationTimeDiff;
delete [] fragmentationPlType;
}
RTPFragmentationHeader& operator=(const RTPFragmentationHeader& header)
{
if(this == &header)
{
return *this;
}
if(header.fragmentationVectorSize != fragmentationVectorSize)
{
// new size of vectors
// delete old
delete [] fragmentationOffset;
fragmentationOffset = NULL;
delete [] fragmentationLength;
fragmentationLength = NULL;
delete [] fragmentationTimeDiff;
fragmentationTimeDiff = NULL;
delete [] fragmentationPlType;
fragmentationPlType = NULL;
if(header.fragmentationVectorSize > 0)
{
// allocate new
if(header.fragmentationOffset)
{
fragmentationOffset = new WebRtc_UWord32[header.fragmentationVectorSize];
}
if(header.fragmentationLength)
{
fragmentationLength = new WebRtc_UWord32[header.fragmentationVectorSize];
}
if(header.fragmentationTimeDiff)
{
fragmentationTimeDiff = new WebRtc_UWord16[header.fragmentationVectorSize];
}
if(header.fragmentationPlType)
{
fragmentationPlType = new WebRtc_UWord8[header.fragmentationVectorSize];
}
}
// set new size
fragmentationVectorSize = header.fragmentationVectorSize;
}
if(header.fragmentationVectorSize > 0)
{
// copy values
if(header.fragmentationOffset)
{
memcpy(fragmentationOffset, header.fragmentationOffset,
header.fragmentationVectorSize * sizeof(WebRtc_UWord32));
}
if(header.fragmentationLength)
{
memcpy(fragmentationLength, header.fragmentationLength,
header.fragmentationVectorSize * sizeof(WebRtc_UWord32));
}
if(header.fragmentationTimeDiff)
{
memcpy(fragmentationTimeDiff, header.fragmentationTimeDiff,
header.fragmentationVectorSize * sizeof(WebRtc_UWord16));
}
if(header.fragmentationPlType)
{
memcpy(fragmentationPlType, header.fragmentationPlType,
header.fragmentationVectorSize * sizeof(WebRtc_UWord8));
}
}
return *this;
}
void VerifyAndAllocateFragmentationHeader( const WebRtc_UWord16 size)
{
if( fragmentationVectorSize < size)
{
WebRtc_UWord16 oldVectorSize = fragmentationVectorSize;
{
// offset
WebRtc_UWord32* oldOffsets = fragmentationOffset;
fragmentationOffset = new WebRtc_UWord32[size];
memset(fragmentationOffset+oldVectorSize, 0,
sizeof(WebRtc_UWord32)*(size-oldVectorSize));
// copy old values
memcpy(fragmentationOffset,oldOffsets, sizeof(WebRtc_UWord32) * oldVectorSize);
delete[] oldOffsets;
}
// length
{
WebRtc_UWord32* oldLengths = fragmentationLength;
fragmentationLength = new WebRtc_UWord32[size];
memset(fragmentationLength+oldVectorSize, 0,
sizeof(WebRtc_UWord32) * (size- oldVectorSize));
memcpy(fragmentationLength, oldLengths,
sizeof(WebRtc_UWord32) * oldVectorSize);
delete[] oldLengths;
}
// time diff
{
WebRtc_UWord16* oldTimeDiffs = fragmentationTimeDiff;
fragmentationTimeDiff = new WebRtc_UWord16[size];
memset(fragmentationTimeDiff+oldVectorSize, 0,
sizeof(WebRtc_UWord16) * (size- oldVectorSize));
memcpy(fragmentationTimeDiff, oldTimeDiffs,
sizeof(WebRtc_UWord16) * oldVectorSize);
delete[] oldTimeDiffs;
}
// payload type
{
WebRtc_UWord8* oldTimePlTypes = fragmentationPlType;
fragmentationPlType = new WebRtc_UWord8[size];
memset(fragmentationPlType+oldVectorSize, 0,
sizeof(WebRtc_UWord8) * (size- oldVectorSize));
memcpy(fragmentationPlType, oldTimePlTypes,
sizeof(WebRtc_UWord8) * oldVectorSize);
delete[] oldTimePlTypes;
}
fragmentationVectorSize = size;
}
}
WebRtc_UWord16 fragmentationVectorSize; // Number of fragmentations
WebRtc_UWord32* fragmentationOffset; // Offset of pointer to data for each fragm.
WebRtc_UWord32* fragmentationLength; // Data size for each fragmentation
WebRtc_UWord16* fragmentationTimeDiff; // Timestamp difference relative "now" for
// each fragmentation
WebRtc_UWord8* fragmentationPlType; // Payload type of each fragmentation
};
struct RTCPVoIPMetric
{
// RFC 3611 4.7
WebRtc_UWord8 lossRate;
WebRtc_UWord8 discardRate;
WebRtc_UWord8 burstDensity;
WebRtc_UWord8 gapDensity;
WebRtc_UWord16 burstDuration;
WebRtc_UWord16 gapDuration;
WebRtc_UWord16 roundTripDelay;
WebRtc_UWord16 endSystemDelay;
WebRtc_UWord8 signalLevel;
WebRtc_UWord8 noiseLevel;
WebRtc_UWord8 RERL;
WebRtc_UWord8 Gmin;
WebRtc_UWord8 Rfactor;
WebRtc_UWord8 extRfactor;
WebRtc_UWord8 MOSLQ;
WebRtc_UWord8 MOSCQ;
WebRtc_UWord8 RXconfig;
WebRtc_UWord16 JBnominal;
WebRtc_UWord16 JBmax;
WebRtc_UWord16 JBabsMax;
};
// Types for the FEC packet masks. The type |kFecMaskRandom| is based on a
// random loss model. The type |kFecMaskBursty| is based on a bursty/consecutive
// loss model. The packet masks are defined in
// modules/rtp_rtcp/fec_private_tables_random(bursty).h
enum FecMaskType {
kFecMaskRandom,
kFecMaskBursty,
};
// Struct containing forward error correction settings.
struct FecProtectionParams {
int fec_rate;
bool use_uep_protection;
int max_fec_frames;
FecMaskType fec_mask_type;
};
// class describing a complete, or parts of an encoded frame.
class EncodedVideoData
{
public:
EncodedVideoData() :
payloadType(0),
timeStamp(0),
renderTimeMs(0),
encodedWidth(0),
encodedHeight(0),
completeFrame(false),
missingFrame(false),
payloadData(NULL),
payloadSize(0),
bufferSize(0),
fragmentationHeader(),
frameType(kVideoFrameDelta),
codec(kVideoCodecUnknown)
{};
EncodedVideoData(const EncodedVideoData& data)
{
payloadType = data.payloadType;
timeStamp = data.timeStamp;
renderTimeMs = data.renderTimeMs;
encodedWidth = data.encodedWidth;
encodedHeight = data.encodedHeight;
completeFrame = data.completeFrame;
missingFrame = data.missingFrame;
payloadSize = data.payloadSize;
fragmentationHeader = data.fragmentationHeader;
frameType = data.frameType;
codec = data.codec;
if (data.payloadSize > 0)
{
payloadData = new WebRtc_UWord8[data.payloadSize];
memcpy(payloadData, data.payloadData, data.payloadSize);
}
else
{
payloadData = NULL;
}
}
~EncodedVideoData()
{
delete [] payloadData;
};
EncodedVideoData& operator=(const EncodedVideoData& data)
{
if (this == &data)
{
return *this;
}
payloadType = data.payloadType;
timeStamp = data.timeStamp;
renderTimeMs = data.renderTimeMs;
encodedWidth = data.encodedWidth;
encodedHeight = data.encodedHeight;
completeFrame = data.completeFrame;
missingFrame = data.missingFrame;
payloadSize = data.payloadSize;
fragmentationHeader = data.fragmentationHeader;
frameType = data.frameType;
codec = data.codec;
if (data.payloadSize > 0)
{
delete [] payloadData;
payloadData = new WebRtc_UWord8[data.payloadSize];
memcpy(payloadData, data.payloadData, data.payloadSize);
bufferSize = data.payloadSize;
}
return *this;
};
void VerifyAndAllocate( const WebRtc_UWord32 size)
{
if (bufferSize < size)
{
WebRtc_UWord8* oldPayload = payloadData;
payloadData = new WebRtc_UWord8[size];
memcpy(payloadData, oldPayload, sizeof(WebRtc_UWord8) * payloadSize);
bufferSize = size;
delete[] oldPayload;
}
}
WebRtc_UWord8 payloadType;
WebRtc_UWord32 timeStamp;
WebRtc_Word64 renderTimeMs;
WebRtc_UWord32 encodedWidth;
WebRtc_UWord32 encodedHeight;
bool completeFrame;
bool missingFrame;
WebRtc_UWord8* payloadData;
WebRtc_UWord32 payloadSize;
WebRtc_UWord32 bufferSize;
RTPFragmentationHeader fragmentationHeader;
FrameType frameType;
VideoCodecType codec;
};
struct VideoContentMetrics {
VideoContentMetrics()
: motion_magnitude(0.0f),
spatial_pred_err(0.0f),
spatial_pred_err_h(0.0f),
spatial_pred_err_v(0.0f) {
}
void Reset() {
motion_magnitude = 0.0f;
spatial_pred_err = 0.0f;
spatial_pred_err_h = 0.0f;
spatial_pred_err_v = 0.0f;
}
float motion_magnitude;
float spatial_pred_err;
float spatial_pred_err_h;
float spatial_pred_err_v;
};
/*************************************************
*
* VideoFrame class
*
* The VideoFrame class allows storing and
* handling of video frames.
*
*
*************************************************/
class VideoFrame
{
public:
VideoFrame();
~VideoFrame();
/**
* Verifies that current allocated buffer size is larger than or equal to the input size.
* If the current buffer size is smaller, a new allocation is made and the old buffer data
* is copied to the new buffer.
* Buffer size is updated to minimumSize.
*/
WebRtc_Word32 VerifyAndAllocate(const WebRtc_UWord32 minimumSize);
/**
* Update length of data buffer in frame. Function verifies that new length is less or
* equal to allocated size.
*/
WebRtc_Word32 SetLength(const WebRtc_UWord32 newLength);
/*
* Swap buffer and size data
*/
WebRtc_Word32 Swap(WebRtc_UWord8*& newMemory,
WebRtc_UWord32& newLength,
WebRtc_UWord32& newSize);
/*
* Swap buffer and size data
*/
WebRtc_Word32 SwapFrame(VideoFrame& videoFrame);
/**
* Copy buffer: If newLength is bigger than allocated size, a new buffer of size length
* is allocated.
*/
WebRtc_Word32 CopyFrame(const VideoFrame& videoFrame);
/**
* Copy buffer: If newLength is bigger than allocated size, a new buffer of size length
* is allocated.
*/
WebRtc_Word32 CopyFrame(WebRtc_UWord32 length, const WebRtc_UWord8* sourceBuffer);
/**
* Delete VideoFrame and resets members to zero
*/
void Free();
/**
* Set frame timestamp (90kHz)
*/
void SetTimeStamp(const WebRtc_UWord32 timeStamp) {_timeStamp = timeStamp;}
/**
* Get pointer to frame buffer
*/
WebRtc_UWord8* Buffer() const {return _buffer;}
WebRtc_UWord8*& Buffer() {return _buffer;}
/**
* Get allocated buffer size
*/
WebRtc_UWord32 Size() const {return _bufferSize;}
/**
* Get frame length
*/
WebRtc_UWord32 Length() const {return _bufferLength;}
/**
* Get frame timestamp (90kHz)
*/
WebRtc_UWord32 TimeStamp() const {return _timeStamp;}
/**
* Get frame width
*/
WebRtc_UWord32 Width() const {return _width;}
/**
* Get frame height
*/
WebRtc_UWord32 Height() const {return _height;}
/**
* Set frame width
*/
void SetWidth(const WebRtc_UWord32 width) {_width = width;}
/**
* Set frame height
*/
void SetHeight(const WebRtc_UWord32 height) {_height = height;}
/**
* Set render time in miliseconds
*/
void SetRenderTime(const WebRtc_Word64 renderTimeMs) {_renderTimeMs = renderTimeMs;}
/**
* Get render time in miliseconds
*/
WebRtc_Word64 RenderTimeMs() const {return _renderTimeMs;}
private:
void Set(WebRtc_UWord8* buffer,
WebRtc_UWord32 size,
WebRtc_UWord32 length,
WebRtc_UWord32 timeStamp);
WebRtc_UWord8* _buffer; // Pointer to frame buffer
WebRtc_UWord32 _bufferSize; // Allocated buffer size
WebRtc_UWord32 _bufferLength; // Length (in bytes) of buffer
WebRtc_UWord32 _timeStamp; // Timestamp of frame (90kHz)
WebRtc_UWord32 _width;
WebRtc_UWord32 _height;
WebRtc_Word64 _renderTimeMs;
}; // end of VideoFrame class declaration
// inline implementation of VideoFrame class:
inline
VideoFrame::VideoFrame():
_buffer(0),
_bufferSize(0),
_bufferLength(0),
_timeStamp(0),
_width(0),
_height(0),
_renderTimeMs(0)
{
//
}
inline
VideoFrame::~VideoFrame()
{
if(_buffer)
{
delete [] _buffer;
_buffer = NULL;
}
}
inline
WebRtc_Word32
VideoFrame::VerifyAndAllocate(const WebRtc_UWord32 minimumSize)
{
if (minimumSize < 1)
{
return -1;
}
if(minimumSize > _bufferSize)
{
// create buffer of sufficient size
WebRtc_UWord8* newBufferBuffer = new WebRtc_UWord8[minimumSize];
if(_buffer)
{
// copy old data
memcpy(newBufferBuffer, _buffer, _bufferSize);
delete [] _buffer;
}
else
{
memset(newBufferBuffer, 0, minimumSize * sizeof(WebRtc_UWord8));
}
_buffer = newBufferBuffer;
_bufferSize = minimumSize;
}
return 0;
}
inline
WebRtc_Word32
VideoFrame::SetLength(const WebRtc_UWord32 newLength)
{
if (newLength >_bufferSize )
{ // can't accomodate new value
return -1;
}
_bufferLength = newLength;
return 0;
}
inline
WebRtc_Word32
VideoFrame::SwapFrame(VideoFrame& videoFrame)
{
WebRtc_UWord32 tmpTimeStamp = _timeStamp;
WebRtc_UWord32 tmpWidth = _width;
WebRtc_UWord32 tmpHeight = _height;
WebRtc_Word64 tmpRenderTime = _renderTimeMs;
_timeStamp = videoFrame._timeStamp;
_width = videoFrame._width;
_height = videoFrame._height;
_renderTimeMs = videoFrame._renderTimeMs;
videoFrame._timeStamp = tmpTimeStamp;
videoFrame._width = tmpWidth;
videoFrame._height = tmpHeight;
videoFrame._renderTimeMs = tmpRenderTime;
return Swap(videoFrame._buffer, videoFrame._bufferLength, videoFrame._bufferSize);
}
inline
WebRtc_Word32
VideoFrame::Swap(WebRtc_UWord8*& newMemory, WebRtc_UWord32& newLength, WebRtc_UWord32& newSize)
{
WebRtc_UWord8* tmpBuffer = _buffer;
WebRtc_UWord32 tmpLength = _bufferLength;
WebRtc_UWord32 tmpSize = _bufferSize;
_buffer = newMemory;
_bufferLength = newLength;
_bufferSize = newSize;
newMemory = tmpBuffer;
newLength = tmpLength;
newSize = tmpSize;
return 0;
}
inline
WebRtc_Word32
VideoFrame::CopyFrame(WebRtc_UWord32 length, const WebRtc_UWord8* sourceBuffer)
{
if (length > _bufferSize)
{
WebRtc_Word32 ret = VerifyAndAllocate(length);
if (ret < 0)
{
return ret;
}
}
memcpy(_buffer, sourceBuffer, length);
_bufferLength = length;
return 0;
}
inline
WebRtc_Word32
VideoFrame::CopyFrame(const VideoFrame& videoFrame)
{
if(CopyFrame(videoFrame.Length(), videoFrame.Buffer()) != 0)
{
return -1;
}
_timeStamp = videoFrame._timeStamp;
_width = videoFrame._width;
_height = videoFrame._height;
_renderTimeMs = videoFrame._renderTimeMs;
return 0;
}
inline
void
VideoFrame::Free()
{
_timeStamp = 0;
_bufferLength = 0;
_bufferSize = 0;
_height = 0;
_width = 0;
_renderTimeMs = 0;
if(_buffer)
{
delete [] _buffer;
_buffer = NULL;
}
}
/* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It
* allows for adding and subtracting frames while keeping track of the resulting
* states.
*
* Notes
* - The total number of samples in |data_| is
* samples_per_channel_ * num_channels_
*
* - Stereo data is interleaved starting with the left channel.
*
* - The +operator assume that you would never add exactly opposite frames when
* deciding the resulting state. To do this use the -operator.
*/
class AudioFrame
{
public:
enum { kMaxDataSizeSamples = 3840 }; // stereo, 32 kHz, 60ms (2*32*60)
enum VADActivity
{
kVadActive = 0,
kVadPassive = 1,
kVadUnknown = 2
};
enum SpeechType
{
kNormalSpeech = 0,
kPLC = 1,
kCNG = 2,
kPLCCNG = 3,
kUndefined = 4
};
AudioFrame();
virtual ~AudioFrame();
int UpdateFrame(
int id,
uint32_t timestamp,
const int16_t* data,
int samples_per_channel,
int sample_rate_hz,
SpeechType speech_type,
VADActivity vad_activity,
int num_channels = 1,
uint32_t energy = -1);
AudioFrame& Append(const AudioFrame& rhs);
void Mute();
AudioFrame& operator=(const AudioFrame& rhs);
AudioFrame& operator>>=(const int rhs);
AudioFrame& operator+=(const AudioFrame& rhs);
AudioFrame& operator-=(const AudioFrame& rhs);
int id_;
uint32_t timestamp_;
int16_t data_[kMaxDataSizeSamples];
int samples_per_channel_;
int sample_rate_hz_;
int num_channels_;
SpeechType speech_type_;
VADActivity vad_activity_;
uint32_t energy_;
};
inline
AudioFrame::AudioFrame()
:
id_(-1),
timestamp_(0),
data_(),
samples_per_channel_(0),
sample_rate_hz_(0),
num_channels_(1),
speech_type_(kUndefined),
vad_activity_(kVadUnknown),
energy_(0xffffffff)
{
}
inline
AudioFrame::~AudioFrame()
{
}
inline
int
AudioFrame::UpdateFrame(
int id,
uint32_t timestamp,
const int16_t* data,
int samples_per_channel,
int sample_rate_hz,
SpeechType speech_type,
VADActivity vad_activity,
int num_channels,
uint32_t energy)
{
id_ = id;
timestamp_ = timestamp;
sample_rate_hz_ = sample_rate_hz;
speech_type_ = speech_type;
vad_activity_ = vad_activity;
num_channels_ = num_channels;
energy_ = energy;
if((samples_per_channel > kMaxDataSizeSamples) ||
(num_channels > 2) || (num_channels < 1))
{
samples_per_channel_ = 0;
return -1;
}
samples_per_channel_ = samples_per_channel;
if(data != NULL)
{
memcpy(data_, data, sizeof(int16_t) *
samples_per_channel * num_channels_);
}
else
{
memset(data_,0,sizeof(int16_t) *
samples_per_channel * num_channels_);
}
return 0;
}
inline
void
AudioFrame::Mute()
{
memset(data_, 0, samples_per_channel_ * num_channels_ * sizeof(int16_t));
}
inline
AudioFrame&
AudioFrame::operator=(const AudioFrame& rhs)
{
// Sanity Check
if((rhs.samples_per_channel_ > kMaxDataSizeSamples) ||
(rhs.num_channels_ > 2) ||
(rhs.num_channels_ < 1))
{
return *this;
}
if(this == &rhs)
{
return *this;
}
id_ = rhs.id_;
timestamp_ = rhs.timestamp_;
sample_rate_hz_ = rhs.sample_rate_hz_;
speech_type_ = rhs.speech_type_;
vad_activity_ = rhs.vad_activity_;
num_channels_ = rhs.num_channels_;
energy_ = rhs.energy_;
samples_per_channel_ = rhs.samples_per_channel_;
memcpy(data_, rhs.data_,
sizeof(int16_t) * rhs.samples_per_channel_ * num_channels_);
return *this;
}
inline
AudioFrame&
AudioFrame::operator>>=(const int rhs)
{
assert((num_channels_ > 0) && (num_channels_ < 3));
if((num_channels_ > 2) ||
(num_channels_ < 1))
{
return *this;
}
for(int i = 0; i < samples_per_channel_ * num_channels_; i++)
{
data_[i] = static_cast<int16_t>(data_[i] >> rhs);
}
return *this;
}
inline
AudioFrame&
AudioFrame::Append(const AudioFrame& rhs)
{
// Sanity check
assert((num_channels_ > 0) && (num_channels_ < 3));
if((num_channels_ > 2) ||
(num_channels_ < 1))
{
return *this;
}
if(num_channels_ != rhs.num_channels_)
{
return *this;
}
if((vad_activity_ == kVadActive) ||
rhs.vad_activity_ == kVadActive)
{
vad_activity_ = kVadActive;
}
else if((vad_activity_ == kVadUnknown) ||
rhs.vad_activity_ == kVadUnknown)
{
vad_activity_ = kVadUnknown;
}
if(speech_type_ != rhs.speech_type_)
{
speech_type_ = kUndefined;
}
int offset = samples_per_channel_ * num_channels_;
for(int i = 0;
i < rhs.samples_per_channel_ * rhs.num_channels_;
i++)
{
data_[offset+i] = rhs.data_[i];
}
samples_per_channel_ += rhs.samples_per_channel_;
return *this;
}
// merge vectors
inline
AudioFrame&
AudioFrame::operator+=(const AudioFrame& rhs)
{
// Sanity check
assert((num_channels_ > 0) && (num_channels_ < 3));
if((num_channels_ > 2) ||
(num_channels_ < 1))
{
return *this;
}
if(num_channels_ != rhs.num_channels_)
{
return *this;
}
bool noPrevData = false;
if(samples_per_channel_ != rhs.samples_per_channel_)
{
if(samples_per_channel_ == 0)
{
// special case we have no data to start with
samples_per_channel_ = rhs.samples_per_channel_;
noPrevData = true;
} else
{
return *this;
}
}
if((vad_activity_ == kVadActive) ||
rhs.vad_activity_ == kVadActive)
{
vad_activity_ = kVadActive;
}
else if((vad_activity_ == kVadUnknown) ||
rhs.vad_activity_ == kVadUnknown)
{
vad_activity_ = kVadUnknown;
}
if(speech_type_ != rhs.speech_type_)
{
speech_type_ = kUndefined;
}
if(noPrevData)
{
memcpy(data_, rhs.data_,
sizeof(int16_t) * rhs.samples_per_channel_ * num_channels_);
} else
{
// IMPROVEMENT this can be done very fast in assembly
for(int i = 0; i < samples_per_channel_ * num_channels_; i++)
{
int32_t wrapGuard = static_cast<int32_t>(data_[i]) +
static_cast<int32_t>(rhs.data_[i]);
if(wrapGuard < -32768)
{
data_[i] = -32768;
}else if(wrapGuard > 32767)
{
data_[i] = 32767;
}else
{
data_[i] = (int16_t)wrapGuard;
}
}
}
energy_ = 0xffffffff;
return *this;
}
inline
AudioFrame&
AudioFrame::operator-=(const AudioFrame& rhs)
{
// Sanity check
assert((num_channels_ > 0) && (num_channels_ < 3));
if((num_channels_ > 2)||
(num_channels_ < 1))
{
return *this;
}
if((samples_per_channel_ != rhs.samples_per_channel_) ||
(num_channels_ != rhs.num_channels_))
{
return *this;
}
if((vad_activity_ != kVadPassive) ||
rhs.vad_activity_ != kVadPassive)
{
vad_activity_ = kVadUnknown;
}
speech_type_ = kUndefined;
for(int i = 0; i < samples_per_channel_ * num_channels_; i++)
{
int32_t wrapGuard = static_cast<int32_t>(data_[i]) -
static_cast<int32_t>(rhs.data_[i]);
if(wrapGuard < -32768)
{
data_[i] = -32768;
}
else if(wrapGuard > 32767)
{
data_[i] = 32767;
}
else
{
data_[i] = (int16_t)wrapGuard;
}
}
energy_ = 0xffffffff;
return *this;
}
} // namespace webrtc
#endif // MODULE_COMMON_TYPES_H
|
vmware-serengeti/vhm
|
elastic-runtime/src/main/java/com/vmware/vhadoop/vhm/vmcalgorithm/VMCA_DumbVMChooser.java
|
<reponame>vmware-serengeti/vhm
/***************************************************************************
* Copyright (c) 2013 VMware, Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************/
/***************************************************************************
* Copyright (c) 2012 VMware, Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************/
package com.vmware.vhadoop.vhm.vmcalgorithm;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import com.vmware.vhadoop.external.VCActionDTOTypes.VMDTO;
import com.vmware.vhadoop.util.CompoundStatus;
import com.vmware.vhadoop.util.ProgressLogger;
import com.vmware.vhadoop.vhm.TTStatesForHost;
public class VMCA_DumbVMChooser implements VMChooserAlgorithm {
private static final String _className = VMCA_DumbVMChooser.class.getName();
private static final ProgressLogger _pLog = ProgressLogger.getProgressLogger(_className);
private static final Logger _log = _pLog.getLogger();
@Override
public VMCAResult chooseVMsToEnable(TTStatesForHost[] hostAndVMs, int totalTTVMs, int delta) {
CompoundStatus taskStatus = new CompoundStatus(_className+" enable"); /* TODO: Set the status somewhere */
List<VMDTO> toEnable = new ArrayList<VMDTO>();
/* Just cycle through the hosts taking as many VMs as each will give */
for (TTStatesForHost hostAndVM : hostAndVMs) {
int remaining = delta - toEnable.size();
if (remaining <= 0) {
break;
}
for (VMDTO vm : hostAndVM.getDisabled()) {
toEnable.add(vm);
remaining--;
if (remaining <= 0) {
break;
}
}
}
if (delta > toEnable.size()) {
_log.severe("Request to enable more VMs than are available!");
}
return new VMCAResult(toEnable.toArray(new VMDTO[0]), taskStatus);
}
@Override
public VMCAResult chooseVMsToDisable(TTStatesForHost[] hostAndVMs, int totalTTVMs, int delta) {
CompoundStatus taskStatus = new CompoundStatus(_className+" disable"); /* TODO: Set the status somewhere */
List<VMDTO> toDisable = new ArrayList<VMDTO>();
/* Just cycle through the hosts taking as many VMs as each will give */
for (TTStatesForHost hostAndVM : hostAndVMs) {
int remaining = delta - toDisable.size();
if (remaining <= 0) {
break;
}
for (VMDTO vm: hostAndVM.getEnabled()) {
toDisable.add(vm);
remaining--;
if (remaining <= 0) {
break;
}
}
}
if (delta > toDisable.size()) {
_log.severe("Request to disable more VMs than are available!");
}
return new VMCAResult(toDisable.toArray(new VMDTO[0]), taskStatus);
}
}
|
Antoine-Regembal/portfolio
|
src/Components/Presentation/Presentation.js
|
import React from "react";
import { Container, Row, Col } from "react-grid-system";
import "./Presentation.css";
import PresentationText from "./PresentationText";
const Presentation = ({ presentationContent, name }) => (
<div id={presentationContent.sectionId} className="presentation">
<Container className="presentation__container">
<Row>
<Col lg={6} md={12}>
<PresentationText
presentationContent={presentationContent}
name={name}
/>
</Col>
<Col lg={6} md={12}>
<img
className="presentation__image"
alt={name}
src={require("../../Medias/me.jpg")}
/>
</Col>
</Row>
</Container>
</div>
);
export default Presentation;
|
imagitama/vrphotobuddy
|
desktopapp/src/config.js
|
<gh_stars>0
const path = require('path')
const { promises: fs, constants } = require('fs')
const os = require('os')
const pathToConfigFile = path.resolve(os.homedir(), 'vrphotobuddy.json')
let config = {
PATH_TO_VRCHAT_PHOTOS: 'NO_PATH',
OAUTH_AUTHORIZE_URL: 'NO_URL',
OAUTH_REDIRECT_WEBSERVER_PORT: 1234,
}
const getConfig = () => {
return config
}
module.exports.getConfig = getConfig
const doesConfigFileExist = async () => {
try {
await fs.access(pathToConfigFile, constants.F_OK)
return true
} catch (err) {
return false
}
}
const readConfigFile = async () => {
const newConfigJson = await fs.readFile(pathToConfigFile)
const newConfig = JSON.parse(newConfigJson)
return newConfig
}
const loadConfig = async () => {
console.info('loading config...')
console.info(`path to custom config file: ${pathToConfigFile}`)
let PATH_TO_VRCHAT_PHOTOS
const OAUTH_AUTHORIZE_URL = process.env.VRPHOTOBUDDY_OAUTH_AUTHORIZE_URL
const OAUTH_REDIRECT_WEBSERVER_PORT = 3001
if (await doesConfigFileExist()) {
console.info('custom config file found, using...')
const configFile = await readConfigFile()
console.info('custom config file', configFile)
if (configFile.PATH_TO_VRCHAT_PHOTOS) {
PATH_TO_VRCHAT_PHOTOS = configFile.PATH_TO_VRCHAT_PHOTOS
}
} else {
console.info('no custom config found, skipping...')
PATH_TO_VRCHAT_PHOTOS = process.env.VRPHOTOBUDDY_DEFAULT_PHOTO_PATH
}
PATH_TO_VRCHAT_PHOTOS = PATH_TO_VRCHAT_PHOTOS.replace('~', os.homedir())
console.info(`config has been loaded`)
console.info(`path to VRChat photos: ${PATH_TO_VRCHAT_PHOTOS}`)
console.info(`oauth authorize URL: ${OAUTH_AUTHORIZE_URL}`)
console.info(`webserver port: ${OAUTH_REDIRECT_WEBSERVER_PORT}`)
config = {
PATH_TO_VRCHAT_PHOTOS,
OAUTH_AUTHORIZE_URL,
OAUTH_REDIRECT_WEBSERVER_PORT,
}
}
module.exports.loadConfig = loadConfig
|
jalowiczor/beeon-gateway-with-nemea-sources
|
src/exporters/MqttExporter.cpp
|
#include <cstring>
#include <Poco/Exception.h>
#include "di/Injectable.h"
#include "exporters/MqttExporter.h"
#include "util/NullSensorDataFormatter.h"
#include "util/SensorDataFormatter.h"
BEEEON_OBJECT_BEGIN(BeeeOn, MqttExporter)
BEEEON_OBJECT_CASTABLE(Exporter)
BEEEON_OBJECT_PROPERTY("topic", &MqttExporter::setTopic)
BEEEON_OBJECT_PROPERTY("qos", &MqttExporter::setQos)
BEEEON_OBJECT_PROPERTY("formatter", &MqttExporter::setFormatter)
BEEEON_OBJECT_PROPERTY("mqttClient", &MqttExporter::setMqttClient)
BEEEON_OBJECT_END(BeeeOn, MqttExporter)
using namespace BeeeOn;
using namespace Poco;
using namespace std;
const static string DEFAULT_TOPIC = "BeeeOnOut";
const static string DEFAULT_CLIENT_ID = "GatewayExporterClient";
MqttExporter::MqttExporter():
m_topic(DEFAULT_TOPIC),
m_qos(MqttMessage::EXACTLY_ONCE),
m_clientID(DEFAULT_CLIENT_ID)
{
}
MqttExporter::~MqttExporter()
{
}
void MqttExporter::setTopic(const string &topic)
{
m_topic = topic;
}
void MqttExporter::setMqttClient(MqttClient::Ptr client)
{
m_mqtt = client;
}
void MqttExporter::setFormatter(const SharedPtr<SensorDataFormatter> formatter)
{
m_formatter = formatter;
}
bool MqttExporter::ship(const SensorData &data)
{
MqttMessage msg = {
m_topic,
m_formatter->format(data),
m_qos
};
try {
m_mqtt->publish(msg);
}
catch (const Exception &ex) {
logger().log(ex, __FILE__, __LINE__);
return false;
}
return true;
}
void MqttExporter::setQos(const int qos)
{
switch (qos) {
case MqttMessage::MOST_ONCE:
case MqttMessage::LEAST_ONCE:
case MqttMessage::EXACTLY_ONCE:
m_qos = static_cast<MqttMessage::QoS>(qos);
break;
default:
throw InvalidArgumentException("QOS is out of range");
}
}
|
PacktPublishing/Mastering-Postgis
|
Chapter07/resources/ext-6.2.0-gpl/ext-6.2.0/packages/core/src/dom/Underlay.js
|
<filename>Chapter07/resources/ext-6.2.0-gpl/ext-6.2.0/packages/core/src/dom/Underlay.js
/**
* A class that provides an underlay element which displays behind an absolutely positioned
* target element and tracks its size and position. Abstract base class for
* {@link Ext.dom.Shadow} and {@link Ext.dom.Shim}
*
*
* @private
* @abstract
*/
Ext.define('Ext.dom.Underlay', {
requires: [ 'Ext.dom.UnderlayPool' ],
/**
* @cfg {Ext.dom.Element} target
* The target element
*/
/**
* @cfg {Number} zIndex
* The CSS z-index to use for this underlay. Defaults to the z-index of {@link #target}.
*/
constructor: function(config) {
Ext.apply(this, config);
},
/**
* @method
* @protected
* Called before the underlay is shown, immediately after its element is retrieved
* from the pool
*/
beforeShow: Ext.emptyFn,
/**
* @protected
* Returns the dom element that this underlay should be inserted before.
* Defaults to the target element
* @return {Ext.dom.Element}
*/
getInsertionTarget: function() {
return this.target;
},
/**
* @protected
* @return {Ext.dom.UnderlayPool}
*/
getPool: function() {
return this.pool ||
(this.self.prototype.pool = new Ext.dom.UnderlayPool(this.elementConfig));
},
/**
* Hides the underlay
*/
hide: function() {
var me = this,
el = me.el;
if (el) {
el.hide();
me.getPool().checkIn(el);
me.el = null;
me.hidden = true;
}
},
/**
* Aligns the underlay to its target element
* @param {Number} [x] The x position of the target element. If not provided, the
* x position will be read from the DOM.
* @param {Number} [y] The y position of the target element. If not provided, the
* y position will be read from the DOM.
* @param {Number} [width] The width of the target element. If not provided, the
* width will be read from the DOM.
* @param {Number} [height] The height of the target element. If not provided, the
* height will be read from the DOM.
*/
realign: function(x, y, width, height) {
var me = this,
el = me.el,
target = me.target,
offsets = me.offsets,
max = Math.max;
if (el) {
if (x == null) {
x = target.getX();
}
if (y == null) {
y = target.getY();
}
if (width == null) {
width = target.getWidth();
}
if (height == null) {
height = target.getHeight();
}
if (offsets) {
x = x + offsets.x;
y = y + offsets.y;
width = max(width + offsets.w, 0);
height = max(height + offsets.h, 0);
}
el.setXY([x, y]);
el.setSize(width, height);
}
},
/**
* Adjust the z-index of this underlay
* @param {Number} zIndex The new z-index
*/
setZIndex: function(zIndex) {
this.zIndex = zIndex;
if (this.el) {
this.el.setStyle("z-index", zIndex);
}
},
/**
* Shows the underlay
*/
show: function() {
var me = this,
target = me.target,
zIndex = me.zIndex,
el = me.el,
insertionTarget = me.getInsertionTarget().dom,
dom;
if (!el) {
el = me.el = me.getPool().checkOut();
}
me.beforeShow();
if (zIndex == null) {
// For best results, we need the underlay to be as close as possible to its
// target element in the z-index stacking order without overlaying the target
// element. Since the UnderlayPool inserted the underlay as high as possible
// in the dom tree when we checked the underlay out of the pool, we can assume
// that it comes before the target element in the dom tree, and therefore can
// give it the exact same index as the target element.
zIndex = (parseInt(target.getStyle("z-index"), 10));
}
if (zIndex) {
el.setStyle("z-index", zIndex);
}
// Overlay elements are shared, so fix position to match current owner
el.setStyle('position', me.fixed ? 'fixed' : '');
dom = el.dom;
if (dom.nextSibling !== insertionTarget) {
// inserting the underlay as the previous sibling of the target ensures that
// it will show behind the target, as long as its z-index is less than or equal
// to the z-index of the target element.
target.dom.parentNode.insertBefore(dom, insertionTarget);
}
el.show();
me.realign();
me.hidden = false;
}
});
|
featherfly/conversion
|
src/main/java/cn/featherfly/conversion/core/basic/LocalDateTimeConvertor.java
|
<gh_stars>0
package cn.featherfly.conversion.core.basic;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import cn.featherfly.common.lang.GenericType;
import cn.featherfly.common.lang.StringUtils;
import cn.featherfly.conversion.core.ConversionException;
/**
* <p>
* LocalDateTime类转换器
* </p>
*
* @author 钟冀
*/
public class LocalDateTimeConvertor extends AbstractBasicConvertor<LocalDateTime, GenericType<LocalDateTime>> {
private static final String DATE_TIME_FORMAT = "yyyy-MM-dd HH:mm:ss";
private static final DateTimeFormatter DATE_TIME_PATTERN = DateTimeFormatter.ofPattern(DATE_TIME_FORMAT);
/**
*/
public LocalDateTimeConvertor() {
}
/**
* {@inheritDoc}
*/
@Override
protected String doToString(LocalDateTime value, GenericType<LocalDateTime> genericType) {
if (value != null) {
return value.format(DATE_TIME_PATTERN);
}
return "";
}
/**
* {@inheritDoc}
*/
@Override
protected LocalDateTime doToObject(String value, GenericType<LocalDateTime> genericType) {
if (StringUtils.isNotBlank(value)) {
value = value.trim();
try {
return LocalDateTime.parse(value, DATE_TIME_PATTERN);
} catch (Exception e) {
throw new ConversionException("#convert_failed_with_type",
new Object[] { value, DATE_TIME_FORMAT, getType().getName() });
}
}
return null;
}
}
|
sivazozo/Xrob
|
server/src/com/theah64/xrob/api/database/tables/Files.java
|
package com.theah64.xrob.api.database.tables;
import com.sun.istack.internal.Nullable;
import com.theah64.xrob.api.database.Connection;
import com.theah64.xrob.api.models.File;
import com.theah64.xrob.api.models.FileBundle;
import com.theah64.xrob.api.utils.DarKnight;
import com.theah64.xrob.api.utils.RandomString;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by theapache64 on 16/9/16,9:25 PM.
*/
public class Files extends BaseTable<File> {
private static final String COLUMN_FILE_NAME = "file_name";
private static final String COLUMN_IS_DIRECTORY = "is_directory";
private static final String COLUMN_FILE_SIZE_IN_KB = "file_size_in_kb";
private static final String COLUMN_ABSOLUTE_PARENT_PATH = "absolute_parent_path";
public static final String COLUMN_FILE_HASH = "file_hash";
private static final String ABSOLUTE_ROOT = "/";
private static final String COLUMN_AS_HAS_DIRECTORY = "has_directory";
private Files() {
super("files");
}
private static final Files instance = new Files();
public static Files getInstance() {
return instance;
}
@Override
public void addv2(@Nullable String victimId, JSONArray jaFiles) throws RuntimeException, JSONException {
System.out.println("jaFiles: " + jaFiles);
//Exploding new file structures
//Crearing file bundle
final String bundleId = FileBundles.getInstance().addv3(new FileBundle(null, victimId, 0, DarKnight.getEncrypted(victimId + RandomString.getRandomString(10)).replaceAll("[^A-Za-z0-9]", "")));
final String query = "INSERT INTO files (file_bundle_id,absolute_parent_path,file_name,parent_id,is_directory,file_size_in_kb,file_hash) VALUES (?,?,?,?,?,?,?);";
final java.sql.Connection con = Connection.getConnection();
try {
final PreparedStatement ps = con.prepareStatement(query, PreparedStatement.RETURN_GENERATED_KEYS);
ps.setString(1, bundleId);
insert(ps, ABSOLUTE_ROOT, null, jaFiles, victimId, bundleId);
ps.close();
} catch (SQLException e) {
e.printStackTrace();
} finally {
try {
con.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
private void insert(PreparedStatement ps, String absoluteParentPath, final String parentId, JSONArray jaFiles, String victimId, String bundleId) throws JSONException, SQLException {
for (int i = 0; i < jaFiles.length(); i++) {
final JSONObject joFile = jaFiles.getJSONObject(i);
final String name = joFile.getString("name");
final long size = joFile.getLong("size");
final boolean isDirectory = joFile.has("files");
ps.setString(2, absoluteParentPath);
ps.setString(3, name);
ps.setString(4, parentId);
ps.setBoolean(5, isDirectory);
ps.setLong(6, size);
ps.setString(7, DarKnight.getEncrypted(victimId + bundleId + parentId + RandomString.getRandomString(10)).replaceAll("[^A-Za-z0-9]", ""));
final String parentId2;
if (ps.executeUpdate() == 1) {
final ResultSet rs = ps.getGeneratedKeys();
if (rs.first()) {
parentId2 = rs.getString(1);
} else {
throw new SQLException("Failed to get row id");
}
rs.close();
} else {
throw new SQLException("Failed to add file");
}
if (isDirectory) {
final JSONArray jaFiles2 = joFile.getJSONArray("files");
if (jaFiles.length() > 0) {
String parentPath;
if (absoluteParentPath.equals(ABSOLUTE_ROOT)) {
parentPath = ABSOLUTE_ROOT + name;
} else {
parentPath = absoluteParentPath + "/" + name;
}
insert(ps, parentPath, parentId2, jaFiles2, victimId, bundleId);
}
}
}
}
public List<File> getAll(final String victimId, final String bundleId, @Nullable final String fileParentId) {
List<File> files = null;
final String query = String.format("SELECT f.id, f.file_name, f.absolute_parent_path, f.file_size_in_kb, f.is_directory, ISNULL(f2.id) AS has_directory, f.file_hash FROM files f LEFT JOIN file_bundles fb ON fb.id = f.file_bundle_id LEFT JOIN files f2 ON f2.parent_id = f.id AND f2.is_active = 1 WHERE fb.victim_id = ? AND f.file_bundle_id = ? AND f.parent_id %s AND f.is_active = 1 AND fb.is_active = 1 GROUP BY f.id ORDER BY f.file_name;", fileParentId == null ? "IS NULL" : "= ?");
final java.sql.Connection con = Connection.getConnection();
try {
final PreparedStatement ps = con.prepareStatement(query);
ps.setString(1, victimId);
ps.setString(2, bundleId);
if (fileParentId != null) {
ps.setString(3, fileParentId);
}
final ResultSet rs = ps.executeQuery();
if (rs.first()) {
files = new ArrayList<>();
do {
final String fileId = rs.getString(COLUMN_ID);
final String fileName = rs.getString(COLUMN_FILE_NAME);
final String absoluteParentPath = rs.getString(COLUMN_ABSOLUTE_PARENT_PATH);
final String fileSizeInKB = rs.getString(COLUMN_FILE_SIZE_IN_KB);
final boolean isDirectory = rs.getBoolean(COLUMN_IS_DIRECTORY);
final boolean hasDirectory = !rs.getBoolean(COLUMN_AS_HAS_DIRECTORY);
final String fileHash = rs.getString(COLUMN_FILE_HASH);
files.add(new File(fileId, null, fileName, absoluteParentPath, fileSizeInKB, fileHash, isDirectory, hasDirectory));
} while (rs.next());
}
rs.close();
ps.close();
} catch (SQLException e) {
e.printStackTrace();
} finally {
try {
con.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
return files;
}
}
|
DankeGott/ServiceRedundancy
|
src/main/java/zju/edu/cn/platform/redundancy/jsoninfo/generator/SolutionConfigGenerator.java
|
<reponame>DankeGott/ServiceRedundancy
package zju.edu.cn.platform.redundancy.jsoninfo.generator;
import zju.edu.cn.platform.redundancy.jsoninfo.config.AppConfig;
import zju.edu.cn.platform.redundancy.jsoninfo.config.EdgeConfig;
import zju.edu.cn.platform.redundancy.jsoninfo.config.ResourceAllocationConfig;
import zju.edu.cn.platform.redundancy.jsoninfo.config.SchedulingConfig;
import java.util.ArrayList;
import java.util.List;
/**
* 资源分配与流量调度默认策略生成方案。
* @author jfqiao
* @since 2020/01/03
*/
public class SolutionConfigGenerator {
public static List<ResourceAllocationConfig> generateResourceAllocConfig(List<AppConfig> appConfigs, List<EdgeConfig> edgeConfigs) {
List<ResourceAllocationConfig> resourceAllocationConfigs = new ArrayList<>();
for (int i = 0; i < appConfigs.size(); i++) {
for (int j = 0; j < edgeConfigs.size(); j++) {
ResourceAllocationConfig resourceAllocationConfig = new ResourceAllocationConfig();
resourceAllocationConfig.setEdgeName(edgeConfigs.get(j).getName());
resourceAllocationConfig.setAppName(appConfigs.get(i).getAppName());
resourceAllocationConfig.setAppNodeName(appConfigs.get(i).getNodes().get(0).getNodeName());
// 采用平均的方式分配资源
resourceAllocationConfig.setAllocComputingPower(edgeConfigs.get(j).getComputingPower() / (double)appConfigs.size());
resourceAllocationConfigs.add(resourceAllocationConfig);
}
}
return resourceAllocationConfigs;
}
public static List<SchedulingConfig> generateShedulingConfig(List<AppConfig> appConfigs, List<EdgeConfig> edgeConfigs) {
List<SchedulingConfig> schedulingConfigs = new ArrayList<>();
for (int i = 0; i < appConfigs.size(); i++) {
// 注意云服务器没有接收的
for (int j = 0; j < edgeConfigs.size() - 1; j++) {
for (int k = 0; k < edgeConfigs.size(); k++) {
SchedulingConfig schedulingConfig = new SchedulingConfig();
schedulingConfig.setAccessEdgeName(edgeConfigs.get(j).getName());
schedulingConfig.setAppName(appConfigs.get(i).getAppName());
schedulingConfig.setNodeName(appConfigs.get(i).getNodes().get(0).getNodeName());
schedulingConfig.setTargetEdgeName(edgeConfigs.get(k).getName());
// 采用Round-Robin的默认方式调度
schedulingConfig.setProbability(1.0 / edgeConfigs.size());
schedulingConfigs.add(schedulingConfig);
}
}
}
return schedulingConfigs;
}
}
|
rajrajhans/bundle-stats-action
|
node_modules/reakit/lib/VisuallyHidden/index.js
|
<filename>node_modules/reakit/lib/VisuallyHidden/index.js<gh_stars>10-100
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
require('../_rollupPluginBabelHelpers-8f9a8751.js');
require('reakit-system/createComponent');
require('reakit-system/createHook');
require('reakit-utils/shallowEqual');
require('../Role/Role.js');
var VisuallyHidden_VisuallyHidden = require('./VisuallyHidden.js');
exports.VisuallyHidden = VisuallyHidden_VisuallyHidden.VisuallyHidden;
exports.useVisuallyHidden = VisuallyHidden_VisuallyHidden.useVisuallyHidden;
|
Hopp-Stu/ares
|
ares-system-new/src/main/java/com/ares/system/common/security/UserDetailsServiceImpl.java
|
<reponame>Hopp-Stu/ares<gh_stars>0
/*******************************************************************************
* Copyright (c) 2021 - 9999, ARES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.ares.system.common.security;
import com.ares.core.persistence.model.system.SysRole;
import com.ares.core.persistence.model.system.SysUser;
import com.ares.core.persistence.service.SysRoleService;
import com.ares.core.persistence.service.SysUserService;
import com.ares.system.common.jwt.JwtUserDetails;
import com.ares.system.common.log.Log;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* @description:
* @author: Young
* @date: 2020/10/19
* @see: com.ares.system.persistence.service UserDetailsService.java
**/
@Service
public class UserDetailsServiceImpl implements UserDetailsService {
private SysUserService userService;
private SysRoleService roleService;
@Autowired
public UserDetailsServiceImpl(SysUserService userService,
SysRoleService roleService) {
this.userService = userService;
this.roleService = roleService;
}
@Log
@Override
public UserDetails loadUserByUsername(String userName) throws UsernameNotFoundException {
SysUser user = userService.getUserByName(userName);
if (null == user) {
throw new UsernameNotFoundException("该用户不存在");
}
List<SysRole> roleList = roleService.getRoleByUserId(user.getId());
List<String> perms = new ArrayList<>();
for (SysRole role : roleList) {
if ("gly".equalsIgnoreCase(role.getRoleName())) {
perms = roleService.getPermsByRoleId(null);
} else {
perms = roleService.getPermsByRoleId(role.getId());
}
}
List<GrantedAuthority> grantedAuthorities = perms.stream().map(GrantedAuthorityImpl::new).collect(Collectors.toList());
return new JwtUserDetails(userName, user.getPassword(), grantedAuthorities);
}
}
|
tranphuc269/vue-yoast-analyze
|
node_modules/yoastseo/src/researches/sentences.js
|
<reponame>tranphuc269/vue-yoast-analyze<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = function (paper) {
return (0, _getSentences2.default)(paper.getText());
};
var _getSentences = require("../stringProcessing/getSentences");
var _getSentences2 = _interopRequireDefault(_getSentences);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
//# sourceMappingURL=sentences.js.map
|
mfkiwl/libparanumal
|
libs/linearSolver/initialGuess.cpp
|
/*
The MIT License (MIT)
Copyright (c) 2017 <NAME>, <NAME>, <NAME>, <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
#include "initialGuess.hpp"
#include "mesh.hpp"
initialGuessSolver_t* initialGuessSolver_t::Setup(dlong N, dlong Nhalo, platform_t& platform, settings_t& settings, MPI_Comm comm)
{
initialGuessSolver_t* initialGuessSolver = new initialGuessSolver_t(N, Nhalo, platform, settings, comm);
initialGuessSolver->linearSolver = linearSolver_t::Setup(N, Nhalo, platform, settings, comm);
initialGuessSolver->igStrategy = nullptr;
if (settings.compareSetting("INITIAL GUESS STRATEGY", "NONE")) {
initialGuessSolver->igStrategy = new igDefaultStrategy(N, platform, settings, comm);
} else if (settings.compareSetting("INITIAL GUESS STRATEGY", "ZERO")) {
initialGuessSolver->igStrategy = new igZeroStrategy(N, platform, settings, comm);
} else if (settings.compareSetting("INITIAL GUESS STRATEGY", "CLASSIC")) {
initialGuessSolver->igStrategy = new igClassicProjectionStrategy(N, platform, settings, comm);
} else if (settings.compareSetting("INITIAL GUESS STRATEGY", "QR")) {
initialGuessSolver->igStrategy = new igRollingQRProjectionStrategy(N, platform, settings, comm);
} else if (settings.compareSetting("INITIAL GUESS STRATEGY", "EXTRAP")) {
initialGuessSolver->igStrategy = new igExtrapStrategy(N, platform, settings, comm);
} else {
LIBP_ABORT("Requested INITIAL GUESS STRATEGY not found.");
}
return initialGuessSolver;
}
initialGuessSolver_t::initialGuessSolver_t(dlong _N, dlong _Nhalo, platform_t& _platform, settings_t& _settings, MPI_Comm _comm):
linearSolver_t(_N, _Nhalo, _platform, _settings, _comm),
igStrategy(nullptr),
linearSolver(nullptr)
{
return;
}
initialGuessSolver_t::~initialGuessSolver_t()
{
delete igStrategy;
delete linearSolver;
}
int initialGuessSolver_t::Solve(solver_t& solver, precon_t& precon, occa::memory& o_x, occa::memory& o_rhs, const dfloat tol, const int MAXIT, const int verbose)
{
int iter = 0;
igStrategy->FormInitialGuess(o_x, o_rhs);
iter = linearSolver->Solve(solver, precon, o_x, o_rhs, tol, MAXIT, verbose);
igStrategy->Update(solver, o_x, o_rhs);
return iter;
}
/*****************************************************************************/
void initialGuessAddSettings(settings_t& settings, const string prefix)
{
settings.newSetting(prefix + "INITIAL GUESS STRATEGY",
"NONE",
"Strategy for selecting initial guess for linear solver",
{"NONE", "ZERO", "CLASSIC", "QR", "EXTRAP"});
settings.newSetting(prefix + "INITIAL GUESS HISTORY SPACE DIMENSION",
"-1",
"Dimension of the initial guess space");
settings.newSetting(prefix + "INITIAL GUESS EXTRAP DEGREE",
"-1",
"Degree used for EXTRAP initial guess schemes.");
settings.newSetting(prefix + "INITIAL GUESS EXTRAP COEFFS METHOD",
"MINNORM",
"Method for selecting coefficients with EXTRAP initial guess schemes.",
{"MINNORM", "CPQR"});
}
/*****************************************************************************/
initialGuessStrategy_t::initialGuessStrategy_t(dlong _N, platform_t& _platform, settings_t& _settings, MPI_Comm _comm):
platform(_platform),
settings(_settings),
comm(_comm),
Ntotal(_N)
{
return;
}
initialGuessStrategy_t::~initialGuessStrategy_t()
{
return;
}
/*****************************************************************************/
igDefaultStrategy::igDefaultStrategy(dlong _N, platform_t& _platform, settings_t& _settings, MPI_Comm _comm):
initialGuessStrategy_t(_N, _platform, _settings, _comm)
{
return;
}
void igDefaultStrategy::FormInitialGuess(occa::memory& o_x, occa::memory& o_rhs)
{
return;
}
void igDefaultStrategy::Update(solver_t &solver, occa::memory& o_x, occa::memory& o_rhs)
{
return;
}
/*****************************************************************************/
igZeroStrategy::igZeroStrategy(dlong _N, platform_t& _platform, settings_t& _settings, MPI_Comm _comm):
initialGuessStrategy_t(_N, _platform, _settings, _comm)
{
platform.linAlg.InitKernels({"set"});
return;
}
void igZeroStrategy::FormInitialGuess(occa::memory& o_x, occa::memory& o_rhs)
{
platform.linAlg.set(Ntotal, 0.0, o_x);
return;
}
void igZeroStrategy::Update(solver_t &solver, occa::memory& o_x, occa::memory& o_rhs)
{
return;
}
/*****************************************************************************/
igProjectionStrategy::igProjectionStrategy(dlong _N, platform_t& _platform, settings_t& _settings, MPI_Comm _comm):
initialGuessStrategy_t(_N, _platform, _settings, _comm)
{
curDim = 0;
settings.getSetting("INITIAL GUESS HISTORY SPACE DIMENSION", maxDim);
o_btilde = platform.malloc(Ntotal*sizeof(dfloat));
o_xtilde = platform.malloc(Ntotal*sizeof(dfloat));
o_Btilde = platform.malloc(Ntotal*maxDim*sizeof(dfloat));
o_Xtilde = platform.malloc(Ntotal*maxDim*sizeof(dfloat));
alphas = new dfloat[maxDim]();
alphasThisRank = new dfloat[maxDim]();
o_alphas = platform.malloc(maxDim*sizeof(dfloat));
ctmpNblocks = (Ntotal + BLOCKSIZE - 1)/BLOCKSIZE;
ctmp = (dfloat*)calloc(ctmpNblocks*maxDim, sizeof(dfloat));
o_ctmp = platform.malloc(ctmpNblocks*maxDim*sizeof(dfloat), ctmp);
// Build kernels.
platform.linAlg.InitKernels({"set"});
occa::properties kernelInfo = platform.props;
kernelInfo["defines/" "p_igNhist"] = maxDim;
igBasisInnerProductsKernel = platform.buildKernel(LINEARSOLVER_DIR "/okl/igBasisInnerProducts.okl", "igBasisInnerProducts", kernelInfo);
igReconstructKernel = platform.buildKernel(LINEARSOLVER_DIR "/okl/igReconstruct.okl", "igReconstruct", kernelInfo);
igScaleKernel = platform.buildKernel(LINEARSOLVER_DIR "/okl/igScale.okl", "igScale", kernelInfo);
igUpdateKernel = platform.buildKernel(LINEARSOLVER_DIR "/okl/igUpdate.okl", "igUpdate", kernelInfo);
return;
}
igProjectionStrategy::~igProjectionStrategy()
{
if (ctmp)
delete[] ctmp;
if (alphas)
delete[] alphas;
if (alphasThisRank)
delete[] alphasThisRank;
return;
}
void igProjectionStrategy::FormInitialGuess(occa::memory& o_x, occa::memory& o_rhs)
{
if (curDim > 0) {
igBasisInnerProducts(o_rhs, o_Btilde, o_alphas, alphas, alphasThisRank);
platform.linAlg.set(Ntotal, 0.0, o_x);
igReconstruct(o_x, 1.0, o_alphas, o_Xtilde, o_x);
}
return;
}
void igProjectionStrategy::igBasisInnerProducts(occa::memory& o_x, occa::memory& o_Q, occa::memory& o_c, dfloat *c, dfloat *cThisRank)
{
igBasisInnerProductsKernel(Ntotal, ctmpNblocks, curDim, o_x, o_Q, o_ctmp);
o_ctmp.copyTo(ctmp, ctmpNblocks*curDim*sizeof(dfloat));
dlong cnt = 0;
for (int m = 0; m < curDim; ++m) {
cThisRank[m] = 0;
for (int n = 0; n < ctmpNblocks; ++n) {
cThisRank[m] += ctmp[cnt];
++cnt;
}
}
MPI_Allreduce(cThisRank, c, curDim, MPI_DFLOAT, MPI_SUM, comm);
o_c.copyFrom(c, curDim*sizeof(dfloat));
return;
}
void igProjectionStrategy::igReconstruct(occa::memory& o_u, dfloat a, occa::memory& o_c, occa::memory& o_Q, occa::memory& o_unew)
{
igReconstructKernel(Ntotal, curDim, o_u, a, o_c, o_Q, o_unew);
return;
}
/*****************************************************************************/
igClassicProjectionStrategy::igClassicProjectionStrategy(dlong _N, platform_t& _platform, settings_t& _settings, MPI_Comm _comm):
igProjectionStrategy(_N, _platform, _settings, _comm)
{
return;
}
void igClassicProjectionStrategy::Update(solver_t &solver, occa::memory& o_x, occa::memory& o_rhs)
{
// Compute RHS corresponding to the approximate solution obtained.
solver.Operator(o_x, o_btilde);
// Insert new solution into the initial guess space.
if ((curDim >= maxDim) || (curDim == 0)) {
dfloat normbtilde = 0.0;
normbtilde = platform.linAlg.norm2(Ntotal, o_btilde, comm);
if (normbtilde > 0) {
igScaleKernel(Ntotal, 1.0/normbtilde, o_btilde, o_Btilde);
igScaleKernel(Ntotal, 1.0/normbtilde, o_x, o_Xtilde);
curDim = 1;
}
} else {
dfloat invnormbtilde = 0.0;
const int Nreorth = 2;
o_x.copyTo(o_xtilde, Ntotal*sizeof(dfloat));
// Orthogonalize new RHS against previous ones.
for (int n = 0; n < Nreorth; n++) {
igBasisInnerProducts(o_btilde, o_Btilde, o_alphas, alphas, alphasThisRank);
igReconstruct(o_btilde, (dfloat)(-1.0), o_alphas, o_Btilde, o_btilde);
igReconstruct(o_xtilde, (dfloat)(-1.0), o_alphas, o_Xtilde, o_xtilde);
}
// Normalize.
invnormbtilde = platform.linAlg.norm2(Ntotal, o_btilde, comm);
invnormbtilde = 1.0/invnormbtilde;
#if 0
igScaleKernel(Ntotal, invnormbtilde, o_btilde, o_btilde);
igScaleKernel(Ntotal, invnormbtilde, o_xtilde, o_xtilde);
// Store.
o_btilde.copyTo(o_Btilde + curDim*Ntotal*sizeof(dfloat));
o_xtilde.copyTo(o_Xtilde + curDim*Ntotal*sizeof(dfloat));
#else
igUpdateKernel(Ntotal, curDim, invnormbtilde, o_btilde, 1, o_Btilde, o_xtilde, 1, o_Xtilde);
#endif
curDim++;
}
return;
}
/*****************************************************************************/
igRollingQRProjectionStrategy::igRollingQRProjectionStrategy(dlong _N, platform_t& _platform, settings_t& _settings, MPI_Comm _comm):
igProjectionStrategy(_N, _platform, _settings, _comm)
{
R = new dfloat[maxDim*maxDim]();
o_R = platform.malloc(maxDim*maxDim*sizeof(dfloat));
occa::properties kernelInfo = platform.props;
kernelInfo["defines/" "p_igNhist"] = maxDim;
igDropQRFirstColumnKernel = platform.buildKernel(LINEARSOLVER_DIR "/okl/igDropQRFirstColumn.okl", "igDropQRFirstColumn", kernelInfo);
return;
}
igRollingQRProjectionStrategy::~igRollingQRProjectionStrategy()
{
if (R)
delete[] R;
return;
}
void igRollingQRProjectionStrategy::Update(solver_t &solver, occa::memory& o_x, occa::memory& o_rhs)
{
// Compute RHS corresponding to the approximate solution obtained.
solver.Operator(o_x, o_btilde);
// Rotate the history space (QR update).
if (curDim == maxDim) {
// Drop the first column in the QR factorization: R = R(:, 2:end).
for (int j = 0; j < maxDim; j++) {
for (int i = 0; i < maxDim - 1; i++)
R[j*maxDim + i] = R[j*maxDim + (i + 1)];
R[j*maxDim + (maxDim - 1)] = 0.0;
}
o_R.copyFrom(R);
// Update the RHS and solution spaces.
igDropQRFirstColumnKernel(Ntotal, o_Btilde, o_Xtilde, o_R);
// Restore R to triangular form (overlapped with Q update).
for (int j = 0; j < maxDim - 1 ; j++) {
dfloat c = 0.0, s = 0.0;
dfloat Rjj = R[j*maxDim + j];
dfloat Rjp1j = R[(j + 1)*maxDim + j];
givensRotation(Rjj, Rjp1j, &c, &s);
for (int i = j; i < maxDim; i++) {
dfloat Rji = R[j*maxDim + i];
dfloat Rjp1i = R[(j + 1)*maxDim + i];
R[j*maxDim + i] = c*Rji + s*Rjp1i;
R[(j + 1)*maxDim + i] = -s*Rji + c*Rjp1i;
}
}
// Copy the updated R back to the device.
platform.device.finish();
o_R.copyFrom(R);
curDim--;
}
// Orthogonalize and tack on the new column.
if (curDim == 0) {
dfloat normbtilde = 0.0;
normbtilde = platform.linAlg.norm2(Ntotal, o_btilde, comm);
if (normbtilde > 0) {
#if 0
igScaleKernel(Ntotal, 1.0/normbtilde, o_btilde, o_Btilde);
igScaleKernel(Ntotal, 1.0/normbtilde, o_x, o_Xtilde);
#else
dfloat invnormbtilde = 1.0/normbtilde;
igUpdateKernel(Ntotal, 0, invnormbtilde, o_btilde, 0, o_Btilde, o_x, 0, o_Xtilde);
#endif
R[0] = normbtilde;
curDim = 1;
}
} else {
dfloat normbtilde = 0.0, normbtildeproj = 0.0;;
const int Nreorth = 2;
o_x.copyTo(o_xtilde, Ntotal*sizeof(dfloat));
// Compute the initial norm of the new vector.
normbtilde = platform.linAlg.norm2(Ntotal, o_btilde, comm);
// Zero the entries above/on the diagonal of the column of R into which we want to write.
for (int i = 0; i < curDim; i++)
R[i*maxDim + curDim] = 0.0;
// Orthogonalize new RHS against previous ones.
for (int n = 0; n < Nreorth; n++) {
igBasisInnerProducts(o_btilde, o_Btilde, o_alphas, alphas, alphasThisRank);
igReconstruct(o_btilde, (dfloat)(-1.0), o_alphas, o_Btilde, o_btilde);
igReconstruct(o_xtilde, (dfloat)(-1.0), o_alphas, o_Xtilde, o_xtilde);
for (int i = 0; i < curDim; i++)
R[i*maxDim + curDim] += alphas[i];
}
// Normalize.
normbtildeproj = platform.linAlg.norm2(Ntotal, o_btilde, comm);
// Only add if the remainder after projection is large enough.
//
// TODO: What is the appropriate criterion here?
if (normbtildeproj/normbtilde > 1.0e-10) {
#if 0
igScaleKernel(Ntotal, 1.0/normbtildeproj, o_btilde, o_btilde);
igScaleKernel(Ntotal, 1.0/normbtildeproj, o_xtilde, o_xtilde);
// Store.
o_btilde.copyTo(o_Btilde + curDim*Ntotal*sizeof(dfloat));
o_xtilde.copyTo(o_Xtilde + curDim*Ntotal*sizeof(dfloat));
#else
dfloat invnormbtildeproj = 1.0/normbtildeproj;
igUpdateKernel(Ntotal, curDim, invnormbtildeproj, o_btilde, 1, o_Btilde, o_xtilde, 1, o_Xtilde);
#endif
R[curDim*maxDim + curDim] = normbtildeproj;
curDim++;
}
}
o_R.copyFrom(R);
}
void igRollingQRProjectionStrategy::givensRotation(dfloat a, dfloat b, dfloat *c, dfloat *s)
{
// Compute a Givens rotation that zeros the bottom component of [a ; b].
if (b != 0) {
dfloat h = hypot(a, b);
dfloat d = 1.0/h;
*c = fabs(a)*d;
*s = copysign(d, a)*b;
} else {
*c = 1.0;
*s = 0.0;
}
return;
}
/*****************************************************************************/
igExtrapStrategy::igExtrapStrategy(dlong _N, platform_t& _platform, settings_t& _settings, MPI_Comm _comm):
initialGuessStrategy_t(_N, _platform, _settings, _comm)
{
int M, m;
settings.getSetting("INITIAL GUESS HISTORY SPACE DIMENSION", M);
settings.getSetting("INITIAL GUESS EXTRAP DEGREE", m);
dfloat *c = new dfloat[M]();
extrapCoeffs(m, M, c);
Nhistory = M;
entry = 0;
o_coeffs = platform.malloc(Nhistory*sizeof(dfloat), c);
shift = 0;
o_xh = platform.malloc(Nhistory*Ntotal*sizeof(dfloat));
platform.linAlg.InitKernels({"set"});
occa::properties kernelInfo = platform.props;
kernelInfo["defines/" "p_igNhist"] = Nhistory;
igExtrapKernel = platform.buildKernel(LINEARSOLVER_DIR "/okl/igExtrap.okl", "igExtrap", kernelInfo);
igExtrapSparseKernel = platform.buildKernel(LINEARSOLVER_DIR "/okl/igExtrap.okl", "igExtrapSparse", kernelInfo);
platform.linAlg.set(Nhistory*Ntotal, 0.0, o_xh);
delete[] c;
return;
}
void igExtrapStrategy::FormInitialGuess(occa::memory& o_x, occa::memory& o_rhs)
{
if (entry < Nhistory) {
int M, m;
if (entry == Nhistory - 1) {
settings.getSetting("INITIAL GUESS HISTORY SPACE DIMENSION", M);
settings.getSetting("INITIAL GUESS EXTRAP DEGREE", m);
} else {
M = mymax(1, entry + 1);
m = sqrt((double)M);
}
// Construct the extrapolation coefficients.
dfloat *c, *d, *sparseCoeffs;
c = new dfloat[Nhistory]();
d = new dfloat[Nhistory]();
sparseCoeffs = new dfloat[Nhistory]();
for (int n = 0; n < Nhistory; ++n) {
c[n] = 0;
d[n] = 0;
sparseCoeffs[n] = 0;
}
if (M == 1) {
d[Nhistory - 1] = 1.0;
} else {
extrapCoeffs(m, M, c);
// need d[0:M-1] = {0, 0, 0, .., c[0], c[1], .., c[M-1]}
for (int i = 0; i < M; i++)
d[Nhistory - M + i] = c[i];
}
int *sparseIds = new int[Nhistory]();
Nsparse = 0;
for (int n = 0; n < Nhistory; ++n) {
if (fabs(d[n]) > 1e-14) { // hmm
sparseIds[Nsparse] = n;
sparseCoeffs[Nsparse] = d[n];
++Nsparse;
}
}
o_coeffs = platform.malloc(Nhistory*sizeof(dfloat), d);
o_sparseIds = platform.malloc(Nhistory*sizeof(int), sparseIds);
o_sparseCoeffs = platform.malloc(Nhistory*sizeof(dfloat), sparseCoeffs);
++entry;
delete[] sparseIds;
}
if (settings.compareSetting("INITIAL GUESS EXTRAP COEFFS METHOD", "MINNORM"))
igExtrapKernel(Ntotal, Nhistory, shift, o_coeffs, o_xh, o_x);
else {
igExtrapSparseKernel(Ntotal, Nhistory, shift, Nsparse, o_sparseIds, o_sparseCoeffs, o_xh, o_x);
}
return;
}
void igExtrapStrategy::Update(solver_t &solver, occa::memory& o_x, occa::memory& o_rhs)
{
occa::memory o_tmp = o_xh + Ntotal*shift*sizeof(dfloat);
o_x.copyTo(o_tmp, Ntotal*sizeof(dfloat));
shift = (shift + 1) % Nhistory;
return;
}
void igExtrapStrategy::extrapCoeffs(int m, int M, dfloat *c)
{
dfloat h, ro, *r, *V, *b;
if (M < m + 1) {
std::stringstream ss;
ss << "Extrapolation space dimension (" << M << ") too low for degree (" << m << ").";
LIBP_ABORT(ss.str());
}
h = 2.0/(M - 1);
r = new dfloat[M]();
for (int i = 0; i < M; i++)
r[i] = -1.0 + i*h;
ro = 1.0 + h; // Evaluation point.
V = new dfloat[(m + 1)*M]();
mesh_t::Vandermonde1D(m, M, r, V);
b = new dfloat[m + 1]();
mesh_t::Vandermonde1D(m, 1, &ro, b);
if (settings.compareSetting("INITIAL GUESS EXTRAP COEFFS METHOD", "MINNORM")) {
matrixUnderdeterminedRightSolveMinNorm(M, m + 1, V, b, c);
} else if (settings.compareSetting("INITIAL GUESS EXTRAP COEFFS METHOD", "CPQR")) {
matrixUnderdeterminedRightSolveCPQR(M, m + 1, V, b, c);
}
delete[] r;
delete[] V;
delete[] b;
return;
}
|
Repast/repast.simphony
|
repast.simphony.statecharts.diagram/src-gen/org/eclipse/gmf/tooling/runtime/update/DiagramUpdater.java
|
/*
* Copyright (c) 2012, Montages AG
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* <NAME> (Montages) - initial API (#372479)
*/
package org.eclipse.gmf.tooling.runtime.update;
import java.util.List;
import org.eclipse.gmf.runtime.notation.View;
/**
* @since 3.0
*/
public interface DiagramUpdater {
public List<? extends UpdaterNodeDescriptor> getSemanticChildren(View view);
public List<? extends UpdaterLinkDescriptor> getContainedLinks(View view);
public List<? extends UpdaterLinkDescriptor> getIncomingLinks(View view);
public List<? extends UpdaterLinkDescriptor> getOutgoingLinks(View view);
}
|
tonyblundell/kafka4m
|
src/main/scala/kafka4m/partitions/HasTimestamp.scala
|
package kafka4m.partitions
import java.time.ZonedDateTime
import org.apache.kafka.clients.consumer.ConsumerRecord
trait HasTimestamp[A] {
def timestamp(value: A): ZonedDateTime
}
object HasTimestamp {
def apply[A](implicit ht: HasTimestamp[A]): HasTimestamp[A] = ht
implicit def consumerRecordHasTimestamp[K, V] = new HasTimestamp[ConsumerRecord[K, V]] {
override def timestamp(value: ConsumerRecord[K, V]): ZonedDateTime = {
utcForEpochMillis(value.timestamp)
}
}
implicit object identity extends HasTimestamp[ZonedDateTime] {
override def timestamp(value: ZonedDateTime): ZonedDateTime = value
}
}
|
hafeild/dp-lambda
|
config/initializers/version.rb
|
<gh_stars>0
## Version information. Sets two global variables:
##
## VERSION -- an array of Year, Month, Number, and Hotfix or build number.
## VERSION_STRING -- a string formatted as Year.Month.Number.Hotfix (for
## release) or Yearm.Month.Build (for developement).
## Note that in a release, 00 is ignored for Hotfix, and
## for Number if both Number and Hotfix are 00.
##
## Some of the info below needs to be updated when going to production releases
## -- namely, change the isRelease variable to true and update the Year,
## Month, Number, and Hotfix versions. Editing this file requires a server
## restart.
## Update these fields ##
isRelease = false
yearVersion = "19" ## Year of release.
monthVersion = "03" ## Month of release.
numberVersion = "00" ## Number of release within Year-Month
hotFixNo = "01" ## Hot fix no. for release.
VERSION = [
yearVersion,
monthVersion,
numberVersion,
isRelease ? hotFixNo : "dev-#{`git rev-parse --short HEAD`}".strip
]
VERSION_STRING = VERSION.join(".").sub(/(\.00)*$/, '')
|
emacslisp/tomcat
|
java/org/apache/coyote/http2/Http2UpgradeHandler.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.coyote.http2;
import java.io.EOFException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Queue;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import javax.servlet.http.WebConnection;
import org.apache.coyote.Adapter;
import org.apache.coyote.CloseNowException;
import org.apache.coyote.ProtocolException;
import org.apache.coyote.Request;
import org.apache.coyote.http11.upgrade.InternalHttpUpgradeHandler;
import org.apache.coyote.http2.HpackDecoder.HeaderEmitter;
import org.apache.coyote.http2.HpackEncoder.State;
import org.apache.coyote.http2.Http2Parser.Input;
import org.apache.coyote.http2.Http2Parser.Output;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.codec.binary.Base64;
import org.apache.tomcat.util.http.MimeHeaders;
import org.apache.tomcat.util.net.AbstractEndpoint.Handler.SocketState;
import org.apache.tomcat.util.net.SSLSupport;
import org.apache.tomcat.util.net.SocketEvent;
import org.apache.tomcat.util.net.SocketWrapperBase;
import org.apache.tomcat.util.res.StringManager;
/**
* This represents an HTTP/2 connection from a client to Tomcat. It is designed
* on the basis that there will never be more than one thread performing I/O at
* a time. <br>
* For reading, this implementation is blocking within frames and non-blocking
* between frames. <br>
* Note:
* <ul>
* <li>You will need to nest an <UpgradeProtocol
* className="org.apache.coyote.http2.Http2Protocol" /> element inside a TLS
* enabled Connector element in server.xml to enable HTTP/2 support.</li>
* </ul>
*/
class Http2UpgradeHandler extends AbstractStream implements InternalHttpUpgradeHandler, Input, Output {
protected static final Log log = LogFactory.getLog(Http2UpgradeHandler.class);
protected static final StringManager sm = StringManager.getManager(Http2UpgradeHandler.class);
private static final AtomicInteger connectionIdGenerator = new AtomicInteger(0);
private static final Integer STREAM_ID_ZERO = Integer.valueOf(0);
protected static final int FLAG_END_OF_STREAM = 1;
protected static final int FLAG_END_OF_HEADERS = 4;
protected static final byte[] PING = { 0x00, 0x00, 0x08, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00 };
protected static final byte[] PING_ACK = { 0x00, 0x00, 0x08, 0x06, 0x01, 0x00, 0x00, 0x00, 0x00 };
protected static final byte[] SETTINGS_ACK = { 0x00, 0x00, 0x00, 0x04, 0x01, 0x00, 0x00, 0x00, 0x00 };
protected static final byte[] GOAWAY = { 0x07, 0x00, 0x00, 0x00, 0x00, 0x00 };
private static final String HTTP2_SETTINGS_HEADER = "HTTP2-Settings";
private static final HeaderSink HEADER_SINK = new HeaderSink();
protected final String connectionId;
private final Adapter adapter;
protected volatile SocketWrapperBase<?> socketWrapper;
private volatile SSLSupport sslSupport;
private volatile Http2Parser parser;
// Simple state machine (sequence of states)
private AtomicReference<ConnectionState> connectionState = new AtomicReference<>(ConnectionState.NEW);
private volatile long pausedNanoTime = Long.MAX_VALUE;
/**
* Remote settings are settings defined by the client and sent to Tomcat
* that Tomcat must use when communicating with the client.
*/
private final ConnectionSettingsRemote remoteSettings;
/**
* Local settings are settings defined by Tomcat and sent to the client that
* the client must use when communicating with Tomcat.
*/
protected final ConnectionSettingsLocal localSettings;
private HpackDecoder hpackDecoder;
private HpackEncoder hpackEncoder;
// All timeouts in milliseconds
private long readTimeout = Http2Protocol.DEFAULT_READ_TIMEOUT;
private long keepAliveTimeout = Http2Protocol.DEFAULT_KEEP_ALIVE_TIMEOUT;
private long writeTimeout = Http2Protocol.DEFAULT_WRITE_TIMEOUT;
private final Map<Integer, Stream> streams = new HashMap<>();
protected final AtomicInteger activeRemoteStreamCount = new AtomicInteger(0);
// Start at -1 so the 'add 2' logic in closeIdleStreams() works
private volatile int maxActiveRemoteStreamId = -1;
private volatile int maxProcessedStreamId;
private final AtomicInteger nextLocalStreamId = new AtomicInteger(2);
private final PingManager pingManager = getPingManager();
private volatile int newStreamsSinceLastPrune = 0;
// Tracking for when the connection is blocked (windowSize < 1)
private final Map<AbstractStream, int[]> backLogStreams = new ConcurrentHashMap<>();
private long backLogSize = 0;
// Stream concurrency control
private int maxConcurrentStreamExecution = Http2Protocol.DEFAULT_MAX_CONCURRENT_STREAM_EXECUTION;
private AtomicInteger streamConcurrency = null;
private Queue<StreamRunnable> queuedRunnable = null;
// Limits
private Set<String> allowedTrailerHeaders = Collections.emptySet();
private int maxHeaderCount = Constants.DEFAULT_MAX_HEADER_COUNT;
private int maxHeaderSize = Constants.DEFAULT_MAX_HEADER_SIZE;
private int maxTrailerCount = Constants.DEFAULT_MAX_TRAILER_COUNT;
private int maxTrailerSize = Constants.DEFAULT_MAX_TRAILER_SIZE;
Http2UpgradeHandler(Adapter adapter, Request coyoteRequest) {
super(STREAM_ID_ZERO);
this.adapter = adapter;
this.connectionId = Integer.toString(connectionIdGenerator.getAndIncrement());
remoteSettings = new ConnectionSettingsRemote(connectionId);
localSettings = new ConnectionSettingsLocal(connectionId);
// Initial HTTP request becomes stream 1.
if (coyoteRequest != null) {
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.upgrade", connectionId));
}
Integer key = Integer.valueOf(1);
Stream stream = new Stream(key, this, coyoteRequest);
streams.put(key, stream);
maxActiveRemoteStreamId = 1;
activeRemoteStreamCount.set(1);
maxProcessedStreamId = 1;
}
}
protected PingManager getPingManager()
{
return new PingManager();
}
@Override
public void init(WebConnection webConnection)
{
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.init", connectionId, connectionState.get()));
}
if (!connectionState.compareAndSet(ConnectionState.NEW, ConnectionState.CONNECTED)) {
return;
}
// Init concurrency control if needed
if (maxConcurrentStreamExecution < localSettings.getMaxConcurrentStreams()) {
streamConcurrency = new AtomicInteger(0);
queuedRunnable = new ConcurrentLinkedQueue<>();
}
parser = new Http2Parser(connectionId, this, this);
Stream stream = null;
socketWrapper.setReadTimeout(getReadTimeout());
socketWrapper.setWriteTimeout(getWriteTimeout());
if (webConnection != null) {
// HTTP/2 started via HTTP upgrade.
// The initial HTTP/1.1 request is available as Stream 1.
try {
// Process the initial settings frame
stream = getStream(1, true);
String base64Settings = stream.getCoyoteRequest().getHeader(HTTP2_SETTINGS_HEADER);
byte[] settings = Base64.decodeBase64(base64Settings);
// Settings are only valid on stream 0
FrameType.SETTINGS.check(0, settings.length);
for (int i = 0; i < settings.length % 6; i++) {
int id = ByteUtil.getTwoBytes(settings, i * 6);
long value = ByteUtil.getFourBytes(settings, (i * 6) + 2);
remoteSettings.set(Setting.valueOf(id), value);
}
} catch (Http2Exception e) {
throw new ProtocolException(sm.getString("upgradeHandler.upgrade.fail", connectionId));
}
}
// Send the initial settings frame
writeSettings();
// Make sure the client has sent a valid connection preface before we
// send the response to the original request over HTTP/2.
try {
parser.readConnectionPreface();
} catch (Http2Exception e) {
String msg = sm.getString("upgradeHandler.invalidPreface", connectionId);
if (log.isDebugEnabled()) {
log.debug(msg);
}
throw new ProtocolException(msg);
}
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.prefaceReceived", connectionId));
}
// Send a ping to get an idea of round trip time as early as possible
try {
pingManager.sendPing(true);
} catch (IOException ioe) {
throw new ProtocolException(sm.getString("upgradeHandler.pingFailed"), ioe);
}
if (webConnection != null) {
processStreamOnContainerThread(stream);
}
}
private void processStreamOnContainerThread(Stream stream)
{
StreamProcessor streamProcessor = new StreamProcessor(this, stream, adapter, socketWrapper);
streamProcessor.setSslSupport(sslSupport);
processStreamOnContainerThread(streamProcessor, SocketEvent.OPEN_READ);
}
void processStreamOnContainerThread(StreamProcessor streamProcessor, SocketEvent event)
{
StreamRunnable streamRunnable = new StreamRunnable(streamProcessor, event);
if (streamConcurrency == null) {
socketWrapper.execute(streamRunnable);
} else {
if (getStreamConcurrency() < maxConcurrentStreamExecution) {
increaseStreamConcurrency();
socketWrapper.execute(streamRunnable);
} else {
queuedRunnable.offer(streamRunnable);
}
}
}
@Override
public void setSocketWrapper(SocketWrapperBase<?> wrapper)
{
this.socketWrapper = wrapper;
}
@Override
public void setSslSupport(SSLSupport sslSupport)
{
this.sslSupport = sslSupport;
}
@Override
public SocketState upgradeDispatch(SocketEvent status)
{
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.upgradeDispatch.entry", connectionId, status));
}
// WebConnection is not used so passing null here is fine
// Might not be necessary. init() will handle that.
init(null);
SocketState result = SocketState.CLOSED;
try {
pingManager.sendPing(false);
checkPauseState();
switch (status) {
case OPEN_READ:
try {
// There is data to read so use the read timeout while
// reading frames.
socketWrapper.setReadTimeout(getReadTimeout());
while (true) {
try {
if (!parser.readFrame(false)) {
break;
}
} catch (StreamException se) {
// Stream errors are not fatal to the connection so
// continue reading frames
Stream stream = getStream(se.getStreamId(), false);
if (stream == null) {
sendStreamReset(se);
} else {
stream.close(se);
}
}
}
// No more frames to read so switch to the keep-alive
// timeout.
socketWrapper.setReadTimeout(getKeepAliveTimeout());
} catch (Http2Exception ce) {
// Really ConnectionException
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.connectionError"), ce);
}
closeConnection(ce);
break;
}
if (connectionState.get() != ConnectionState.CLOSED) {
result = SocketState.UPGRADED;
}
break;
case OPEN_WRITE:
processWrites();
result = SocketState.UPGRADED;
break;
case DISCONNECT:
case ERROR:
case TIMEOUT:
case STOP:
close();
break;
}
} catch (IOException ioe) {
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.ioerror", connectionId), ioe);
}
close();
}
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.upgradeDispatch.exit", connectionId, result));
}
return result;
}
ConnectionSettingsRemote getRemoteSettings()
{
return remoteSettings;
}
ConnectionSettingsLocal getLocalSettings()
{
return localSettings;
}
@Override
public void pause()
{
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.pause.entry", connectionId));
}
if (connectionState.compareAndSet(ConnectionState.CONNECTED, ConnectionState.PAUSING)) {
pausedNanoTime = System.nanoTime();
try {
writeGoAwayFrame((1 << 31) - 1, Http2Error.NO_ERROR.getCode(), null);
} catch (IOException ioe) {
// This is fatal for the connection. Ignore it here. There will
// be
// further attempts at I/O in upgradeDispatch() and it can
// better
// handle the IO errors.
}
}
}
@Override
public void destroy()
{
// NO-OP
}
private void checkPauseState() throws IOException
{
if (connectionState.get() == ConnectionState.PAUSING) {
if (pausedNanoTime + pingManager.getRoundTripTimeNano() < System.nanoTime()) {
connectionState.compareAndSet(ConnectionState.PAUSING, ConnectionState.PAUSED);
writeGoAwayFrame(maxProcessedStreamId, Http2Error.NO_ERROR.getCode(), null);
}
}
}
private int increaseStreamConcurrency()
{
return streamConcurrency.incrementAndGet();
}
private int decreaseStreamConcurrency()
{
return streamConcurrency.decrementAndGet();
}
private int getStreamConcurrency()
{
return streamConcurrency.get();
}
void executeQueuedStream()
{
if (streamConcurrency == null) {
return;
}
decreaseStreamConcurrency();
if (getStreamConcurrency() < maxConcurrentStreamExecution) {
StreamRunnable streamRunnable = queuedRunnable.poll();
if (streamRunnable != null) {
increaseStreamConcurrency();
socketWrapper.execute(streamRunnable);
}
}
}
void sendStreamReset(StreamException se) throws IOException
{
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.rst.debug", connectionId, Integer.toString(se.getStreamId()),
se.getError(), se.getMessage()));
}
// Write a RST frame
byte[] rstFrame = new byte[13];
// Length
ByteUtil.setThreeBytes(rstFrame, 0, 4);
// Type
rstFrame[3] = FrameType.RST.getIdByte();
// No flags
// Stream ID
ByteUtil.set31Bits(rstFrame, 5, se.getStreamId());
// Payload
ByteUtil.setFourBytes(rstFrame, 9, se.getError().getCode());
synchronized (socketWrapper) {
socketWrapper.write(true, rstFrame, 0, rstFrame.length);
socketWrapper.flush(true);
}
}
void closeConnection(Http2Exception ce)
{
try {
writeGoAwayFrame(maxProcessedStreamId, ce.getError().getCode(),
ce.getMessage().getBytes(StandardCharsets.UTF_8));
} catch (IOException ioe) {
// Ignore. GOAWAY is sent on a best efforts basis and the original
// error has already been logged.
}
close();
}
protected void writeSettings()
{
// Send the initial settings frame
try {
byte[] settings = localSettings.getSettingsFrameForPending();
socketWrapper.write(true, settings, 0, settings.length);
socketWrapper.flush(true);
} catch (IOException ioe) {
String msg = sm.getString("upgradeHandler.sendPrefaceFail", connectionId);
if (log.isDebugEnabled()) {
log.debug(msg);
}
throw new ProtocolException(msg, ioe);
}
}
protected void writeGoAwayFrame(int maxStreamId, long errorCode, byte[] debugMsg) throws IOException
{
byte[] fixedPayload = new byte[8];
ByteUtil.set31Bits(fixedPayload, 0, maxStreamId);
ByteUtil.setFourBytes(fixedPayload, 4, errorCode);
int len = 8;
if (debugMsg != null) {
len += debugMsg.length;
}
byte[] payloadLength = new byte[3];
ByteUtil.setThreeBytes(payloadLength, 0, len);
synchronized (socketWrapper) {
socketWrapper.write(true, payloadLength, 0, payloadLength.length);
socketWrapper.write(true, GOAWAY, 0, GOAWAY.length);
socketWrapper.write(true, fixedPayload, 0, 8);
if (debugMsg != null) {
socketWrapper.write(true, debugMsg, 0, debugMsg.length);
}
socketWrapper.flush(true);
}
}
void writeHeaders(Stream stream, int pushedStreamId, MimeHeaders mimeHeaders, boolean endOfStream, int payloadSize)
throws IOException
{
// This ensures the Stream processing thread has control of the socket.
synchronized (socketWrapper) {
doWriteHeaders(stream, pushedStreamId, mimeHeaders, endOfStream, payloadSize);
}
if (endOfStream) {
stream.sentEndOfStream();
}
}
/*
* Separate method to allow Http2AsyncUpgradeHandler to call this code
* without synchronizing on socketWrapper since it doesn't need to.
*/
void doWriteHeaders(Stream stream, int pushedStreamId, MimeHeaders mimeHeaders, boolean endOfStream,
int payloadSize) throws IOException
{
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.writeHeaders", connectionId, stream.getIdentifier(),
Integer.valueOf(pushedStreamId), Boolean.valueOf(endOfStream)));
}
if (!stream.canWrite()) {
return;
}
HeaderFrameBuffers headerFrameBuffers = getHeaderFrameBuffers(payloadSize);
byte[] pushedStreamIdBytes = null;
if (pushedStreamId > 0) {
pushedStreamIdBytes = new byte[4];
ByteUtil.set31Bits(pushedStreamIdBytes, 0, pushedStreamId);
}
boolean first = true;
State state = null;
while (state != State.COMPLETE) {
headerFrameBuffers.startFrame();
if (first && pushedStreamIdBytes != null) {
headerFrameBuffers.getPayload().put(pushedStreamIdBytes);
}
state = getHpackEncoder().encode(mimeHeaders, headerFrameBuffers.getPayload());
headerFrameBuffers.getPayload().flip();
if (state == State.COMPLETE || headerFrameBuffers.getPayload().limit() > 0) {
ByteUtil.setThreeBytes(headerFrameBuffers.getHeader(), 0, headerFrameBuffers.getPayload().limit());
if (first) {
first = false;
if (pushedStreamIdBytes == null) {
headerFrameBuffers.getHeader()[3] = FrameType.HEADERS.getIdByte();
} else {
headerFrameBuffers.getHeader()[3] = FrameType.PUSH_PROMISE.getIdByte();
}
if (endOfStream) {
headerFrameBuffers.getHeader()[4] = FLAG_END_OF_STREAM;
}
} else {
headerFrameBuffers.getHeader()[3] = FrameType.CONTINUATION.getIdByte();
}
if (state == State.COMPLETE) {
headerFrameBuffers.getHeader()[4] += FLAG_END_OF_HEADERS;
}
if (log.isDebugEnabled()) {
log.debug(headerFrameBuffers.getPayload().limit() + " bytes");
}
ByteUtil.set31Bits(headerFrameBuffers.getHeader(), 5, stream.getIdentifier().intValue());
headerFrameBuffers.endFrame();
} else if (state == State.UNDERFLOW) {
headerFrameBuffers.expandPayload();
}
}
headerFrameBuffers.endHeaders();
}
protected HeaderFrameBuffers getHeaderFrameBuffers(int initialPayloadSize)
{
return new DefaultHeaderFrameBuffers(initialPayloadSize);
}
protected HpackEncoder getHpackEncoder()
{
if (hpackEncoder == null) {
hpackEncoder = new HpackEncoder();
}
// Ensure latest agreed table size is used
hpackEncoder.setMaxTableSize(remoteSettings.getHeaderTableSize());
return hpackEncoder;
}
void writeBody(Stream stream, ByteBuffer data, int len, boolean finished) throws IOException
{
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.writeBody", connectionId, stream.getIdentifier(),
Integer.toString(len)));
}
// Need to check this now since sending end of stream will change this.
boolean writeable = stream.canWrite();
byte[] header = new byte[9];
ByteUtil.setThreeBytes(header, 0, len);
header[3] = FrameType.DATA.getIdByte();
if (finished) {
header[4] = FLAG_END_OF_STREAM;
stream.sentEndOfStream();
if (!stream.isActive()) {
activeRemoteStreamCount.decrementAndGet();
}
}
if (writeable) {
ByteUtil.set31Bits(header, 5, stream.getIdentifier().intValue());
synchronized (socketWrapper) {
try {
socketWrapper.write(true, header, 0, header.length);
int orgLimit = data.limit();
data.limit(data.position() + len);
socketWrapper.write(true, data);
data.limit(orgLimit);
socketWrapper.flush(true);
} catch (IOException ioe) {
handleAppInitiatedIOException(ioe);
}
}
}
}
/*
* Handles an I/O error on the socket underlying the HTTP/2 connection when
* it is triggered by application code (usually reading the request or
* writing the response). Such I/O errors are fatal so the connection is
* closed. The exception is re-thrown to make the client code aware of the
* problem.
*
* Note: We can not rely on this exception reaching the socket processor
* since the application code may swallow it.
*/
protected void handleAppInitiatedIOException(IOException ioe) throws IOException
{
close();
throw ioe;
}
/*
* Needs to know if this was application initiated since that affects the
* error handling.
*/
void writeWindowUpdate(Stream stream, int increment, boolean applicationInitiated) throws IOException
{
if (!stream.canWrite()) {
return;
}
synchronized (socketWrapper) {
// Build window update frame for stream 0
byte[] frame = new byte[13];
ByteUtil.setThreeBytes(frame, 0, 4);
frame[3] = FrameType.WINDOW_UPDATE.getIdByte();
ByteUtil.set31Bits(frame, 9, increment);
socketWrapper.write(true, frame, 0, frame.length);
// Change stream Id and re-use
ByteUtil.set31Bits(frame, 5, stream.getIdentifier().intValue());
try {
socketWrapper.write(true, frame, 0, frame.length);
socketWrapper.flush(true);
} catch (IOException ioe) {
if (applicationInitiated) {
handleAppInitiatedIOException(ioe);
} else {
throw ioe;
}
}
}
}
private void processWrites() throws IOException
{
synchronized (socketWrapper) {
if (socketWrapper.flush(false)) {
socketWrapper.registerWriteInterest();
}
}
}
int reserveWindowSize(Stream stream, int reservation) throws IOException
{
// Need to be holding the stream lock so releaseBacklog() can't notify
// this thread until after this thread enters wait()
int allocation = 0;
synchronized (stream) {
do {
synchronized (this) {
if (!stream.canWrite()) {
throw new CloseNowException(sm.getString("upgradeHandler.stream.notWritable",
stream.getConnectionId(), stream.getIdentifier()));
}
long windowSize = getWindowSize();
if (windowSize < 1 || backLogSize > 0) {
// Has this stream been granted an allocation
int[] value = backLogStreams.get(stream);
if (value == null) {
value = new int[] { reservation, 0 };
backLogStreams.put(stream, value);
backLogSize += reservation;
// Add the parents as well
AbstractStream parent = stream.getParentStream();
while (parent != null && backLogStreams.putIfAbsent(parent, new int[2]) == null) {
parent = parent.getParentStream();
}
} else {
if (value[1] > 0) {
allocation = value[1];
decrementWindowSize(allocation);
if (value[0] == 0) {
// The reservation has been fully allocated
// so this stream can be removed from the
// backlog.
backLogStreams.remove(stream);
} else {
// This allocation has been used. Reset the
// allocation to zero. Leave the stream on
// the backlog as it still has more bytes to
// write.
value[1] = 0;
}
}
}
} else if (windowSize < reservation) {
allocation = (int) windowSize;
decrementWindowSize(allocation);
} else {
allocation = reservation;
decrementWindowSize(allocation);
}
}
if (allocation == 0) {
try {
stream.wait();
} catch (InterruptedException e) {
throw new IOException(sm.getString("upgradeHandler.windowSizeReservationInterrupted",
connectionId, stream.getIdentifier(), Integer.toString(reservation)), e);
}
}
} while (allocation == 0);
}
return allocation;
}
@SuppressWarnings("sync-override") // notifyAll() needs to be outside sync
// to avoid deadlock
@Override
protected void incrementWindowSize(int increment) throws Http2Exception
{
Set<AbstractStream> streamsToNotify = null;
synchronized (this) {
long windowSize = getWindowSize();
if (windowSize < 1 && windowSize + increment > 0) {
streamsToNotify = releaseBackLog((int) (windowSize + increment));
}
super.incrementWindowSize(increment);
}
if (streamsToNotify != null) {
for (AbstractStream stream : streamsToNotify) {
synchronized (stream) {
stream.notifyAll();
}
}
}
}
private synchronized Set<AbstractStream> releaseBackLog(int increment)
{
Set<AbstractStream> result = new HashSet<>();
if (backLogSize < increment) {
// Can clear the whole backlog
result.addAll(backLogStreams.keySet());
backLogStreams.clear();
backLogSize = 0;
} else {
int leftToAllocate = increment;
while (leftToAllocate > 0) {
leftToAllocate = allocate(this, leftToAllocate);
}
for (Entry<AbstractStream, int[]> entry : backLogStreams.entrySet()) {
int allocation = entry.getValue()[1];
if (allocation > 0) {
backLogSize -= allocation;
result.add(entry.getKey());
}
}
}
return result;
}
private int allocate(AbstractStream stream, int allocation)
{
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.allocate.debug", getConnectionId(), stream.getIdentifier(),
Integer.toString(allocation)));
}
// Allocate to the specified stream
int[] value = backLogStreams.get(stream);
if (value[0] >= allocation) {
value[0] -= allocation;
value[1] += allocation;
return 0;
}
// There was some left over so allocate that to the children of the
// stream.
int leftToAllocate = allocation;
value[1] = value[0];
value[0] = 0;
leftToAllocate -= value[1];
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.allocate.left", getConnectionId(), stream.getIdentifier(),
Integer.toString(leftToAllocate)));
}
// Recipients are children of the current stream that are in the
// backlog.
Set<AbstractStream> recipients = new HashSet<>();
recipients.addAll(stream.getChildStreams());
recipients.retainAll(backLogStreams.keySet());
// Loop until we run out of allocation or recipients
while (leftToAllocate > 0) {
if (recipients.size() == 0) {
backLogStreams.remove(stream);
return leftToAllocate;
}
int totalWeight = 0;
for (AbstractStream recipient : recipients) {
if (log.isDebugEnabled()) {
log.debug(
sm.getString("upgradeHandler.allocate.recipient", getConnectionId(), stream.getIdentifier(),
recipient.getIdentifier(), Integer.toString(recipient.getWeight())));
}
totalWeight += recipient.getWeight();
}
// Use an Iterator so fully allocated children/recipients can be
// removed.
Iterator<AbstractStream> iter = recipients.iterator();
int allocated = 0;
while (iter.hasNext()) {
AbstractStream recipient = iter.next();
int share = leftToAllocate * recipient.getWeight() / totalWeight;
if (share == 0) {
// This is to avoid rounding issues triggering an infinite
// loop. It will cause a very slight over allocation but
// HTTP/2 should cope with that.
share = 1;
}
int remainder = allocate(recipient, share);
// Remove recipients that receive their full allocation so that
// they are excluded from the next allocation round.
if (remainder > 0) {
iter.remove();
}
allocated += (share - remainder);
}
leftToAllocate -= allocated;
}
return 0;
}
private Stream getStream(int streamId, boolean unknownIsError) throws ConnectionException
{
Integer key = Integer.valueOf(streamId);
Stream result = streams.get(key);
if (result == null && unknownIsError) {
// Stream has been closed and removed from the map
throw new ConnectionException(sm.getString("upgradeHandler.stream.closed", key), Http2Error.PROTOCOL_ERROR);
}
return result;
}
private Stream createRemoteStream(int streamId) throws ConnectionException
{
Integer key = Integer.valueOf(streamId);
if (streamId % 2 != 1) {
throw new ConnectionException(sm.getString("upgradeHandler.stream.even", key), Http2Error.PROTOCOL_ERROR);
}
pruneClosedStreams();
Stream result = new Stream(key, this);
streams.put(key, result);
return result;
}
private Stream createLocalStream(Request request)
{
int streamId = nextLocalStreamId.getAndAdd(2);
Integer key = Integer.valueOf(streamId);
Stream result = new Stream(key, this, request);
streams.put(key, result);
return result;
}
private void close()
{
connectionState.set(ConnectionState.CLOSED);
for (Stream stream : streams.values()) {
// The connection is closing. Close the associated streams as no
// longer required.
stream.receiveReset(Http2Error.CANCEL.getCode());
}
try {
socketWrapper.close();
} catch (IOException ioe) {
log.debug(sm.getString("upgradeHandler.socketCloseFailed"), ioe);
}
}
private void pruneClosedStreams()
{
// Only prune every 10 new streams
if (newStreamsSinceLastPrune < 9) {
// Not atomic. Increments may be lost. Not a problem.
newStreamsSinceLastPrune++;
return;
}
// Reset counter
newStreamsSinceLastPrune = 0;
// RFC 7540, 5.3.4 endpoints should maintain state for at least the
// maximum number of concurrent streams
long max = localSettings.getMaxConcurrentStreams();
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.pruneStart", connectionId, Long.toString(max),
Integer.toString(streams.size())));
}
// Allow an additional 10% for closed streams that are used in the
// priority tree
max = max + max / 10;
if (max > Integer.MAX_VALUE) {
max = Integer.MAX_VALUE;
}
int toClose = streams.size() - (int) max;
if (toClose < 1) {
return;
}
// Need to try and close some streams.
// Try to close streams in this order
// 1. Completed streams used for a request with no children
// 2. Completed streams used for a request with children
// 3. Closed final streams
//
// Steps 1 and 2 will always be completed.
// Step 3 will be completed to the minimum extent necessary to bring the
// total number of streams under the limit.
// Use these sets to track the different classes of streams
TreeSet<Integer> candidatesStepOne = new TreeSet<>();
TreeSet<Integer> candidatesStepTwo = new TreeSet<>();
TreeSet<Integer> candidatesStepThree = new TreeSet<>();
for (Entry<Integer, Stream> entry : streams.entrySet()) {
Stream stream = entry.getValue();
// Never remove active streams
if (stream.isActive()) {
continue;
}
if (stream.isClosedFinal()) {
// This stream went from IDLE to CLOSED and is likely to have
// been created by the client as part of the priority tree.
candidatesStepThree.add(entry.getKey());
} else if (stream.getChildStreams().size() == 0) {
// Closed, no children
candidatesStepOne.add(entry.getKey());
} else {
// Closed, with children
candidatesStepTwo.add(entry.getKey());
}
}
// Process the step one list
for (Integer streamIdToRemove : candidatesStepOne) {
// Remove this childless stream
Stream removedStream = streams.remove(streamIdToRemove);
removedStream.detachFromParent();
toClose--;
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.pruned", connectionId, streamIdToRemove));
}
// Did this make the parent childless?
AbstractStream parent = removedStream.getParentStream();
while (parent instanceof Stream && !((Stream) parent).isActive() && !((Stream) parent).isClosedFinal()
&& parent.getChildStreams().size() == 0) {
streams.remove(parent.getIdentifier());
parent.detachFromParent();
toClose--;
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.pruned", connectionId, streamIdToRemove));
}
// Also need to remove this stream from the p2 list
candidatesStepTwo.remove(parent.getIdentifier());
parent = parent.getParentStream();
}
}
// Process the P2 list
for (Integer streamIdToRemove : candidatesStepTwo) {
removeStreamFromPriorityTree(streamIdToRemove);
toClose--;
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.pruned", connectionId, streamIdToRemove));
}
}
while (toClose > 0 && candidatesStepThree.size() > 0) {
Integer streamIdToRemove = candidatesStepThree.pollLast();
removeStreamFromPriorityTree(streamIdToRemove);
toClose--;
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.prunedPriority", connectionId, streamIdToRemove));
}
}
if (toClose > 0) {
log.warn(sm.getString("upgradeHandler.pruneIncomplete", connectionId, Integer.toString(toClose)));
}
}
private void removeStreamFromPriorityTree(Integer streamIdToRemove)
{
Stream streamToRemove = streams.remove(streamIdToRemove);
// Move the removed Stream's children to the removed Stream's
// parent.
Set<Stream> children = streamToRemove.getChildStreams();
if (streamToRemove.getChildStreams().size() == 1) {
// Shortcut
streamToRemove.getChildStreams().iterator().next().rePrioritise(streamToRemove.getParentStream(),
streamToRemove.getWeight());
} else {
int totalWeight = 0;
for (Stream child : children) {
totalWeight += child.getWeight();
}
for (Stream child : children) {
streamToRemove.getChildStreams().iterator().next().rePrioritise(streamToRemove.getParentStream(),
streamToRemove.getWeight() * child.getWeight() / totalWeight);
}
}
streamToRemove.detachFromParent();
}
void push(Request request, Stream associatedStream) throws IOException
{
Stream pushStream = createLocalStream(request);
// TODO: Is 1k the optimal value?
writeHeaders(associatedStream, pushStream.getIdentifier().intValue(), request.getMimeHeaders(), false, 1024);
pushStream.sentPushPromise();
processStreamOnContainerThread(pushStream);
}
@Override
protected final String getConnectionId()
{
return connectionId;
}
@Override
protected final int getWeight()
{
return 0;
}
boolean isTrailerHeaderAllowed(String headerName)
{
return allowedTrailerHeaders.contains(headerName);
}
// ------------------------------------------- Configuration getters/setters
public long getReadTimeout()
{
return readTimeout;
}
public void setReadTimeout(long readTimeout)
{
this.readTimeout = readTimeout;
}
public long getKeepAliveTimeout()
{
return keepAliveTimeout;
}
public void setKeepAliveTimeout(long keepAliveTimeout)
{
this.keepAliveTimeout = keepAliveTimeout;
}
public long getWriteTimeout()
{
return writeTimeout;
}
public void setWriteTimeout(long writeTimeout)
{
this.writeTimeout = writeTimeout;
}
public void setMaxConcurrentStreams(long maxConcurrentStreams)
{
localSettings.set(Setting.MAX_CONCURRENT_STREAMS, maxConcurrentStreams);
}
public void setMaxConcurrentStreamExecution(int maxConcurrentStreamExecution)
{
this.maxConcurrentStreamExecution = maxConcurrentStreamExecution;
}
public void setInitialWindowSize(int initialWindowSize)
{
localSettings.set(Setting.INITIAL_WINDOW_SIZE, initialWindowSize);
}
public void setAllowedTrailerHeaders(Set<String> allowedTrailerHeaders)
{
this.allowedTrailerHeaders = allowedTrailerHeaders;
}
public void setMaxHeaderCount(int maxHeaderCount)
{
this.maxHeaderCount = maxHeaderCount;
}
public int getMaxHeaderCount()
{
return maxHeaderCount;
}
public void setMaxHeaderSize(int maxHeaderSize)
{
this.maxHeaderSize = maxHeaderSize;
}
public int getMaxHeaderSize()
{
return maxHeaderSize;
}
public void setMaxTrailerCount(int maxTrailerCount)
{
this.maxTrailerCount = maxTrailerCount;
}
public int getMaxTrailerCount()
{
return maxTrailerCount;
}
public void setMaxTrailerSize(int maxTrailerSize)
{
this.maxTrailerSize = maxTrailerSize;
}
public int getMaxTrailerSize()
{
return maxTrailerSize;
}
public void setInitiatePingDisabled(boolean initiatePingDisabled)
{
pingManager.initiateDisabled = initiatePingDisabled;
}
// ----------------------------------------------- Http2Parser.Input methods
@Override
public boolean fill(boolean block, byte[] data, int offset, int length) throws IOException
{
int len = length;
int pos = offset;
boolean nextReadBlock = block;
int thisRead = 0;
while (len > 0) {
thisRead = socketWrapper.read(nextReadBlock, data, pos, len);
if (thisRead == 0) {
if (nextReadBlock) {
// Should never happen
throw new IllegalStateException();
} else {
return false;
}
} else if (thisRead == -1) {
if (connectionState.get().isNewStreamAllowed()) {
throw new EOFException();
} else {
return false;
}
} else {
pos += thisRead;
len -= thisRead;
nextReadBlock = true;
}
}
return true;
}
@Override
public int getMaxFrameSize()
{
return localSettings.getMaxFrameSize();
}
// ---------------------------------------------- Http2Parser.Output methods
@Override
public HpackDecoder getHpackDecoder()
{
if (hpackDecoder == null) {
hpackDecoder = new HpackDecoder(localSettings.getHeaderTableSize());
}
return hpackDecoder;
}
@Override
public ByteBuffer startRequestBodyFrame(int streamId, int payloadSize) throws Http2Exception
{
Stream stream = getStream(streamId, true);
stream.checkState(FrameType.DATA);
stream.receivedData(payloadSize);
return stream.getInputByteBuffer();
}
@Override
public void endRequestBodyFrame(int streamId) throws Http2Exception
{
Stream stream = getStream(streamId, true);
stream.getInputBuffer().onDataAvailable();
}
@Override
public void receivedEndOfStream(int streamId) throws ConnectionException
{
Stream stream = getStream(streamId, connectionState.get().isNewStreamAllowed());
if (stream != null) {
stream.receivedEndOfStream();
if (!stream.isActive()) {
activeRemoteStreamCount.decrementAndGet();
}
}
}
@Override
public void swallowedPadding(int streamId, int paddingLength) throws ConnectionException, IOException
{
Stream stream = getStream(streamId, true);
// +1 is for the payload byte used to define the padding length
writeWindowUpdate(stream, paddingLength + 1, false);
}
@Override
public HeaderEmitter headersStart(int streamId, boolean headersEndStream) throws Http2Exception
{
if (connectionState.get().isNewStreamAllowed()) {
Stream stream = getStream(streamId, false);
if (stream == null) {
stream = createRemoteStream(streamId);
}
if (streamId < maxActiveRemoteStreamId) {
throw new ConnectionException(sm.getString("upgradeHandler.stream.old", Integer.valueOf(streamId),
Integer.valueOf(maxActiveRemoteStreamId)), Http2Error.PROTOCOL_ERROR);
}
stream.checkState(FrameType.HEADERS);
stream.receivedStartOfHeaders(headersEndStream);
closeIdleStreams(streamId);
if (localSettings.getMaxConcurrentStreams() < activeRemoteStreamCount.incrementAndGet()) {
activeRemoteStreamCount.decrementAndGet();
throw new StreamException(
sm.getString("upgradeHandler.tooManyRemoteStreams",
Long.toString(localSettings.getMaxConcurrentStreams())),
Http2Error.REFUSED_STREAM, streamId);
}
return stream;
} else {
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.noNewStreams", connectionId, Integer.toString(streamId)));
}
// Stateless so a static can be used to save on GC
return HEADER_SINK;
}
}
private void closeIdleStreams(int newMaxActiveRemoteStreamId) throws Http2Exception
{
for (int i = maxActiveRemoteStreamId + 2; i < newMaxActiveRemoteStreamId; i += 2) {
Stream stream = getStream(i, false);
if (stream != null) {
stream.closeIfIdle();
}
}
maxActiveRemoteStreamId = newMaxActiveRemoteStreamId;
}
@Override
public void reprioritise(int streamId, int parentStreamId, boolean exclusive, int weight) throws Http2Exception
{
if (streamId == parentStreamId) {
throw new ConnectionException(
sm.getString("upgradeHandler.dependency.invalid", getConnectionId(), Integer.valueOf(streamId)),
Http2Error.PROTOCOL_ERROR);
}
Stream stream = getStream(streamId, false);
if (stream == null) {
stream = createRemoteStream(streamId);
}
stream.checkState(FrameType.PRIORITY);
AbstractStream parentStream = getStream(parentStreamId, false);
if (parentStream == null) {
parentStream = this;
}
stream.rePrioritise(parentStream, exclusive, weight);
}
@Override
public void headersEnd(int streamId) throws ConnectionException
{
setMaxProcessedStream(streamId);
Stream stream = getStream(streamId, connectionState.get().isNewStreamAllowed());
if (stream != null && stream.isActive()) {
if (stream.receivedEndOfHeaders()) {
processStreamOnContainerThread(stream);
}
}
}
private void setMaxProcessedStream(int streamId)
{
if (maxProcessedStreamId < streamId) {
maxProcessedStreamId = streamId;
}
}
@Override
public void reset(int streamId, long errorCode) throws Http2Exception
{
Stream stream = getStream(streamId, true);
stream.checkState(FrameType.RST);
stream.receiveReset(errorCode);
}
@Override
public void setting(Setting setting, long value) throws ConnectionException
{
// Special handling required
if (setting == Setting.INITIAL_WINDOW_SIZE) {
long oldValue = remoteSettings.getInitialWindowSize();
// Do this first in case new value is invalid
remoteSettings.set(setting, value);
int diff = (int) (value - oldValue);
for (Stream stream : streams.values()) {
try {
stream.incrementWindowSize(diff);
} catch (Http2Exception h2e) {
stream.close(new StreamException(
sm.getString("upgradeHandler.windowSizeTooBig", connectionId, stream.getIdentifier()),
h2e.getError(), stream.getIdentifier().intValue()));
}
}
} else {
remoteSettings.set(setting, value);
}
}
@Override
public void settingsEnd(boolean ack) throws IOException
{
if (ack) {
if (!localSettings.ack()) {
// Ack was unexpected
log.warn(sm.getString("upgradeHandler.unexpectedAck", connectionId, getIdentifier()));
}
} else {
synchronized (socketWrapper) {
socketWrapper.write(true, SETTINGS_ACK, 0, SETTINGS_ACK.length);
socketWrapper.flush(true);
}
}
}
@Override
public void pingReceive(byte[] payload, boolean ack) throws IOException
{
pingManager.receivePing(payload, ack);
}
@Override
public void goaway(int lastStreamId, long errorCode, String debugData)
{
if (log.isDebugEnabled()) {
log.debug(sm.getString("upgradeHandler.goaway.debug", connectionId, Integer.toString(lastStreamId),
Long.toHexString(errorCode), debugData));
}
close();
}
@Override
public void incrementWindowSize(int streamId, int increment) throws Http2Exception
{
if (streamId == 0) {
incrementWindowSize(increment);
} else {
Stream stream = getStream(streamId, true);
stream.checkState(FrameType.WINDOW_UPDATE);
stream.incrementWindowSize(increment);
}
}
@Override
public void swallowed(int streamId, FrameType frameType, int flags, int size) throws IOException
{
// NO-OP.
}
protected class PingManager {
protected boolean initiateDisabled = false;
// 10 seconds
protected final long pingIntervalNano = 10000000000L;
protected int sequence = 0;
protected long lastPingNanoTime = Long.MIN_VALUE;
protected Queue<PingRecord> inflightPings = new ConcurrentLinkedQueue<>();
protected Queue<Long> roundTripTimes = new ConcurrentLinkedQueue<>();
/**
* Check to see if a ping was sent recently and, if not, send one.
*
* @param force
* Send a ping, even if one was sent recently
*
* @throws IOException
* If an I/O issue prevents the ping from being sent
*/
public void sendPing(boolean force) throws IOException
{
if (initiateDisabled) {
return;
}
long now = System.nanoTime();
if (force || now - lastPingNanoTime > pingIntervalNano) {
lastPingNanoTime = now;
byte[] payload = new byte[8];
synchronized (socketWrapper) {
int sentSequence = ++sequence;
PingRecord pingRecord = new PingRecord(sentSequence, now);
inflightPings.add(pingRecord);
ByteUtil.set31Bits(payload, 4, sentSequence);
socketWrapper.write(true, PING, 0, PING.length);
socketWrapper.write(true, payload, 0, payload.length);
socketWrapper.flush(true);
}
}
}
public void receivePing(byte[] payload, boolean ack) throws IOException
{
if (ack) {
// Extract the sequence from the payload
int receivedSequence = ByteUtil.get31Bits(payload, 4);
PingRecord pingRecord = inflightPings.poll();
while (pingRecord != null && pingRecord.getSequence() < receivedSequence) {
pingRecord = inflightPings.poll();
}
if (pingRecord == null) {
// Unexpected ACK. Log it.
} else {
long roundTripTime = System.nanoTime() - pingRecord.getSentNanoTime();
roundTripTimes.add(Long.valueOf(roundTripTime));
while (roundTripTimes.size() > 3) {
roundTripTimes.poll();
}
if (log.isDebugEnabled()) {
log.debug(sm.getString("pingManager.roundTripTime", connectionId, Long.valueOf(roundTripTime)));
}
}
} else {
// Client originated ping. Echo it back.
synchronized (socketWrapper) {
socketWrapper.write(true, PING_ACK, 0, PING_ACK.length);
socketWrapper.write(true, payload, 0, payload.length);
socketWrapper.flush(true);
}
}
}
public long getRoundTripTimeNano()
{
return (long) roundTripTimes.stream().mapToLong(x -> x.longValue()).average().orElse(0);
}
}
protected static class PingRecord {
private final int sequence;
private final long sentNanoTime;
public PingRecord(int sequence, long sentNanoTime) {
this.sequence = sequence;
this.sentNanoTime = sentNanoTime;
}
public int getSequence()
{
return sequence;
}
public long getSentNanoTime()
{
return sentNanoTime;
}
}
private enum ConnectionState {
NEW(true), CONNECTED(true), PAUSING(true), PAUSED(false), CLOSED(false);
private final boolean newStreamsAllowed;
private ConnectionState(boolean newStreamsAllowed) {
this.newStreamsAllowed = newStreamsAllowed;
}
public boolean isNewStreamAllowed()
{
return newStreamsAllowed;
}
}
protected static interface HeaderFrameBuffers {
public void startFrame();
public void endFrame() throws IOException;
public void endHeaders() throws IOException;
public byte[] getHeader();
public ByteBuffer getPayload();
public void expandPayload();
}
private class DefaultHeaderFrameBuffers implements HeaderFrameBuffers {
private final byte[] header;
private ByteBuffer payload;
public DefaultHeaderFrameBuffers(int initialPayloadSize) {
header = new byte[9];
payload = ByteBuffer.allocate(initialPayloadSize);
}
@Override
public void startFrame()
{
// NO-OP
}
@Override
public void endFrame() throws IOException
{
try {
socketWrapper.write(true, header, 0, header.length);
socketWrapper.write(true, payload);
socketWrapper.flush(true);
} catch (IOException ioe) {
handleAppInitiatedIOException(ioe);
}
payload.clear();
}
@Override
public void endHeaders()
{
// NO-OP
}
@Override
public byte[] getHeader()
{
return header;
}
@Override
public ByteBuffer getPayload()
{
return payload;
}
@Override
public void expandPayload()
{
payload = ByteBuffer.allocate(payload.capacity() * 2);
}
}
}
|
wniter/springboot-demo
|
spring-jedis/src/main/java/com/example/spring/jedis/sharedPool/ShardedPoolSourceTemplate.java
|
<gh_stars>0
package com.example.spring.jedis.sharedPool;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import redis.clients.jedis.ShardedJedis;
/**
* create by 尼恩 @ 疯狂创客圈
**/
@Repository("shardedPoolSourceTemplate")
@Slf4j
public class ShardedPoolSourceTemplate {
@Autowired
private ShardedPoolSource<ShardedJedis> redisDataSource;
/**
* 设置单个值
*
* @param key
* @param value
* @param seconds
* @return
*/
public String save(String key, String value, long seconds) {
String result = null;
ShardedJedis shardedJedis = redisDataSource.getRedisClient();
if (shardedJedis == null) {
return result;
}
boolean broken = false;
try {
result = setBySeconds(shardedJedis, key, value, seconds);
} catch (Exception e) {
log.error(e.getMessage(), e);
broken = true;
} finally {
redisDataSource.returnResource(shardedJedis, broken);
}
return result;
}
private String setBySeconds(
ShardedJedis pool,
String key, String value,
long seconds
) {
boolean keyExist = pool.exists(key);
if (keyExist) {
// NX是不存在时才set, XX是存在时才set, EX是秒,PX是毫秒
return pool.set(key, value, "XX", "EX", seconds);
} else {
return pool.set(key, value, "NX", "EX", seconds);
}
}
/**
* 获取单个值
*
* @param key
* @return
*/
public String get(String key) {
String result = null;
ShardedJedis shardedJedis = redisDataSource.getRedisClient();
if (shardedJedis == null) {
return result;
}
boolean broken = false;
try {
result = shardedJedis.get(key);
} catch (Exception e) {
log.error(e.getMessage(), e);
broken = true;
} finally {
redisDataSource.returnResource(shardedJedis, broken);
}
return result;
}
public Boolean exists(String key) {
Boolean result = false;
ShardedJedis shardedJedis = redisDataSource.getRedisClient();
if (shardedJedis == null) {
return result;
}
boolean broken = false;
try {
result = shardedJedis.exists(key);
} catch (Exception e) {
log.error(e.getMessage(), e);
broken = true;
} finally {
redisDataSource.returnResource(shardedJedis, broken);
}
return result;
}
public String type(String key) {
String result = null;
ShardedJedis shardedJedis = redisDataSource.getRedisClient();
if (shardedJedis == null) {
return result;
}
boolean broken = false;
try {
result = shardedJedis.type(key);
} catch (Exception e) {
log.error(e.getMessage(), e);
broken = true;
} finally {
redisDataSource.returnResource(shardedJedis, broken);
}
return result;
}
/**
* 在某段时间后失效
*
* @param key key
* @param seconds 时间为秒数
* @return 秒数
*/
public Long expire(String key, int seconds) {
Long result = null;
ShardedJedis shardedJedis = redisDataSource.getRedisClient();
if (shardedJedis == null) {
return result;
}
boolean broken = false;
try {
result = shardedJedis.expire(key, seconds);
} catch (Exception e) {
log.error(e.getMessage(), e);
broken = true;
} finally {
redisDataSource.returnResource(shardedJedis, broken);
}
return result;
}
/**
* 在某个时间点失效
*
* @param key
* @param unixTime
* @return
*/
public Long expireAt(String key, long unixTime) {
Long result = null;
ShardedJedis shardedJedis = redisDataSource.getRedisClient();
if (shardedJedis == null) {
return result;
}
boolean broken = false;
try {
result = shardedJedis.expireAt(key, unixTime);
} catch (Exception e) {
log.error(e.getMessage(), e);
broken = true;
} finally {
redisDataSource.returnResource(shardedJedis, broken);
}
return result;
}
public Long ttl(String key) {
Long result = null;
ShardedJedis shardedJedis = redisDataSource.getRedisClient();
if (shardedJedis == null) {
return result;
}
boolean broken = false;
try {
result = shardedJedis.ttl(key);
} catch (Exception e) {
log.error(e.getMessage(), e);
broken = true;
} finally {
redisDataSource.returnResource(shardedJedis, broken);
}
return result;
}
public boolean setbit(String key, long offset, boolean value) {
ShardedJedis shardedJedis = redisDataSource.getRedisClient();
boolean result = false;
if (shardedJedis == null) {
return result;
}
boolean broken = false;
try {
result = shardedJedis.setbit(key, offset, value);
} catch (Exception e) {
log.error(e.getMessage(), e);
broken = true;
} finally {
redisDataSource.returnResource(shardedJedis, broken);
}
return result;
}
public boolean getbit(String key, long offset) {
ShardedJedis shardedJedis = redisDataSource.getRedisClient();
boolean result = false;
if (shardedJedis == null) {
return result;
}
boolean broken = false;
try {
result = shardedJedis.getbit(key, offset);
} catch (Exception e) {
log.error(e.getMessage(), e);
broken = true;
} finally {
redisDataSource.returnResource(shardedJedis, broken);
}
return result;
}
public long setRange(String key, long offset, String value) {
ShardedJedis shardedJedis = redisDataSource.getRedisClient();
long result = 0;
if (shardedJedis == null) {
return result;
}
boolean broken = false;
try {
result = shardedJedis.setrange(key, offset, value);
} catch (Exception e) {
log.error(e.getMessage(), e);
broken = true;
} finally {
redisDataSource.returnResource(shardedJedis, broken);
}
return result;
}
public String getRange(String key, long startOffset, long endOffset) {
ShardedJedis shardedJedis = redisDataSource.getRedisClient();
String result = null;
if (shardedJedis == null) {
return result;
}
boolean broken = false;
try {
result = shardedJedis.getrange(key, startOffset, endOffset);
} catch (Exception e) {
log.error(e.getMessage(), e);
broken = true;
} finally {
redisDataSource.returnResource(shardedJedis, broken);
}
return result;
}
}
|
ziyekudeng/panda-rules
|
src/main/java/org/wangfeng/panda/app/calculation/function/GetElementFunction.java
|
<reponame>ziyekudeng/panda-rules
package org.wangfeng.panda.app.calculation.function;
import org.springframework.stereotype.Component;
import org.wangfeng.panda.app.common.exception.RuleRuntimeException;
import org.wangfeng.panda.app.util.NumberUtils;
import java.util.ArrayList;
import java.util.List;
/**
* 获取数组中特定下标的元素的
* <p>
* 要求:
* 1、objs中必须有两个参数
* 2、第一个参数为数组(数组如何传递),第二个参数为指定的位置,必须是整数
* <p>
* 返回值类型支持:
* // * 1、integer
* // * 2、double
* 3、string (目前只支持字符串)
* // * 4、date
* // * 5、time
* // * 6、boolean
*/
@Component
public class GetElementFunction extends BaseFunction {
private static final String GET_ELEMENT_ERROR_MESSAGE = "获取数组中特定下标的元素的函数异常!";
@Override
public Object invoke(Object... objs) {
//1、校验传入的参数是否有问题
checkArgsCount(2, GET_ELEMENT_ERROR_MESSAGE, objs);
//2、校验第一个参数是否是数组,第二个参数是否是正整数
try {
if (!(objs[0] instanceof List) || !NumberUtils.NonNegativeInteger(Double.valueOf(objs[1].toString()))) {
throw new RuleRuntimeException(GET_ELEMENT_ERROR_MESSAGE + IMPORT_UNITE_ERROR_MESSAGE);
}
} catch (Exception e) {
throw new RuleRuntimeException(GET_ELEMENT_ERROR_MESSAGE + IMPORT_UNITE_ERROR_MESSAGE);
}
//3、计算并返回结果
try {
List list = (List) objs[0];
if (list != null) {
return list.get(Integer.valueOf(objs[1].toString()));
// return new StringBuffer().append("\"").append(list.get(Integer.valueOf(objs[1].toString())).toString()).append("\"").toString();
}
} catch (IndexOutOfBoundsException e) {
return "";
} catch (Exception e) {
return null;
}
return null;
}
/**
* 测试代码
*
* @param args
*/
public static void main(String[] args) {
List o1 = new ArrayList();
o1.add(1);
o1.add("这是第二个");
o1.add(true);
o1.add('f');
Object o2 = 111;
GetElementFunction getElementFunction = new GetElementFunction();
System.out.println(getElementFunction.invoke(o1, o2));
}
}
|
ty6412001/MyTestDemo
|
FreeSeeds/src/main/java/com/fsd/mvvmlight/freeseeds/topics/TopicItemViewModel.java
|
package com.fsd.mvvmlight.freeseeds.topics;
import android.content.Context;
import android.content.Intent;
import android.databinding.ObservableField;
import android.databinding.ObservableInt;
import com.fsd.mvvmlight.freeseeds.R;
import com.fsd.mvvmlight.freeseeds.entity.Topic;
import com.kelin.mvvmlight.base.ViewModel;
import com.kelin.mvvmlight.command.ReplyCommand;
import java.util.ArrayList;
/**
* Created by zhangyabei on 4/5/17.
*/
public class TopicItemViewModel implements ViewModel {
//context
private Context context;
//model
public Topic topicBean;
//field to presenter
public final ObservableField<String> title = new ObservableField<>();
public final ObservableField<String> user = new ObservableField<>();
public final ObservableField<String> distance = new ObservableField<>();
public final ObservableField<String> imageUrl = new ObservableField<>();
public final ObservableField<String> date = new ObservableField<>();
public TopicItemViewModel.ViewStyle viewStyle = new TopicItemViewModel.ViewStyle();
//Use class viewStyle to wrap field which is binding to style of view
public static class ViewStyle {
public final ObservableInt titleTextColor = new ObservableInt();
}
//command
public ReplyCommand itemClickCommand = new ReplyCommand(() -> {
// this.viewStyle.titleTextColor.set(context.getResources().getColor(android.R.color.darker_gray));
// Intent intent = new Intent(context, NewsDetailActivity.class);
// intent.putExtra(NewsDetailActivity.EXTRA_KEY_NEWS_ID, storiesBean.getId());
// context.startActivity(intent);
});
public TopicItemViewModel(Context context, Topic topicBean) {
this.context = context;
this.topicBean = topicBean;
this.viewStyle.titleTextColor.set(context.getResources().getColor(android.R.color.black));
title.set(topicBean.title);
user.set(topicBean.userCreator.name);
if (!topicBean.mediasHas.isEmpty())
imageUrl.set( new ArrayList<String>(topicBean.mediasHas.get(0).urls.values()).get(0) );
}
}
|
merico-dev/common-backend
|
dist/ae-helper/import-report/tasks/writeReportDuplicateFunctionsTask.js
|
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const models_1 = require("../../../models");
const utils_1 = require("./utils");
const writeReportFunctionsTask_1 = __importDefault(require("./writeReportFunctionsTask"));
const Uuid = __importStar(require("uuid"));
const writeReportTask_1 = __importDefault(require("./writeReportTask"));
const writeEmails_1 = __importDefault(require("./writeEmails"));
exports.default = utils_1.newTask('WRITE_REPORT_DUPLICATE_FUNCTIONS', [
writeReportFunctionsTask_1.default,
writeReportTask_1.default,
writeEmails_1.default,
], async (context, [{ cagNodeIdToFuncIdTbl }]) => {
if (context.reportOfAll.dryness == null)
return;
const duplicateFuncIds = new Set();
const groupEntities = [];
const entities = [];
for (const group of context.reportOfAll.dryness.drynessGroups) {
const groupRecord = new models_1.ReportDuplicateGroup();
groupEntities.push(groupRecord);
groupRecord.id = Uuid.v4();
groupRecord.duplicate_function_number = group.drynessFuncs.length;
groupRecord.report_id = context.reportId;
for (const func of group.drynessFuncs) {
const record = new models_1.ReportDuplicateFunction();
entities.push(record);
record.report_id = context.reportId;
record.report_duplicate_group_id = groupRecord.id;
record.format_email = func.email;
record.report_function_id = cagNodeIdToFuncIdTbl.get(func.funcId);
duplicateFuncIds.add(func.funcId);
}
}
await utils_1.batchInsertWithProgress(context, models_1.ReportDuplicateGroup, () => groupEntities);
await utils_1.batchInsertWithProgress(context, models_1.ReportDuplicateFunction, () => entities);
return {
numDuplicateFunctions: duplicateFuncIds.size,
};
});
|
go-clang/clang-v5
|
clang/declqualifierkind_gen.go
|
package clang
// #include "./clang-c/Index.h"
// #include "go-clang.h"
import "C"
import "fmt"
// 'Qualifiers' written next to the return and parameter types in Objective-C method declarations.
type DeclQualifierKind uint32
const (
DeclQualifier_None DeclQualifierKind = C.CXObjCDeclQualifier_None
DeclQualifier_In = C.CXObjCDeclQualifier_In
DeclQualifier_Inout = C.CXObjCDeclQualifier_Inout
DeclQualifier_Out = C.CXObjCDeclQualifier_Out
DeclQualifier_Bycopy = C.CXObjCDeclQualifier_Bycopy
DeclQualifier_Byref = C.CXObjCDeclQualifier_Byref
DeclQualifier_Oneway = C.CXObjCDeclQualifier_Oneway
)
func (dqk DeclQualifierKind) Spelling() string {
switch dqk {
case DeclQualifier_None:
return "DeclQualifier=None"
case DeclQualifier_In:
return "DeclQualifier=In"
case DeclQualifier_Inout:
return "DeclQualifier=Inout"
case DeclQualifier_Out:
return "DeclQualifier=Out"
case DeclQualifier_Bycopy:
return "DeclQualifier=Bycopy"
case DeclQualifier_Byref:
return "DeclQualifier=Byref"
case DeclQualifier_Oneway:
return "DeclQualifier=Oneway"
}
return fmt.Sprintf("DeclQualifierKind unknown %d", int(dqk))
}
func (dqk DeclQualifierKind) String() string {
return dqk.Spelling()
}
|
aamin005/Firdowsspace
|
vendor/bundle/ruby/2.3.0/gems/mini_magick-3.8.0/lib/mini_magick/utilities.rb
|
require 'rbconfig'
module MiniMagick
module Utilities
class << self
# Cross-platform way of finding an executable in the $PATH.
#
# which('ruby') #=> /usr/bin/ruby
def which(cmd)
exts = ENV['PATHEXT'] ? ENV['PATHEXT'].split(';') : ['']
ENV['PATH'].split(File::PATH_SEPARATOR).each do |path|
exts.each do |ext|
exe = File.join(path, "#{cmd}#{ext}")
return exe if File.executable? exe
end
end
nil
end
# Finds out if the host OS is windows
def windows?
RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/
end
def windows_escape(value)
# For Windows, ^ is the escape char, equivalent to \ in Unix.
escaped = value.gsub(/\^/, '^^').gsub(/>/, '^>')
if escaped !~ /^".+"$/ && escaped.include?("'")
escaped.inspect
else
escaped
end
end
end
end
end
|
NEWBEE108/java_algos
|
cracking-the-coding-interview/src/chapter_3/stack_queue/Shelter.java
|
package chapter_3.stack_queue;
import java.util.LinkedList;
/**
* class Shelter
* <p>
* Book solution implemented
*
* @param <T>
* @author <NAME>
*/
public class Shelter<T> {
LinkedList<Dog<T>> dogsList;
LinkedList<Cat<T>> catsList;
int order = 0, size = 0;
/**
* Have two LinkedLists one for Dog and one for Cat
*/
public Shelter() {
dogsList = new LinkedList<Dog<T>>();
catsList = new LinkedList<Cat<T>>();
}
/**
* Set order add to appropriate LinkedList
*
* @param animal
* @return added
*/
public boolean enqueue(Animal<T> animal) {
boolean added = false;
if (animal == null) {
return added;
}
animal.setOrder(this.order);
if (animal.getClass().getSimpleName().equals("Dog")) {
dogsList.add((Dog<T>) animal);
added = true;
} else if (animal.getClass().getSimpleName().equals("Cat")) {
catsList.add((Cat<T>) animal);
added = true;
}
this.order++;
this.size++;
return added;
}
/**
* Dequeue either Dog or Cat based on order
*
* @return animal
*/
public Animal<T> dequeueAny() {
if (dogsList.isEmpty()) {
this.size--;
return catsList.poll();
} else if (catsList.isEmpty()) {
this.size--;
return dogsList.poll();
}
Dog<T> dog = dogsList.peek();
Cat<T> cat = catsList.peek();
if (dog.isOlderThan(cat)) {
return dequeueDog();
} else {
return dequeueCat();
}
}
/**
* Dequeue from dogList
*
* @return dog
*/
public Dog<T> dequeueDog() {
this.size--;
return dogsList.poll();
}
/**
* Dequeue from catList
*
* @return cat
*/
public Cat<T> dequeueCat() {
this.size--;
return catsList.poll();
}
/*
* @Override toString
*
* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("[");
Animal<T> animal;
while ((animal = dequeueAny()) != null) {
builder.append(animal.getName()).append(":").append(animal.getOrder()).append(", ");
}
builder.replace(builder.length() - 2, builder.length(), "]");
return builder.toString();
}
}
|
qinchende/gofas
|
skill/collection/test/rw_per_test.go
|
<reponame>qinchende/gofas<gh_stars>1-10
package test
import (
"github.com/qinchende/gofast/skill/collection"
"runtime"
"testing"
"time"
)
func init() {
runtime.GOMAXPROCS(4)
}
//PS gofast\skill\collection\test> go test -bench=Go* -benchmem -benchtime=10s
//goos: windows
//goarch: amd64
//pkg: github.com/qinchende/gofast/skill/collection/test
//cpu: Intel(R) Core(TM) i7-10700 CPU @ 2.90GHz
//BenchmarkGozeroRW-4 8464778 1282 ns/op 2 B/op 0 allocs/op
//BenchmarkGofastRW-4 59552853 201.4 ns/op 0 B/op 0 allocs/op
//PASS
//ok github.com/qinchende/gofast/skill/collection/test 34.894s
// 看看上面的性能差异,桶越多 go-zero的性能越差,和单个桶的时间间隔差别不大
const duration = time.Millisecond * 50
const winSize = 100
const concurrencyNum = 100000
const loopTimes = 1
// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// go-zero 滑动窗口的性能
func BenchmarkGozeroRW(b *testing.B) {
rWin := collection.NewRollingWindow(winSize, duration)
b.ReportAllocs()
b.ResetTimer()
// 并发测试模式
b.SetParallelism(concurrencyNum)
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
goZeroRollingWindow(rWin)
}
})
}
func goZeroRollingWindow(rw *collection.RollingWindow) {
for i := 0; i < loopTimes; i++ {
var accepts, total int64
rw.Reduce(func(b *collection.Bucket) {
accepts += int64(b.Sum)
total += b.Count
})
rw.Add(1)
}
}
// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// gofast 滑动窗口的性能
func BenchmarkGofastRW(b *testing.B) {
rWin := collection.NewRollingWindowSdx(winSize, duration)
b.ReportAllocs()
b.ResetTimer()
// 并发测试模式
b.SetParallelism(concurrencyNum)
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
gofastRollingWindow(rWin)
}
})
}
func gofastRollingWindow(rw *collection.RollingWindowSdx) {
for i := 0; i < loopTimes; i++ {
rw.CurrWinValue()
rw.Add(1)
}
}
|
deepakkumar96/biovalidator
|
src/main/java/org/intermine/biovalidator/validator/csv/CsvHeaderDetector.java
|
package org.intermine.biovalidator.validator.csv;
/*
* Copyright (C) 2002-2019 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import org.apache.commons.lang3.math.NumberUtils;
import org.intermine.biovalidator.parser.CsvParser;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* This header detector is a port of python standard library csv module (csv.Sniffer.has_header)'.
* <pre>
* See original python's implementation <a href="http://bit.ly/2H4ndNN" target="_blank">
* here </a>
* </pre>
* @author <NAME>
*/
public class CsvHeaderDetector
{
private static final int SAMPLE_ROW = 15;
private InputStreamReader inputStreamReader;
private boolean allowComments;
private String delimiter;
/**
* input stream
* @param inputStreamReader filepath
* @param allowComments allow comments preceded with '#' or not
* @param delimiter delimiter for csv/tsv file
*/
public CsvHeaderDetector(InputStreamReader inputStreamReader,
boolean allowComments,
String delimiter) {
this.inputStreamReader = inputStreamReader;
this.allowComments = allowComments;
this.delimiter = delimiter;
}
/**
* Test whether a csv data has a header or not.
* This is a Java port of python csv module's 'has_header()' method, see link to the code
* at the top of the file for description about approach used in the original implementation.
*
* Approach for detecting header:
* <p>
* 1. It looks for a column which is of a single type(say integer) and if all rows have the
* same type of value except the first row, then the first row is considered as a header.
*
* 2. If the first check doesn't work, then it also counts the length of the value of each
* column, if all values of a column have the same length except the first row, then again
* its a header.
* </p>
* <p>
* See original python implementation: <a href="http://bit.ly/2H4ndNN" target="_blank">
* here</a>
* </p>
* @throws IOException if failed parsing
* @return boolean
*/
public boolean hasHeader() throws IOException {
/*
Tell the parse that file does not has header so it won't skip first line, as this
class wants to read first line in order to detect whether file has header or not.
So passing hasHeader=false in the CsvParser() constructor
*/
try (CsvParser parser = new CsvParser(inputStreamReader, false, allowComments, delimiter)) {
String[] header = parser.parseNext();
int totalColumns = header.length;
/*
This map stores entry for each column, for each column-mapping it stores column index
as key, and an object representing value, value can be one of two possible objects:
1. Boolean object:
If a value is of Boolean type, then it means up to now all rows are number for a
particular column.
2. Integer objects:
If a value is of Integer type, then it means that the column does not have all
value as number but rather string, so now it will store length of column-values.
*/
Map<Integer, Object> columnTypes = new HashMap<>();
for (int i = 0; i < totalColumns; i++) {
columnTypes.put(i, null);
}
int checked = 0;
while (parser.hasNext() && checked < SAMPLE_ROW) {
String[] row = parser.parseNext();
if (row.length != totalColumns) {
continue;
}
//Copy keys to avoid Modification Exception
Set<Integer> columnTypesKeys = new HashSet<>(columnTypes.keySet());
columnTypesKeys.forEach(i -> {
Object thisType;
if (NumberUtils.isCreatable(row[i])) { //if column-value is a number
thisType = Boolean.TRUE; // true indicating column-value is of number type
} else {
// if a column-value isn't a number, store it's length
thisType = row[i] == null ? 0 : row[i].length();
}
if (thisType != columnTypes.get(i)) {
if (columnTypes.get(i) == null) {
columnTypes.put(i, thisType); // add new column type
} else {
// type is inconsistent, remove column from consideration
columnTypes.remove(i);
}
}
});
checked++;
}
/*
Now check whether there is a column which is either all number except first row, OR
there is a column whose length of all values are same except the first row,
Finally, compare results against first row and "vote" on whether it's a header.
*/
int hasHeader = 0;
for (Map.Entry<Integer, Object> entry : columnTypes.entrySet()) {
int colKey = entry.getKey();
Object colVal = entry.getValue();
if (colVal instanceof Integer) { // Integer means, considering length of col-values
int colValInt = (Integer) colVal;
if (header[colKey] != null && header[colKey].length() == colValInt) {
// If length of first row is same as all other rows,
// then it might not be a header, decrement vote by 1
hasHeader--;
} else {
hasHeader++; // else it might be a header, So increment vote by 1
}
} else if (colVal instanceof Boolean) {
// If column-value is a Booleans then, all rows(except first row) are numbers
if (NumberUtils.isCreatable(header[colKey])) {
// if first row is also a number then, it may not be a header
hasHeader--;
} else {
hasHeader++; // else it might be a header, So increment vote by 1
}
}
}
return hasHeader > 0;
}
}
}
/*
var sample = [
['Usern',2323,'First_name', 'Last_Name'],
['booker12',9012,'Rachel', 'Booker'],
['grey07',2070,'Laura', 'Grey'],
['johnson81',4081,'Craig', 'Johnson'],
['jenkins46',9346,'Mary', 'Jenkins'],
['smith79',5079,'Jamie', 'Smith']
];
*/
|
cwolsen7905/UbixOS
|
doc/html/db/d66/stat_8c.js
|
var stat_8c =
[
[ "_sys_stat", "db/d66/stat_8c.html#a450c63b2bbf7f46c54846f2b8a565587", null ],
[ "sys_fstat", "db/d66/stat_8c.html#a20c64cd904d02f5580b4de7c194b9357", null ],
[ "sys_fstatat", "db/d66/stat_8c.html#aadc04b2a7dff6c7f77610e7231d66dd9", null ],
[ "sys_fstatfs", "db/d66/stat_8c.html#a30aafea2c6eb09289d163ff81fa8f26b", null ],
[ "sys_lstat", "db/d66/stat_8c.html#a4b775cc1ea799b4c54da7904c03756b5", null ],
[ "sys_stat", "db/d66/stat_8c.html#a1c419cd11d05a0bd998b4cc0de4ee142", null ],
[ "sys_statfs", "db/d66/stat_8c.html#a6194ac39a3ce5a21050d2a9a3117b433", null ]
];
|
jkdubr/Proj4
|
Pod/Classes/Projection/MOBProjectionEPSG23838.h
|
#import "MOBProjection.h"
@interface MOBProjectionEPSG23838 : MOBProjection
@end
|
ufoscout/jpattern
|
core/src/main/java/com/jpattern/service/log/reader/MessageFilter.java
|
package com.jpattern.service.log.reader;
/**
*
* @author <NAME> 10/ago/2009
*
*/
public class MessageFilter implements IFilter {
private static final long serialVersionUID = 1L;
private String _filterString;
public MessageFilter(String aFilterString){
_filterString = aFilterString;
}
public IQueueMessage what(IQueueMessage aMessage) {
if (!aMessage.getMessage().contains( _filterString ))
return new QueueMessage(0, "", false);
return aMessage;
}
}
|
haole420/dex-api
|
src/main/java/org/coinex/dex/client/api/TransactionsApi.java
|
/*
* CET-Lite for CoinEx Chain
* A REST interface for state queries, transaction generation and broadcasting.
*
* OpenAPI spec version: 3.0
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package org.coinex.dex.client.api;
import org.coinex.dex.client.ApiCallback;
import org.coinex.dex.client.ApiClient;
import org.coinex.dex.client.ApiException;
import org.coinex.dex.client.ApiResponse;
import org.coinex.dex.client.Configuration;
import org.coinex.dex.client.Pair;
import org.coinex.dex.client.ProgressRequestBody;
import org.coinex.dex.client.ProgressResponseBody;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import org.coinex.dex.client.model.BroadcastTxCommitResult;
import org.coinex.dex.client.model.InlineResponse2003;
import org.coinex.dex.client.model.PaginatedQueryTxs;
import org.coinex.dex.client.model.Tx;
import org.coinex.dex.client.model.TxBroadcast;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class TransactionsApi {
private ApiClient apiClient;
public TransactionsApi() {
this(Configuration.getDefaultApiClient());
}
public TransactionsApi(ApiClient apiClient) {
this.apiClient = apiClient;
}
public ApiClient getApiClient() {
return apiClient;
}
public void setApiClient(ApiClient apiClient) {
this.apiClient = apiClient;
}
/**
* Build call for broadcastTx
* @param txBroadcast The tx must be a signed StdTx. The supported broadcast modes include `\"block\"`(return after tx commit), `\"sync\"`(return afer CheckTx) and `\"async\"`(return right away). (required)
* @param progressListener Progress listener
* @param progressRequestListener Progress request listener
* @return Call to execute
* @throws ApiException If fail to serialize the request body object
*/
public com.squareup.okhttp.Call broadcastTxCall(TxBroadcast txBroadcast, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = txBroadcast;
// create path and map variables
String localVarPath = "/txs";
List<Pair> localVarQueryParams = new ArrayList<Pair>();
List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { };
return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call broadcastTxValidateBeforeCall(TxBroadcast txBroadcast, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'txBroadcast' is set
if (txBroadcast == null) {
throw new ApiException("Missing the required parameter 'txBroadcast' when calling broadcastTx(Async)");
}
com.squareup.okhttp.Call call = broadcastTxCall(txBroadcast, progressListener, progressRequestListener);
return call;
}
/**
* Broadcast a signed tx
* Broadcast a signed tx to a full node
* @param txBroadcast The tx must be a signed StdTx. The supported broadcast modes include `\"block\"`(return after tx commit), `\"sync\"`(return afer CheckTx) and `\"async\"`(return right away). (required)
* @return BroadcastTxCommitResult
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public BroadcastTxCommitResult broadcastTx(TxBroadcast txBroadcast) throws ApiException {
ApiResponse<BroadcastTxCommitResult> resp = broadcastTxWithHttpInfo(txBroadcast);
return resp.getData();
}
/**
* Broadcast a signed tx
* Broadcast a signed tx to a full node
* @param txBroadcast The tx must be a signed StdTx. The supported broadcast modes include `\"block\"`(return after tx commit), `\"sync\"`(return afer CheckTx) and `\"async\"`(return right away). (required)
* @return ApiResponse<BroadcastTxCommitResult>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<BroadcastTxCommitResult> broadcastTxWithHttpInfo(TxBroadcast txBroadcast) throws ApiException {
com.squareup.okhttp.Call call = broadcastTxValidateBeforeCall(txBroadcast, null, null);
Type localVarReturnType = new TypeToken<BroadcastTxCommitResult>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Broadcast a signed tx (asynchronously)
* Broadcast a signed tx to a full node
* @param txBroadcast The tx must be a signed StdTx. The supported broadcast modes include `\"block\"`(return after tx commit), `\"sync\"`(return afer CheckTx) and `\"async\"`(return right away). (required)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call broadcastTxAsync(TxBroadcast txBroadcast, final ApiCallback<BroadcastTxCommitResult> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = broadcastTxValidateBeforeCall(txBroadcast, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<BroadcastTxCommitResult>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/**
* Build call for encodeTx
* @param tx The tx to encode (required)
* @param progressListener Progress listener
* @param progressRequestListener Progress request listener
* @return Call to execute
* @throws ApiException If fail to serialize the request body object
*/
public com.squareup.okhttp.Call encodeTxCall(Tx tx, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = tx;
// create path and map variables
String localVarPath = "/txs/encode";
List<Pair> localVarQueryParams = new ArrayList<Pair>();
List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { };
return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call encodeTxValidateBeforeCall(Tx tx, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'tx' is set
if (tx == null) {
throw new ApiException("Missing the required parameter 'tx' when calling encodeTx(Async)");
}
com.squareup.okhttp.Call call = encodeTxCall(tx, progressListener, progressRequestListener);
return call;
}
/**
* Encode a transaction to the Amino wire format
* Encode a transaction (signed or not) from JSON to base64-encoded Amino serialized bytes
* @param tx The tx to encode (required)
* @return InlineResponse2003
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public InlineResponse2003 encodeTx(Tx tx) throws ApiException {
ApiResponse<InlineResponse2003> resp = encodeTxWithHttpInfo(tx);
return resp.getData();
}
/**
* Encode a transaction to the Amino wire format
* Encode a transaction (signed or not) from JSON to base64-encoded Amino serialized bytes
* @param tx The tx to encode (required)
* @return ApiResponse<InlineResponse2003>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<InlineResponse2003> encodeTxWithHttpInfo(Tx tx) throws ApiException {
com.squareup.okhttp.Call call = encodeTxValidateBeforeCall(tx, null, null);
Type localVarReturnType = new TypeToken<InlineResponse2003>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Encode a transaction to the Amino wire format (asynchronously)
* Encode a transaction (signed or not) from JSON to base64-encoded Amino serialized bytes
* @param tx The tx to encode (required)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call encodeTxAsync(Tx tx, final ApiCallback<InlineResponse2003> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = encodeTxValidateBeforeCall(tx, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<InlineResponse2003>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/**
* Build call for getTxByHash
* @param hash Tx hash (required)
* @param progressListener Progress listener
* @param progressRequestListener Progress request listener
* @return Call to execute
* @throws ApiException If fail to serialize the request body object
*/
public com.squareup.okhttp.Call getTxByHashCall(String hash, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/txs/{hash}"
.replaceAll("\\{" + "hash" + "\\}", apiClient.escapeString(hash.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { };
return apiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call getTxByHashValidateBeforeCall(String hash, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'hash' is set
if (hash == null) {
throw new ApiException("Missing the required parameter 'hash' when calling getTxByHash(Async)");
}
com.squareup.okhttp.Call call = getTxByHashCall(hash, progressListener, progressRequestListener);
return call;
}
/**
* Get a Tx by hash
*
* @param hash Tx hash (required)
* @return BroadcastTxCommitResult
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public BroadcastTxCommitResult getTxByHash(String hash) throws ApiException {
ApiResponse<BroadcastTxCommitResult> resp = getTxByHashWithHttpInfo(hash);
return resp.getData();
}
/**
* Get a Tx by hash
*
* @param hash Tx hash (required)
* @return ApiResponse<BroadcastTxCommitResult>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<BroadcastTxCommitResult> getTxByHashWithHttpInfo(String hash) throws ApiException {
com.squareup.okhttp.Call call = getTxByHashValidateBeforeCall(hash, null, null);
Type localVarReturnType = new TypeToken<BroadcastTxCommitResult>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Get a Tx by hash (asynchronously)
*
* @param hash Tx hash (required)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call getTxByHashAsync(String hash, final ApiCallback<BroadcastTxCommitResult> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = getTxByHashValidateBeforeCall(hash, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<BroadcastTxCommitResult>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/**
* Build call for searchTx
* @param messageAction transaction events such as 'message.action=send' which results in the following endpoint: 'GET /txs?message.action=send' (optional)
* @param messageSender transaction tags with sender: 'GET /txs?message.action=send&message.sender=cosmos16xyempempp92x9hyzz9wrgf94r6j9h5f06pxxv' (optional)
* @param page Page number (optional)
* @param limit Maximum number of items per page (optional)
* @param progressListener Progress listener
* @param progressRequestListener Progress request listener
* @return Call to execute
* @throws ApiException If fail to serialize the request body object
*/
public com.squareup.okhttp.Call searchTxCall(String messageAction, String messageSender, Integer page, Integer limit, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/txs";
List<Pair> localVarQueryParams = new ArrayList<Pair>();
List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>();
if (messageAction != null)
localVarQueryParams.addAll(apiClient.parameterToPair("message.action", messageAction));
if (messageSender != null)
localVarQueryParams.addAll(apiClient.parameterToPair("message.sender", messageSender));
if (page != null)
localVarQueryParams.addAll(apiClient.parameterToPair("page", page));
if (limit != null)
localVarQueryParams.addAll(apiClient.parameterToPair("limit", limit));
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { };
return apiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call searchTxValidateBeforeCall(String messageAction, String messageSender, Integer page, Integer limit, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
com.squareup.okhttp.Call call = searchTxCall(messageAction, messageSender, page, limit, progressListener, progressRequestListener);
return call;
}
/**
* Search transactions
* Search transactions by events.
* @param messageAction transaction events such as 'message.action=send' which results in the following endpoint: 'GET /txs?message.action=send' (optional)
* @param messageSender transaction tags with sender: 'GET /txs?message.action=send&message.sender=cosmos16xyempempp92x9hyzz9wrgf94r6j9h5f06pxxv' (optional)
* @param page Page number (optional)
* @param limit Maximum number of items per page (optional)
* @return PaginatedQueryTxs
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public PaginatedQueryTxs searchTx(String messageAction, String messageSender, Integer page, Integer limit) throws ApiException {
ApiResponse<PaginatedQueryTxs> resp = searchTxWithHttpInfo(messageAction, messageSender, page, limit);
return resp.getData();
}
/**
* Search transactions
* Search transactions by events.
* @param messageAction transaction events such as 'message.action=send' which results in the following endpoint: 'GET /txs?message.action=send' (optional)
* @param messageSender transaction tags with sender: 'GET /txs?message.action=send&message.sender=cosmos16xyempempp92x9hyzz9wrgf94r6j9h5f06pxxv' (optional)
* @param page Page number (optional)
* @param limit Maximum number of items per page (optional)
* @return ApiResponse<PaginatedQueryTxs>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<PaginatedQueryTxs> searchTxWithHttpInfo(String messageAction, String messageSender, Integer page, Integer limit) throws ApiException {
com.squareup.okhttp.Call call = searchTxValidateBeforeCall(messageAction, messageSender, page, limit, null, null);
Type localVarReturnType = new TypeToken<PaginatedQueryTxs>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Search transactions (asynchronously)
* Search transactions by events.
* @param messageAction transaction events such as 'message.action=send' which results in the following endpoint: 'GET /txs?message.action=send' (optional)
* @param messageSender transaction tags with sender: 'GET /txs?message.action=send&message.sender=cosmos16xyempempp92x9hyzz9wrgf94r6j9h5f06pxxv' (optional)
* @param page Page number (optional)
* @param limit Maximum number of items per page (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call searchTxAsync(String messageAction, String messageSender, Integer page, Integer limit, final ApiCallback<PaginatedQueryTxs> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = searchTxValidateBeforeCall(messageAction, messageSender, page, limit, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<PaginatedQueryTxs>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
}
|
xcesiv/pqvid
|
src/main/java/io/kamax/matrix/json/event/MatrixJsonDirectEvent.java
|
/*
* matrix-java-sdk - Matrix Client SDK for Java
* Copyright (C) 2018 <NAME>
*
* https://www.kamax.io/
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package io.kamax.matrix.json.event;
import com.google.gson.JsonObject;
import io.kamax.matrix.MatrixID;
import io.kamax.matrix._MatrixID;
import io.kamax.matrix.event._DirectEvent;
import io.kamax.matrix.json.GsonUtil;
import io.kamax.matrix.json.InvalidJsonException;
import org.apache.commons.lang3.StringUtils;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MatrixJsonDirectEvent extends MatrixJsonEphemeralEvent implements _DirectEvent {
private Map<_MatrixID, List<String>> mappings = new HashMap<>();
public MatrixJsonDirectEvent(JsonObject obj) {
super(obj);
if (!StringUtils.equals(Type, getType())) { // FIXME check should be done in the abstract class
throw new InvalidJsonException("Type is not " + Type);
}
getObj("content").entrySet().forEach(entry -> {
if (!entry.getValue().isJsonArray()) {
throw new InvalidJsonException("Content key " + entry.getKey() + " is not an array");
}
_MatrixID id = MatrixID.asAcceptable(entry.getKey());
mappings.put(id, GsonUtil.asList(entry.getValue().getAsJsonArray(), String.class));
});
}
@Override
public Map<_MatrixID, List<String>> getMappings() {
return Collections.unmodifiableMap(mappings);
}
}
|
boxingbeetle/softfab
|
tests/unit/test_resultlib.py
|
# SPDX-License-Identifier: BSD-3-Clause
"""Test result storage and processing functionality."""
from pytest import fixture, raises
from softfab.resultlib import ResultStorage
@fixture
def resultStorage(tmp_path):
return ResultStorage(tmp_path)
# Test data that can be used by various test cases:
TASK_NAME = 'testtask'
RUN_ID = 'faster'
KEY = 'dawn'
NR_RUNS = 50
def testResultsPutGet(resultStorage):
"""Test whether data can be stored and retrieved."""
def valueFunc(index):
return f'value{index:02d}'
runIds = []
for index in range(NR_RUNS):
runId = f'run{index:02d}'
runIds.append(runId)
data = {KEY: valueFunc(index)}
resultStorage.putData(TASK_NAME, runId, data)
results = resultStorage.getCustomData(TASK_NAME, runIds, KEY)
foundIds = []
for runId, value in results:
assert runId.startswith('run')
index = int(runId[3:])
assert 0 <= index < NR_RUNS
assert value == valueFunc(index)
foundIds.append(runId)
assert sorted(foundIds) == sorted(runIds)
def testResultsInvalidKey(resultStorage):
"""Test treatment of invalid keys."""
# TODO: Maybe we need more thought about what should be valid keys.
for key in ('../abc', ''):
data = {key: 'dummy'}
with raises(KeyError):
resultStorage.putData(TASK_NAME, RUN_ID, data)
results = resultStorage.getCustomData(TASK_NAME, [RUN_ID], key)
assert list(results) == []
def testResultsReplace(resultStorage):
"""Check that new data replaces old data."""
oldData = {KEY: 'old'}
newData = {KEY: 'new'}
resultStorage.putData(TASK_NAME, RUN_ID, oldData)
resultStorage.putData(TASK_NAME, RUN_ID, newData)
results = resultStorage.getCustomData(TASK_NAME, [RUN_ID], KEY)
assert list(results) == [(RUN_ID, 'new')]
def testResultsAdd(resultStorage):
"""Check that new data with different keys is added to old data."""
oldData = {'oldkey': 'old'}
newData = {'newkey': 'new'}
resultStorage.putData(TASK_NAME, RUN_ID, oldData)
resultStorage.putData(TASK_NAME, RUN_ID, newData)
results1 = resultStorage.getCustomData(TASK_NAME, [RUN_ID], 'oldkey')
assert list(results1) == [(RUN_ID, 'old')]
results2 = resultStorage.getCustomData(TASK_NAME, [RUN_ID], 'newkey')
assert list(results2) == [(RUN_ID, 'new')]
def testResultsListKeys(resultStorage):
"""Tests listing the keys that exist for a task name."""
for index in range(2, NR_RUNS):
runId = f'run{index:02d}'
keys = [
f'key{key:02d}'
for key in range(2, NR_RUNS)
if key % index == 0
]
data = dict.fromkeys(keys, 'dummy')
resultStorage.putData(TASK_NAME, runId, data)
assert resultStorage.getCustomKeys(TASK_NAME) == {
# for every N, N % N == 0 is true
# so every key for 2 <= key < nrRuns should be present
f'key{key:02d}' for key in range(2, NR_RUNS)
}
def testResultsListKeysNone(resultStorage):
"""Tests listing the keys if no data is stored for a task name."""
assert resultStorage.getCustomKeys(TASK_NAME) == set()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.