repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
skangg/opal
|
lib/opal/nodes/case.rb
|
<reponame>skangg/opal
require 'opal/nodes/base'
module Opal
module Nodes
class CaseNode < Base
handle :case
children :condition
def compile
handled_else = false
compiler.in_case do
if condition
case_stmt[:cond] = true
add_local '$case'
push "$case = ", expr(condition), ";"
end
case_parts.each_with_index do |wen, idx|
if wen and wen.type == :when
compiler.returns(wen) if needs_closure?
push "else " unless idx == 0
push stmt(wen)
elsif wen # s(:else)
handled_else = true
wen = compiler.returns(wen) if needs_closure?
push "else {", stmt(wen), "}"
end
end
# if we are having a closure, we must return a usable value
if needs_closure? and !handled_else
push "else { return nil }"
end
wrap '(function() {', '})()' if needs_closure?
end
end
def needs_closure?
!stmt?
end
def case_parts
children[1..-1]
end
def case_stmt
compiler.case_stmt
end
end
class WhenNode < Base
handle :when
children :whens, :body
def compile
push "if ("
when_checks.each_with_index do |check, idx|
push ' || ' unless idx == 0
if check.type == :splat
push "(function($splt) { for (var i = 0; i < $splt.length; i++) {"
push "if ($splt[i]['$===']($case)) { return true; }"
push "} return false; })(", expr(check[1]), ")"
else
if case_stmt[:cond]
call = s(:call, check, :===, s(:arglist, s(:js_tmp, '$case')))
push expr(call)
else
push js_truthy(check)
end
end
end
push ") {", process(body_code, @level), "}"
end
def when_checks
whens.children
end
def case_stmt
compiler.case_stmt
end
def body_code
body || s(:nil)
end
end
end
end
|
audaciouscode/Shion.app
|
RemoteBuddyManager.h
|
//
// RemoteBuddyManager.h
// Shion
//
// Created by <NAME> on 5/11/09.
// Copyright 2009 Audacious Software. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import "BehaviourBridgeReceptor.h"
@interface RemoteBuddyManager : NSObject
{
BehaviourBridgeReceptor * behaviourBridgeReceptor;
}
@end
|
infiniteblock/CalemiUtils-1.12.2
|
src/main/java/calemiutils/tileentity/TileEntityBank.java
|
package calemiutils.tileentity;
import calemiutils.config.CUConfig;
import calemiutils.gui.GuiBank;
import calemiutils.inventory.ContainerBank;
import calemiutils.item.ItemCurrency;
import calemiutils.security.ISecurity;
import calemiutils.security.SecurityProfile;
import calemiutils.tileentity.base.*;
import calemiutils.util.Location;
import calemiutils.util.VeinScan;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.Container;
import net.minecraft.util.EnumFacing;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import java.util.ArrayList;
import java.util.List;
public class TileEntityBank extends TileEntityInventoryBase implements ITileEntityGuiHandler, ICurrencyNetworkBank, ISecurity {
public List<Location> connectedUnits = new ArrayList<>();
public int storedCurrency = 0;
private final SecurityProfile profile = new SecurityProfile();
private VeinScan scan;
public TileEntityBank() {
setInputSlots(0, 1);
setSideInputSlots(0, 1);
setExtractSlots(0, 1);
}
@Override
public void update() {
if (getLocation() != null && scan == null) {
scan = new VeinScan(getLocation());
}
if (scan != null) {
if (world.getWorldTime() % 40 == 0) {
connectedUnits.clear();
boolean foundAnotherBank = false;
scan.reset();
scan.startNetworkScan(getConnectedDirections());
for (Location location : scan.buffer) {
if (!location.equals(getLocation()) && location.getTileEntity() instanceof TileEntityBank) {
foundAnotherBank = true;
}
if (location.getTileEntity() instanceof ICurrencyNetworkUnit) {
ICurrencyNetworkUnit unit = (ICurrencyNetworkUnit) location.getTileEntity();
connectedUnits.add(location);
if (unit.getBankLocation() == null) {
unit.setBankLocation(getLocation());
}
}
}
enable = !foundAnotherBank;
}
}
if (!world.isRemote) {
if (getStackInSlot(0) != null && getStackInSlot(0).getItem() instanceof ItemCurrency) {
int amountToAdd = ((ItemCurrency) getStackInSlot(0).getItem()).value;
int stackSize = 0;
for (int i = 0; i < getStackInSlot(0).getCount(); i++) {
if (canAddAmount(amountToAdd)) {
stackSize++;
amountToAdd += ((ItemCurrency) getStackInSlot(0).getItem()).value;
}
}
if (stackSize != 0) {
addCurrency(stackSize * ((ItemCurrency) getStackInSlot(0).getItem()).value);
decrStackSize(0, stackSize);
}
}
}
}
private boolean canAddAmount(int amount) {
int storedAmount = storedCurrency;
return storedAmount + amount <= getMaxCurrency();
}
public void addCurrency(int amount) {
markForUpdate();
setCurrency(storedCurrency + amount);
}
@Override
public Container getTileContainer(EntityPlayer player) {
return new ContainerBank(player, this);
}
@Override
@SideOnly(Side.CLIENT)
public GuiContainer getTileGuiContainer(EntityPlayer player) {
return new GuiBank(player, this);
}
@Override
public int getSizeInventory() {
return 2;
}
@Override
public int getStoredCurrency() {
return storedCurrency;
}
@Override
public void setCurrency(int amount) {
int setAmount = amount;
if (amount > getMaxCurrency()) {
setAmount = getMaxCurrency();
}
storedCurrency = setAmount;
}
@Override
public SecurityProfile getSecurityProfile() {
return profile;
}
@Override
public EnumFacing[] getConnectedDirections() {
return EnumFacing.VALUES;
}
@Override
public int getMaxCurrency() {
return CUConfig.misc.bankCurrencyCapacity;
}
}
|
jjchiw/hms-flutter-plugin
|
flutter-hms-dtm/android/src/main/java/com/huawei/hms/flutter/dtm/interfaces/CustomTag.java
|
/*
Copyright 2020. Huawei Technologies Co., Ltd. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.huawei.hms.flutter.dtm.interfaces;
import android.content.Context;
import android.os.Handler;
import android.os.Looper;
import com.huawei.hms.dtm.ICustomTag;
import com.huawei.hms.flutter.dtm.DTMPlugin;
import com.huawei.hms.flutter.dtm.logger.HMSLogger;
import java.util.Map;
import io.flutter.plugin.common.MethodChannel;
public class CustomTag implements ICustomTag {
private static final String TAG = "CustomTag";
final MethodChannel channel = DTMPlugin.CHANNELS.get(0);
@Override
public void call(final Map<String, Object> map) {
Context context = DTMPlugin.getContext();
if (context != null) {
HMSLogger.getInstance(context).startMethodExecutionTimer(TAG);
}
invokeListenMethod(map);
if (context != null) {
HMSLogger.getInstance(context).sendSingleEvent(TAG);
}
}
private void invokeListenMethod(final Map<String, Object> tags) {
new Handler(Looper.getMainLooper()).post(() -> channel.invokeMethod("listenToTags", tags));
}
}
|
hopshackle/wordAlignment
|
src/edu/cmu/lti/nlp/amr/Entity.scala
|
package edu.cmu.lti.nlp.amr
import scala.collection.mutable.Map
import scala.collection.mutable.Set
import scala.collection.mutable.ArrayBuffer
// Case class for BIO tagged entities
case class Entity(start: Int, end: Int, label: String)
object Entity {
def entitiesFromConll(conllStr: String, column: Int = 1) : Array[Entity] = {
// Returns the entities from a conll BIO tagging string (tokens separated by "\n", fields by "\t")
val conll = conllStr.split("\n").map(x => x.split("\t")) // WARNING: conllStr should not end with a '\n' (otherwise our test for conll(i)(1) != "O" might be index out of bounds because the line is empty)
var i = 0
val entities : ArrayBuffer[Entity] = ArrayBuffer()
while (i < conll.size) {
if (conll(i)(column) != "O") {
assert(conll(i)(column).matches("(B|I|L|U)-.*"), "Data is not in conll BIO tagging format") // UIUC format
val label = conll(i)(column).drop(2)
val start = i
i += 1
while (i < conll.size && (conll(i)(column) == "I-"+label || conll(i)(column) == "L-"+label)) {
i +=1
}
val end = i
entities += Entity(start, end, label)
} else {
i += 1
}
}
return entities.toArray
}
}
|
atlasapi/atlas
|
src/main/java/org/atlasapi/remotesite/channel4/pmlsd/epg/C4SynthesizedItemMerger.java
|
package org.atlasapi.remotesite.channel4.pmlsd.epg;
import java.util.Set;
import org.atlasapi.media.entity.Broadcast;
import org.atlasapi.media.entity.Encoding;
import org.atlasapi.media.entity.Episode;
import org.atlasapi.media.entity.Location;
import org.atlasapi.media.entity.Version;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
public class C4SynthesizedItemMerger {
public void merge(Episode synthesized, Episode canonical) {
if (synthesized == null || canonical == null) {
return; // lucky us.
}
Version version = Iterables.getOnlyElement(canonical.getVersions());
Version synthVersion = Iterables.getOnlyElement(synthesized.getVersions());
mergeBroadcasts(version, synthVersion);
mergeLocations(version, synthVersion);
}
private void mergeLocations(Version version, Version synthVersion) {
Encoding encoding = Iterables.getOnlyElement(version.getManifestedAs());
Encoding synthEncoding = Iterables.getOnlyElement(synthVersion.getManifestedAs());
/* create a set of the uris of the current locations in the canon episode.
* copy in locations from synth episode if they're not in the uri set.
*/
ImmutableSet<String> locationUris = currentLocationUris(encoding.getAvailableAt());
for (Location location : synthEncoding.getAvailableAt()) {
if (!locationUris.contains(location.getUri())) {
encoding.addAvailableAt(location);
}
}
}
private ImmutableSet<String> currentLocationUris(Set<Location> locations) {
return ImmutableSet.copyOf(Iterables.transform(locations, new Function<Location, String>() {
@Override
public String apply(Location location) {
return location.getUri();
}
}));
}
private void mergeBroadcasts(Version version, Version synthVersion) {
/* Create a set of current broadcast ids in .
* Copy in broadcasts from synth if not in canon.
*/
Set<String> currentBroadcastIds = currentBroadcastIds(version.getBroadcasts());
for (Broadcast synthBroadcast : synthVersion.getBroadcasts()) {
if (!currentBroadcastIds.contains(synthBroadcast.getSourceId())) {
version.addBroadcast(synthBroadcast);
}
}
}
private ImmutableSet<String> currentBroadcastIds(Set<Broadcast> broadcasts) {
return ImmutableSet.copyOf(Iterables.transform(broadcasts, new Function<Broadcast, String>() {
@Override
public String apply(Broadcast broadcast) {
return broadcast.getSourceId();
}
}));
}
}
|
lotharschulz/cpp
|
exercises/practice/rna-transcription/rna_transcription.cpp
|
<filename>exercises/practice/rna-transcription/rna_transcription.cpp<gh_stars>1-10
#include "rna_transcription.h"
namespace rna_transcription {
} // namespace rna_transcription
|
npocmaka/Windows-Server-2003
|
windows/advcore/gdiplus/engine/fondrv/tt/ttfd/service.c
|
/******************************Module*Header*******************************\
* Module Name: service.c
*
* set of service routines for converting between ascii and unicode strings
*
* Created: 15-Nov-1990 11:38:31
* Author: <NAME> [BodinD]
*
* Copyright (c) 1990 Microsoft Corporation
*
\**************************************************************************/
#include "fd.h"
/******************************Public*Routine******************************\
*
* vCpyBeToLeUnicodeString,
*
* convert (c - 1) WCHAR's in big endian format to little endian and
* put a terminating zero at the end of the dest string
*
* History:
* 11-Dec-1991 -by- <NAME> [BodinD]
* Wrote it.
\**************************************************************************/
VOID vCpyBeToLeUnicodeString(LPWSTR pwcLeDst, LPWSTR pwcBeSrc, ULONG c)
{
LPWSTR pwcBeSrcEnd;
ASSERTDD(c > 0, "vCpyBeToLeUnicodeString: c == 0\n");
for
(
pwcBeSrcEnd = pwcBeSrc + (c - 1);
pwcBeSrc < pwcBeSrcEnd;
pwcBeSrc++, pwcLeDst++
)
{
*pwcLeDst = BE_UINT16(pwcBeSrc);
}
*pwcLeDst = (WCHAR)(UCHAR)'\0';
}
/******************************Public*Routine******************************\
*
* VOID vCvtMacToUnicode
*
* Effects:
*
* Warnings:
*
* History:
* 07-Mar-1992 -by- <NAME> [BodinD]
* Wrote it.
\**************************************************************************/
VOID vCvtMacToUnicode
(
ULONG ulLangId,
LPWSTR pwcLeDst,
PBYTE pjSrcMac,
ULONG c
)
{
PBYTE pjSrcEnd;
//!!! I believe that LangId should be used to select the proper conversion
//!!! routine, this is a stub [bodind]
ulLangId;
for
(
pjSrcEnd = pjSrcMac + c;
pjSrcMac < pjSrcEnd;
pjSrcMac++, pwcLeDst++
)
{
*pwcLeDst = (WCHAR)(*pjSrcMac);
}
}
/******************************Public*Routine******************************\
*
* VOID vCpyMacToLeUnicodeString
*
*
* Ensures that string is zero terminated so that other cool things can be
* done to it such as wcscpy, wcslen e.t.c.
*
* History:
* 13-Jan-1992 -by- <NAME> [BodinD]
* Wrote it.
\**************************************************************************/
VOID vCpyMacToLeUnicodeString
(
ULONG ulLangId,
LPWSTR pwcLeDst,
PBYTE pjSrcMac,
ULONG c
)
{
ASSERTDD(c > 0, "vCpyMacToLeUnicodeString: c == 0\n");
c -= 1;
vCvtMacToUnicode (ulLangId, pwcLeDst, pjSrcMac, c);
pwcLeDst[c] = (WCHAR)(UCHAR)'\0';
}
/**************************************************************************\
* The rest of the file is stolen from JeanP's win31 code in fd_mac.c
*
* Conversion routines from Mac character code and Mac langageID to
* Unicode character code and OS2 langage ID
*
* Public routines:
* Unicode2Mac
* Mac2Lang
*
\**************************************************************************/
/*
** Converts the OS2 langageID to the to the Mac langage ID
*/
#ifdef JEANP_IS_WRONG
// JEANp messed up danish and german, else my conversion table is the
// same as mine [bodind]
uint16 aCvLang [32] =
{
0, 12, 0, 0, 0, 0, 0, 7,
14, 0, 6, 13, 1, 10, 0, 15,
3, 11, 21, 4, 9, 0, 8, 0,
0, 0, 18, 0, 0, 5, 22, 17
};
#endif // JEANP_IS_WRONG
uint16 aCvLang [32] =
{
0, // 0 -> 0 (0 -> english == default)
12, // 1 -> 12 (arabic -> arabic)
0, // 2 -> 0 (bulgarian -> english == default)
0, // 3 -> 0 (catalon -> english == default)
0, // 4 -> 0 (Chinese -> english == default)
0, // 5 -> 0 (Czeh -> english == default)
7, // 6 -> 7 (Danish -> Danish)
2, // 7 -> 2 (German -> German)
14, // 8 -> 14 (Greek -> Greek)
0, // 9 -> 0 (English -> english)
6, // a -> 6 (spanish -> spanish)
13, // b -> 13 (finnish -> finnish)
1, // c -> 1 (french -> french)
10, // d -> 10 (hebrew -> hebrew)
0, // e -> 0 (hungarian -> english == default)
15, // f -> 15 (icelandic -> icelandic)
3, // 10 -> 3 (Italian -> italian)
11, // 11 -> 11 (japanese -> japanese)
21, // 12 -> 21 (korean -> hindi, this seems to be a bug?????????)
4, // 13 -> 4 (dutch -> dutch)
9, // 14 -> 9 (norweign -> norweign)
0, // 15 -> 0 (Polish -> english == default)
8, // 16 -> 8 (portugese -> portugese)
0, // 17 -> 0 (rhaeto-romanic -> english == default)
0, // 18 -> 0 (romanian -> english == default)
0, // 19 -> 0 (russian -> english == default)
18, // 1a -> 18 (Yugoslavian -> Yugoslavian), lat or cyr ????
0, // 1b -> 0 (slovakian -> english == default)
0, // 1c -> 0 (albanian -> english == default)
5, // 1d -> 5 (swedish -> swedish)
22, // 1e -> 22 (thai -> thai)
17 // 1f -> 17 (turkish -> turkish)
};
/************************** Public Routine *****************************\
* Mac2Lang
*
* Converts the OS2 langageID to the to the Mac langage ID
*
* History:
* Fri Dec 08 11:28:35 1990 -by- <NAME> [jeanp]
* Wrote it.
\***********************************************************************/
uint16 ui16Mac2Lang (uint16 Id)
{
// this is just a way to bail out if an incorrect lang id is passed to
// this routine [bodind]
// Note that Id & 1f < 32 == sizeof(aCvLang)/sizeof(aCvLang[0]), no gp-fault
return aCvLang[Id & 0x1f];
}
|
BritClousing/EOSAI
|
EOSAI/EOSAIIncludes.h
|
#include "EOSAIInterface.h"
#include "EOSAIBuildOption.h"
#include "EOSAIBuildOrder.h"
#include "EOSAIBuildingDescription.h"
#include "EOSAILocation.h"
//#include "EOSAIPlayerPoiObject2.h"
//#include "EOSAIPlayerPoiMobile2.h"
#include "EOSAIAirfield.h"
#include "City.h"
#include "EOSAIResource.h"
#include "EOSAIUnit.h"
#include "AIPlayer.h"
#include "EOSAIGameRules.h"
#include "EOSAITechnologyDesc.h"
#include "EOSAITechTreeNode.h"
#include "EOSAIUnitsubsetPermission.h"
#include "EOSAIGamePlayer.h"
#include "AIPlayerDesc.h"
#include "EOSAIPlayerManager.h"
//#include "EOSAIPlayerManager.h"
#include "EOSAIGlobalForeignRelations.h"
#include "EOSAINationalSummary3.h"
//#include "EOSAIUnit2PathwayResult.h"
#include "EOSAIUnitPathwayFinder.h"
#include "EOSAIUnitPathwayResult.h"
#include "EOSAIUnitTemplate.h"
#include "EOSAIUnitTemplateSet.h"
#include "EOSAIRoad2.h"
#include "EOSAIWorldDistanceTool.h"
#include "EOSAIRegion2.h"
#include "EOSAIGenericRegionManager.h"
#include "EOSAITacticalProject2.h"
#include "EOSAIUnitActionIdea.h"
#include "EOSAIUnitPathwayResultStep.h"
#include "EOSAIUnitPathwayPredefinedStep.h"
|
MarioMatschgi/Minecraft-Plugins
|
_Eclipse/_SpigotDecompiled/work/decompile-cf6b1333/net/minecraft/server/BiomeForest.java
|
package net.minecraft.server;
import java.util.Random;
public class BiomeForest extends BiomeBase {
protected static final WorldGenForest x = new WorldGenForest(false, true);
protected static final WorldGenForest y = new WorldGenForest(false, false);
protected static final WorldGenForestTree z = new WorldGenForestTree(false);
private final BiomeForest.Type A;
public BiomeForest(BiomeForest.Type biomeforest_type, BiomeBase.a biomebase_a) {
super(biomebase_a);
this.A = biomeforest_type;
this.s.z = 10;
this.s.C = 2;
if (this.A == BiomeForest.Type.FLOWER) {
this.s.z = 6;
this.s.B = 100;
this.s.C = 1;
this.u.add(new BiomeBase.BiomeMeta(EntityRabbit.class, 4, 2, 3));
}
if (this.A == BiomeForest.Type.NORMAL) {
this.u.add(new BiomeBase.BiomeMeta(EntityWolf.class, 5, 4, 4));
}
if (this.A == BiomeForest.Type.ROOFED) {
this.s.z = -999;
}
}
public WorldGenTreeAbstract a(Random random) {
return (WorldGenTreeAbstract) (this.A == BiomeForest.Type.ROOFED && random.nextInt(3) > 0 ? BiomeForest.z : (this.A != BiomeForest.Type.BIRCH && random.nextInt(5) != 0 ? (random.nextInt(10) == 0 ? BiomeForest.n : BiomeForest.m) : BiomeForest.y));
}
public BlockFlowers.EnumFlowerVarient a(Random random, BlockPosition blockposition) {
if (this.A == BiomeForest.Type.FLOWER) {
double d0 = MathHelper.a((1.0D + BiomeForest.k.a((double) blockposition.getX() / 48.0D, (double) blockposition.getZ() / 48.0D)) / 2.0D, 0.0D, 0.9999D);
BlockFlowers.EnumFlowerVarient blockflowers_enumflowervarient = BlockFlowers.EnumFlowerVarient.values()[(int) (d0 * (double) BlockFlowers.EnumFlowerVarient.values().length)];
return blockflowers_enumflowervarient == BlockFlowers.EnumFlowerVarient.BLUE_ORCHID ? BlockFlowers.EnumFlowerVarient.POPPY : blockflowers_enumflowervarient;
} else {
return super.a(random, blockposition);
}
}
public void a(World world, Random random, BlockPosition blockposition) {
if (this.A == BiomeForest.Type.ROOFED) {
this.b(world, random, blockposition);
}
int i = random.nextInt(5) - 3;
if (this.A == BiomeForest.Type.FLOWER) {
i += 2;
}
this.a(world, random, blockposition, i);
super.a(world, random, blockposition);
}
protected void b(World world, Random random, BlockPosition blockposition) {
for (int i = 0; i < 4; ++i) {
for (int j = 0; j < 4; ++j) {
int k = i * 4 + 1 + 8 + random.nextInt(3);
int l = j * 4 + 1 + 8 + random.nextInt(3);
BlockPosition blockposition1 = world.getHighestBlockYAt(blockposition.a(k, 0, l));
if (random.nextInt(20) == 0) {
WorldGenHugeMushroom worldgenhugemushroom = new WorldGenHugeMushroom();
worldgenhugemushroom.generate(world, random, blockposition1);
} else {
WorldGenTreeAbstract worldgentreeabstract = this.a(random);
worldgentreeabstract.e();
if (worldgentreeabstract.generate(world, random, blockposition1)) {
worldgentreeabstract.a(world, random, blockposition1);
}
}
}
}
}
protected void a(World world, Random random, BlockPosition blockposition, int i) {
int j = 0;
while (j < i) {
int k = random.nextInt(3);
if (k == 0) {
BiomeForest.l.a(BlockTallPlant.EnumTallFlowerVariants.SYRINGA);
} else if (k == 1) {
BiomeForest.l.a(BlockTallPlant.EnumTallFlowerVariants.ROSE);
} else if (k == 2) {
BiomeForest.l.a(BlockTallPlant.EnumTallFlowerVariants.PAEONIA);
}
int l = 0;
while (true) {
if (l < 5) {
int i1 = random.nextInt(16) + 8;
int j1 = random.nextInt(16) + 8;
int k1 = random.nextInt(world.getHighestBlockYAt(blockposition.a(i1, 0, j1)).getY() + 32);
if (!BiomeForest.l.generate(world, random, new BlockPosition(blockposition.getX() + i1, k1, blockposition.getZ() + j1))) {
++l;
continue;
}
}
++j;
break;
}
}
}
public Class<? extends BiomeBase> g() {
return BiomeForest.class;
}
public static enum Type {
NORMAL, FLOWER, BIRCH, ROOFED;
private Type() {}
}
}
|
Shashi-rk/azure-sdk-for-java
|
sdk/mysqlflexibleserver/azure-resourcemanager-mysqlflexibleserver/src/main/java/com/azure/resourcemanager/mysqlflexibleserver/models/Configuration.java
|
<gh_stars>1000+
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.mysqlflexibleserver.models;
import com.azure.core.management.SystemData;
import com.azure.resourcemanager.mysqlflexibleserver.fluent.models.ConfigurationInner;
/** An immutable client-side representation of Configuration. */
public interface Configuration {
/**
* Gets the id property: Fully qualified resource Id for the resource.
*
* @return the id value.
*/
String id();
/**
* Gets the name property: The name of the resource.
*
* @return the name value.
*/
String name();
/**
* Gets the type property: The type of the resource.
*
* @return the type value.
*/
String type();
/**
* Gets the systemData property: The system metadata relating to this resource.
*
* @return the systemData value.
*/
SystemData systemData();
/**
* Gets the value property: Value of the configuration.
*
* @return the value value.
*/
String value();
/**
* Gets the description property: Description of the configuration.
*
* @return the description value.
*/
String description();
/**
* Gets the defaultValue property: Default value of the configuration.
*
* @return the defaultValue value.
*/
String defaultValue();
/**
* Gets the dataType property: Data type of the configuration.
*
* @return the dataType value.
*/
String dataType();
/**
* Gets the allowedValues property: Allowed values of the configuration.
*
* @return the allowedValues value.
*/
String allowedValues();
/**
* Gets the source property: Source of the configuration.
*
* @return the source value.
*/
ConfigurationSource source();
/**
* Gets the isReadOnly property: If is the configuration read only.
*
* @return the isReadOnly value.
*/
IsReadOnly isReadOnly();
/**
* Gets the isConfigPendingRestart property: If is the configuration pending restart or not.
*
* @return the isConfigPendingRestart value.
*/
IsConfigPendingRestart isConfigPendingRestart();
/**
* Gets the isDynamicConfig property: If is the configuration dynamic.
*
* @return the isDynamicConfig value.
*/
IsDynamicConfig isDynamicConfig();
/**
* Gets the inner com.azure.resourcemanager.mysqlflexibleserver.fluent.models.ConfigurationInner object.
*
* @return the inner object.
*/
ConfigurationInner innerModel();
}
|
Marcura/marcura-ui
|
app/grid-page/grid-page.js
|
<filename>app/grid-page/grid-page.js
angular.module('app.controllers').controller('gridPageController', gridPageController);
function gridPageController($scope) {
$scope.sortField1 = {
name: 'eta',
isAsc: false
};
$scope.sortField2 = {
name: 'eta',
isAsc: false
};
$scope.das = [{
vessel: 'Densa Felcon',
operation: 'Discharging',
commodity: 'Grains',
eta: '2016-12-25',
status: 'Appointment accepted',
total: 36483.27
}, {
vessel: 'Bertina',
operation: 'Loading',
commodity: 'Wheat',
eta: '2016-07-22',
status: 'Appointment accepted, PDA approval completed',
total: 25990.20
}, {
vessel: 'Alpine Alaska',
operation: 'Discharging',
commodity: 'Coal',
eta: '2016-02-12',
status: 'Appointment accepted, PDA approval completed',
total: 40000
}, {
vessel: 'Alpine Alaska',
operation: 'Discharging',
commodity: 'Coal',
eta: '2016-02-12',
status: 'Appointment accepted, PDA approval completed',
total: 50000
}, {
vessel: 'Alpine Alaska',
operation: 'Discharging',
commodity: 'Coal',
eta: '2016-02-12',
status: 'Appointment accepted, PDA approval completed',
total: 30000
}];
}
|
RustamSultanov/camachine
|
js/src/main/scala/camachineapi/js/components/LambdaComponent.scala
|
<filename>js/src/main/scala/camachineapi/js/components/LambdaComponent.scala
package camachineapi.js.components
import camachineapi.js.http.Client
import camachineapi.js.forms.Forms._
import camachineapi.models.LambdaCode
import cats.effect.{ContextShift, IO}
import monix.reactive.Observable
import outwatch.Handler
import outwatch.dom.VDomModifier
import outwatch.dom.dsl._
class LambdaComponent(
textAreaLambda: Handler[String],
resultLambda: Handler[LambdaCode],
client: Client,
)(
implicit
contextShift: ContextShift[IO],
) {
def dataStream: Observable[LambdaCode] = textAreaLambda.map(LambdaCode.apply)
def sendRequestIO(data: LambdaCode): IO[LambdaCode] =
client.requestLambda(data)
def disableStream: Observable[Boolean] = dataStream.map(_.code.length < 4)
def node = div(
textAreaGroupRow(
textAreaLambda,
"lambdaTextArea",
"Лямбда код",
"Введите последовательность лямбда-термов",
isRequired = true,
),
buttonSubmitGroupRow(
"Отправить",
resultLambda,
disableStream,
dataStream,
sendRequestIO,
),
h3("Терм ККЛ полученый из лямбда-термов"),
table(
tr(
td(
styleAttr := "bgcolor=#b9e4c9",
VDomModifier(resultLambda.map(_.code)),
),
),
),
)
}
object LambdaComponent {
def init(
client: Client,
)(
implicit
contextShift: ContextShift[IO],
): IO[LambdaComponent] =
for {
textAreaLambdaHandler <- Handler.create[String]
resultLambdaHandler <- Handler.create[LambdaCode]
} yield
new LambdaComponent(
textAreaLambdaHandler,
resultLambdaHandler,
client,
)
}
|
Guild-Hall/MHFC
|
src/main/java/mhfc/net/common/entity/creature/Tigrex.java
|
package mhfc.net.common.entity.creature;
import java.util.ArrayList;
import java.util.List;
import org.lwjgl.opengl.GL11;
import com.github.worldsender.mcanm.client.model.util.RenderPassInformation;
import mhfc.net.common.ai.IActionManager;
import mhfc.net.common.ai.IExecutableAction;
import mhfc.net.common.ai.entity.AIAngleWhip;
import mhfc.net.common.ai.entity.AIBite;
import mhfc.net.common.ai.entity.AIBreathe;
import mhfc.net.common.ai.entity.AIDeath;
import mhfc.net.common.ai.entity.AIIdle;
import mhfc.net.common.ai.entity.AIWander;
import mhfc.net.common.ai.entity.monsters.tigrex.BackOff;
import mhfc.net.common.ai.entity.monsters.tigrex.Charge;
import mhfc.net.common.ai.entity.monsters.tigrex.GroundHurl;
import mhfc.net.common.ai.entity.monsters.tigrex.Jump;
import mhfc.net.common.ai.entity.monsters.tigrex.Roar;
import mhfc.net.common.ai.manager.builder.ActionManagerBuilder;
import mhfc.net.common.core.registry.MHFCSoundRegistry;
import mhfc.net.common.entity.CollisionParts;
import mhfc.net.common.entity.CreatureAttributes;
import mhfc.net.common.item.materials.ItemMaterial.MaterialSubType;
import mhfc.net.common.util.SubTypedItem;
import net.minecraft.block.Block;
import net.minecraft.entity.SharedMonsterAttributes;
import net.minecraft.entity.ai.EntityAIHurtByTarget;
import net.minecraft.entity.ai.EntityAINearestAttackableTarget;
import net.minecraft.entity.passive.EntityVillager;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.util.SoundEvent;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
public class Tigrex extends CreatureAttributes<Tigrex> {
public int rageLevel;
public Tigrex(World par1World) {
super(par1World);
setSize(4.6f, 3.6f);
stepHeight = 1.5f;
}
@Override
protected void applyEntityAttributes() {
super.applyEntityAttributes();
this.getEntityAttribute(SharedMonsterAttributes.MAX_HEALTH).setBaseValue(5000D);
this.getEntityAttribute(SharedMonsterAttributes.ARMOR).setBaseValue(45D);
this.getEntityAttribute(SharedMonsterAttributes.ARMOR_TOUGHNESS).setBaseValue(30D);
this.getEntityAttribute(SharedMonsterAttributes.FOLLOW_RANGE).setBaseValue(60D);
}
@Override
protected void initEntityAI() {
super.initEntityAI();
targetTasks.addTask(6, new EntityAIHurtByTarget(this, true));
targetTasks.addTask(1, new EntityAINearestAttackableTarget<>(this, EntityVillager.class, true));
targetTasks.addTask(3, new EntityAINearestAttackableTarget<>(this, EntityPlayer.class, true));
}
@Override
protected IActionManager<Tigrex> constructActionManager() {
ActionManagerBuilder<Tigrex> manager = new ActionManagerBuilder<>();
/** Living AIs **/
manager.registerAction(
setDeathAction(
new AIDeath(
this,
"mhfc:models/Tigrex/dying.mcanm",
MHFCSoundRegistry.getRegistry().tigrexDeath)));
manager.registerAction(new AIBreathe(this, "mhfc:models/Tigrex/breathe.mcanm", 60, 2F));
manager.registerAction(new AIIdle(this, "mhfc:models/Tigrex/idle.mcanm", 160, 2F));
manager.registerAction(new AIIdle(this, "mhfc:models/Tigrex/idle3.mcanm", 260, 1F));
manager.registerAction(
new AIWander<Tigrex>(this, "mhfc:models/Tigrex/walk.mcanm", 122, 1F, 0.08F, 0.4F, 21, 85, 1, 30));
/** Attack AIs **/
manager.registerAction(
new AIBite(
this,
"mhfc:models/Tigrex/bite.mcanm",
85,
12,
70,
10F,
MHFCSoundRegistry.getRegistry().tigrexBite,
6F,
false,
0,
0));
manager.registerAction(
new AIAngleWhip<>(
"mhfc:models/Tigrex/clawswipe.mcanm",
41,
5,
80,
10,
MHFCSoundRegistry.getRegistry().tigrexTailWhip,
7,
6,
2F,
120,
10));
manager.registerAction(
new AIAngleWhip<>(
"mhfc:models/Tigrex/tailswipe.mcanm",
60,
12,
82,
10F,
MHFCSoundRegistry.getRegistry().tigrexTailWhip,
9,
5,
1,
180,
10));
manager.registerAction(new Jump());
manager.registerAction(new Roar());
manager.registerAction(new BackOff());
manager.registerAction(new Charge());
manager.registerAction(new GroundHurl());
List<IExecutableAction<? super Tigrex>> allowedFirstSight = new ArrayList<>();
allowedFirstSight.add(new Roar());
return manager.build(this);
}
@Override
protected void dropFewItems(boolean par1, int par2) {
dropItemRand(SubTypedItem.fromSubItem(MaterialSubType.TIGREXSKULLSHELL, 1));
dropItemRand(SubTypedItem.fromSubItem(MaterialSubType.TIGREXTAIL, 1));
}
@Override
public RenderPassInformation preRenderCallback(float subFrame, RenderPassInformation sub) {
GL11.glScaled(2.3, 2.3, 2.3);
return super.preRenderCallback(subFrame, sub);
}
@Override
protected SoundEvent getAmbientSound() {
return MHFCSoundRegistry.getRegistry().tigrexIdle;
}
@Override
protected SoundEvent getHurtSound() {
return MHFCSoundRegistry.getRegistry().tigrexIdle;
}
@Override
protected SoundEvent getDeathSound() {
return MHFCSoundRegistry.getRegistry().tigrexDeath;
}
@Override
public CollisionParts[] getParts() {
// TODO Auto-generated method stub
return null;
}
@Override
protected void playStepSound(BlockPos pos, Block blockIn) {
this.playSound(MHFCSoundRegistry.getRegistry().tigrexStep, 0.7F, 1.0F);
}
}
|
Felon03/CppPrimer
|
Ch 10/10.28.cpp
|
<gh_stars>0
/*一个vector中保存1-9,将其拷贝到三个其他容器中。
分别使用inserter、back_inserter和front_inserter将元素添加到三个容器中。
*/
#include<iostream>
#include<vector>
#include<list>
#include<iterator>
using namespace std;
void print(const list<int> &l)
{
for (const auto &x : l)
cout << x << " ";
cout << endl;
}
int main()
{
vector<int> v{ 1,2,3,4,5,6,7,8,9 };
list<int> l1, l2, l3;
// 使用front_inserter
copy(v.cbegin(), v.cend(), front_inserter(l1));
// 使用back_inserter
copy(v.cbegin(), v.cend(), back_inserter(l2));
// 使用inserter
copy(v.cbegin(), v.cend(), inserter(l3, l3.begin()));
print(l1);
print(l2);
print(l3);
return 0;
}
|
amithbm/stravamerge
|
ui/src/routes/activities/selectors/getWeekChartData.spec.js
|
<gh_stars>0
import React from 'react'
import getWeekChartData from '../selectors/getWeekChartData.js'
describe('selectors', () => {
describe('getWeekChartData', () => {
it('returns rounded week data as array', () => {
const state = {
activities: {
weeklySummary: {
monday: 1500,
tuesday: 1750,
wednesday: 6000,
thursday: 600,
friday: 12000,
saturday: 10000,
sunday: 15000
}
}
}
const result = getWeekChartData(state)
expect(result.map(x => x.distance)).toMatchArray([1.5,1.8,6,0.6,12,10,15])
expect(result.map(x => x.label)).toMatchArray([
'Monday',
'Tuesday',
'Wednesday',
'Thursday',
'Friday',
'Saturday',
'Sunday'
])
})
})
})
|
ZhiruiFeng/CarsMemory
|
src/producer/video_producer.py
|
<reponame>ZhiruiFeng/CarsMemory<gh_stars>1-10
#!/usr/bin/env python3
# video producers
"""
Simulate real-time video streaming:
1. The source could be a video file.
2. The source could also be a folder contains a sequence of keyframes.
"""
import re
import json
import sys
import time
from multiprocessing import Process
import cv2
import imutils
import numpy as np
from imutils.video import VideoStream
from kafka import KafkaProducer, TopicPartition
from kafka.partitioner import RoundRobinPartitioner, Murmur2Partitioner
import src.params as params
from src.kafka.utils import np_to_json
from src.utils import get_curtimestamp_millis
import src.processing.cvutils as cvutils
import src.kafka.settings as settings
from src.processing.sampling_with_location import VideoSampler
class StreamVideo(Process):
def __init__(self, video_path,
topic,
topic_partitions=8,
use_cv2=False,
pub_obj_key=settings.ORIGINAL_PREFIX,
sample_speed=10,
location='Unknown',
group=None,
target=None,
name=None,
verbose=False,
rr_distribute=False):
"""Video Streaming Producer Process Class. Publishes frames from a video source to a topic.
:param video_path: video path or url
:param topic: kafka topic to publish stamped encoded frames.
:param topic_partitions: number of partitions this topic has, for distributing messages among partitions
:param use_cv2: send every frame, using cv2 library, else will use imutils to speedup training
:param pub_obj_key: associate tag with every frame encoded, can be used later to separate raw frames
:param sample_speed: to decrease the fps of incoming video
:param group: group should always be None; it exists solely for compatibility with threading.
:param target: Process Target
:param name: Process name
:param verbose: print logs on stdout
:param rr_distribute: use round robin partitioner, should be set same as consumers.
"""
super().__init__(group=group, target=target, name=name)
# This is the folder for videos
self.video_path = video_path
# TOPIC TO PUBLISH
self.frame_topic = topic
self.topic_partitions = topic_partitions
# Get the camera_num for the steam name
self.camera_num = int(re.findall(r"StreamVideo-([0-9]*)", self.name)[0])
self.use_cv2 = use_cv2
self.object_key = settings.ORIGINAL_PREFIX
self.verbose = verbose
self.rr_distribute = rr_distribute
self.sampler = VideoSampler(sample_speed)
# For first version, we just use the car's registion location
# In future, we could use GPS information
self.timer = time.time()
self.zerotime = time.time()
self.sizecnt = 0
self.location = location
self.report_range = 60
def run(self):
"""Publish video frames as json objects, timestamped, marked with camera number.
Source:
self.video_path: URL for streaming video
self.kwargs["use_cv2"]: use raw cv2 streaming, set to false to use smart fast streaming --> not every frame is sent.
Publishes:
A dict {"frame": string(base64encodedarray), "dtype": obj.dtype.str, "shape": obj.shape,
"timestamp": time.time(), "camera": camera, "frame_num": frame_num}
"""
if self.rr_distribute:
partitioner = RoundRobinPartitioner(partitions=
[TopicPartition(topic=self.frame_topic, partition=i)
for i in range(self.topic_partitions)])
else:
partitioner = Murmur2Partitioner(partitions=
[TopicPartition(topic=self.frame_topic, partition=i)
for i in range(self.topic_partitions)])
# Producer object, set desired partitioner
frame_producer = KafkaProducer(bootstrap_servers=[params.KAFKA_BROKER],
key_serializer=lambda key: str(key).encode(),
value_serializer=lambda value: json.dumps(value).encode(),
partitioner=partitioner,
max_request_size=134217728)
print("[CAM {}] URL: {}, SET PARTITIONS FOR FRAME TOPIC: {}".format(self.camera_num,
self.video_path,
frame_producer.partitions_for(
self.frame_topic)))
# Use either option
if self.use_cv2:
# video = cv2.VideoCapture(self.video_path)
# Here we use sampler to read all videos from a folder
self.sampler.add_video(self.video_path)
else:
video = VideoStream(self.video_path).start()
# Track frame number
frame_num = 0
start_time = time.time()
print("[CAM {}] START TIME {}: ".format(self.camera_num, start_time))
while True:
if self.use_cv2:
success, image, self.location = self.sampler.read()
if not success:
if self.verbose:
print("[CAM {}] URL: {}, END FRAME: {}".format(self.name,
self.video_path,
frame_num))
break
else:
image = video.read()
if image is None:
if self.verbose:
print("[CAM {}] URL: {}, END FRAME: {}".format(self.name,
self.video_path,
frame_num))
break
# Attach metadata to frame, transform into JSON
message = self.transform(frame=image,
frame_num=frame_num,
location=self.location,
object_key=self.object_key,
camera=self.camera_num,
verbose=self.verbose)
self.sizecnt += 1
if time.time() - self.timer > self.report_range:
acc = self.sizecnt
#if self.verbose:
print("[Cam {}]Minute {} send out size {}".format(self.camera_num,
int(self.timer - self.zerotime)//self.report_range,
acc))
self.sizecnt = 0
self.timer = time.time()
# Callback function
def on_send_success(record_metadata):
print(record_metadata.topic)
print(record_metadata.partition)
print(record_metadata.offset)
def on_send_error(excp):
print(excp)
# log.error('I am an errback', exc_info=excp)
# Partition to be sent to
part = frame_num % self.topic_partitions
# Logging
# Publish to specific partition
if self.verbose:
print("\r[PRODUCER][Cam {}] FRAME: {} TO PARTITION: {}".format(message["camera"], frame_num, part))
frame_producer.send(self.frame_topic, key="{}_{}".format(self.camera_num, frame_num), value=message).add_callback(on_send_success).add_errback(on_send_error)
else:
frame_producer.send(self.frame_topic, key="{}_{}".format(self.camera_num, frame_num), value=message)
# if frame_num % 1000 == 0:
frame_producer.flush()
frame_num += 1
if self.use_cv2:
self.sampler.release()
else:
video.stop()
if self.verbose:
print("[CAM {}] FINISHED. STREAM TIME {}: ".format(self.camera_num, time.time() - start_time))
return True if frame_num > 0 else False
@staticmethod
def transform(frame, frame_num, location, object_key, camera=0, verbose=False):
"""Serialize frame, create json message with serialized frame, camera number and timestamp.
:param frame: numpy.ndarray, raw frame
:param frame_num: frame number in the particular video/camera
:param location: the location of the cars
:param object_key: identifier for these objects
:param camera: Camera Number the frame is from
:param verbose: print out logs
:return: A dict {"frame": string(base64encodedarray), "dtype": obj.dtype.str, "shape": obj.shape,
"timestamp": time.time(), "camera": camera, "frame_num": frame_num}
"""
frame = imutils.resize(frame, width=400)
frame = cvutils.rotate(frame)
if verbose:
# print raw frame size
print("\nRAW ARRAY SIZE: ", sys.getsizeof(frame))
# serialize frame
frame_dict = np_to_json(frame.astype(np.uint8), prefix_name=object_key)
# Metadata for frame
message = {"timestamp": get_curtimestamp_millis(),
"location": location,
"camera": camera,
"frame_num": frame_num}
# add frame and metadata related to frame
message.update(frame_dict)
if verbose:
# print message size
print("\nMESSAGE SIZE: ", sys.getsizeof(message))
return message
|
actarian/giorgetti
|
src/js/common/storage/local-storage.service.js
|
<filename>src/js/common/storage/local-storage.service.js<gh_stars>0
export class LocalStorageService {
static delete(name) {
if (this.isLocalStorageSupported()) {
window.localStorage.removeItem(name);
}
}
static exist(name) {
if (this.isLocalStorageSupported()) {
return window.localStorage[name] !== undefined;
}
}
static get(name) {
let value = null;
if (this.isLocalStorageSupported() && window.localStorage[name] !== undefined) {
try {
value = JSON.parse(window.localStorage[name]);
} catch (e) {
console.log('LocalStorageService.get.error parsing', name, e);
}
}
return value;
}
static set(name, value) {
if (this.isLocalStorageSupported()) {
try {
const cache = [];
const json = JSON.stringify(value, function(key, value) {
if (typeof value === 'object' && value !== null) {
if (cache.indexOf(value) !== -1) {
// Circular reference found, discard key
return;
}
cache.push(value);
}
return value;
});
window.localStorage.setItem(name, json);
} catch (e) {
console.log('LocalStorageService.set.error serializing', name, value, e);
}
}
}
static isLocalStorageSupported() {
if (this.supported) {
return true;
}
let supported = false;
try {
supported = 'localStorage' in window && window.localStorage !== null;
if (supported) {
window.localStorage.setItem('test', '1');
window.localStorage.removeItem('test');
} else {
supported = false;
}
} catch (e) {
supported = false;
}
this.supported = supported;
return supported;
}
}
|
Varulv1997/juneau
|
juneau-core/juneau-core-utest/src/test/java/org/apache/juneau/http/MediaRanges_Test.java
|
<gh_stars>1-10
// ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.http;
import static org.apache.juneau.assertions.Assertions.*;
import static org.junit.Assert.*;
import static org.junit.runners.MethodSorters.*;
import org.apache.juneau.collections.*;
import static org.apache.juneau.http.MediaRanges.*;
import org.junit.*;
@FixMethodOrder(NAME_ASCENDING)
public class MediaRanges_Test {
//-----------------------------------------------------------------------------------------------------------------
// Verifies that media type parameters are distinguished from media range extensions.
//-----------------------------------------------------------------------------------------------------------------
@Test
public void a01_extensions() throws Exception {
MediaRanges x1;
MediaRange x2;
x1 = of("text/json");
x2 = x1.getRange(0);
assertString(x2).is("text/json");
assertObject(x2.getParameters()).json().is("[]");
assertString(x2.getQValue()).is("1.0");
assertObject(x2.getExtensions()).json().is("[]");
assertNull(x1.getRange(-1));
assertNull(x1.getRange(1));
x1 = of("foo,bar");
x2 = x1.getRange(0);
assertString(x2).is("foo");
assertObject(x2.getParameters()).json().is("[]");
assertString(x2.getQValue()).is("1.0");
assertObject(x2.getExtensions()).json().is("[]");
x1 = of(" foo , bar ");
x2 = x1.getRange(0);
assertString(x2).is("foo");
assertObject(x2.getParameters()).json().is("[]");
assertString(x2.getQValue()).is("1.0");
assertObject(x2.getExtensions()).json().is("[]");
x1 = of("text/json;a=1;q=0.9;b=2");
x2 = x1.getRange(0);
assertString(x2).is("text/json;a=1;q=0.9;b=2");
assertObject(x2.getParameters()).json().is("['a=1']");
assertString(x2.getQValue()).is("0.9");
assertObject(x2.getExtensions()).json().is("['b=2']");
x1 = of("text/json;a=1;a=2;q=0.9;b=3;b=4");
x2 = x1.getRange(0);
assertString(x2).is("text/json;a=1;a=2;q=0.9;b=3;b=4");
assertObject(x2.getParameters()).json().is("['a=1','a=2']");
assertString(x2.getQValue()).is("0.9");
assertObject(x2.getExtensions()).json().is("['b=3','b=4']");
x1 = of("text/json;a=1;a=2;q=1.0;b=3;b=4");
x2 = x1.getRange(0);
assertString(x2).is("text/json;a=1;a=2;q=1.0;b=3;b=4");
assertObject(x2.getParameters()).json().is("['a=1','a=2']");
assertString(x2.getQValue()).is("1.0");
assertObject(x2.getExtensions()).json().is("['b=3','b=4']");
x1 = of("text/json;a=1");
x2 = x1.getRange(0);
assertString(x2).is("text/json;a=1");
assertObject(x2.getParameters()).json().is("['a=1']");
assertString(x2.getQValue()).is("1.0");
assertObject(x2.getExtensions()).json().is("[]");
x1 = of("text/json;a=1;");
x2 = x1.getRange(0);
assertString(x2).is("text/json;a=1");
assertObject(x2.getParameters()).json().is("['a=1']");
assertString(x2.getQValue()).is("1.0");
assertObject(x2.getExtensions()).json().is("[]");
x1 = of("text/json;q=0.9");
x2 = x1.getRange(0);
assertString(x2).is("text/json;q=0.9");
assertObject(x2.getParameters()).json().is("[]");
assertString(x2.getQValue()).is("0.9");
assertObject(x2.getExtensions()).json().is("[]");
x1 = of("text/json;q=0.9;");
x2 = x1.getRange(0);
assertString(x2).is("text/json;q=0.9");
assertObject(x2.getParameters()).json().is("[]");
assertString(x2.getQValue()).is("0.9");
assertObject(x2.getExtensions()).json().is("[]");
}
@Test
public void a02_hasSubtypePart() {
MediaRanges mr = of("text/json+x,text/foo+y;q=0.0");
assertTrue(mr.hasSubtypePart("json"));
assertTrue(mr.hasSubtypePart("x"));
assertFalse(mr.hasSubtypePart("foo"));
assertFalse(mr.hasSubtypePart("y"));
}
@Test
public void a03_ordering() {
assertString(of("text/json")).is("text/json");
assertString(of("text/json,text/*")).is("text/json,text/*");
assertString(of("text/*,text/json")).is("text/json,text/*");
assertString(of("text/*,text/*")).is("text/*,text/*");
assertString(of("*/text,text/*")).is("text/*,*/text");
assertString(of("text/*,*/text")).is("text/*,*/text");
assertString(of("a;q=0.9,b;q=0.1")).is("a;q=0.9,b;q=0.1");
assertString(of("b;q=0.9,a;q=0.1")).is("b;q=0.9,a;q=0.1");
assertString(of("a,b;q=0.9,c;q=0.1,d;q=0")).is("a,b;q=0.9,c;q=0.1,d;q=0.0");
assertString(of("d;q=0,c;q=0.1,b;q=0.9,a")).is("a,b;q=0.9,c;q=0.1,d;q=0.0");
assertString(of("a;q=1,b;q=0.9,c;q=0.1,d;q=0")).is("a,b;q=0.9,c;q=0.1,d;q=0.0");
assertString(of("d;q=0,c;q=0.1,b;q=0.9,a;q=1")).is("a,b;q=0.9,c;q=0.1,d;q=0.0");
assertString(of("a;q=0,b;q=0.1,c;q=0.9,d;q=1")).is("d,c;q=0.9,b;q=0.1,a;q=0.0");
assertString(of("*")).is("*");
assertString(of("")).is("");
assertString(of(null)).is("");
assertString(of("foo/bar/baz")).is("foo/bar/baz");
}
@Test
public void a04_match() {
MediaRanges x1 = of("text/json");
assertInteger(x1.match(AList.of(MediaType.of("text/json")))).is(0);
assertInteger(x1.match(AList.of(MediaType.of("text/foo")))).is(-1);
assertInteger(x1.match(AList.of((MediaType)null))).is(-1);
assertInteger(x1.match(null)).is(-1);
MediaRanges x2 = of("");
assertInteger(x2.match(AList.of(MediaType.of("text/json")))).is(-1);
}
@Test
public void a05_getRanges() {
MediaRanges x1 = of("text/json");
assertObject(x1.getRanges()).json().is("['text/json']");
}
}
|
impastasyndrome/Lambda-Resource-Static-Assets
|
2-resources/BLOG/ciriculumn/week-2/week2/scope_project/test/23-smoothie-machine-spec.js
|
<filename>2-resources/BLOG/ciriculumn/week-2/week2/scope_project/test/23-smoothie-machine-spec.js
const assert = require("assert");
const chai = require("chai");
const expect = chai.expect;
const smoothieMachine = require("../problems/23-smoothie-machine.js");
describe("smoothieMachine()", function () {
it("should return an object with an 'ingredients' property set as an array of smoothieMachine's passed in arguments", function () {
let smoothie1 = smoothieMachine();
let smoothie2 = smoothieMachine("apples", "bananas", "berries");
expect(smoothie1.ingredients).to.eql([]);
expect(smoothie2.ingredients).to.eql(["apples", "bananas", "berries"]);
});
it("should have an `addIngredients` method that accepts multiple params to add to the ingredients property", function () {
let smoothie1 = smoothieMachine();
let smoothie2 = smoothieMachine("apples", "bananas", "berries");
let test1 = smoothie1.addIngredients("milk");
let result1 = "I'm having a smoothie with milk";
let test2 = smoothie1.addIngredients("kale", "spinach");
let result2 = "I'm having a smoothie with milk and kale and spinach";
let test3 = smoothie1.addIngredients("honey", "pears", "berries");
let result3 =
"I'm having a smoothie with milk and kale and spinach and honey and pears and berries";
let test4 = smoothie2.addIngredients("pineapple");
let result4 =
"I'm having a smoothie with apples and bananas and berries and pineapple";
assert.equal(test1, result1);
assert.equal(test2, result2);
assert.equal(test3, result3);
expect(smoothie1.ingredients).to.eql([
"milk",
"kale",
"spinach",
"honey",
"pears",
"berries",
]);
assert.equal(test4, result4);
expect(smoothie2.ingredients).to.eql([
"apples",
"bananas",
"berries",
"pineapple",
]);
});
});
|
naga-project/webfx
|
webfx-kit/webfx-kit-javafxgraphics-emul/src/main/java/javafx/scene/shape/ClosePath.java
|
<gh_stars>100-1000
/*
* Copyright (c) 2010, 2016, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javafx.scene.shape;
/**
* A path element which closes the current path.
*
* <p>For more information on path elements see the {@link Path} and
* {@link PathElement} classes.
* @since JavaFX 2.0
*/
public class ClosePath extends PathElement {
/*static {
ClosePathHelper.setClosePathAccessor(new ClosePathHelper.ClosePathAccessor() {
@Override
public void doAddTo(PathElement pathElement, Path2D path) {
((ClosePath) pathElement).doAddTo(path);
}
});
}*/
/**
* Creates an empty instance of ClosePath.
*/
public ClosePath() {
//ClosePathHelper.initHelper(this);
}
/**
* {@inheritDoc}
*/
/*@Override
void addTo(NGPath pgPath) {
pgPath.addClosePath();
}*/
/*
* Note: This method MUST only be called via its accessor method.
*/
/*private void doAddTo(Path2D path) {
path.closePath();
}*/
/**
* Returns a string representation of this {@code ArcTo} object.
* @return a string representation of this {@code ArcTo} object.
*/
@Override
public String toString() {
return "ClosePath";
}
}
|
rightinyourwheelhouse/wheelhouse-website
|
src/cms/config/pages/blogPage.js
|
/* eslint-disable sort-keys */
import seo from '../seo';
export default {
file: 'src/data/pages/blogPage.json',
label: 'Blog',
name: 'blogPage',
identifier_field: 'page',
fields: [
{
label: 'Page',
name: 'page',
widget: 'hidden',
default: 'blog',
},
{
label: 'Title',
name: 'title',
widget: 'string',
},
seo,
],
};
|
tdiprima/code
|
recipes/Python/426543_oneliner_Multichop_data/recipe-426543.py
|
<reponame>tdiprima/code
>>> a,bite = "supercalifragalisticexpialidocious",3
>>> [(a[d:d+bite]) for d in range(len(a)-bite) if d%bite==0]
[('s', 'u', 'p'), ('e', 'r', 'c'), ('a', 'l', 'i'), ('f', 'r', 'a'), ('g', 'a', 'l'), ('i', 's', 't'), ('i', 'c', 'e'), ('x', 'p', 'i'), ('a', 'l', 'i'), ('d', 'o', 'c'), ('i', 'o', 'u')]
>>> # or on a list
>>> b =['sup', 'erc', 'ali', 'fra', 'gal', 'ist', 'ice', 'xpi', 'ali', 'doc', 'iou']
>>>
>>> [(b[d:d+bite]) for d in range(len(b)-bite) if d%bite==0]
[['sup', 'erc', 'ali'], ['fra', 'gal', 'ist'], ['ice', 'xpi', 'ali']]
|
bnanes/imagej
|
plugins/scripting/jruby/src/main/java/imagej/plugins/scripting/jruby/JRubyScriptEngine.java
|
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2009 - 2014 Board of Regents of the University of
* Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck
* Institute of Molecular Cell Biology and Genetics.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package imagej.plugins.scripting.jruby;
import imagej.script.AbstractScriptEngine;
import java.io.PrintStream;
import java.io.Reader;
import java.io.Writer;
import javax.script.ScriptContext;
import javax.script.ScriptEngine;
import javax.script.ScriptException;
import org.jruby.Ruby;
import org.jruby.RubyInstanceConfig;
import org.jruby.embed.io.ReaderInputStream;
import org.jruby.embed.io.WriterOutputStream;
/**
* A Ruby interpreter based on JRuby.
*
* @author <NAME>
*/
public class JRubyScriptEngine extends AbstractScriptEngine
{
private Ruby interpreter;
public JRubyScriptEngine() {
interpreter = Ruby.newInstance();
engineScopeBindings = new JRubyBindings(interpreter);
}
@Override
public Object eval(final String script) throws ScriptException {
setup();
try {
return interpreter.evalScriptlet(script);
}
catch (final Exception e) {
throw new ScriptException(e);
}
}
@Override
public Object eval(final Reader reader) throws ScriptException {
setup();
try {
final String filename = getString(ScriptEngine.FILENAME);
interpreter.runFromMain(new ReaderInputStream(reader), filename == null ? "*none*" : filename);
return this;
}
catch (final Exception e) {
throw new ScriptException(e);
}
}
protected void setup() {
final RubyInstanceConfig config = interpreter.getInstanceConfig();
final ScriptContext context = getContext();
final Reader reader = context.getReader();
if (reader != null) {
config.setInput(new ReaderInputStream(reader));
}
final Writer writer = context.getWriter();
if (writer != null) {
config.setOutput(new PrintStream(new WriterOutputStream(writer)));
}
final Writer errorWriter = context.getErrorWriter();
if (errorWriter != null) {
config.setError(new PrintStream(new WriterOutputStream(errorWriter)));
}
}
private String getString(final String key) {
Object result = get(key);
return result == null ? null : result.toString();
}
}
|
antiagainst/llvm-zorg
|
zorg/buildbot/commands/MakeCommand.py
|
<gh_stars>1-10
import re
from buildbot.process.properties import WithProperties
from buildbot.steps.shell import WarningCountingShellCommand
class MakeCommand(WarningCountingShellCommand):
@staticmethod
def sanitize_kwargs(kwargs):
# kwargs we could get and must not pass through
# to the buildstep.RemoteShellCommand constructor.
# Note: This is a workaround of the buildbot design issue,
# thus should be removed once the original issue gets fixed.
consume_kwargs = [
"jobs",
]
sanitized_kwargs = kwargs.copy()
for k in consume_kwargs:
if k in sanitized_kwargs.keys():
del sanitized_kwargs[k]
return sanitized_kwargs
def __init__(self, prefixCommand=None, options=None, targets=None, **kwargs):
self.prefixCommand = prefixCommand
self.targets = targets
command = []
if prefixCommand:
command += prefixCommand
command += ["make"]
if options is None:
self.options = list()
else:
self.options = list(options)
j_opt = re.compile(r'^-j$|^-j\d+$')
# We can get jobs in the options. If so, we would use that.
if not any(j_opt.search(opt) for opt in self.options if isinstance(opt, basestring)):
# Otherwise let's see if we got it in the kwargs.
if kwargs.get('jobs', None):
self.options += ["-j", kwargs['jobs']]
else:
# Use the property if option was not explicitly
# specified.
command += [
WithProperties("%(jobs:+-j)s"),
WithProperties("%(jobs:-)s"),
]
if self.options:
command += self.options
if targets:
command += targets
# Remove here all the kwargs any of our LLVM buildbot command could consume.
# Note: We will remove all the empty items from the command at start, as we
# still didn't get yet WithProperties rendered.
sanitized_kwargs = self.sanitize_kwargs(kwargs)
sanitized_kwargs["command"] = command
# And upcall to let the base class do its work
WarningCountingShellCommand.__init__(self, **sanitized_kwargs)
self.addFactoryArguments(prefixCommand=prefixCommand,
options=self.options,
targets=targets)
def start(self):
# Don't forget to remove all the empty items from the command,
# which we could get because of WithProperties rendered as empty strings.
self.command = filter(bool, self.command)
# Then upcall.
WarningCountingShellCommand.start(self)
|
AtrixTV/backoffice-administration
|
public/admin/js/vodSubtitles/config.js
|
import edit_button from '../edit_button.html';
export default function (nga, admin) {
var vodsubtitles = admin.getEntity('vodsubtitles');
vodsubtitles.listView()
.title('<h4>Vod Subtitles <i class="fa fa-angle-right" aria-hidden="true"></i> List</h4>')
.batchActions([])
.fields([
nga.field('vod_id', 'reference')
.targetEntity(admin.getEntity('Vods'))
.targetField(nga.field('title'))
.isDetailLink(false)
.label('Vod'),
nga.field('title', 'string')
.label('Title'),
nga.field('subtitle_url', 'string')
.map(function truncate(value) {
if (!value) {
return '';
}
return value.length > 25 ? value.substr(0, 25) + '...' : value;
})
.label('Subtitle Url'),
])
.filters([
nga.field('q')
.label('')
.template('<div class="input-group"><input type="text" ng-model="value" placeholder="Search" class="form-control"></input><span class="input-group-addon"><i class="glyphicon glyphicon-search"></i></span></div>')
.pinned(true)])
.listActions(['edit'])
.exportFields([
vodsubtitles.listView().fields(),
]);
vodsubtitles.deletionView()
.title('<h4>Channel Streams <i class="fa fa-angle-right" aria-hidden="true"></i> Remove <span style ="color:red;"> {{entry.values.title}}')
.actions(['<ma-back-button entry="entry" entity="entity"></ma-back-button>'])
vodsubtitles.creationView()
.title('<h4>Vod Subtitles <i class="fa fa-angle-right" aria-hidden="true"></i> Create: Vod Subtitles</h4>')
.onSubmitSuccess(['progression', 'notification', '$state', 'entry', 'entity', function(progression, notification, $state, entry, entity) {
progression.done();
$state.go($state.get('edit'), { entity: 'Vods', id: entry.values.vod_id });
return false;
}])
.fields([
nga.field('vod_id', 'reference')
.targetEntity(admin.getEntity('Vods'))
.targetField(nga.field('title'))
.attributes({ placeholder: 'Select Vod from dropdown list' })
.validation({ required: true })
.perPage(-1)
.label('Vod'),
nga.field('title')
.attributes({ placeholder: 'Specify the subtitles title' })
.validation({ required: true })
.label('Title'),
nga.field('subtitle_url' ,'file')
.uploadInformation({ 'url': '/file-upload/single-file/subtitles/subtitle_url','apifilename': 'result'})
.template('<div class="row">'+
'<div class="col-xs-12 col-sm-8"><ma-file-field field="field" value="entry.values.subtitle_url"></ma-file-field></div>'+
'<div class="col-xs-12 col-sm-1" style="display: none;"><img src="{{ entry.values.subtitle_url }}"/></div>'+
'</div>'+
'<div class="row"><small id="emailHelp" class="form-text text-muted">Please, make sure the subtitle file is correctly encoded</small></div>')
.label('File input *')
.validation({ required: true })
.label('URL'),
nga.field('template')
.label('')
.template(edit_button),
]);
vodsubtitles.editionView()
.title('<h4>Vod Subtitles <i class="fa fa-angle-right" aria-hidden="true"></i> Edit: {{ entry.values.vod_id }}</h4>')
.actions(['list', 'delete'])
.fields([
vodsubtitles.creationView().fields(),
]);
return vodsubtitles;
}
|
graemeaj/VRMiniGun
|
Plugins/VRExpansionPlugin/VRExpansionPlugin/Intermediate/Build/Win64/UE4/Inc/VRExpansionPlugin/GS_Physics.generated.h
|
<gh_stars>0
// Copyright Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "UObject/ObjectMacros.h"
#include "UObject/ScriptMacros.h"
PRAGMA_DISABLE_DEPRECATION_WARNINGS
#ifdef VREXPANSIONPLUGIN_GS_Physics_generated_h
#error "GS_Physics.generated.h already included, missing '#pragma once' in GS_Physics.h"
#endif
#define VREXPANSIONPLUGIN_GS_Physics_generated_h
#define HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_SPARSE_DATA
#define HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_RPC_WRAPPERS
#define HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_RPC_WRAPPERS_NO_PURE_DECLS
#define HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_INCLASS_NO_PURE_DECLS \
private: \
static void StaticRegisterNativesUGS_Physics(); \
friend struct Z_Construct_UClass_UGS_Physics_Statics; \
public: \
DECLARE_CLASS(UGS_Physics, UGS_Default, COMPILED_IN_FLAGS(0), CASTCLASS_None, TEXT("/Script/VRExpansionPlugin"), NO_API) \
DECLARE_SERIALIZER(UGS_Physics)
#define HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_INCLASS \
private: \
static void StaticRegisterNativesUGS_Physics(); \
friend struct Z_Construct_UClass_UGS_Physics_Statics; \
public: \
DECLARE_CLASS(UGS_Physics, UGS_Default, COMPILED_IN_FLAGS(0), CASTCLASS_None, TEXT("/Script/VRExpansionPlugin"), NO_API) \
DECLARE_SERIALIZER(UGS_Physics)
#define HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_STANDARD_CONSTRUCTORS \
/** Standard constructor, called after all reflected properties have been initialized */ \
NO_API UGS_Physics(const FObjectInitializer& ObjectInitializer = FObjectInitializer::Get()); \
DEFINE_DEFAULT_OBJECT_INITIALIZER_CONSTRUCTOR_CALL(UGS_Physics) \
DECLARE_VTABLE_PTR_HELPER_CTOR(NO_API, UGS_Physics); \
DEFINE_VTABLE_PTR_HELPER_CTOR_CALLER(UGS_Physics); \
private: \
/** Private move- and copy-constructors, should never be used */ \
NO_API UGS_Physics(UGS_Physics&&); \
NO_API UGS_Physics(const UGS_Physics&); \
public:
#define HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_ENHANCED_CONSTRUCTORS \
private: \
/** Private move- and copy-constructors, should never be used */ \
NO_API UGS_Physics(UGS_Physics&&); \
NO_API UGS_Physics(const UGS_Physics&); \
public: \
DECLARE_VTABLE_PTR_HELPER_CTOR(NO_API, UGS_Physics); \
DEFINE_VTABLE_PTR_HELPER_CTOR_CALLER(UGS_Physics); \
DEFINE_DEFAULT_OBJECT_INITIALIZER_CONSTRUCTOR_CALL(UGS_Physics)
#define HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_PRIVATE_PROPERTY_OFFSET
#define HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_18_PROLOG
#define HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_GENERATED_BODY_LEGACY \
PRAGMA_DISABLE_DEPRECATION_WARNINGS \
public: \
HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_PRIVATE_PROPERTY_OFFSET \
HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_SPARSE_DATA \
HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_RPC_WRAPPERS \
HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_INCLASS \
HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_STANDARD_CONSTRUCTORS \
public: \
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#define HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_GENERATED_BODY \
PRAGMA_DISABLE_DEPRECATION_WARNINGS \
public: \
HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_PRIVATE_PROPERTY_OFFSET \
HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_SPARSE_DATA \
HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_RPC_WRAPPERS_NO_PURE_DECLS \
HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_INCLASS_NO_PURE_DECLS \
HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h_21_ENHANCED_CONSTRUCTORS \
private: \
PRAGMA_ENABLE_DEPRECATION_WARNINGS
template<> VREXPANSIONPLUGIN_API UClass* StaticClass<class UGS_Physics>();
#undef CURRENT_FILE_ID
#define CURRENT_FILE_ID HostProject_Plugins_VRExpansionPlugin_Source_VRExpansionPlugin_Public_GripScripts_GS_Physics_h
PRAGMA_ENABLE_DEPRECATION_WARNINGS
|
steveohara/enocean4j
|
src/main/java/uk/co/_4ng/enocean/eep/EEPAttributeChangeListener.java
|
<filename>src/main/java/uk/co/_4ng/enocean/eep/EEPAttributeChangeListener.java
/*
* Copyright 2017 enocean4j development teams
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co._4ng.enocean.eep;
import uk.co._4ng.enocean.devices.EnOceanDevice;
import uk.co._4ng.enocean.eep.eep26.telegram.EEP26Telegram;
/**
* Define the set of methods by which a software entity (class) can be notified
* about the change of a "monitored" EEP26 attribute.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public interface EEPAttributeChangeListener {
/**
* Notifies a listener about the change of the given attribute, for the given channel.
*
* @param channelId The id of the channel involved by the attribute change.
* @param telegram The originating telegram
* @param attribute The changed attribute (to ease handling).
*/
void handleAttributeChange(int channelId, EEP26Telegram telegram, EEPAttribute<?> attribute, EnOceanDevice device);
}
|
scorpio975/d-repr
|
pydrepr/drepr/models/parse_v1/path_parser.py
|
<reponame>scorpio975/d-repr
import re
from abc import ABC, abstractmethod
from typing import List, Union
from drepr.utils.validator import InputError
from ..path import Path, IndexExpr, RangeExpr, WildcardExpr, Expr
from ..resource import Resource
class PathParser(ABC):
@abstractmethod
def parse(self, resource: Resource, path: Union[str, list], parse_trace: str) -> Path:
pass
# noinspection PyMethodMayBeStatic
def get_resource(self, resources: List[Resource], resource_id: str, trace: str) -> Resource:
for res in resources:
if res.id == resource_id:
return res
raise InputError(f"{trace}\nERROR: Refer to path of an nonexistent resource: {resource_id}")
class PathParserV1(PathParser):
"""
A path can either be a JSONPath or our list path
1. If the path is a JSONPath, then it is a string startswith `$`. We only support the following
type of step: range, index, list of index, and wildcard. However, wildcard is only used for selecting
all values of an object
2. If the path is a normal string
"""
REG_SRANGE = re.compile(r"^(\d+)?\.\.(-?\d+)?(?::(\d+))?$")
REG_SINDEX = re.compile(r"^(?:\$\{([^}]+)})|(\d+)|(.*)$")
REG_SRANGE_EXPR = re.compile(
r"^(?:(\d+)|(?:\$\{([^}]+)}))?\.\.(?:(-\d+)|(?:\$\{([^}]+)}))?(?::(\d+)|(?:\$\{([^}]+)}))?$"
)
REG_JPATH_BRACKET = re.compile(r"(?:\[(-?\d+)?\:(?:(-?\d+)(?:\:(-?\d+))?)?\])|(?:\[(-?\d+)\])|(?:\['([^']+)'\])")
REG_JPATH_DOT = re.compile(r"\.((?:(?!\.|\[).)+)")
def parse(self, _resource: Resource, path: Union[str, list], parse_trace: str) -> Path:
if isinstance(path, str):
return self.parse_jsonpath(path, parse_trace)
if isinstance(path, list):
return self.parse_custom_path(path, parse_trace)
raise InputError(f"{parse_trace}\nERROR: the path must either be a "
f"string (JSONPath) or a list of steps. Get {type(path)} instead")
def parse_jsonpath(self, jpath: str, parse_trace: str) -> Path:
if not jpath.startswith("$"):
raise InputError(f"{parse_trace}\nERROR: invalid json path. The path must start with `$`. "
f"Get: {jpath}")
jpath = jpath[1:]
steps = []
parsing_pos = 1
while len(jpath) > 0:
if jpath.startswith("["):
m = self.REG_JPATH_BRACKET.match(jpath)
if m is None:
raise InputError(
f"{parse_trace}\nERROR: invalid json path, error while parsing bracket at position {parsing_pos}")
jpath = jpath[m.span()[-1]:]
parsing_pos += m.span()[-1] # m.span()[0] is always 0
if m.group(5) is not None:
# match with string
steps.append(IndexExpr(m.group(5)))
elif m.group(4) is not None:
# match with a single number
steps.append(IndexExpr(int(m.group(4))))
else:
steps.append(RangeExpr(
int(m.group(1) or "0"),
int(m.group(2)) if m.group(2) is not None else None,
int(m.group(3) or "1")))
elif jpath.startswith(".*~"):
# *~ select property names
steps.append(WildcardExpr.Names)
jpath = jpath[3:]
parsing_pos += 3
elif jpath.startswith(".*"):
steps.append(WildcardExpr.Values)
jpath = jpath[2:]
parsing_pos += 2
else:
m = self.REG_JPATH_DOT.match(jpath)
if m is None:
raise InputError(
f"{parse_trace}\nERROR: invalid json path, error while parsing step at position {parsing_pos}")
jpath = jpath[m.span()[-1]:]
parsing_pos += m.span()[-1] # m.span()[0] is always 0
# after a dot, it can either be a number or a string
if m.group(1).isdigit():
steps.append(IndexExpr(int(m.group(1))))
else:
steps.append(IndexExpr(m.group(1)))
return Path(steps)
def parse_custom_path(self, path: List[str], parse_trace: str) -> Path:
steps = []
for i, step in enumerate(path):
trace = f"Parsing step {i} ({step})"
if isinstance(step, str):
m = self.REG_SRANGE.match(step)
if m is not None:
steps.append(RangeExpr(
int(m.group(1) or '0'),
int(m.group(2)) if m.group(2) is not None else None,
int(m.group(3) or '1')))
continue
m = self.REG_SRANGE_EXPR.match(step)
if m is not None:
steps.append(RangeExpr(
(Expr(m.group(1)[2:-1]) if m.group(1).startswith("${") else int(m.group(1)))
if m.group(1) is not None else 0,
(Expr(m.group(2)[2:-1]) if m.group(2).startswith("${") else int(m.group(2)))
if m.group(2) is not None else None,
(Expr(m.group(2)[2:-1]) if m.group(2).startswith("${") else int(m.group(2)))
if m.group(2) is not None else 1))
continue
if step.startswith("${"):
steps.append(IndexExpr(Expr(step[2:-1])))
else:
steps.append(IndexExpr(step))
elif isinstance(step, int):
steps.append(IndexExpr(step))
else:
raise InputError(
f"{parse_trace}\n{trace}\nERROR: step must either be string or number. Get {type(step)} instead")
return Path(steps)
|
mwoehlke-kitware/kwiver
|
vital/algo/compute_depth.cxx
|
// This file is part of KWIVER, and is distributed under the
// OSI-approved BSD 3-Clause License. See top-level LICENSE file or
// https://github.com/Kitware/kwiver/blob/master/LICENSE for details.
#include <vital/algo/compute_depth.h>
#include <vital/algo/algorithm.txx>
namespace kwiver {
namespace vital {
namespace algo {
compute_depth
::compute_depth()
{
attach_logger("algo.compute_depth");
}
/// Set a callback function to report intermediate progress
void
compute_depth
::set_callback(callback_t cb)
{
this->m_callback = cb;
}
/// Helper for computing without depth uncertainty pointer
kwiver::vital::image_container_sptr
compute_depth
::compute(std::vector<kwiver::vital::image_container_sptr> const& frames,
std::vector<kwiver::vital::camera_perspective_sptr> const& cameras,
double depth_min, double depth_max,
unsigned int reference_frame,
vital::bounding_box<int> const& roi,
std::vector<kwiver::vital::image_container_sptr> const& masks) const
{
kwiver::vital::image_container_sptr depth_uncertainty = nullptr;
return compute(frames, cameras, depth_min, depth_max,
reference_frame, roi, depth_uncertainty, masks);
}
} // end namespace algo
} // end namespace vital
} // end namespace kwiver
/// \cond DoxygenSuppress
INSTANTIATE_ALGORITHM_DEF(kwiver::vital::algo::compute_depth);
/// \endcond
|
Vagrancy/something.apk
|
Something/src/main/java/net/fastfourier/something/data/SomeDatabase.java
|
<reponame>Vagrancy/something.apk
package net.fastfourier.something.data;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import com.salvadordalvik.fastlibrary.data.FastDatabase;
/**
* Created by matthewshepard on 1/21/14.
*/
public class SomeDatabase extends FastDatabase {
public static final int DB_VERSION = 4;
public static final String TABLE_SAVED_DRAFT = "saved_reply";
public static final String TABLE_FORUM = "forum";
public static final String TABLE_STARRED_FORUM = "starred_forum";
public static final String VIEW_FORUMS = "all_forums";
public static final String VIEW_STARRED_FORUMS = "starred_forums";
private static SomeDatabase db;
private SomeDatabase(Context context) {
super(context, "something.db", DB_VERSION);
}
public synchronized static void init(Context context){
db = new SomeDatabase(context.getApplicationContext());
}
public static synchronized SomeDatabase getDatabase(){
return db;
}
@Override
public void onCreate(SQLiteDatabase db) {
createForumTable(db);
createStarredForumTable(db);
createSavedDraftTable(db);
createViews(db);
}
private void createViews(SQLiteDatabase db){
db.execSQL("create view all_forums as " +
"select forum.forum_id as forum_id, forum_name, parent_forum_id, category, forum_starred " +
"from forum left join starred_forum using (forum_id) " +
"order by forum_index");
db.execSQL("create view starred_forums as " +
"select forum.forum_id as forum_id, forum_name, parent_forum_id, category, forum_starred " +
"from forum, starred_forum using (forum_id) " +
"order by forum_index");
}
private void dropViews(SQLiteDatabase db){
db.execSQL("drop view if exists all_forums");
db.execSQL("drop view if exists starred_forums");
}
private void dropTables(SQLiteDatabase db){
db.execSQL("drop table if exists forum");
db.execSQL("drop table if exists starred_forum");
db.execSQL("drop table if exists saved_reply");
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
//Place update statements here, defaults to wipe/recreate tables
//Allow case statements to fall through so older DB versions will get upgraded in one call
switch (oldVersion){
//DB_VERSION notes:
//3: first public release
//4: Added saved_reply table
case 3:
//4: Added saved_reply table
createSavedDraftTable(db);
case 4:
//For future use
//intentionally fall through
case 5:
//For future use
//intentionally fall through
case 6:
//For future use
//intentionally fall through
case 7:
//For future use
//intentionally fall through
case 8:
//For future use
//intentionally fall through
case 9:
//For future use
//intentionally fall through
break;
default:
dropViews(db);
dropTables(db);
onCreate(db);
break;
}
}
private void createForumTable(SQLiteDatabase db){
db.execSQL("create table forum (" +
"forum_id INTEGER PRIMARY KEY," +
"forum_name TEXT NOT NULL," +
"parent_forum_id INTEGER DEFAULT 0," +
"category TEXT," +
"forum_index INTEGER NOT NULL" +
")");
}
private void createStarredForumTable(SQLiteDatabase db){
db.execSQL("create table starred_forum (" +
"forum_id INTEGER PRIMARY KEY," +
"forum_starred INTEGER DEFAULT 1" +
")");
}
private void createSavedDraftTable(SQLiteDatabase db){
db.execSQL("create table saved_reply (" +
"reply_id INTEGER PRIMARY KEY," +
"reply_thread_id INTEGER," +
"reply_post_id INTEGER," +
"reply_type INTEGER NOT NULL," +
"reply_original_content TEXT," +
"reply_user_content TEXT," +
"reply_formcookie TEXT," +
"reply_formkey TEXT," +
"reply_title TEXT," +
"reply_username TEXT," +
"reply_signature INTEGER," +
"reply_bookmark INTEGER," +
"reply_emotes INTEGER," +
"reply_saved_timestamp DATETIME DEFAULT CURRENT_TIMESTAMP" +
")");
}
}
|
EBRD-ProzorroSale/openprocurement.auctions.core
|
openprocurement/auctions/core/tests/base.py
|
<reponame>EBRD-ProzorroSale/openprocurement.auctions.core
# -*- coding: utf-8 -*-
import webtest
from types import FunctionType
from uuid import uuid4
from copy import deepcopy
from datetime import datetime, timedelta
from openprocurement.api.constants import VERSION
from openprocurement.api.tests.base import (
JSON_RENDERER_ERROR, # noqa forwarded import
)
from openprocurement.api.tests.blanks.json_data import (
test_document_data, # noqa forwarded import
test_item_data # noqa forwarded import
)
from openprocurement.auctions.core.utils import (
apply_data_patch,
connection_mock_config,
SESSION
)
from openprocurement.api.tests.base import MOCK_CONFIG as BASE_MOCK_CONFIG
from openprocurement.api.tests.fixtures.mocks import MigrationResourcesDTO_mock # noqa import fowrard
from openprocurement.auctions.core.tests.fixtures.config import PARTIAL_MOCK_CONFIG
from openprocurement.api.tests.base import BaseResourceWebTest, BaseWebTest as CoreWebTest
now = datetime.now()
test_organization = {
"name": u"Державне управління справами",
"identifier": {
"scheme": u"UA-EDR",
"id": u"00037256",
"uri": u"http://www.dus.gov.ua/"
},
"address": {
"countryName": u"Україна",
"postalCode": u"01220",
"region": u"м. Київ",
"locality": u"м. Київ",
"streetAddress": u"вул. Банкова, 11, корпус 1"
},
"contactPoint": {
"name": u"Державне управління справами",
"telephone": u"0440000000"
}
}
test_procuringEntity = test_organization.copy()
test_auction_data = {
"title": u"футляри до державних нагород",
"dgfID": u"219560",
"dgfDecisionDate": u"2016-11-17",
"dgfDecisionID": u"219560",
"tenderAttempts": 1,
"procuringEntity": test_procuringEntity,
"value": {
"amount": 100,
"currency": u"UAH"
},
"minimalStep": {
"amount": 35,
"currency": u"UAH"
},
"items": [
{
"description": u"Земля для військовослужбовців",
"classification": {
"scheme": u"CAV",
"id": u"06000000-2",
"description": u"Земельні ділянки"
},
"unit": {
"name": u"item",
"code": u"44617100-9"
},
"quantity": 5,
"address": {
"countryName": u"Україна",
"postalCode": "79000",
"region": u"м. Київ",
"locality": u"м. Київ",
"streetAddress": u"вул. Банкова 1"
}
}
],
"auctionPeriod": {
"startDate": (now.date() + timedelta(days=14)).isoformat()
}
}
base_test_bids = [
{
"tenderers": [
test_organization
],
"value": {
"amount": 469,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
},
{
"tenderers": [
test_organization
],
"value": {
"amount": 479,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
]
def snitch(func):
"""
This method is used to add test function to TestCase classes.
snitch method gets test function and returns a copy of this function
with 'test_' prefix at the beginning (to identify this function as
an executable test).
It provides a way to implement a storage (python module that
contains non-executable test functions) for tests and to include
different set of functions into different test cases.
"""
return FunctionType(func.func_code, func.func_globals,
'test_' + func.func_name, closure=func.func_closure)
class PrefixedRequestClass(webtest.app.TestRequest):
@classmethod
def blank(cls, path, *args, **kwargs):
path = '/api/%s%s' % (VERSION, path)
return webtest.app.TestRequest.blank(path, *args, **kwargs)
MOCK_CONFIG = connection_mock_config(PARTIAL_MOCK_CONFIG, ('plugins','api', 'plugins'),
BASE_MOCK_CONFIG)
class BaseWebTest(CoreWebTest):
mock_config = MOCK_CONFIG
def setUp(self):
super(BaseWebTest, self).setUp()
if self.docservice:
self.setUpDS()
class BaseAuctionWebTest(BaseResourceWebTest):
initial_data = None
initial_status = None
initial_bids = None
initial_lots = None
docservice = False
mock_config = MOCK_CONFIG
def set_status(self, status, extra=None):
data = {'status': status}
if status == 'active.enquiries':
data.update({
"enquiryPeriod": {
"startDate": (now).isoformat(),
"endDate": (now + timedelta(days=7)).isoformat()
},
"tenderPeriod": {
"startDate": (now + timedelta(days=7)).isoformat(),
"endDate": (now + timedelta(days=14)).isoformat()
}
})
elif status == 'active.tendering':
data.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=10)).isoformat(),
"endDate": (now).isoformat()
},
"tenderPeriod": {
"startDate": (now).isoformat(),
"endDate": (now + timedelta(days=7)).isoformat()
}
})
elif status == 'active.auction':
data.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=14)).isoformat(),
"endDate": (now - timedelta(days=7)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=7)).isoformat(),
"endDate": (now).isoformat()
},
"auctionPeriod": {
"startDate": (now).isoformat()
}
})
if self.initial_lots:
data.update({
'lots': [
{
"auctionPeriod": {
"startDate": (now).isoformat()
}
}
for _ in self.initial_lots
]
})
elif status == 'active.qualification':
data.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=8)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=8)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat()
}
})
if self.initial_lots:
data.update({
'lots': [
{
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
}
}
for i in self.initial_lots
]
})
elif status == 'active.awarded':
data.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=8)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=8)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
if self.initial_lots:
data.update({
'lots': [
{
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
}
}
for i in self.initial_lots
]
})
elif status == 'complete':
data.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=25)).isoformat(),
"endDate": (now - timedelta(days=18)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=18)).isoformat(),
"endDate": (now - timedelta(days=11)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=11)).isoformat(),
"endDate": (now - timedelta(days=10)).isoformat()
},
"awardPeriod": {
"startDate": (now - timedelta(days=10)).isoformat(),
"endDate": (now - timedelta(days=10)).isoformat()
}
})
if self.initial_lots:
data.update({
'lots': [
{
"auctionPeriod": {
"startDate": (now - timedelta(days=11)).isoformat(),
"endDate": (now - timedelta(days=10)).isoformat()
}
}
for i in self.initial_lots
]
})
if extra:
data.update(extra)
auction = self.db.get(self.auction_id)
auction.update(apply_data_patch(auction, data))
self.db.save(auction)
authorization = self.app.authorization
self.app.authorization = ('Basic', ('chronograph', ''))
#response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.app.authorization = authorization
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
return response
def setUp(self):
super(BaseAuctionWebTest, self).setUp()
self.create_auction()
if self.docservice:
self.setUpDS()
def set_auction_mode(self, auction_id, mode):
current_auth = self.app.authorization
self.app.authorization = ('Basic', ('administrator', ''))
response = self.app.patch_json('/auctions/{}'.format(auction_id),
{'data': {'mode': mode}})
self.app.authorization = current_auth
return response
def use_transfer(self, transfer, auction_id, origin_transfer):
req_data = {"data": {"id": transfer['data']['id'],
'transfer': origin_transfer}}
self.app.post_json('/auctions/{}/ownership'.format(auction_id), req_data)
response = self.app.get('/transfers/{}'.format(transfer['data']['id']))
return response.json
def create_transfer(self):
test_transfer_data = {}
response = self.app.post_json('/transfers', {"data": test_transfer_data})
return response.json
def get_auction(self, auction_id):
response = self.app.get('/auctions/{}'.format(auction_id))
return response.json
@staticmethod
def add_lots_to_auction(auction, need_lots=None):
lots = []
for i in need_lots:
lot = deepcopy(i)
lot['id'] = uuid4().hex
lots.append(lot)
auction['lots'] = lots
for i, item in enumerate(auction['items']):
item['relatedLot'] = lots[i % len(lots)]['id']
def create_auction_unit(self, auth=None, data=None, lots=None, status=None):
auth_switch = False
if auth:
current_auth = self.app.authorization
self.app.authorization = auth
auth_switch = True
if not data:
data = deepcopy(self.initial_data)
if lots:
self.add_lots_to_auction(data, lots)
elif self.initial_lots:
self.add_lots_to_auction(data, self.initial_lots)
if status:
response = self.app.post_json('/auctions', {'data': data}, status=status)
else:
response = self.app.post_json('/auctions', {'data': data})
auction = response.json
if auth_switch:
self.app.authorization = current_auth
return auction
def create_auction(self):
data = deepcopy(self.initial_data)
if self.initial_lots:
lots = []
for i in self.initial_lots:
lot = deepcopy(i)
lot['id'] = uuid4().hex
lots.append(lot)
data['lots'] = self.initial_lots = lots
for i, item in enumerate(data['items']):
item['relatedLot'] = lots[i % len(lots)]['id']
response = self.app.post_json('/auctions', {'data': data})
auction = response.json['data']
self.auction = auction
self.auction_token = response.json['access']['token']
self.auction_transfer = response.json['access']['transfer']
self.auction_id = auction['id']
status = auction['status']
if self.initial_bids:
self.initial_bids_tokens = {}
response = self.set_status('active.tendering')
status = response.json['data']['status']
bids = []
for i in self.initial_bids:
if self.initial_lots:
i = i.copy()
value = i.pop('value')
i['lotValues'] = [
{
'value': value,
'relatedLot': l['id'],
}
for l in self.initial_lots
]
response = self.app.post_json('/auctions/{}/bids'.format(self.auction_id), {'data': i})
self.assertEqual(response.status, '201 Created')
bids.append(response.json['data'])
self.initial_bids_tokens[response.json['data']['id']] = response.json['access']['token']
self.initial_bids = bids
if self.initial_status != status:
self.set_status(self.initial_status)
def tearDownDS(self):
SESSION.request = self._srequest
def tearDown(self):
if self.docservice:
self.tearDownDS()
del self.db[self.auction_id]
super(BaseAuctionWebTest, self).tearDown()
|
kanzure/eth-testrpc
|
tests/endpoints/test_eth_sendTransaction.py
|
from testrpc.client.utils import force_text
def test_eth_sendTransaction(rpc_client, accounts, hex_accounts):
result = rpc_client(
method="eth_sendTransaction",
params=[{
"from": accounts[0],
"to": accounts[1],
"value": 1234,
"data": "0x1234",
"gas": 100000,
"gasPrice": 4321,
}],
)
assert len(result) == 66
txn = rpc_client(
method="eth_getTransactionByHash",
params=[result],
)
assert txn['from'] == force_text(hex_accounts[0])
assert txn['to'] == force_text(hex_accounts[1])
assert txn['value'] == hex(1234)
assert txn['input'] == '0x1234'
assert txn['gas'] == hex(100000)
assert txn['gasPrice'] == hex(4321)
def test_eth_sendTransaction_with_hex_values(rpc_client, accounts, hex_accounts):
result = rpc_client(
method="eth_sendTransaction",
params=[{
"from": accounts[0],
"to": accounts[1],
"value": hex(1234),
"data": "0x1234",
"gas": hex(100000),
"gasPrice": hex(4321),
}],
)
assert len(result) == 66
txn = rpc_client(
method="eth_getTransactionByHash",
params=[result],
)
assert txn['from'] == force_text(hex_accounts[0])
assert txn['to'] == force_text(hex_accounts[1])
assert txn['value'] == hex(1234)
assert txn['input'] == '0x1234'
assert txn['gas'] == hex(100000)
assert txn['gasPrice'] == hex(4321)
|
KaterinaLupacheva/habit-tracker-pwa
|
src/main/java/io/ramonak/habitTracker/UI/views/LoginView.java
|
<reponame>KaterinaLupacheva/habit-tracker-pwa
package io.ramonak.habitTracker.UI.views;
import com.vaadin.flow.component.Component;
import com.vaadin.flow.component.Tag;
import com.vaadin.flow.component.UI;
import com.vaadin.flow.component.button.Button;
import com.vaadin.flow.component.login.LoginForm;
import com.vaadin.flow.component.login.LoginOverlay;
import com.vaadin.flow.component.orderedlayout.VerticalLayout;
import com.vaadin.flow.router.BeforeEnterEvent;
import com.vaadin.flow.router.BeforeEnterObserver;
import com.vaadin.flow.router.PageTitle;
import com.vaadin.flow.router.Route;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.context.SecurityContextHolder;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@Route(value = LoginView.ROUTE)
@PageTitle("Login")
public class LoginView extends VerticalLayout {
public static final String ROUTE = "login";
public LoginView() {
LoginForm login = new LoginForm();
login.setAction("login"); //
// getElement().appendChild(login.getElement()); //
// login.addLoginListener(e -> {
// UI.getCurrent().navigate("habit");
// });
Button register = new Button("Register", e-> UI.getCurrent().navigate("register"));
add(login, register);
setAlignItems(Alignment.CENTER);
// login.addLoginListener(e -> { //
// try {
// // try to authenticate with given credentials, should always return not null or throw an {@link AuthenticationException}
// final Authentication authentication = authenticationManager
// .authenticate(new UsernamePasswordAuthenticationToken(e.getUsername(), e.getPassword())); //
//
// // if authentication was successful we will update the security context and redirect to the page requested first
// SecurityContextHolder.getContext().setAuthentication(authentication); //
//// login.close(); //
// UI.getCurrent().navigate(requestCache.resolveRedirectUrl()); //
//
// } catch (AuthenticationException ex) { //
// // show default error message
// // Note: You should not expose any detailed information here like "username is known but password is wrong"
// // as it weakens security.
// login.setError(true);
// }
// });
}
// @Override
// public void beforeEnter(BeforeEnterEvent event) {
// // inform the user about an authentication error
// // (yes, the API for resolving query parameters is annoying...)
// if(!event.getLocation().getQueryParameters().getParameters().getOrDefault("error", Collections.emptyList()).isEmpty()) {
// login.setError(true); //
// }
// }
}
|
grain-2/gui
|
chap11/src/book/demo/TableViewerCompositeDemo.java
|
package book.demo;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Tree;
import org.eclipse.jface.viewers.TreeViewer;
import org.eclipse.jface.layout.TreeColumnLayout;
import org.eclipse.swt.widgets.Table;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.jface.layout.TableColumnLayout;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.jface.viewers.TableViewerColumn;
import org.eclipse.jface.viewers.ColumnPixelData;
public class TableViewerCompositeDemo {
protected Shell shell;
private Table table;
private TableColumn tblclmnNewColumn;
private TableViewerColumn tableViewerColumn;
/**
* Launch the application.
* @param args
*/
public static void main(String[] args) {
try {
TableViewerCompositeDemo window = new TableViewerCompositeDemo();
window.open();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Open the window.
*/
public void open() {
Display display = Display.getDefault();
createContents();
shell.open();
shell.layout();
while (!shell.isDisposed()) {
if (!display.readAndDispatch()) {
display.sleep();
}
}
}
/**
* Create contents of the window.
*/
protected void createContents() {
shell = new Shell();
shell.setSize(450, 300);
shell.setText("SWT Application");
shell.setLayout(new FillLayout(SWT.HORIZONTAL));
Composite composite = new Composite(shell, SWT.NONE);
TableColumnLayout tcl_composite = new TableColumnLayout();
composite.setLayout(tcl_composite);
TableViewer tableViewer = new TableViewer(composite, SWT.BORDER | SWT.FULL_SELECTION);
tableViewer.setColumnProperties(new String[] {"用户名", "密码", "身份"});
table = tableViewer.getTable();
table.setHeaderVisible(true);
table.setLinesVisible(true);
tableViewerColumn = new TableViewerColumn(tableViewer, SWT.CENTER);
tblclmnNewColumn = tableViewerColumn.getColumn();
tcl_composite.setColumnData(tblclmnNewColumn, new ColumnPixelData(150, true, true));
tblclmnNewColumn.setText("New Column");
}
}
|
mgusevstudio/EldritchHorrorAssistant
|
app/src/main/java/ru/mgusev/eldritchhorror/adapter/FaqAdapter.java
|
<filename>app/src/main/java/ru/mgusev/eldritchhorror/adapter/FaqAdapter.java
package ru.mgusev.eldritchhorror.adapter;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.support.annotation.NonNull;
import android.support.v7.util.DiffUtil;
import android.support.v7.widget.CardView;
import android.support.v7.widget.RecyclerView;
import android.text.Html;
import android.text.Spannable;
import android.text.SpannableStringBuilder;
import android.text.method.LinkMovementMethod;
import android.text.style.ImageSpan;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import butterknife.BindView;
import butterknife.ButterKnife;
import ru.mgusev.eldritchhorror.R;
import ru.mgusev.eldritchhorror.api.json_model.Article;
import ru.mgusev.eldritchhorror.app.App;
import ru.mgusev.eldritchhorror.interfaces.OnItemClicked;
import ru.mgusev.eldritchhorror.support.ArticleDiffUtilCallback;
import ru.mgusev.eldritchhorror.util.StatsIcons;
import static android.support.v4.util.Preconditions.checkNotNull;
public class FaqAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> {
static class FaqViewHolder extends RecyclerView.ViewHolder {
@BindView(R.id.item_faq_card_view)
CardView faqCardView;
@BindView(R.id.item_faq_question)
TextView faqQuestion;
@BindView(R.id.item_faq_answer)
TextView faqAnswer;
FaqViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
}
}
static class ErrataViewHolder extends RecyclerView.ViewHolder {
@BindView(R.id.item_faq_errata_card_view)
CardView errataCardView;
@BindView(R.id.item_faq_errata_text)
TextView errataText;
ErrataViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
}
}
@Inject
StatsIcons statsIcons;
private Context context;
private OnItemClicked onClick;
private List<Article> articleList;
public FaqAdapter(Context context) {
App.getComponent().inject(this);
this.articleList = new ArrayList<>();
this.context = context;
}
public void setData(List<Article> list) {
DiffUtil.DiffResult diffResult = DiffUtil.calculateDiff(new ArticleDiffUtilCallback(this.articleList, list));
this.articleList = list;
diffResult.dispatchUpdatesTo(this);
}
@Override
public int getItemViewType(int position) {
if (articleList.get(position).getTitle().contains("#errata"))
return 1;
return 0;
}
@NonNull
@Override
public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
switch (viewType) {
case 0: return new FaqViewHolder(LayoutInflater.from(parent.getContext()).inflate(R.layout.item_faq, parent, false));
case 1: return new ErrataViewHolder(LayoutInflater.from(parent.getContext()).inflate(R.layout.item_faq_errata, parent, false));
default: return new FaqViewHolder(LayoutInflater.from(parent.getContext()).inflate(R.layout.item_faq, parent, false));
}
}
@Override
public void onBindViewHolder(@NonNull final RecyclerView.ViewHolder holder, int position) {
switch (holder.getItemViewType()) {
case 0:
FaqViewHolder faqViewHolder = (FaqViewHolder) holder;
faqViewHolder.faqQuestion.setText(getSpannableFormText(articleList.get(position).getTitle()), TextView.BufferType.SPANNABLE);
faqViewHolder.faqAnswer.setText(trimSpannable(getSpannableFormText(articleList.get(position).getIntrotext())), TextView.BufferType.SPANNABLE);
faqViewHolder.faqAnswer.setMovementMethod(LinkMovementMethod.getInstance());
break;
case 1:
ErrataViewHolder errataViewHolder = (ErrataViewHolder) holder;
errataViewHolder.errataText.setText(trimSpannable(getSpannableFormText(articleList.get(position).getIntrotext())), TextView.BufferType.SPANNABLE);
errataViewHolder.errataText.setMovementMethod(LinkMovementMethod.getInstance());
break;
}
}
private SpannableStringBuilder getSpannableFormText(String text) {
String textFormHtml = Html.fromHtml(text).toString();
SpannableStringBuilder spannable = (SpannableStringBuilder) Html.fromHtml(text);
for(Map.Entry<String, Drawable> entry : statsIcons.getIconMap().entrySet()) {
String key = entry.getKey();
Drawable value = entry.getValue();
value.setBounds(0, 0, value.getIntrinsicWidth(), value.getIntrinsicHeight());
while (textFormHtml.contains(key)) {
int startSpan = textFormHtml.indexOf(key);
spannable.setSpan(new ImageSpan(value, ImageSpan.ALIGN_BOTTOM), startSpan, startSpan + key.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
StringBuilder string = new StringBuilder();
while (string.length() != key.length()) {
string.append(" ");
}
textFormHtml = textFormHtml.replaceFirst(key, String.valueOf(string));
}
}
return spannable;
}
private SpannableStringBuilder trimSpannable(SpannableStringBuilder spannable) {
checkNotNull(spannable);
int trimStart = 0;
int trimEnd = 0;
String text = spannable.toString();
while (text.length() > 0 && text.startsWith("\n")) {
text = text.substring(1);
trimStart += 1;
}
while (text.length() > 0 && text.endsWith("\n")) {
text = text.substring(0, text.length() - 1);
trimEnd += 1;
}
return spannable.delete(0, trimStart).delete(spannable.length() - trimEnd, spannable.length());
}
@Override
public int getItemCount() {
return this.articleList.size();
}
public void setOnClick(OnItemClicked onClick) {
this.onClick = onClick;
}
}
|
intendia-oss/reactivity
|
core/src/main/java/com/intendia/reactivity/client/PlaceNavigator.java
|
<gh_stars>10-100
package com.intendia.reactivity.client;
import static com.intendia.reactivity.client.PlaceManager.HistoryUpdate.ADD;
import static com.intendia.reactivity.client.PlaceManager.HistoryUpdate.NOOP;
import static com.intendia.reactivity.client.PlaceManager.HistoryUpdate.REPLACE;
import com.intendia.reactivity.client.PlaceManager.HistoryUpdate;
/** Strategy for selecting places for common navigation scenarios */
public interface PlaceNavigator {
/**
* Builds a navigation to the default place. This is invoked when the history token is empty and no places
* handled it. Implementations should build a {@link PlaceNavigation} with a {@link PlaceRequest}
* corresponding to their default presenter. Consider returning {@link HistoryUpdate#NOOP} as update value,
* Otherwise a new token will be inserted in the browser's history and hitting the browser's
* <em>back</em> button will not take the user out of the application.
*
* <p><b>Important!</b> Make sure you build a valid {@link PlaceRequest} and that the user has access
* to it, otherwise you might create an infinite loop.</p>
*/
PlaceNavigation defaultNavigation();
PlaceNavigation errorNavigation(Throwable throwable);
class PlaceNavigation {
public final PlaceRequest placeRequest;
public final HistoryUpdate update;
public static PlaceNavigation of(PlaceRequest placeRequest, HistoryUpdate update) {
return new PlaceNavigation(placeRequest, update);
}
public static PlaceNavigation noop(PlaceRequest placeRequest) { return of(placeRequest, NOOP); }
public static PlaceNavigation add(PlaceRequest placeRequest) { return of(placeRequest, ADD); }
public static PlaceNavigation replace(PlaceRequest placeRequest) { return of(placeRequest, REPLACE); }
PlaceNavigation(PlaceRequest placeRequest, HistoryUpdate update) {
this.placeRequest = placeRequest;
this.update = update;
}
}
}
|
KaoTuz/edk2-stable202108
|
MdePkg/Include/Guid/FirmwareFileSystem3.h
|
/** @file
Guid used to define the Firmware File System 3.
Copyright (c) 2006 - 2018, Intel Corporation. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
@par Revision Reference:
GUIDs introduced in PI Version 1.0.
**/
#ifndef __FIRMWARE_FILE_SYSTEM3_GUID_H__
#define __FIRMWARE_FILE_SYSTEM3_GUID_H__
///
/// The firmware volume header contains a data field for the file system GUID
/// {5473C07A-3DCB-4dca-BD6F-1E9689E7349A}
///
#define EFI_FIRMWARE_FILE_SYSTEM3_GUID \
{ 0x5473c07a, 0x3dcb, 0x4dca, { 0xbd, 0x6f, 0x1e, 0x96, 0x89, 0xe7, 0x34, 0x9a }}
extern EFI_GUID gEfiFirmwareFileSystem3Guid;
#endif // __FIRMWARE_FILE_SYSTEM3_GUID_H__
|
dmitryikh/tarantool
|
src/box/vy_regulator.c
|
/*
* Copyright 2010-2018, Tarantool AUTHORS, please see AUTHORS file.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* 1. Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY AUTHORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#include "vy_regulator.h"
#include <math.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#include <string.h>
#include <tarantool_ev.h>
#include "fiber.h"
#include "histogram.h"
#include "say.h"
#include "trivia/util.h"
#include "vy_quota.h"
#include "vy_stat.h"
/**
* Regulator timer period, in seconds.
*/
static const double VY_REGULATOR_TIMER_PERIOD = 1;
/**
* Time window over which the write rate is averaged,
* in seconds.
*/
static const double VY_WRITE_RATE_AVG_WIN = 5;
/**
* Histogram percentile used for estimating dump bandwidth.
* For details see the comment to vy_regulator::dump_bandwidth_hist.
*/
static const int VY_DUMP_BANDWIDTH_PCT = 10;
/*
* Until we dump anything, assume bandwidth to be 10 MB/s,
* which should be fine for initial guess.
*/
static const size_t VY_DUMP_BANDWIDTH_DEFAULT = 10 * 1024 * 1024;
/**
* Do not take into account small dumps when estimating dump
* bandwidth, because they have too high overhead associated
* with file creation.
*/
static const size_t VY_DUMP_SIZE_ACCT_MIN = 1024 * 1024;
/**
* Number of dumps to take into account for rate limit calculation.
* Shouldn't be too small to avoid uneven RPS. Shouldn't be too big
* either - otherwise the rate limit will adapt too slowly to workload
* changes. 100 feels like a good choice.
*/
static const int VY_RECENT_DUMP_COUNT = 100;
static void
vy_regulator_trigger_dump(struct vy_regulator *regulator)
{
if (regulator->dump_in_progress)
return;
if (regulator->trigger_dump_cb(regulator) != 0)
return;
regulator->dump_in_progress = true;
/*
* To avoid unpredictably long stalls, we must limit
* the write rate when a dump is in progress so that
* we don't hit the hard limit before the dump has
* completed, i.e.
*
* mem_left mem_used
* ---------- >= --------------
* write_rate dump_bandwidth
*/
struct vy_quota *quota = regulator->quota;
size_t mem_left = (quota->used < quota->limit ?
quota->limit - quota->used : 0);
size_t mem_used = quota->used;
size_t max_write_rate = (double)mem_left / (mem_used + 1) *
regulator->dump_bandwidth;
max_write_rate = MIN(max_write_rate, regulator->dump_bandwidth);
vy_quota_set_rate_limit(quota, VY_QUOTA_RESOURCE_MEMORY,
max_write_rate);
say_info("dumping %zu bytes, expected rate %.1f MB/s, "
"ETA %.1f s, write rate (avg/max) %.1f/%.1f MB/s",
quota->used, (double)regulator->dump_bandwidth / 1024 / 1024,
(double)quota->used / (regulator->dump_bandwidth + 1),
(double)regulator->write_rate / 1024 / 1024,
(double)regulator->write_rate_max / 1024 / 1024);
regulator->write_rate_max = regulator->write_rate;
}
static void
vy_regulator_update_write_rate(struct vy_regulator *regulator)
{
size_t used_curr = regulator->quota->used;
size_t used_last = regulator->quota_used_last;
/*
* Memory can be dumped between two subsequent timer
* callback invocations, in which case memory usage
* will decrease. Ignore such observations - it's not
* a big deal, because dump is a rare event.
*/
if (used_curr < used_last) {
regulator->quota_used_last = used_curr;
return;
}
size_t rate_avg = regulator->write_rate;
size_t rate_curr = (used_curr - used_last) / VY_REGULATOR_TIMER_PERIOD;
double weight = 1 - exp(-VY_REGULATOR_TIMER_PERIOD /
VY_WRITE_RATE_AVG_WIN);
rate_avg = (1 - weight) * rate_avg + weight * rate_curr;
regulator->write_rate = rate_avg;
if (regulator->write_rate_max < rate_curr)
regulator->write_rate_max = rate_curr;
regulator->quota_used_last = used_curr;
}
static void
vy_regulator_update_dump_watermark(struct vy_regulator *regulator)
{
struct vy_quota *quota = regulator->quota;
/*
* Due to log structured nature of the lsregion allocator,
* which is used for allocating statements, we cannot free
* memory in chunks, only all at once. Therefore we should
* configure the watermark so that by the time we hit the
* limit, all memory have been dumped, i.e.
*
* limit - watermark watermark
* ----------------- = --------------
* write_rate dump_bandwidth
*
* Be pessimistic when predicting the write rate - use the
* max observed write rate multiplied by 1.5 - because it's
* better to start memory dump early than delay it as long
* as possible at the risk of experiencing unpredictably
* long stalls.
*/
size_t write_rate = regulator->write_rate_max * 3 / 2;
regulator->dump_watermark =
(double)quota->limit * regulator->dump_bandwidth /
(regulator->dump_bandwidth + write_rate + 1);
/*
* It doesn't make sense to set the watermark below 50%
* of the memory limit because the write rate can exceed
* the dump bandwidth under no circumstances.
*/
regulator->dump_watermark = MAX(regulator->dump_watermark,
quota->limit / 2);
}
static void
vy_regulator_timer_cb(ev_loop *loop, ev_timer *timer, int events)
{
(void)loop;
(void)events;
struct vy_regulator *regulator = timer->data;
vy_regulator_update_write_rate(regulator);
vy_regulator_update_dump_watermark(regulator);
vy_regulator_check_dump_watermark(regulator);
}
void
vy_regulator_create(struct vy_regulator *regulator, struct vy_quota *quota,
vy_trigger_dump_f trigger_dump_cb)
{
enum { KB = 1024, MB = KB * KB };
static int64_t dump_bandwidth_buckets[] = {
100 * KB, 200 * KB, 300 * KB, 400 * KB, 500 * KB, 600 * KB,
700 * KB, 800 * KB, 900 * KB, 1 * MB, 2 * MB, 3 * MB,
4 * MB, 5 * MB, 6 * MB, 7 * MB, 8 * MB, 9 * MB,
10 * MB, 15 * MB, 20 * MB, 25 * MB, 30 * MB, 35 * MB,
40 * MB, 45 * MB, 50 * MB, 55 * MB, 60 * MB, 65 * MB,
70 * MB, 75 * MB, 80 * MB, 85 * MB, 90 * MB, 95 * MB,
100 * MB, 200 * MB, 300 * MB, 400 * MB, 500 * MB, 600 * MB,
700 * MB, 800 * MB, 900 * MB,
};
memset(regulator, 0, sizeof(*regulator));
regulator->dump_bandwidth_hist = histogram_new(dump_bandwidth_buckets,
lengthof(dump_bandwidth_buckets));
if (regulator->dump_bandwidth_hist == NULL)
panic("failed to allocate dump bandwidth histogram");
regulator->quota = quota;
regulator->trigger_dump_cb = trigger_dump_cb;
ev_timer_init(®ulator->timer, vy_regulator_timer_cb, 0,
VY_REGULATOR_TIMER_PERIOD);
regulator->timer.data = regulator;
regulator->dump_bandwidth = VY_DUMP_BANDWIDTH_DEFAULT;
regulator->dump_watermark = SIZE_MAX;
}
void
vy_regulator_start(struct vy_regulator *regulator)
{
regulator->quota_used_last = regulator->quota->used;
vy_quota_set_rate_limit(regulator->quota, VY_QUOTA_RESOURCE_MEMORY,
regulator->dump_bandwidth);
ev_timer_start(loop(), ®ulator->timer);
}
void
vy_regulator_destroy(struct vy_regulator *regulator)
{
ev_timer_stop(loop(), ®ulator->timer);
histogram_delete(regulator->dump_bandwidth_hist);
}
void
vy_regulator_quota_exceeded(struct vy_regulator *regulator)
{
vy_regulator_trigger_dump(regulator);
}
void
vy_regulator_check_dump_watermark(struct vy_regulator *regulator)
{
if (regulator->quota->used >= regulator->dump_watermark)
vy_regulator_trigger_dump(regulator);
}
void
vy_regulator_dump_complete(struct vy_regulator *regulator,
size_t mem_dumped, double dump_duration)
{
regulator->dump_in_progress = false;
if (mem_dumped >= VY_DUMP_SIZE_ACCT_MIN && dump_duration > 0) {
histogram_collect(regulator->dump_bandwidth_hist,
mem_dumped / dump_duration);
/*
* To avoid unpredictably long stalls caused by
* mispredicting dump time duration, we need to
* know the worst (smallest) dump bandwidth so
* use a lower-bound percentile estimate.
*/
regulator->dump_bandwidth = histogram_percentile_lower(
regulator->dump_bandwidth_hist, VY_DUMP_BANDWIDTH_PCT);
}
/*
* Reset the rate limit.
*
* It doesn't make sense to allow to consume memory at
* a higher rate than it can be dumped so we set the rate
* limit to the dump bandwidth rather than disabling it
* completely.
*/
vy_quota_set_rate_limit(regulator->quota, VY_QUOTA_RESOURCE_MEMORY,
regulator->dump_bandwidth);
say_info("dumped %zu bytes in %.1f s, rate %.1f MB/s",
mem_dumped, dump_duration,
mem_dumped / dump_duration / 1024 / 1024);
}
void
vy_regulator_set_memory_limit(struct vy_regulator *regulator, size_t limit)
{
vy_quota_set_limit(regulator->quota, limit);
vy_regulator_update_dump_watermark(regulator);
}
void
vy_regulator_reset_dump_bandwidth(struct vy_regulator *regulator, size_t max)
{
histogram_reset(regulator->dump_bandwidth_hist);
regulator->dump_bandwidth = VY_DUMP_BANDWIDTH_DEFAULT;
if (max > 0 && regulator->dump_bandwidth > max)
regulator->dump_bandwidth = max;
vy_quota_set_rate_limit(regulator->quota, VY_QUOTA_RESOURCE_MEMORY,
regulator->dump_bandwidth);
}
void
vy_regulator_reset_stat(struct vy_regulator *regulator)
{
memset(®ulator->sched_stat_last, 0,
sizeof(regulator->sched_stat_last));
}
/*
* The goal of rate limiting is to ensure LSM trees stay close to
* their perfect shape, as defined by run_size_ratio. When dump rate
* is too high, we have to throttle database writes to ensure
* compaction can keep up with dumps. We can't deduce optimal dump
* bandwidth from LSM configuration, such as run_size_ratio or
* run_count_per_level, since different spaces or different indexes
* within a space can have different configuration settings. The
* workload can also vary significantly from space to space. So,
* when setting the limit, we have to consider dump and compaction
* activities of the database as a whole.
*
* To this end, we keep track of compaction bandwidth and write
* amplification of the entire database, across all LSM trees.
* The idea is simple: observe the current write amplification
* and compaction bandwidth, and set maximal write rate to a value
* somewhat below the implied limit, so as to make room for
* compaction to do more work if necessary.
*
* We use the following metrics to calculate the limit:
* - dump_output - number of bytes dumped to disk over the last
* observation period. The period itself is measured in dumps,
* not seconds, and is defined by constant VY_RECENT_DUMP_COUNT.
* - compaction_output - number of bytes produced by compaction
* over the same period.
* - compaction_rate - total compaction output, in bytes, divided
* by total time spent on doing compaction by compaction threads,
* both measured over the same observation period. This gives an
* estimate of the speed at which compaction can write output.
* In the real world this speed is dependent on the disk write
* throughput, number of dump threads, and actual dump rate, but
* given the goal of rate limiting is providing compaction with
* extra bandwidth, this metric is considered an accurate enough
* approximation of the disk bandwidth available to compaction.
*
* We calculate the compaction rate with the following formula:
*
* compaction_output
* compaction_rate = compaction_threads * -----------------
* compaction_time
*
* where compaction_threads represents the total number of available
* compaction threads and compaction_time is the total time, in
* seconds, spent by all threads doing compaction. You can look at
* the formula this way: compaction_ouptut / compaction_time gives
* the average write speed of a single compaction thread, and by
* multiplying it by the number of compaction threads we get the
* compaction rate of the entire database.
*
* In an optimal system dump rate must be proportional to compaction
* rate and inverse to write amplification:
*
* dump_rate = compaction_rate / (write_amplification - 1)
*
* The latter can be obtained by dividing total output of compaction
* by total output of dumps over the observation period:
*
* dump_output + compaction_output
* write_amplification = ------------------------------- =
* dump_output
*
* = 1 + compaction_output / dump_output
*
* Putting this all together and taking into account data compaction
* during memory dump, we get for the max transaction rate:
*
* dump_input
* tx_rate = dump_rate * ----------- =
* dump_output
*
* compaction_output
* = compaction_threads * ----------------- *
* compaction_time
*
* dump_output dump_input
* * ----------------- * ----------- =
* compaction_output dump_output
*
* = compaction_threads * dump_input / compaction_time
*
* We set the rate limit to 0.75 of the approximated optimal to
* leave the database engine enough room needed to use more disk
* bandwidth for compaction if necessary. As soon as compaction gets
* enough disk bandwidth to keep LSM trees in optimal shape
* compaction speed becomes stable, as does write amplification.
*/
void
vy_regulator_update_rate_limit(struct vy_regulator *regulator,
const struct vy_scheduler_stat *stat,
int compaction_threads)
{
struct vy_scheduler_stat *last = ®ulator->sched_stat_last;
struct vy_scheduler_stat *recent = ®ulator->sched_stat_recent;
int32_t dump_count = stat->dump_count - last->dump_count;
int64_t dump_input = stat->dump_input - last->dump_input;
double compaction_time = stat->compaction_time - last->compaction_time;
*last = *stat;
if (dump_input < (ssize_t)VY_DUMP_SIZE_ACCT_MIN)
return;
recent->dump_count += dump_count;
recent->dump_input += dump_input;
recent->compaction_time += compaction_time;
double rate = 0.75 * compaction_threads * recent->dump_input /
recent->compaction_time;
/*
* We can't simply use (size_t)MIN(rate, SIZE_MAX) to cast
* the rate from double to size_t here, because on a 64-bit
* system SIZE_MAX equals 2^64-1, which can't be represented
* as double without loss of precision and hence is rounded
* up to 2^64, which in turn can't be converted back to size_t.
* So we first convert the rate to uint64_t using exp2(64) to
* check if it fits and only then cast the uint64_t to size_t.
*/
uint64_t rate64;
if (rate < exp2(64))
rate64 = rate;
else
rate64 = UINT64_MAX;
vy_quota_set_rate_limit(regulator->quota, VY_QUOTA_RESOURCE_DISK,
(size_t)MIN(rate64, SIZE_MAX));
/*
* Periodically rotate statistics for quicker adaptation
* to workload changes.
*/
if (recent->dump_count > VY_RECENT_DUMP_COUNT) {
recent->dump_count /= 2;
recent->dump_input /= 2;
recent->compaction_time /= 2;
}
}
|
igrigorik/agent
|
spec/queues_spec.rb
|
require "spec_helper"
describe Agent::Queues do
after do
Agent::Queues.clear
end
it "should register queues" do
Agent::Queues.register("foo", String, 10)
expect(Agent::Queues["foo"]).to be_a(Agent::Queue)
expect(Agent::Queues["foo"].type).to eq(String)
expect(Agent::Queues["foo"].max).to eq(10)
end
it "should delete queues" do
Agent::Queues.register("foo", String, 10)
Agent::Queues.delete("foo")
expect(Agent::Queues["foo"]).to be_nil
end
it "should remove all queues queues" do
Agent::Queues.register("foo", String, 10)
Agent::Queues.register("bar", String, 10)
Agent::Queues.clear
expect(Agent::Queues["foo"]).to be_nil
expect(Agent::Queues["bar"]).to be_nil
end
end
|
syama0731/batch3
|
terasoluna-collector/src/test/java/jp/terasoluna/fw/collector/db/DaoCollectorConfigTest.java
|
<reponame>syama0731/batch3<filename>terasoluna-collector/src/test/java/jp/terasoluna/fw/collector/db/DaoCollectorConfigTest.java
/**
*
*/
package jp.terasoluna.fw.collector.db;
import static org.junit.Assert.*;
import jp.terasoluna.fw.collector.exception.CollectorExceptionHandler;
import jp.terasoluna.fw.collector.exception.CollectorExceptionHandlerStatus;
import jp.terasoluna.fw.collector.validate.ValidateErrorStatus;
import jp.terasoluna.fw.collector.validate.ValidationErrorHandler;
import jp.terasoluna.fw.collector.vo.DataValueObject;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.springframework.validation.Errors;
import org.springframework.validation.Validator;
/**
*
*/
public class DaoCollectorConfigTest {
/**
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDownAfterClass() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#DaoCollectorConfig(Object, String, Object)}
* のためのテスト・メソッド。
*/
@Test
public void testDaoCollectorConfig001() {
Object queryResultHandleDao = null;
String methodName = null;
Object bindParams = null;
// テスト
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
assertNotNull(config);
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#DaoCollectorConfig(Object, String, Object)}
* のためのテスト・メソッド。
*/
@Test
public void testDaoCollectorConfig002() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
// テスト
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
assertNotNull(config);
assertEquals(queryResultHandleDao, config.getQueryResultHandleDao());
assertEquals(methodName, config.getMethodName());
assertEquals(bindParams, config.getBindParams());
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#addQueueSize(int)} のためのテスト・メソッド。
*/
@Test
public void testAddQueueSize001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
int queueSize = 123;
// テスト
config.addQueueSize(queueSize);
assertNotNull(config);
assertEquals(queryResultHandleDao, config.getQueryResultHandleDao());
assertEquals(methodName, config.getMethodName());
assertEquals(bindParams, config.getBindParams());
assertEquals(queueSize, config.getQueueSize());
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#addExceptionHandler(jp.terasoluna.fw.collector.exception.CollectorExceptionHandler)}
* のためのテスト・メソッド。
*/
@Test
public void testAddExceptionHandler001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
CollectorExceptionHandler exceptionHandler = new CollectorExceptionHandler() {
public CollectorExceptionHandlerStatus handleException(
DataValueObject dataValueObject) {
return null;
}
};
// テスト
config.addExceptionHandler(exceptionHandler);
assertNotNull(config);
assertEquals(queryResultHandleDao, config.getQueryResultHandleDao());
assertEquals(methodName, config.getMethodName());
assertEquals(bindParams, config.getBindParams());
assertEquals(exceptionHandler, config.getExceptionHandler());
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#addValidator(org.springframework.validation.Validator)}
* のためのテスト・メソッド。
*/
@Test
public void testAddValidator001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
Validator validator = new Validator() {
@SuppressWarnings("unchecked")
public boolean supports(Class clazz) {
return false;
}
public void validate(Object target, Errors errors) {
}
};
// テスト
config.addValidator(validator);
assertNotNull(config);
assertEquals(queryResultHandleDao, config.getQueryResultHandleDao());
assertEquals(methodName, config.getMethodName());
assertEquals(bindParams, config.getBindParams());
assertEquals(validator, config.getValidator());
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#addValidationErrorHandler(jp.terasoluna.fw.collector.validate.ValidationErrorHandler)}
* のためのテスト・メソッド。
*/
@Test
public void testAddValidationErrorHandler001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
ValidationErrorHandler validationErrorHandler = new ValidationErrorHandler() {
public ValidateErrorStatus handleValidationError(
DataValueObject dataValueObject, Errors errors) {
return null;
}
};
// テスト
config.addValidationErrorHandler(validationErrorHandler);
assertNotNull(config);
assertEquals(queryResultHandleDao, config.getQueryResultHandleDao());
assertEquals(methodName, config.getMethodName());
assertEquals(bindParams, config.getBindParams());
assertEquals(validationErrorHandler, config.getValidationErrorHandler());
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#addRelation1n(boolean)} のためのテスト・メソッド。
*/
@Test
public void testAddRelation1n001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
boolean relation1n = true;
// テスト
config.addRelation1n(relation1n);
assertNotNull(config);
assertEquals(queryResultHandleDao, config.getQueryResultHandleDao());
assertEquals(methodName, config.getMethodName());
assertEquals(bindParams, config.getBindParams());
assertEquals(relation1n, config.isRelation1n());
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#addDaoCollectorPrePostProcess(jp.terasoluna.fw.collector.db.DaoCollectorPrePostProcess)}
* のためのテスト・メソッド。
*/
@Test
public void testAddDaoCollectorPrePostProcess001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
DaoCollectorPrePostProcess daoCollectorPrePostProcess = new DaoCollectorPrePostProcess() {
public <P> void postprocessComplete(DaoCollector<P> collector) {
}
public <P> DaoCollectorPrePostProcessStatus postprocessException(
DaoCollector<P> collector, Throwable throwable) {
return null;
}
public <P> void preprocess(DaoCollector<P> collector) {
}
};
// テスト
config.addDaoCollectorPrePostProcess(daoCollectorPrePostProcess);
assertNotNull(config);
assertEquals(queryResultHandleDao, config.getQueryResultHandleDao());
assertEquals(methodName, config.getMethodName());
assertEquals(bindParams, config.getBindParams());
assertEquals(daoCollectorPrePostProcess, config
.getDaoCollectorPrePostProcess());
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#addExecuteByConstructor(boolean)}
* のためのテスト・メソッド。
*/
@Test
public void testAddExecuteByConstructor001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
boolean executeByConstructor = true;
// テスト
config.addExecuteByConstructor(executeByConstructor);
assertNotNull(config);
assertEquals(queryResultHandleDao, config.getQueryResultHandleDao());
assertEquals(methodName, config.getMethodName());
assertEquals(bindParams, config.getBindParams());
assertEquals(executeByConstructor, config.isExecuteByConstructor());
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#getQueryResultHandleDao()} のためのテスト・メソッド。
*/
@Test
public void testGetQueryResultHandleDAO001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
// テスト
Object result = config.getQueryResultHandleDao();
assertNotNull(result);
assertEquals(queryResultHandleDao, result);
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#setQueryResultHandleDao(Object)}
* のためのテスト・メソッド。
*/
@Test
public void testSetQueryResultHandleDAO001() {
Object queryResultHandleDao = new Object();
Object queryResultHandleDao2 = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
// テスト
config.setQueryResultHandleDao(queryResultHandleDao2);
assertNotNull(config.getQueryResultHandleDao());
assertEquals(queryResultHandleDao2, config.getQueryResultHandleDao());
}
/**
* {@link DaoCollectorConfig#getMethodName()} のためのテスト・メソッド。
*/
@Test
public void testGetMethodName001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
// テスト
String result = config.getMethodName();
assertNotNull(result);
assertEquals(methodName, result);
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#setMethodName(String)} のためのテスト・メソッド。
*/
@Test
public void testSetMethodName001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
String methodName2 = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
// テスト
config.setMethodName(methodName2);
assertNotNull(config.getMethodName());
assertEquals(methodName, config.getMethodName());
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#getBindParams()} のためのテスト・メソッド。
*/
@Test
public void testGetBindParams001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
// テスト
Object result = config.getBindParams();
assertNotNull(result);
assertEquals(bindParams, result);
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#setBindParams(java.lang.Object)} のためのテスト・メソッド。
*/
@Test
public void testSetBindParams001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
Object bindParams2 = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
// テスト
config.setBindParams(bindParams2);
assertNotNull(config.getBindParams());
assertEquals(bindParams2, config.getBindParams());
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#isRelation1n()} のためのテスト・メソッド。
*/
@Test
public void testIsRelation1n001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
boolean relation1n = true;
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
config.setRelation1n(relation1n);
// テスト
boolean result = config.isRelation1n();
assertNotNull(result);
assertEquals(relation1n, result);
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#setRelation1n(boolean)} のためのテスト・メソッド。
*/
@Test
public void testSetRelation1n001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
boolean relation1n = true;
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
// テスト
config.setRelation1n(relation1n);
assertNotNull(config.isRelation1n());
assertEquals(relation1n, config.isRelation1n());
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#getDaoCollectorPrePostProcess()} のためのテスト・メソッド。
*/
@Test
public void testGetDaoCollectorPrePostProcess001() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
DaoCollectorPrePostProcess daoCollectorPrePostProcess = new DaoCollectorPrePostProcess() {
public <P> void postprocessComplete(DaoCollector<P> collector) {
}
public <P> DaoCollectorPrePostProcessStatus postprocessException(
DaoCollector<P> collector, Throwable throwable) {
return null;
}
public <P> void preprocess(DaoCollector<P> collector) {
}
};
config.setDaoCollectorPrePostProcess(daoCollectorPrePostProcess);
// テスト
DaoCollectorPrePostProcess result = config
.getDaoCollectorPrePostProcess();
assertNotNull(result);
assertEquals(daoCollectorPrePostProcess, result);
}
/**
* {@link jp.terasoluna.fw.collector.db.DaoCollectorConfig#setDaoCollectorPrePostProcess(jp.terasoluna.fw.collector.db.DaoCollectorPrePostProcess)}
* のためのテスト・メソッド。
*/
@Test
public void setDaoCollectorPrePostProcess() {
Object queryResultHandleDao = new Object();
String methodName = "hoge";
Object bindParams = new Object();
DaoCollectorConfig config = new DaoCollectorConfig(queryResultHandleDao,
methodName, bindParams);
DaoCollectorPrePostProcess daoCollectorPrePostProcess = new DaoCollectorPrePostProcess() {
public <P> void postprocessComplete(DaoCollector<P> collector) {
}
public <P> DaoCollectorPrePostProcessStatus postprocessException(
DaoCollector<P> collector, Throwable throwable) {
return null;
}
public <P> void preprocess(DaoCollector<P> collector) {
}
};
config.setDaoCollectorPrePostProcess(daoCollectorPrePostProcess);
DaoCollectorPrePostProcess daoCollectorPrePostProcess2 = new DaoCollectorPrePostProcess() {
public <P> void postprocessComplete(DaoCollector<P> collector) {
}
public <P> DaoCollectorPrePostProcessStatus postprocessException(
DaoCollector<P> collector, Throwable throwable) {
return null;
}
public <P> void preprocess(DaoCollector<P> collector) {
}
};
// テスト
config.setDaoCollectorPrePostProcess(daoCollectorPrePostProcess2);
assertNotNull(config.getDaoCollectorPrePostProcess());
assertEquals(daoCollectorPrePostProcess2, config
.getDaoCollectorPrePostProcess());
}
}
|
hmatt1/angelscript-intellij
|
src/main/java/org/intellij/sdk/language/AngelScriptCompletionContributor.java
|
package org.intellij.sdk.language;
import com.intellij.codeInsight.completion.*;
import com.intellij.codeInsight.lookup.LookupElementBuilder;
import com.intellij.patterns.PlatformPatterns;
import com.intellij.psi.PsiFile;
import com.intellij.util.ProcessingContext;
import org.intellij.sdk.language.psi.AngelScriptElementType;
import org.intellij.sdk.language.psi.AngelScriptElementTypes;
import org.intellij.sdk.language.psi.AngelScriptTokenType;
import org.jetbrains.annotations.NotNull;
public class AngelScriptCompletionContributor extends CompletionContributor {
public AngelScriptCompletionContributor() {
extend(CompletionType.BASIC, PlatformPatterns.psiElement(AngelScriptElementTypes.T_IDENT),
new CompletionProvider<>() {
public void addCompletions(@NotNull CompletionParameters parameters,
@NotNull ProcessingContext context,
@NotNull CompletionResultSet resultSet) {
PsiFile file = parameters.getOriginalFile();
for (String s : AngelScriptUtil.findVars(file)) {
resultSet.addElement(LookupElementBuilder.create(s));
}
}
}
);
}
}
|
johndpope/echo
|
thirdparty/physx/PhysXSDK/Source/GeomUtils/src/contact/GuContactConvexMeshCommon.h
|
<filename>thirdparty/physx/PhysXSDK/Source/GeomUtils/src/contact/GuContactConvexMeshCommon.h
/*
* Copyright (c) 2008-2015, NVIDIA CORPORATION. All rights reserved.
*
* NVIDIA CORPORATION and its licensors retain all intellectual property
* and proprietary rights in and to this software, related documentation
* and any modifications thereto. Any use, reproduction, disclosure or
* distribution of this software and related documentation without an express
* license agreement from NVIDIA CORPORATION is strictly prohibited.
*/
// Copyright (c) 2004-2008 AGEIA Technologies, Inc. All rights reserved.
// Copyright (c) 2001-2004 NovodeX AG. All rights reserved.
#ifndef GU_CONTACT_CONVEX_MESH_COMMON_H
#define GU_CONTACT_CONVEX_MESH_COMMON_H
#include "GuTriangleVertexPointers.h"
#include "CmScaling.h"
#include "GuTriangleCache.h"
namespace physx
{
namespace Gu
{
class Container;
}
namespace Gu
{
struct PolygonalData;
class ContactBuffer;
PX_FORCE_INLINE void getVertices(
PxVec3* PX_RESTRICT localPoints, const Gu::InternalTriangleMeshData* meshDataLS,
const Cm::FastVertex2ShapeScaling& meshScaling, PxU32 triangleIndex, bool idtMeshScale)
{
PxVec3 v0, v1, v2;
Gu::TriangleVertexPointers::getTriangleVerts(meshDataLS, triangleIndex, v0, v1, v2);
if(idtMeshScale)
{
localPoints[0] = v0;
localPoints[1] = v1;
localPoints[2] = v2;
}
else
{
localPoints[0] = meshScaling * v0;
localPoints[1] = meshScaling * v1;
localPoints[2] = meshScaling * v2;
}
}
}
}
#endif
|
tongbin102/csr
|
csr-api/src/main/java/com/project/csr/model/po/ChannelPo.java
|
<gh_stars>0
package com.project.csr.model.po;
import com.baomidou.mybatisplus.annotation.TableName;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.project.csr.common.model.BasePo;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
/**
* <p>
* 渠道表
* </p>
*
* @author bin.tong
* @since 2020-11-05
*/
@Data
@EqualsAndHashCode(callSuper = true)
@TableName("channel")
@ApiModel(value = "ChannelPo对象", description = "渠道表")
public class ChannelPo extends BasePo {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "渠道code")
private String code;
@ApiModelProperty(value = "渠道名称")
private String name;
@ApiModelProperty(value = "题目首字母")
private String questionPrefix;
@ApiModelProperty(value = "渠道类型(1考核;2扣分)")
private Integer ctype;
}
|
juangea/B28_boneMaster
|
source/blender/editors/curve/curve_ops.c
|
/*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* The Original Code is Copyright (C) 2009 Blender Foundation.
* All rights reserved.
*/
/** \file
* \ingroup edcurve
*/
#include <stdlib.h>
#include <math.h>
#include "DNA_curve_types.h"
#include "DNA_scene_types.h"
#include "RNA_access.h"
#include "WM_api.h"
#include "WM_types.h"
#include "ED_curve.h"
#include "ED_object.h"
#include "ED_screen.h"
#include "ED_select_utils.h"
#include "ED_transform.h"
#include "curve_intern.h"
/************************* registration ****************************/
void ED_operatortypes_curve(void)
{
WM_operatortype_append(FONT_OT_text_insert);
WM_operatortype_append(FONT_OT_line_break);
WM_operatortype_append(FONT_OT_case_toggle);
WM_operatortype_append(FONT_OT_case_set);
WM_operatortype_append(FONT_OT_style_toggle);
WM_operatortype_append(FONT_OT_style_set);
WM_operatortype_append(FONT_OT_select_all);
WM_operatortype_append(FONT_OT_text_copy);
WM_operatortype_append(FONT_OT_text_cut);
WM_operatortype_append(FONT_OT_text_paste);
WM_operatortype_append(FONT_OT_text_paste_from_file);
WM_operatortype_append(FONT_OT_move);
WM_operatortype_append(FONT_OT_move_select);
WM_operatortype_append(FONT_OT_delete);
WM_operatortype_append(FONT_OT_change_character);
WM_operatortype_append(FONT_OT_change_spacing);
WM_operatortype_append(FONT_OT_open);
WM_operatortype_append(FONT_OT_unlink);
WM_operatortype_append(FONT_OT_textbox_add);
WM_operatortype_append(FONT_OT_textbox_remove);
WM_operatortype_append(CURVE_OT_hide);
WM_operatortype_append(CURVE_OT_reveal);
WM_operatortype_append(CURVE_OT_separate);
WM_operatortype_append(CURVE_OT_split);
WM_operatortype_append(CURVE_OT_duplicate);
WM_operatortype_append(CURVE_OT_delete);
WM_operatortype_append(CURVE_OT_dissolve_verts);
WM_operatortype_append(CURVE_OT_spline_type_set);
WM_operatortype_append(CURVE_OT_radius_set);
WM_operatortype_append(CURVE_OT_spline_weight_set);
WM_operatortype_append(CURVE_OT_handle_type_set);
WM_operatortype_append(CURVE_OT_normals_make_consistent);
WM_operatortype_append(CURVE_OT_decimate);
WM_operatortype_append(CURVE_OT_shade_smooth);
WM_operatortype_append(CURVE_OT_shade_flat);
WM_operatortype_append(CURVE_OT_tilt_clear);
WM_operatortype_append(CURVE_OT_primitive_bezier_curve_add);
WM_operatortype_append(CURVE_OT_primitive_bezier_circle_add);
WM_operatortype_append(CURVE_OT_primitive_nurbs_curve_add);
WM_operatortype_append(CURVE_OT_primitive_nurbs_circle_add);
WM_operatortype_append(CURVE_OT_primitive_nurbs_path_add);
WM_operatortype_append(SURFACE_OT_primitive_nurbs_surface_curve_add);
WM_operatortype_append(SURFACE_OT_primitive_nurbs_surface_circle_add);
WM_operatortype_append(SURFACE_OT_primitive_nurbs_surface_surface_add);
WM_operatortype_append(SURFACE_OT_primitive_nurbs_surface_cylinder_add);
WM_operatortype_append(SURFACE_OT_primitive_nurbs_surface_sphere_add);
WM_operatortype_append(SURFACE_OT_primitive_nurbs_surface_torus_add);
WM_operatortype_append(CURVE_OT_smooth);
WM_operatortype_append(CURVE_OT_smooth_weight);
WM_operatortype_append(CURVE_OT_smooth_radius);
WM_operatortype_append(CURVE_OT_smooth_tilt);
WM_operatortype_append(CURVE_OT_de_select_first);
WM_operatortype_append(CURVE_OT_de_select_last);
WM_operatortype_append(CURVE_OT_select_all);
WM_operatortype_append(CURVE_OT_select_linked);
WM_operatortype_append(CURVE_OT_select_linked_pick);
WM_operatortype_append(CURVE_OT_select_row);
WM_operatortype_append(CURVE_OT_select_next);
WM_operatortype_append(CURVE_OT_select_previous);
WM_operatortype_append(CURVE_OT_select_more);
WM_operatortype_append(CURVE_OT_select_less);
WM_operatortype_append(CURVE_OT_select_random);
WM_operatortype_append(CURVE_OT_select_nth);
WM_operatortype_append(CURVE_OT_select_similar);
WM_operatortype_append(CURVE_OT_shortest_path_pick);
WM_operatortype_append(CURVE_OT_switch_direction);
WM_operatortype_append(CURVE_OT_subdivide);
WM_operatortype_append(CURVE_OT_make_segment);
WM_operatortype_append(CURVE_OT_spin);
WM_operatortype_append(CURVE_OT_vertex_add);
WM_operatortype_append(CURVE_OT_draw);
WM_operatortype_append(CURVE_OT_extrude);
WM_operatortype_append(CURVE_OT_cyclic_toggle);
WM_operatortype_append(CURVE_OT_match_texture_space);
}
void ED_operatormacros_curve(void)
{
wmOperatorType *ot;
wmOperatorTypeMacro *otmacro;
ot = WM_operatortype_append_macro("CURVE_OT_duplicate_move",
"Add Duplicate",
"Duplicate curve and move",
OPTYPE_UNDO | OPTYPE_REGISTER);
WM_operatortype_macro_define(ot, "CURVE_OT_duplicate");
otmacro = WM_operatortype_macro_define(ot, "TRANSFORM_OT_translate");
RNA_boolean_set(otmacro->ptr, "use_proportional_edit", false);
RNA_boolean_set(otmacro->ptr, "mirror", false);
ot = WM_operatortype_append_macro("CURVE_OT_extrude_move",
"Extrude Curve and Move",
"Extrude curve and move result",
OPTYPE_UNDO | OPTYPE_REGISTER);
WM_operatortype_macro_define(ot, "CURVE_OT_extrude");
otmacro = WM_operatortype_macro_define(ot, "TRANSFORM_OT_translate");
RNA_boolean_set(otmacro->ptr, "use_proportional_edit", false);
RNA_boolean_set(otmacro->ptr, "mirror", false);
}
void ED_keymap_curve(wmKeyConfig *keyconf)
{
/* only set in editmode font, by space_view3d listener */
wmKeyMap *keymap = WM_keymap_ensure(keyconf, "Font", 0, 0);
keymap->poll = ED_operator_editfont;
/* only set in editmode curve, by space_view3d listener */
keymap = WM_keymap_ensure(keyconf, "Curve", 0, 0);
keymap->poll = ED_operator_editsurfcurve;
}
|
Yonas-A/CS-003A
|
Projects/06 Rational/output/output.h
|
#ifndef TEST_RATIONAL_H
#define TEST_RATIONAL_H
/*
***** ALL RESULTS ARE VERIFIED ******
//---------------------------------------------------------------------
// normal run: default values are 0/1 for both rational objects
//---------------------------------------------------------------------
======================TOP======================
Default Values
========================
A: 0/1 B: 0/1
0/1 + 0/1 = 0/1
0/1 - 0/1 = 0/1
0/1 * 0/1 = 0/1
0/1 / 0/1 = UNDEFINED
========================
[A] [B] E[X]IT : a
A: 3/4
========================
A: 3/4 B: 0/1
3/4 + 0/1 = 3/4
3/4 - 0/1 = 3/4
3/4 * 0/1 = 0/4
3/4 / 0/1 = UNDEFINED
========================
[A] [B] E[X]IT : b
B: 5/8
========================
A: 3/4 B: 5/8
3/4 + 5/8 = 44/32
3/4 - 5/8 = 4/32
3/4 * 5/8 = 15/32
3/4 / 5/8 = 24/20
========================
[A] [B] E[X]IT : a
A: 1/0
========================
A: UNDEFINED B: 5/8
UNDEFINED + 5/8 = UNDEFINED
UNDEFINED - 5/8 = UNDEFINED
UNDEFINED * 5/8 = UNDEFINED
UNDEFINED / 5/8 = UNDEFINED
========================
[A] [B] E[X]IT : b
B: 0/0
========================
A: UNDEFINED B: UNDEFINED
UNDEFINED + UNDEFINED = UNDEFINED
UNDEFINED - UNDEFINED = UNDEFINED
UNDEFINED * UNDEFINED = UNDEFINED
UNDEFINED / UNDEFINED = UNDEFINED
========================
[A] [B] E[X]IT : b
B: 1/7
========================
A: UNDEFINED B: 1/7
UNDEFINED + 1/7 = UNDEFINED
UNDEFINED - 1/7 = UNDEFINED
UNDEFINED * 1/7 = UNDEFINED
UNDEFINED / 1/7 = UNDEFINED
========================
[A] [B] E[X]IT : a
A: 0/0
========================
A: UNDEFINED B: 1/7
UNDEFINED + 1/7 = UNDEFINED
UNDEFINED - 1/7 = UNDEFINED
UNDEFINED * 1/7 = UNDEFINED
UNDEFINED / 1/7 = UNDEFINED
========================
[A] [B] E[X]IT : b
B: 0/0
========================
A: UNDEFINED B: UNDEFINED
UNDEFINED + UNDEFINED = UNDEFINED
UNDEFINED - UNDEFINED = UNDEFINED
UNDEFINED * UNDEFINED = UNDEFINED
UNDEFINED / UNDEFINED = UNDEFINED
========================
[A] [B] E[X]IT : a
A: 3/4
========================
A: 3/4 B: UNDEFINED
3/4 + UNDEFINED = UNDEFINED
3/4 - UNDEFINED = UNDEFINED
3/4 * UNDEFINED = UNDEFINED
3/4 / UNDEFINED = UNDEFINED
========================
[A] [B] E[X]IT : b
B: 9/8
========================
A: 3/4 B: 9/8
3/4 + 9/8 = 60/32
3/4 - 9/8 = -12/32
3/4 * 9/8 = 27/32
3/4 / 9/8 = 24/36
========================
[A] [B] E[X]IT : x
======================END======================
Press <RETURN> to close this window...
*/
#endif // TEST_RATIONAL_H
|
shuipi100/kaldi
|
egs/wsj/s5/steps/nnet3/report/convert_model.py
|
#!/usr/bin/env python3
# This script dumps the parameters of (most components of) an nnet3 model as a
# pickled python dict. (see documentation for the function 'read_model' below
# for more details).
#
# It also contains some utility function that you can get access by importing this
# file.
#
# In egs/mini_librispeech/s5/local/chain/diagnostic/report_example.py, you can
# find an example of the use of this script.
#
# Copyright 2017-2018 <NAME>
# Apache 2.0.
# This requires python 3.
import sys
import subprocess
import numpy as np
import pickle
def read_next_token(s, pos):
"""This function, given a string s (probably a long string, like a line or a file)
and a position 'pos', finds the next token in the string (defined as a nonempty
sequence of whitespace characters delimited by whitespace), and advances the
position to one character after the end of this token.
's' is expected to be of type 'str' and 'pos' of type 'int'.
This function returns a tuple
(token, new_pos).
If we're at the end of the string (there is only whitespace between 'pos' and
the end), then 'token' will be None and 'pos' will be len(s).
"""
assert isinstance(s, str) and isinstance(pos, int)
assert pos >= 0
# Skip over any initial whitespace.
while pos < len(s) and s[pos].isspace():
pos += 1
if pos >= len(s):
# We reached the end of the string s without finding any non-whitespace.
return (None, pos)
initial_pos = pos
while pos < len(s) and not s[pos].isspace():
pos += 1
token = s[initial_pos:pos]
return (token, pos)
def check_for_newline(s, pos):
"""This function, given a string s (probably a long string, like a line or a file)
and a position 'pos', in the string, eats up all the whitespace it can
and records whether a newline was among that whitespace.
It returns a tuple
(saw_newline, new_pos)
where saw_newline will be true if a newline was seen, and new_pos is
the new position after eating up whitespace-- so either new_pos == len(s)
or s[new_pos] is non-whitespace.
"""
assert isinstance(s, str) and isinstance(pos, int)
assert pos >= 0
saw_newline = False
while pos < len(s) and s[pos].isspace():
if s[pos] == "\n":
saw_newline = True
pos += 1
return (saw_newline, pos)
def read_float(s, pos):
"""This function, given a string s (probably a long string, like a line or a file)
and a position 'pos', tries to read a text-format floating point or integer,
starting from this position, and returns the
pair (float, new_position).
If something goes wrong it will print a warning to stderr and return (None, pos)
"""
orig_pos = pos
(tok, pos) = read_next_token(s, pos)
f = None
try:
f = float(tok)
except:
print("{0}: at file position {1}, expected float but got {1}".format(
sys.argv[0], orig_pos, tok), file=sys.stderr)
return (None, pos)
return (f, pos)
def read_int(s, pos):
"""This function, given a string s (probably a long string, like a line or a
file) and a position 'pos', tries to read a text-format integer, starting
from this position, and returns the
pair (int, new_position).
If something goes wrong it will print a warning to stderr and return (None, pos)
"""
orig_pos = pos
(tok, pos) = read_next_token(s, pos)
i = None
try:
i = int(tok)
except:
print("{0}: at file position {1}, expected int but got {1}".format(
tok).format(sys.argv[0], orig_pos, tok), file=sys.stderr)
return (None, pos)
return (i, pos)
def read_vector(s, pos):
"""This function, given a string s (probably a long string, like a line or a file)
and a position 'pos', tries to read a text-format vector (something like "[ 1.0 2.0 3.0 ]"
starting from this position, reads it as a 1-dimensional numpy array, and returns
the pair (vector, new_position).
If something goes wrong it will print a warning to stderr and return (None, pos)
"""
orig_pos = pos
(tok, pos) = read_next_token(s, pos)
if tok != '[':
print("{0}: at file position {1}, expected vector but got {1}".format(
tok).format(sys.argv[0], pos, tok), file=sys.stderr)
return (None, pos)
v = []
while True:
(tok, pos) = read_next_token(s, pos)
if tok is None or tok == ']':
break
try:
f = float(tok)
v.append(f)
except:
print("{0}: at file position {1}, reading vector, expected float but got {1}".
format(sys.argv[0], pos, tok), file=sys.stderr)
return (None, pos)
if tok is None:
print("{0}: encountered EOF while reading vector.".format(
tok).format(sys.argv[0]), file=sys.stderr)
return (None, pos)
return (np.array(v, dtype=np.float32), pos)
def read_matrix(s, pos):
"""This function, given a string s (probably a long string, like a line or a file)
and a position 'pos', tries to read a text-format matrix
(something like "[\n 1.0 2.0\n 3.0 4.0 ]")
starting from this position, reads it as a 2-dimensional numpy array, and returns
pair (matrix, new_position).
If something goes wrong it will print a warning to stderr and return (None, pos)
"""
orig_pos = pos
(tok, pos) = read_next_token(s, pos)
if tok != '[':
print("{0}: at file position {1}, expected matrix but got {1}".format(
tok).format(sys.argv[0], pos, tok), file=sys.stderr)
return (None, pos)
# m will be an array of arrays (python arrays, not numpy arrays).
m = []
while True:
# At this point, assume we're ready to read a new vector
# (terminated by newline or by "]").
v = []
while True:
(tok, pos) = read_next_token(s, pos)
if tok == ']' or tok == None:
break
else:
try:
f = float(tok)
v.append(f)
except:
print("{0}: at file position {1}, reading matrix, expected float but got {2}".format(
sys.argv[0], pos, tok), file=sys.stderr)
return (None, pos)
(saw_newline, pos) = check_for_newline(s, pos)
if saw_newline: # Newline terminates each row of the matrix.
break
if len(v) > 0:
m.append(v)
if tok == 'None':
print("{0}: matrix starting at position {1} was unexpectedly terminated by EOF.".format(
sys.argv[0], pos), file=sys.stderr)
break
if tok == ']':
break
ans_mat = None
try:
ans_mat = np.array(m, dtype=np.float32)
except:
if tok is None:
print("{0}: error converting matrix starting at position {1} into numpy array.".format(
sys.argv[0], orig_pos), file=sys.stderr)
return (ans_mat, pos)
def is_component_type(component_type):
"""Returns True if 'component_type' is a plausible component type, e.g.
something of the form "<xxxComponent>", otherwise False"""
return (isinstance(component_type, str) and len(component_type) >= 13 and
component_type[0] == "<" and component_type[-10:] == "Component>")
def read_generic(s, pos, terminating_token, action_dict):
"""This function is a generic mechanism for parsing things from text files
(after reading the text file into a string). It will return a pair
(d, new_pos)
where new_pos is the position in the string after reading the object,
and d is a dict representing what we read in.
'terminating_token' is either a token (a whitespace-delimited string)
that terminates the object (something like "</RectifiedLinearComponent>"),
or a set containing possible terminating tokens.
'action_dict' is a dict from token to a pair (function, dict_key)
where 'function' is the function we should use to read in data,
and 'dict_key' is the key in the returned dictionary that we should
use to store the result. For instance, we might have:
action_dict['<ParameterMatrix>'] = (read_matrix, 'params')
It is OK if not everything in the object is covered in 'action_dict'.
This function will simply skip over anything that it doesn't understand.
"""
if isinstance(terminating_token, str):
terminating_tokens = set([terminating_token])
else:
terminating_tokens = terminating_token
assert isinstance(terminating_tokens, set)
assert isinstance(action_dict, dict)
# d will contain the fields of the object.
d = dict()
orig_pos = pos
while True:
(tok, pos) = read_next_token(s, pos)
if tok in terminating_tokens:
break
if tok is None:
print("{0}: error reading object starting at position {1}, got EOF "
"while expecting one of: {2}".format(
sys.argv[0], orig_pos, terminating_tokens), file=sys.stderr)
break
if tok in action_dict:
p = action_dict[tok]
assert isinstance(p, tuple) and len(p) == 2
assert callable(p[0]) and isinstance(p[1], str)
(func, name) = p
(obj, pos) = func(s, pos)
d[name] = obj
return (d, pos)
def get_action_dict(component_type):
"""Given a component-type (i.e. a string, like <SigmoidComponent>, returns an
'action_dict' suitable for reading that component type (specifically, one
that can be given as the 'action_dict' argumnt of 'read_generic'). To
repeat the documentation there:
'action_dict' is a dict from token to a pair (function, dict_key)
where 'function' is the function we should use to read in data,
and 'dict_key' is the key in the returned dictionary that we should
use to store the result. For instance, we might have:
action_dict['<ParameterMatrix>'] = (read_matrix, 'params')
"""
assert is_component_type(component_type)
# e.g. if component_type is '<SigmoidComponent>', raw_component_type would be
# 'Sigmoid'
raw_component_type = component_type[1:-10]
if raw_component_type in { 'Sigmoid', 'Tanh', 'RectifiedLinear',
'Softmax', 'LogSoftmax', 'NoOp' }:
return { '<Dim>': (read_int, 'dim'),
'<BlockDim>': (read_int, 'block-dim'),
'<ValueAvg>': (read_vector, 'value-avg'),
'<DerivAvg>': (read_vector, 'deriv-avg'),
'<OderivRms>': (read_vector, 'oderiv-rms'),
'<Count>': (read_float, 'count'),
'<OderivCount>': (read_float, 'oderiv-count') }
if raw_component_type in {'Affine',
'NaturalGradientAffine'}:
# We call '<LinearParams>' to just 'params' for compatibility with
# LinearComponent.
return { '<LinearParams>': (read_matrix, 'params'),
'<BiasParams>': (read_vector, 'bias') }
if raw_component_type == 'Linear':
return { '<Params>': (read_matrix, 'params') }
if raw_component_type == 'BatchNorm':
return { '<Dim>': (read_int, 'dim'),
'<Count>': (read_float, 'count'),
'<StatsMean>': (read_vector, 'stats-mean'),
'<StatsVar>': (read_vector, 'stats-var') }
# By default (if we don't know anything about the component type) we just
# don't read anything.
return { }
def get_stdout_from_command(command):
""" Executes a command and returns its stdout output as a string. The
command is executed with shell=True, so it may contain pipes and
other shell constructs. Raises an exception if the command exits
with nonzero status.
"""
p = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE)
stdout = p.communicate()[0]
if p.returncode is not 0:
raise Exception("Command exited with status {0}: {1}".format(
p.returncode, command))
return stdout.decode()
def read_component(s, pos):
"""Reads a component starting at position 'pos' in the string 's'. At this position,
there is expected to be a component type, e.g. <RectifiedLinearComponent>, and this
funtion will read until after the end-marker, e.g. </RectifiedLinearComponent>,
or if this fails for some reason, until the next instance of <ComponentName>.
This funtion returns the pair (d, new_pos) where d is a dict from
element-name to object (e.g. d['params'] might contain a matrix), and
new_pos is the position in the string after reading this component in.
Returns (None, new_pos) if something went wrong.
"""
(component_type, pos) = read_next_token(s, pos)
if not is_component_type(component_type):
print("{0}: error reading Component: at position {1}, expected <xxxxComponent>,"
" got: {2}".format(sys.argv[0], pos, component_type), file=sys.stderr)
while True:
(tok, pos) = read_next_token(s, pos)
if tok is None or tok == '<ComponentName>':
return (None, pos)
terminating_token = "</" + component_type[1:]
terminating_tokens = { terminating_token, '<ComponentName>' }
action_dict = get_action_dict(component_type)
(d, pos) = read_generic(s, pos, terminating_tokens, action_dict)
if d is not None:
d['type'] = component_type # e.g. '<LinearComponent>'
d['raw-type'] = component_type[1:-10] # e.g. 'Linear'
return (d, pos)
def read_model(filename):
"""Reads an nnet3 model from the provided filename, and returns a dict
from the component-name to a dict containing things we have read
in for that component."""
command = "nnet3-copy --binary=false {0} -".format(filename)
s = get_stdout_from_command(command)
# The model starts with some structural stuff (component-nodes, etc.) that we
# won't be attempting to parse. We start parsing when we reach
# <NumComponents>.
pos = 0
while True:
(tok, pos) = read_next_token(s, pos)
if tok is None:
print("{0}: unexpected EOF on output of command {1}".format(
sys.argv[0], command))
return None
if tok == "<NumComponents>":
break
# we just read <NumComponents>
(tok, pos) = read_next_token(s, pos)
# 'd', which we return, will be a dict from component-name
# (e.g. 'tdnn1.affine'), to a dict containing elements of the component.
d = dict()
num_components = int(tok) # shouldn't fail.
for c in range(num_components):
# read the components one by one...
(tok, pos) = read_next_token(s, pos)
if tok is None:
print("{0}: unexpected EOF on output of command {1}".format(
sys.argv[0], command))
return None
# We normally expect that tok will be '<ComponentName>', but if we read in
# '<ComponentName>' while parsing the previous component (e.g. if its text form was
# not terminated in the way we expected), then we accept that '<ComponentName>'
# might not be available to parse.
if tok == '<ComponentName>':
component_pos = pos
(component_name, pos) = read_next_token(s, pos)
# At this point the type of the component will be printed: something like
# <NaturalGradientAffineComponent>. We let 'read_component' take it from
# here, and it will read until the terminating </NaturalGradientAffineComponent>,
# or, in the case of error, to EOF or the next <ComponentName> string.
(component, pos) = read_component(s, pos)
if component != None:
d[component_name] = component
else:
print("{0}: error reading component with name {1} at position {2}".format(
sys.argv[0], component_name, component_pos), file=sys.stderr)
return d
def compute_derived_quantities(model):
"""This function, given a model as returned by 'read_model', computes certain
potentially-useful derived quantities inside components: things like row
and column norms of parameter matrices, standard deviations of
accumulated stats.
"""
assert isinstance(model, dict)
for c in model.values():
# 'c' represents the component; it's a dict.
raw_component_type = c['raw-type']
if raw_component_type in {'Linear', 'Affine', 'NaturalGradientAffine'}:
params = c['params'] # this is the parameter matrix.
# compute the row and column norms of the parameter matrix.
c['row-norms'] = np.sqrt(np.sum(params * params, axis=1))
c['col-norms'] = np.sqrt(np.sum(params * params, axis=0))
size = c['col-norms'].size
if size % 3 == 0:
# if the input-dim of this layer is divisible by 3, then compute the
# column-norms after reshaping... this is a kind of pooled column-norm
# that makes sense for TDNNs or wherever we have used Append().
c['col-norms-3'] = np.sqrt(np.sum(np.power(c['col-norms'], 2).reshape(3, size/3), axis=0))
assert c['col-norms-3'].shape == (size/3,)
if raw_component_type == 'BatchNorm':
stats_var = c['stats-var']
c['stats-stddev'] = np.sqrt(stats_var)
def compute_progress(model1, model2):
"""This function, given two models assumed to come from two successive
iterations of training, computes certain component-level quantities
that relate to the rate of change of parameters, and stores them in
'model1'.
"""
for component_name in model1:
if not (component_name in model1 and component_name in model2):
continue
c1 = model1[component_name]
c2 = model2[component_name]
raw_component_type = c1['raw-type']
if raw_component_type in {'Linear', 'Affine', 'NaturalGradientAffine'}:
params1 = c1['params']
params2 = c2['params']
if params1.size != params2.size:
continue # can't compare them if sizes differ.
params_diff = params1 - params2
c1['row-change'] = np.sqrt(np.sum(params_diff * params_diff, axis=1))
c1['col-change'] = np.sqrt(np.sum(params_diff * params_diff, axis=0))
# compute relative change in rows and columns.
epsilon = 1.0e-20
if 'row-norms' in c1:
c1['rel-row-change'] = c1['row-change'] / (c1['row-norms'] + epsilon)
if 'col-norms' in c1:
c1['rel-col-change'] = c1['col-change'] / (c1['col-norms'] + epsilon)
size = c1['col-norms'].size
if size % 3 == 0:
# if the input-dim of this layer is divisible by 3, then average the
# column changes over 3 blocks... this makes sense for TDNNs or
# wherever we have used Append().
c1['col-change-3'] = np.sum(c1['col-change'].reshape(3, size/3), axis=0)
c1['rel-col-change-3'] = c1['col-change-3'] / (c1['col-norms-3'] + epsilon)
def test():
assert sys.version_info.major >= 3
assert read_next_token("", 0) == (None, 0)
assert read_next_token("hello", 0) == ("hello", 5)
assert read_next_token("hello there", 0) == ("hello", 5)
assert read_next_token("hello there", 5) == ("there", 11)
assert read_next_token("hello there", 6) == ("there", 11)
(a, pos) = read_vector(" [ 1 2 3 ] ", 0)
assert pos == 10 and np.array_equal(np.array([1,2,3], dtype=np.float32), a)
assert check_for_newline("hello ", 4) == (False, 4)
assert check_for_newline("hello ", 5) == (False, 6)
assert check_for_newline("hello \n", 5) == (True, 7)
assert check_for_newline("hello \nthere", 5) == (True, 7)
(m, pos) = read_matrix(" [\n 1 2 3\n 4 5 6 ] ", 0)
assert pos == 18 and np.array_equal(np.array([[1,2,3],[4,5,6]], dtype=np.float32), m)
s = " <ignore_this> 1 <some_vec> [ 1 2 3 ] <end>"
(obj, pos) = read_generic(s, 0, "<end>", { '<some_vec>': (read_vector, 'some_vec') })
assert pos == len(s)
assert np.array_equal(obj['some_vec'], np.array([1, 2, 3], dtype=np.float32))
m = read_model('exp/chain_cleaned/tdnn1c_sp_bi/final.mdl')
compute_derived_quantities(m)
print("model is: {0}".format(m))
print("tested")
if __name__ == '__main__':
if len(sys.argv) == 1:
test()
if len(sys.argv) != 3:
print("Usage: {0} <nnet3-model-in> <pickled-model-out>".format(
sys.argv[0]), file=sys.stderr)
sys.exit(1)
m = read_model(sys.argv[1])
if m != None:
try:
f = open(sys.argv[2], "wb")
pickle.dump(m, f)
except:
print("{0}: error writing to {1}".format(
sys.argv[2]), file=sys.stderr)
|
marmitoTH/casa
|
app/models/health.rb
|
<reponame>marmitoTH/casa<filename>app/models/health.rb
class Health < ApplicationRecord
# The "singleton_guard" column is a unique column which must always be set to '0'
# This ensures that only one Health row is created
validates_inclusion_of :singleton_guard, in: [0]
def self.instance
first_or_create!(singleton_guard: 0)
end
end
# == Schema Information
#
# Table name: healths
#
# id :bigint not null, primary key
# latest_deploy_time :datetime
# singleton_guard :integer
# created_at :datetime not null
# updated_at :datetime not null
#
# Indexes
#
# index_healths_on_singleton_guard (singleton_guard) UNIQUE
#
|
jingcao80/Elastos
|
Sources/Elastos/LibCore/src/elastos/security/spec/CRSAOtherPrimeInfo.cpp
|
//=========================================================================
// Copyright (C) 2012 The Elastos Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//=========================================================================
#include "CRSAOtherPrimeInfo.h"
namespace Elastos {
namespace Security {
namespace Spec {
CAR_OBJECT_IMPL(CRSAOtherPrimeInfo)
CAR_INTERFACE_IMPL(CRSAOtherPrimeInfo, Object, IRSAOtherPrimeInfo)
ECode CRSAOtherPrimeInfo::GetCrtCoefficient(
/* [out] */ IBigInteger **crtCoefficient)
{
VALIDATE_NOT_NULL(crtCoefficient)
*crtCoefficient = mCrtCoefficient;
REFCOUNT_ADD(*crtCoefficient)
return NOERROR;
}
ECode CRSAOtherPrimeInfo::GetPrime(
/* [out] */ IBigInteger **prime)
{
VALIDATE_NOT_NULL(prime)
*prime = mPrime;
REFCOUNT_ADD(*prime)
return NOERROR;
}
ECode CRSAOtherPrimeInfo::GetExponent(
/* [out] */ IBigInteger **exponent)
{
VALIDATE_NOT_NULL(exponent)
*exponent = mPrimeExponent;
REFCOUNT_ADD(*exponent)
return NOERROR;
}
ECode CRSAOtherPrimeInfo::constructor(
/* [in] */ IBigInteger *prime,
/* [in] */ IBigInteger *primeExponent,
/* [in] */ IBigInteger *crtCoefficient)
{
if (prime == NULL) {
//throw new NullPointerException("prime == null");
return E_NULL_POINTER_EXCEPTION;
}
if (primeExponent == NULL) {
//throw new NullPointerException("primeExponent == null");
return E_NULL_POINTER_EXCEPTION;
}
if (crtCoefficient == NULL) {
//throw new NullPointerException("crtCoefficient == null");
return E_NULL_POINTER_EXCEPTION;
}
mPrime = prime;
mPrimeExponent = primeExponent;
mCrtCoefficient = crtCoefficient;
return NOERROR;
}
}
}
}
|
raeleus/libGDX-Jam-December-2021
|
core/src/main/java/com/ray3k/template/entities/SpinnerEntity.java
|
package com.ray3k.template.entities;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.math.Vector2;
import com.dongbat.jbump.CollisionFilter;
import com.dongbat.jbump.Collisions;
import com.dongbat.jbump.Item;
import com.dongbat.jbump.Response;
import com.dongbat.jbump.Response.Result;
import com.esotericsoftware.spine.AnimationState.AnimationStateAdapter;
import com.esotericsoftware.spine.AnimationState.TrackEntry;
import com.esotericsoftware.spine.Event;
import com.ray3k.template.*;
import com.ray3k.template.Resources.*;
import java.util.ArrayList;
import static com.ray3k.template.Core.*;
import static com.ray3k.template.Resources.*;
import static com.ray3k.template.Resources.SpineSpinner.*;
import static com.ray3k.template.Resources.Values.*;
public class SpinnerEntity extends Entity implements Enemy {
public float health;
private final static Vector2 temp = new Vector2();
private static final ArrayList<Item> itemsTemp = new ArrayList<>();
private boolean inAttack;
@Override
public void hurt(float damage, float force, float forceDirection) {
temp.x = force;
temp.rotateDeg(forceDirection);
temp.x *= spinnerHurtForceDampenerX;
temp.y *= spinnerHurtForceDampenerY;
deltaX += temp.x;
deltaY += temp.y;
health -= damage;
if (health <= 0) {
destroy = true;
var die = new DieAnimEntity(skeletonData, animationData, animationState.getCurrent(0).getAnimation(), animationState.getCurrent(0).getTrackTime(), animationDie, x, y, skeleton.getRootBone().getRotation(), true);
entityController.add(die);
}
animationState.setAnimation(1, animationHurt, false);
}
@Override
public float getX() {
return x;
}
@Override
public float getY() {
return y;
}
@Override
public void create() {
setSkeletonData(skeletonData, animationData);
setCollisionBox(skeleton.findSlot("bbox"), skeletonBounds, collisionFilter);
depth = DEPTH_ENEMY;
health = spinnerHealth;
animationState.setAnimation(0, animationAnimation, false);
}
@Override
public void actBefore(float delta) {
}
@Override
public void act(float delta) {
if (isOutside(0, 0, Core.levelWidth, Core.levelHeight, spinnerDestroyBorder)) destroy = true;
if (!inAttack) {
setSpeed(Utils.approach(getSpeed(), 0, spinnerDeceleration * delta));
for (int i = 0; i < spinnerDetectRays; i++) {
temp.set(1, 0);
float angle = skeleton.getRootBone().getRotation() + 270 - spinnerDetectAngle / 2 + i * spinnerDetectAngle / (spinnerDetectRays - 1);
angle %= 360;
temp.rotateDeg(angle);
world.queryRay(getBboxCenterX(), getBboxCenterY(), temp.x, temp.y, playerDetectFilter, itemsTemp);
if (itemsTemp.size() > 0) {
setMotion(spinnerMoveSpeed, angle);
inAttack = true;
animationState.setAnimation(0, animationFall, true);
break;
}
}
}
}
@Override
public void draw(float delta) {
if (Values.debugging) {
shapeDrawer.setColor(Color.ORANGE);
shapeDrawer.filledRectangle(getBboxLeft(), getBboxBottom(), bboxWidth, bboxHeight);
}
}
@Override
public void destroy() {
sfx_die.play(sfx);
}
@Override
public void projectedCollision(Result result) {
}
@Override
public void collision(Collisions collisions) {
for (int i = 0; i < collisions.size(); i++) {
var collision = collisions.get(i);
if (collision.other.userData instanceof PlayerEntity) {
var player = (PlayerEntity) collision.other.userData;
float playerDirection = Utils.pointDirection(getBboxCenterX(),getBboxCenterY(), player.getBboxCenterX(),player.getBboxCenterY());
player.hurt(spinnerDamage, spinnerForce, playerDirection);
}
}
}
private static final SpinnerCollisionFilter collisionFilter = new SpinnerCollisionFilter();
private static class SpinnerCollisionFilter implements CollisionFilter {
@Override
public Response filter(Item item, Item other) {
if (other.userData instanceof PlayerEntity) return Response.cross;
return null;
}
}
private static final PlayerDetectFilter playerDetectFilter = new PlayerDetectFilter();
private static class PlayerDetectFilter implements CollisionFilter {
@Override
public Response filter(Item item, Item other) {
if (item.userData instanceof PlayerEntity) return Response.cross;
return null;
}
}
}
|
devkat/dotty
|
tests/disabled/java-interop/failing/varargs-bridge/B.scala
|
package test
class B extends A {
override def foo(x: Int*): Int = x.length + 1
}
object B extends App {
println(new B().foo(1, 2, 3))
}
|
Yash-Wasalwar-07/Boss2D
|
Boss2D/addon/_old/openssl-1.1.0c_for_boss/test/rmdtest.c
|
/*
* Copyright 1995-2016 The OpenSSL Project Authors. All Rights Reserved.
*
* Licensed under the OpenSSL license (the "License"). You may not use
* this file except in compliance with the License. You can obtain a copy
* in the file LICENSE in the source distribution or at
* https://www.openssl.org/source/license.html
*/
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include "../e_os.h"
#ifdef OPENSSL_NO_RMD160
int main(int argc, char *argv[])
{
printf("No ripemd support\n");
return (0);
}
#else
# include BOSS_OPENSSL_V_openssl__ripemd_h //original-code:<openssl/ripemd.h>
# include BOSS_OPENSSL_V_openssl__evp_h //original-code:<openssl/evp.h>
# ifdef CHARSET_EBCDIC
# include <openssl/ebcdic.h>
# endif
static char test[][100] = {
{ "" },
{ "a" },
{ "abc" },
{ "message digest" },
{ "abcdefghijklmnopqrstuvwxyz" },
{ "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" },
{ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" },
{ "12345678901234567890123456789012345678901234567890123456789012345678901234567890" }
};
static char *ret[] = {
"9c1185a5c5e9fc54612808977ee8f548b2258d31",
"0bdc9d2d256b3ee9daae347be6f4dc835a467ffe",
"8eb208f7e05d987a9b044a8e98c6b087f15a0bfc",
"5d0689ef49d2fae572b881b123a85ffa21595f36",
"f71c27109c692c1b56bbdceb5b9d2865b3708dbc",
"12a053384a9c0c88e405a06c27dcf49ada62eb2b",
"b0e20b6e3116640286ed3a87a5713079b21f5189",
"9b752e45573d4b39f4dbd3323cab82bf63326bfb",
};
static char *pt(unsigned char *md);
int main(int argc, char *argv[])
{
unsigned int i;
int err = 0;
char **R;
char *p;
unsigned char md[RIPEMD160_DIGEST_LENGTH];
R = ret;
for (i = 0; i < OSSL_NELEM(test); i++) {
# ifdef CHARSET_EBCDIC
ebcdic2ascii(test[i], test[i], strlen(test[i]));
# endif
if (!EVP_Digest(test[i], strlen(test[i]), md, NULL, EVP_ripemd160(),
NULL)) {
printf("EVP Digest error.\n");
EXIT(1);
}
p = pt(md);
if (strcmp(p, (char *)*R) != 0) {
printf("error calculating RIPEMD160 on '%s'\n", test[i]);
printf("got %s instead of %s\n", p, *R);
err++;
} else
printf("test %d ok\n", i + 1);
R++;
}
EXIT(err);
}
static char *pt(unsigned char *md)
{
int i;
static char buf[80];
for (i = 0; i < RIPEMD160_DIGEST_LENGTH; i++)
sprintf(&(buf[i * 2]), "%02x", md[i]);
return (buf);
}
#endif
|
ChartsBot/telegram-bots
|
src/bots/chart_general/bot_charts.py
|
import locale
import os
import sys
from gevent import monkey
monkey.patch_all() # REALLY IMPORTANT: ALLOWS ZERORPC AND TG TO WORK TOGETHER
BASE_PATH = os.environ.get('BASE_PATH')
sys.path.insert(1, BASE_PATH + '/telegram-bots/src')
from libraries.models.exchanges import SupportedExchanges, ExchangeInfo
from returns.result import Success, Result
from libraries.controllers.converter import Converter
from libraries.models.price import TokenPrices
from libraries import web3_calls
from libraries.services.twitter_connector import TwitterConnector
from libraries.services.web3_connector import Web3HtppConnector
from graphqlclient import GraphQLClient
import time
from datetime import datetime
import os.path
import re
import io
import concurrent.futures
from telegram import Update, InlineKeyboardButton, InlineKeyboardMarkup, ReplyKeyboardMarkup
from telegram.ext import Updater, CommandHandler, CallbackContext, CallbackQueryHandler, Filters, MessageHandler, \
ConversationHandler, InlineQueryHandler
from telegram.inline.inlinequeryresultarticle import InlineQueryResultArticle
from telegram.inline.inputtextmessagecontent import InputTextMessageContent
from telegram.parsemode import ParseMode
import json
from telegram.error import ChatMigrated, BadRequest
from cachetools import cached, TTLCache
from libraries.general_end_functions import EndFunctionsHelper
from libraries.web3_calls import Web3Helper
import libraries.general_end_functions as general_end_functions
import libraries.commands_util as commands_util
import libraries.requests_util as requests_util
import libraries.time_util as time_util
import libraries.util as util
import libraries.scrap_websites_util as scrap_websites_util
import libraries.queries_parser as queries_parser
import libraries.translation_util as translation_util
from libraries.uniswap import Uniswap
from bots.chart_general.bot_charts_values import start_message, message_faq_empty, symbol_gecko, message_faq_additional, \
emoji_number_dic
from libraries.common_values import *
from web3 import Web3
from threading import Thread
import libraries.protobuf.filehandler.fileHandler_pb2 as filehandler_pb2
import libraries.protobuf.filehandler.fileHandler_pb2_grpc as filehandler_pb2_grpc
import grpc
# from py_w3c.validators.html.validator import HTMLValidator
from uuid import uuid4
import zerorpc
# import wolframalpha
import logging
logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# APP_KEY_WOLFRAM = os.environ.get('WOLFRAM_API')
IS_TEST_ENV = False
SECRETS_PATH = os.environ.get('SECRETS_PATH')
with open(SECRETS_PATH + "fomo-bot-python/config.json") as f:
config = json.load(f)
matic_node_endpoint = config['matic']['endpoint']
bsc_mode_endpoint = config['bsc']['endpoint']
w3_matic = Web3HtppConnector(matic_node_endpoint).web3
w3_bsc = Web3HtppConnector(bsc_mode_endpoint).web3
# w3_matic.middleware_onion.inject(geth_poa_middleware, layer=0)
# try:
# wolfram_client = wolframalpha.Client(APP_KEY_WOLFRAM)
# except Exception:
# logging.info("Worlfram struggling to connect, trying again")
# wolfram_client = wolframalpha.Client(APP_KEY_WOLFRAM)
announcement_channel_id = config['channels']['announcement_channel_id']
# charts delete
charts_time_refresh = {}
# ZERORPC
zerorpc_client_data_aggregator = zerorpc.Client()
zerorpc_client_data_aggregator.connect(config["zerorpc"]["url"])
logging.info(zerorpc_client_data_aggregator.hello("coucou"))
# twitter
twitter = TwitterConnector(config['twitter']).instance
# ENV FILES
TELEGRAM_KEY = config['telegram']['key']
TELEGRAM_WEBHOOK_PRIVATE_KEY_PATH = config['telegram']['webhook']['priv-path']
TELEGRAM_WEBHOOK_CERT_PATH = config['telegram']['webhook']['cert-path']
SERVER_IP = config['telegram']['server-ip']
logging.info("%s, %s, %s, %s", TELEGRAM_KEY, TELEGRAM_WEBHOOK_PRIVATE_KEY_PATH, TELEGRAM_WEBHOOK_CERT_PATH, SERVER_IP)
decimals = 1000000000000000000 # that's 18
TMP_FOLDER = BASE_PATH + 'tmp/'
supply_file_path = BASE_PATH + 'log_files/chart_bot/supply_log_$TICKER.txt'
supply_chart_path = BASE_PATH + 'log_files/boo_bot/supply_chart_$TICKER.png'
pie_chart_wallet_path = BASE_PATH + 'log_files/boo_bot/pie_chart_wallet.png'
# grpc
GRPC_FILE_HANDLER_CA_PATH = config['grpc']['file-handler']['ca-path']
# web3
infura_url = config['web3']['eth']['endpoint-http']
w3 = Web3HtppConnector(infura_url).web3
# web3 uni wrapper
uni_wrapper = Uniswap(web3=w3)
# custom helpers
end_function_helper: EndFunctionsHelper = EndFunctionsHelper(config)
w3_helper: Web3Helper = Web3Helper(config)
converter: Converter = Converter(config, end_function_helper)
# log_file
charts_path = BASE_PATH + 'log_files/chart_bot/'
locale.setlocale(locale.LC_ALL, 'en_US')
graphql_client_uni = GraphQLClient('https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2')
graphql_client_eth = GraphQLClient('https://api.thegraph.com/subgraphs/name/blocklytics/ethereum-blocks')
rejection_no_default_ticker_message = "No default token found for this chat. Please ask an admin to add one with /set_default_token <TICKER>"
# CONFIG OPTION repeated task
check_big_buys_interval_seconds = 60 * 5
# grpc stuff
if os.environ.get('https_proxy'):
del os.environ['https_proxy']
if os.environ.get('http_proxy'):
del os.environ['http_proxy']
with open(GRPC_FILE_HANDLER_CA_PATH, 'rb') as f:
grpc_file_handler_creds = grpc.ssl_channel_credentials(f.read())
grpc_file_handler_channel = grpc.secure_channel('localhost:8081', grpc_file_handler_creds,
options=(('grpc.ssl_target_name_override', 'foo.test.google.fr'),
('grpc.enable_http_proxy', 0),))
# create a stub (client)
grpc_file_handler_client = filehandler_pb2_grpc.FileHandlerAkkaServiceStub(grpc_file_handler_channel)
exchange_info_uniswap = ExchangeInfo(exchange=SupportedExchanges.UNISWAP.value,
message_example_chart="/chart TICKER",
message_exchange_command_example_1="/price kp3r",
message_exchange_command_example_2="/price kp3r",
web3=w3)
exchange_info_cake = ExchangeInfo(exchange=SupportedExchanges.PANCAKESWAP.value,
message_example_chart="/chart_bsc 0xbb4CdB9CBd36B01bD1cBaEBF2De08d9173bc095c",
message_exchange_command_example_1="/cake wbtc",
message_exchange_command_example_2="/cake 0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270",
web3=w3_bsc)
exchange_info_qs = ExchangeInfo(exchange=SupportedExchanges.QUICKSWAP.value,
message_example_chart="/chart_polygon 0x1bfd67037b42cf73acf2047067bd4f2c47d9bfd6",
message_exchange_command_example_1="/qs wmatic",
message_exchange_command_example_2="/qs 0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270",
web3=w3_matic)
def get_start_message(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "start")
chat_id = update.message.chat_id
context.bot.send_message(chat_id=chat_id, text=start_message, parse_mode='html', disable_web_page_preview=True)
def get_chart_exchange(update: Update, context: CallbackContext, chain_info: ExchangeInfo):
"""Returns a chart of the desired token"""
chat_id = update.message.chat_id
token, start_time, time_period, options = queries_parser.analyze_query_charts(update.message.text,
None)
time_type, k_hours, k_days = commands_util.get_time_query(start_time, time_period)
t_to = int(time.time())
t_from = t_to - (k_days * 3600 * 24) - (k_hours * 3600)
trending = util.get_banner_txt(zerorpc_client_data_aggregator)
maybe_addy = requests_util.get_token_contract_address(token, chain_info.exchange)
if maybe_addy:
token_info = w3_helper.get_token_info_from_address(maybe_addy, chain_info.web3)
message, path, reply_markup_chart = general_end_functions.send_basic_chart(token_info, charts_path, k_days,
k_hours, t_from, t_to,
chain_info.exchange.chain,
txt=trending)
util.create_and_send_vote(token_info.ticker, "chart_" + chain_info.exchange.chain.name,
update.message.from_user.name,
zerorpc_client_data_aggregator)
token_chat_id = str(chat_id) + "_" + token
charts_time_refresh[token_chat_id] = t_to
context.bot.send_photo(chat_id=chat_id, photo=open(path, 'rb'), caption=message, parse_mode="html",
reply_markup=reply_markup_chart)
# reply_markup=reply_markup_chart)
context.bot.send_photo(chat_id=announcement_channel_id, photo=open(path, 'rb'), caption=message,
parse_mode="html")
else:
context.bot.send_message(chat_id=chat_id, text="Couldn't find your shitcoin on " +
chain_info.exchange.name +
". You can try to past the coin address directly (eg: " +
chain_info.message_example_chart + ")")
def get_chart_polygon(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "chart_polygon")
get_chart_exchange(update, context, exchange_info_qs)
def get_chart_bsc(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "chart_bsc")
get_chart_exchange(update, context, exchange_info_cake)
# button refresh: h:int-d:int-t:token
def get_candlestick(update: Update, context: CallbackContext):
# sourcery skip: merge-duplicate-blocks, merge-else-if-into-elif, merge-nested-ifs, swap-if-else-branches
__log_channel(update.message.chat, "chart")
global charts_time_refresh
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
channel_token = __get_default_token_channel(chat_id)
if channel_token is not None:
default_default_token = channel_token[0]
else:
default_default_token = "eth"
if len(query_received) == 1:
if channel_token is None:
context.bot.send_message(chat_id=chat_id, text=rejection_no_default_ticker_message)
return
token, start_time, time_period, options = queries_parser.analyze_query_charts(update.message.text,
default_default_token)
options = None if options == [] else options
# time_type, k_hours, k_days, tokens = commands_util.check_query(query_received, default_default_token)
time_type, k_hours, k_days = commands_util.get_time_query(start_time, time_period)
t_to = int(time.time())
t_from = t_to - (k_days * 3600 * 24) - (k_hours * 3600)
trending = util.get_banner_txt(zerorpc_client_data_aggregator)
maybe_bottom_text = text_if_coin_being_watched(token)
(message, path, reply_markup_chart) = general_end_functions.send_candlestick_pyplot(token, charts_path, k_days,
k_hours, t_from,
t_to, txt=trending,
options=options,
with_ad=maybe_bottom_text)
if options is not None:
if "f" in options or "finance" in options:
logging.info("finance: not sending vote")
else:
util.create_and_send_vote(token, "chart", update.message.from_user.name, zerorpc_client_data_aggregator)
else:
util.create_and_send_vote(token, "chart", update.message.from_user.name, zerorpc_client_data_aggregator)
token_chat_id = str(chat_id) + "_" + token
charts_time_refresh[token_chat_id] = t_to
context.bot.send_photo(chat_id=chat_id, photo=open(path, 'rb'), caption=message, parse_mode="html",
reply_markup=reply_markup_chart)
context.bot.send_photo(chat_id=announcement_channel_id, photo=open(path, 'rb'), caption=message, parse_mode="html")
def __analyse_and_send_token_price(maybe_price_message: Result[TokenPrices, str], context: CallbackContext, update: Update, chat_id: int):
if isinstance(maybe_price_message, Success):
t_price: TokenPrices = maybe_price_message.unwrap()
util.create_and_send_vote(t_price.metadata.ticker, "price", update.message.from_user.name,
zerorpc_client_data_aggregator)
message = t_price.generate_message()
callback_data = __generate_callback_data_price(ticker_name=t_price.metadata.ticker,
chain_name=t_price.exchange_info.chain.abbreviation,
address=t_price.metadata.address)
button_list_price = [
[InlineKeyboardButton('refresh', callback_data=callback_data)]]
reply_markup_price = InlineKeyboardMarkup(button_list_price)
context.bot.send_message(chat_id=chat_id, text=message, parse_mode='html', reply_markup=reply_markup_price,
disable_web_page_preview=True)
__send_message_to_announcement_channel_if_needed(username=update.message.from_user.name,
method="price", token=t_price.metadata.ticker, context=context,
message=message)
else:
context.bot.send_message(chat_id=chat_id, text=maybe_price_message.unwrap(), parse_mode='html',
disable_web_page_preview=True)
def get_price_oracle(update: Update, context: CallbackContext, exchange_info: ExchangeInfo):
"""Get the price of a token m the price-getter"""
__log_channel(update.message.chat, "price_" + exchange_info.exchange.name)
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
if len(query_received) != 2:
context.bot.send_message(chat_id=chat_id, text='Please specify the ticker or address the desired token. (ex: /'
+ exchange_info.message_exchange_command_example_1 + ')')
else:
token = query_received[1]
logging.info("Asking for token %s on chain %s", token, exchange_info.exchange.chain.name)
maybe_addy = requests_util.get_token_contract_address(token, exchange_info.exchange)
if maybe_addy:
maybe_price_message: Result[TokenPrices, str] = end_function_helper.get_price_from_oracle(maybe_addy, exchange_info)
__analyse_and_send_token_price(maybe_price_message, context, update, chat_id)
else:
context.bot.send_message(chat_id=chat_id, text="Couldn't find your shitcoin on " +
exchange_info.exchange.name +
". You can try to past the coin address directly (eg: " +
exchange_info.message_exchange_command_example_2 + ")")
def get_price_quickswap(update: Update, context: CallbackContext):
get_price_oracle(update=update,
context=context,
exchange_info=exchange_info_qs)
def get_price_pancakeswap(update: Update, context: CallbackContext):
get_price_oracle(update=update,
context=context,
exchange_info=exchange_info_cake)
def get_price_token(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "price")
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
if len(query_received) == 2:
ticker = query_received[1]
logging.info("Asking for token %s with /price", ticker,)
if ticker.upper() in symbol_gecko:
value = symbol_gecko.get(ticker.upper())
maybe_token_price = general_end_functions.get_price_gecko(value)
button_list_price = [
[InlineKeyboardButton('refresh', callback_data='r_p_' + "null" + "_t_" + ticker)]]
reply_markup_price = InlineKeyboardMarkup(button_list_price)
context.bot.send_message(chat_id=chat_id, text=maybe_token_price.generate_message(options=['NO_SCAP']), parse_mode='html', reply_markup=reply_markup_price,
disable_web_page_preview=True)
else:
contract_from_ticker = requests_util.get_token_contract_address(ticker)
if contract_from_ticker is None:
context.bot.send_message(chat_id=chat_id, text='Contract address for ticker ' + ticker + ' not found.')
else:
maybe_token_price = end_function_helper.get_price_from_oracle(contract_from_ticker, exchange_info_uniswap)
__analyse_and_send_token_price(maybe_token_price, context, update, chat_id)
elif len(query_received) == 1: # TODO: merge all those duplicate things
ticker, addr = __get_default_token_channel(chat_id)
if ticker is None:
maybe_token_price = rejection_no_default_ticker_message
context.bot.send_message(chat_id=chat_id, text=maybe_token_price, parse_mode='html')
elif addr is None or addr == "":
context.bot.send_message(chat_id=chat_id, text='Contract address for ticker ' + ticker + ' not found.')
else:
maybe_token_price = end_function_helper.get_price_from_oracle(addr, exchange_info_uniswap)
__analyse_and_send_token_price(maybe_token_price, context, update, chat_id)
else:
context.bot.send_message(chat_id=chat_id, text='Please specify the ticker of the desired token.')
def delete_meme(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "delete_meme")
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
can_delete = False
if update.message.chat.type == 'private':
can_delete = True
elif __is_user_admin(context, update):
can_delete = True
if can_delete:
if len(query_received) == 2:
meme_to_delete = query_received[1]
file_type = "image" if ".jpg" in meme_to_delete else "video" # todo: quick hack, fix
delRequest = filehandler_pb2.FileDeleteRequest(chatId=chat_id,
fileClassification="meme",
fileType=file_type,
name=meme_to_delete)
response = grpc_file_handler_client.DeleteFile(delRequest)
context.bot.send_message(chat_id=chat_id, text=response.message)
else:
context.bot.send_message(chat_id=chat_id,
text="Please specify which meme you wish to delete (like /delete_meme EminentOldEgret.jpg) or reply /delete_meme to it directly")
else:
context.bot.send_message(chat_id=chat_id,
text="You don't have the rights to delete a meme. Only admins can do that you silly")
def get_meme(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "get_meme")
chat_id = update.message.chat_id
if _is_meme_authorized_on_channel(chat_id):
fileRequest = filehandler_pb2.FileGetRequest(chatId=chat_id,
fileClassification="meme",
fileType="image",
author="whatever")
response = grpc_file_handler_client.GetFile(fileRequest)
logging.info("got nice meme")
if response.status:
if response.fileType == "image":
tmp_meme_path = TMP_FOLDER + 'tmp_meme.png'
context.bot.send_photo(chat_id=chat_id,
photo=io.BytesIO(response.file),
caption="Dank meme " + response.name
)
elif response.fileType == "video":
tmp_meme_path = TMP_FOLDER + 'tmp_meme.mp4'
context.bot.send_video(chat_id=chat_id,
video=io.BytesIO(response.file),
caption="Dank meme " + response.name
)
else:
context.bot.send_message(chat_id=chat_id,
text="file type received: " + response.fileType)
else:
context.bot.send_message(chat_id=chat_id,
text="No meme found in this chat")
else:
context.bot.send_message(chat_id=chat_id,
text="Memes are not activated on this channel. An admin can turn them on with /set_function meme")
def _add_meme_video(message, context: CallbackContext):
logging.info("adding dank meme - video")
file_as_bytes, size = general_end_functions.download_video_bytearray(message, context)
chat_id = message.chat_id
chat_title = str(message.chat.title)
file_classification = "meme"
file_type = "video"
author = "anon" # update.message.from_user.name
time_creation = int(time.time())
file = filehandler_pb2.FileUploadRequest(chatId=chat_id,
chatTitle=chat_title,
fileClassification=file_classification,
fileType=file_type,
author=author,
timeCreation=time_creation,
file=bytes(file_as_bytes))
response = grpc_file_handler_client.UploadFile(file)
return response
def _add_meme_photo(message, context: CallbackContext):
logging.info("adding dank meme - image")
file_as_bytes = general_end_functions.download_image_bytearray(message, context)
chat_id = message.chat_id
chat_title = str(message.chat.title)
file_classification = "meme"
file_type = "image"
author = "anon" # update.message.from_user.name
time_creation = int(time.time())
logging.info("adding dank meme")
file = filehandler_pb2.FileUploadRequest(chatId=chat_id,
chatTitle=chat_title,
fileClassification=file_classification,
fileType=file_type,
author=author,
timeCreation=time_creation,
file=bytes(file_as_bytes))
response = grpc_file_handler_client.UploadFile(file)
return response
def add_meme_reply(update: Update, context: CallbackContext):
chat_id = update.message.chat_id
# check if quoted message
if _is_meme_authorized_on_channel(chat_id):
if update.message.reply_to_message is not None:
original_message = update.message.reply_to_message
logging.debug(str(original_message.photo))
logging.debug(str(original_message.video))
if original_message.photo:
response = _add_meme_photo(original_message, context)
logging.debug(str(response))
if not response.status:
context.bot.send_message(chat_id=chat_id, text="👎 Error uploading meme: " + response.message)
else:
context.bot.send_message(chat_id=chat_id, text="👍 Added meme as " + response.message)
elif original_message.video:
response = _add_meme_video(original_message, context)
logging.debug(str(response))
if not response.status:
context.bot.send_message(chat_id=chat_id, text="👎 Error uploading meme: " + response.message)
else:
context.bot.send_message(chat_id=chat_id, text="👍 Added meme as " + response.message)
else:
context.bot.send_message(chat_id=chat_id,
text="Message replied to doesn't seem to contain accepted media (video or photo)")
else:
context.bot.send_message(chat_id=chat_id,
text="Message replied to doesn't seem to contain accepted media (video or photo)")
else:
context.bot.send_message(chat_id=chat_id,
text="Memes are not activated on this channel. An admin can turn them on with /set_function meme")
def handle_new_video(update: Update, context: CallbackContext):
# sourcery skip: remove-redundant-pass
try:
caption = update['message']['caption']
if caption == "/add_meme":
chat_id = update.message.chat_id
try:
response = _add_meme_video(update.message, context)
logging.debug(str(response))
if response.status == False:
context.bot.send_message(chat_id=chat_id, text="👎 Error uploading meme: " + response.message)
else:
context.bot.send_message(chat_id=chat_id, text="👍 Added meme as " + response.message)
except IndexError:
error_msg = "Adding image failed: no image provided. Make sure to send it as a file and not an image."
context.bot.send_message(chat_id=chat_id, text=error_msg)
else:
pass
except KeyError:
pass
def handle_new_image(update: Update, context: CallbackContext):
try:
caption = update['message']['caption']
if caption == "/add_meme":
chat_id = update.message.chat_id
if _is_meme_authorized_on_channel(chat_id):
try:
response = _add_meme_photo(update.message, context)
logging.debug(str(response))
if not response.status:
context.bot.send_message(chat_id=chat_id, text="👎 Error uploading meme: " + response.message)
else:
context.bot.send_message(chat_id=chat_id, text="👍 Added meme as " + response.message)
except IndexError:
error_msg = "Adding image failed: no image provided. Make sure to send it as a file and not an image."
context.bot.send_message(chat_id=chat_id, text=error_msg)
else:
context.bot.send_message(chat_id=chat_id,
text="Memes are not activated on this channel. An admin can turn them on with /set_function meme")
except KeyError:
pass
def __send_message_if_ocr(update, context):
message_id = update.message.message_id
chat_id = update.message.chat_id
try:
text_in_ocr = general_end_functions.ocr_image(update, context, TMP_FOLDER)
if (
'one of the tokens' in text_in_ocr
or 'price movement or' in text_in_ocr
):
context.bot.send_message(chat_id=chat_id, text=test_error_token, reply_to_message_id=message_id)
except IndexError:
pass
def __generate_callback_data_price(ticker_name: str = 'null', chain_name: str = 'null', address: str = 'null'):
return f"r_p_{address}_t_{ticker_name}_c_{chain_name}"
def refresh_price(update: Update, context: CallbackContext):
__log_channel(update.callback_query.message.chat, "refresh_price")
logging.info("refreshing price")
query = update.callback_query.data
contract_from_ticker = query.split('r_p_')[1].split('_t')[0]
token_name = query.split('_t_')[1].split('_c')[0]
chain_name = query.split('_c_')[1]
if token_name.upper() in symbol_gecko:
value = symbol_gecko.get(token_name.upper())
message = general_end_functions.get_price_gecko(value).generate_message(options=['NO_SCAP'])
callback_message = __generate_callback_data_price(token_name, 'null', 'null')
button_list_price = [[InlineKeyboardButton('refresh', callback_data=callback_message)]]
else:
exchange = __chain_abbreviation_to_exchange(chain_name)
callback_message = __generate_callback_data_price(token_name, chain_name, address=contract_from_ticker)
button_list_price = [[InlineKeyboardButton('refresh', callback_data=callback_message)]]
maybe_message: Result[TokenPrices, str] = end_function_helper.get_price_from_oracle(contract_from_ticker, exchange)
if isinstance(maybe_message.__class__, Success):
message = maybe_message.unwrap().generate_message()
else:
update.callback_query.answer()
return
reply_markup_price = InlineKeyboardMarkup(button_list_price)
if update.callback_query.message.text_html != message:
update.callback_query.edit_message_text(text=message, parse_mode='html', reply_markup=reply_markup_price,
disable_web_page_preview=True)
else:
update.callback_query.answer()
def delete_message(update: Update, context: CallbackContext):
logging.info("deleting chart")
chat_id = update.callback_query.message.chat_id
message_id = update.callback_query.message.message_id
context.bot.delete_message(chat_id=chat_id, message_id=message_id)
def _stop_if_refreshing_too_early(context, chat_id, token_chat_id, t_to) -> bool:
global charts_time_refresh
members_count = context.bot.get_chat_members_count(chat_id)
logging.info("members count: " + str(members_count))
if members_count < 100:
return False
if token_chat_id not in charts_time_refresh:
charts_time_refresh[token_chat_id] = t_to
return False
else:
last_time = charts_time_refresh[token_chat_id]
if t_to - last_time < 30:
logging.debug("requesting chart refresh too early")
return False
else:
charts_time_refresh[token_chat_id] = t_to
return True
def refresh_chart(update: Update, context: CallbackContext):
__log_channel(update.callback_query.message.chat, "refresh_chart")
logging.info("refreshing chart")
query = update.callback_query.data
chat_id = update.callback_query.message.chat_id
k_hours = int(re.search(r'\d+', query.split('h:')[1]).group())
k_days = int(re.search(r'\d+', query.split('d:')[1]).group())
token = re.search(r'([A-Za-z0-9-]+)', query.split('t:')[1]).group()[:-1]
options = query.split('o:')[1].split("//")
token_chat_id = str(chat_id) + "_" + token
t_to = int(time.time())
if not _stop_if_refreshing_too_early(context, chat_id, token_chat_id, t_to):
t_from = t_to - (k_days * 3600 * 24) - (k_hours * 3600)
message_id = update.callback_query.message.message_id
trending = util.get_banner_txt(zerorpc_client_data_aggregator)
maybe_bottom_text = text_if_coin_being_watched(token)
(message, path, reply_markup_chart) = general_end_functions.send_candlestick_pyplot(token, charts_path, k_days,
k_hours, t_from, t_to,
txt=trending,
options=options,
with_ad=maybe_bottom_text)
context.bot.send_photo(chat_id=chat_id, photo=open(path, 'rb'), caption=message, parse_mode="html",
reply_markup=reply_markup_chart)
context.bot.delete_message(chat_id=chat_id, message_id=message_id)
def __chain_abbreviation_to_exchange(abbr: str) -> ExchangeInfo:
"""Returns the exhcnage info associated to the abbreviation of the chain"""
chain = util.chain_abbreviation_to_name(abbr)
if chain == 'polygon':
return exchange_info_qs
elif chain == 'bsc':
return exchange_info_cake
else:
return exchange_info_uniswap
def refresh_chart_simple(update: Update, context: CallbackContext):
__log_channel(update.callback_query.message.chat, "refresh_chart")
logging.info("refreshing chart")
query = update.callback_query.data
chat_id = update.callback_query.message.chat_id
k_hours = int(re.search(r'\d+', query.split('h:')[1]).group())
k_days = int(re.search(r'\d+', query.split('d:')[1]).group())
token = re.search(r'([A-Za-z0-9-]+)', query.split('t:')[1]).group()
chain = re.search(r'([A-Za-z0-9-]+)', query.split('_c:')[1]).group()[:-1]
options = query.split('o:')[1].split("//")
token_chat_id = str(chat_id) + "_" + token
current_exchange = __chain_abbreviation_to_exchange(chain)
token_info = w3_helper.get_token_info_from_address(token, current_exchange.web3)
t_to = int(time.time())
if not _stop_if_refreshing_too_early(context, chat_id, token_chat_id, t_to):
t_from = t_to - (k_days * 3600 * 24) - (k_hours * 3600)
message_id = update.callback_query.message.message_id
trending = util.get_banner_txt(zerorpc_client_data_aggregator)
maybe_bottom_text = text_if_coin_being_watched(token)
(message, path, reply_markup_chart) = general_end_functions.send_basic_chart(token_info, charts_path, k_days,
k_hours, t_from, t_to,
current_exchange.exchange.chain,
txt=trending,
options=options,
with_ad=maybe_bottom_text)
context.bot.send_photo(chat_id=chat_id, photo=open(path, 'rb'), caption=message, parse_mode="html",
reply_markup=reply_markup_chart)
context.bot.delete_message(chat_id=chat_id, message_id=message_id)
# sends the current biz threads
def get_biz(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "biz")
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
base_url = "boards.4channel.org/biz/thread/"
message = """Relevant /biz/ threads:
"""
if len(query_received) == 2:
word = query_received[-1]
word_regex_friendly = word.replace('$', '\\$')
threads_ids = scrap_websites_util.get_biz_threads(re.compile(word_regex_friendly))
for thread_id in threads_ids:
excerpt = (thread_id[2] + " | " + thread_id[1])
parsed_excerpt = util.cleanhtml(excerpt)
message += base_url + str(thread_id[0]) + " -- " + parsed_excerpt[0: 100] + "[...] \n"
if not threads_ids:
no_thread_message = "No current /biz/ thread containing the word $WORD. You can make one at https://boards.4channel.org/biz/.".replace(
"$WORD", word)
context.bot.send_message(chat_id=chat_id, text=no_thread_message, disable_web_page_preview=True)
else:
context.bot.send_message(chat_id=chat_id, text=message, disable_web_page_preview=True)
context.bot.send_message(chat_id=announcement_channel_id, text=message, disable_web_page_preview=True)
elif len(query_received) == 1: # TODO: merge all that
word, addr = __get_default_token_channel(chat_id)
if word is None or word.lower() == "null":
context.bot.send_message(chat_id=chat_id,
text='No default ticker set up for this channel. An admin can add one with the /set_default_token command. In the meantime, you can use /biz by doing /biz KEYWORD')
else:
word_regex_friendly = word.replace('$', '\\$')
threads_ids = scrap_websites_util.get_biz_threads(re.compile(word_regex_friendly))
for thread_id in threads_ids:
excerpt = thread_id[2] + " | " + thread_id[1]
message += base_url + str(thread_id[0]) + " -- " + excerpt[0: 100] + "[...] \n"
if not threads_ids:
no_thread_message = "No current /biz/ thread containing the word $WORD. You can make one at https://boards.4channel.org/biz/.".replace(
"$WORD", word)
context.bot.send_message(chat_id=chat_id, text=no_thread_message, disable_web_page_preview=True)
else:
context.bot.send_message(chat_id=chat_id, text=message, disable_web_page_preview=True)
context.bot.send_message(chat_id=announcement_channel_id, text=message, disable_web_page_preview=True)
else:
context.bot.send_message(chat_id=chat_id,
text='Please use the format /biz WORD')
def get_twitter(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "twitter")
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
if len(query_received) == 2:
ticker = query_received[-1]
res = scrap_websites_util.get_last_tweets(twitter, ticker)
context.bot.send_message(chat_id=chat_id, text=res, parse_mode='html', disable_web_page_preview=True)
context.bot.send_message(chat_id=announcement_channel_id, text=res, parse_mode='html',
disable_web_page_preview=True)
elif len(query_received) == 1:
ticker, addr = __get_default_token_channel(chat_id)
if ticker is None or ticker.lower() == "null":
context.bot.send_message(chat_id=chat_id,
text='No default ticker set up for this channel. An admin can add one with the /set_default_token command. In the meantime, you can use /twitter by doing /twitter TOKEN')
else:
res = scrap_websites_util.get_last_tweets(twitter, ticker)
context.bot.send_message(chat_id=chat_id, text=res, parse_mode='html', disable_web_page_preview=True)
context.bot.send_message(chat_id=announcement_channel_id, text=res, parse_mode='html',
disable_web_page_preview=True)
else:
context.bot.send_message(chat_id=chat_id, text="Please use the format /twitter TOKEN_TICKER.",
parse_mode='html', disable_web_page_preview=True)
def do_convert(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "convert")
query_received = update.message.text.split(' ')
chat_id = update.message.chat_id
message = converter.convert_to_something(query_received)
context.bot.send_message(chat_id=chat_id, text=message, disable_web_page_preview=True, parse_mode='html')
def balance_token_in_wallet(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "balance")
query_received = update.message.text.split(' ')
chat_id = update.message.chat_id
if len(query_received) == 3:
wallet = query_received[1]
ticker = query_received[2]
amount, amount_usd = general_end_functions.get_balance_token_wallet(w3, wallet, ticker, w3_helper, converter)
message = "wallet " + str(wallet)[0:3] + '[...]' + " contains <b>" + str(
util.pretty_number(amount)) + " " + ticker + " = " + str(amount_usd) + " usd</b>."
context.bot.send_message(chat_id=chat_id, text=message, parse_mode='html')
# res = con
elif len(query_received) == 2 and query_received[1] == "jackpot":
wallet = "0x9284b7fb2c842666dae4e87ddb49106b72820d26"
ticker = "LUCKY"
amount, amount_usd = general_end_functions.get_balance_token_wallet(w3, wallet, ticker, w3_helper, converter)
message = "<b>🍀 Lucky Daily Jackpot Balance</b>," + str(amount) + " " + ticker + " = <b>" + str(
amount_usd) + " usd</b>."
context.bot.send_message(chat_id=chat_id, text=message, parse_mode="html")
elif len(query_received) == 2:
wallet = query_received[1]
channel_token = __get_default_token_channel(chat_id)
if channel_token is not None:
ticker = channel_token[0]
amount, amount_usd = general_end_functions.get_balance_token_wallet(w3, wallet, ticker, w3_helper, converter)
message = "wallet " + str(wallet)[0:3] + '[...]' + " contains <b>" + str(
util.pretty_number(amount)) + " " + ticker + " = " + str(amount_usd) + " usd</b>."
context.bot.send_message(chat_id=chat_id, text=message, parse_mode='html')
else:
context.bot.send_message(chat_id=chat_id, text="Wrong arguments. Please use /balance WALLET TOKEN")
def get_gas_average(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "gas")
chat_id = update.message.chat_id
message = general_gas_message()
context.bot.send_message(chat_id=chat_id, text=message, disable_web_page_preview=True, parse_mode='html')
context.bot.send_message(chat_id=announcement_channel_id, text=message, disable_web_page_preview=True,
parse_mode='html')
def get_burned_eth(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "burn")
chat_id = update.message.chat_id
message = general_burned_eth_message()
context.bot.send_message(chat_id=chat_id, text=message, disable_web_page_preview=True, parse_mode='html')
context.bot.send_message(chat_id=announcement_channel_id, text=message, disable_web_page_preview=True,
parse_mode='html')
def general_burned_eth_message():
price_usd_eth, burn_rate_1_h, burn_rate_24_h, total_burn = general_end_functions.get_burned_eth_data()
burn_rate_1_h_usd = burn_rate_1_h * price_usd_eth
burn_rate_24_h_usd = burn_rate_24_h * price_usd_eth
total_burned_usd = int(total_burn * price_usd_eth)
message = "<b>Burn rate:</b><code>" + \
"\n1H : " + util.pretty_number(burn_rate_1_h) + 'ETH/min = ' + util.pretty_number(
burn_rate_1_h_usd) + "$/min" + \
"\n24H: " + util.pretty_number(burn_rate_24_h) + 'ETH/min = ' + util.pretty_number(
burn_rate_24_h_usd) + "$/min" + \
"\n</code><b>Total burned:</b><code> " + \
"\n" + util.pretty_number(total_burn) + 'ETH = ' + util.pretty_number(total_burned_usd) + '$</code>'
return message
def get_time_to(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "timeto")
chat_id = update.message.chat_id
query_received = update.message.text[7:]
if query_received in ["jackpot", " jackpot"]:
query_received = "7 pm CST"
elif query_received.lower() in ["christmas", " christmas"]:
logging.info("requesting timeto christmas")
query_received = "25 december"
higher, time_to = time_util.get_time_diff(query_received)
word = ' is ' if higher else ' was '
message = str(query_received) + word + str(time_to) + " from now."
context.bot.send_message(chat_id=chat_id, text=message, disable_web_page_preview=True)
# TODO: fix stuff with default token not being fully used
def get_latest_actions(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "last_actions")
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
if len(query_received) == 1:
default_token = __get_default_token_channel(chat_id)
if default_token is not None:
latest_actions_pretty = general_end_functions.get_last_actions_token_in_eth_pair(token_ticker=default_token[0],
uni_wrapper=uni_wrapper,
graphql_client_uni=graphql_client_uni,
converter=converter
)
util.create_and_send_vote(default_token[0], "actions", update.message.from_user.name,
zerorpc_client_data_aggregator)
context.bot.send_message(chat_id=chat_id, text=latest_actions_pretty, disable_web_page_preview=True,
parse_mode='html')
context.bot.send_message(chat_id=announcement_channel_id, text=latest_actions_pretty,
disable_web_page_preview=True, parse_mode='html')
else:
context.bot.send_message(chat_id=chat_id, text=rejection_no_default_ticker_message)
else:
default_token = __get_default_token_channel(chat_id)
if default_token is not None:
ticker, addr = default_token[0], default_token[1]
else:
ticker, addr = None, None
token, options = queries_parser.analyze_query_last_actions(update.message.text, ticker)
if token is not None:
latest_actions_pretty = general_end_functions.get_last_actions_token_in_eth_pair(token_ticker=token,
uni_wrapper=uni_wrapper,
graphql_client_uni=graphql_client_uni,
converter=converter,
contract=None,
options=options)
util.create_and_send_vote(token, "actions", update.message.from_user.name, zerorpc_client_data_aggregator)
context.bot.send_message(chat_id=chat_id, text=latest_actions_pretty, disable_web_page_preview=True,
parse_mode='html')
context.bot.send_message(chat_id=announcement_channel_id, text=latest_actions_pretty,
disable_web_page_preview=True, parse_mode='html')
else:
context.bot.send_message(chat_id=chat_id, text=rejection_no_default_ticker_message)
def get_trending(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "trending")
chat_id = update.message.chat_id
res = zerorpc_client_data_aggregator.view_trending()
context.bot.send_message(chat_id=chat_id, text=res)
context.bot.send_message(chat_id=announcement_channel_id, text=res)
def get_gas_spent(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "gas_spent")
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
if len(query_received) >= 2:
addr, options = queries_parser.analyze_query_gas_spent(update.message.text)
res = general_end_functions.get_gas_spent(addr, options)
context.bot.send_message(chat_id=chat_id, text=res)
else:
context.bot.send_message(chat_id=chat_id,
text="Please use the format /gas_spent address (ex: /gas_spent 0xBE0eB53F46cd790Cd13851d5EFf43D12404d33E8)")
# ADMIN STUFF
def set_faq(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "set_faq")
chat_id = update.message.chat_id
query_received = update.message.text[8:]
if __is_user_admin(context, update):
if query_received != "":
faq = query_received
res = zerorpc_client_data_aggregator.set_faq(chat_id, faq)
message_info = res + '\n' + message_faq_additional
context.bot.send_message(chat_id=chat_id, text=message_info, parse_mode='html',
disable_web_page_preview=True)
else:
context.bot.send_message(chat_id=chat_id, text="Please use the format /set_faq FAQ")
else:
context.bot.send_message(chat_id=chat_id, text="Only an admin can do that you silly.")
def get_the_faq(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "faq")
chat_id = update.message.chat_id
res = __get_faq_channel(chat_id)
if res == "null" or res is None:
res = message_faq_empty
try:
context.bot.send_message(chat_id=chat_id, text=res, parse_mode='html', disable_web_page_preview=True)
except BadRequest:
header = "Looks like some html tags are not properly set. Here's the raw faq: \n"
context.bot.send_message(chat_id=chat_id, text=header + res, disable_web_page_preview=True)
def __get_faq_channel(channel_id: int):
res = zerorpc_client_data_aggregator.get_faq(channel_id)
return res
def set_default_token(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "set_default_token")
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
if __is_user_admin(context, update):
if len(query_received) == 2:
ticker = query_received[1].upper()
token_addr = requests_util.get_token_contract_address(ticker)
logging.info("setting default channel " + str(chat_id) + " with address " + str(token_addr))
res = zerorpc_client_data_aggregator.set_default_token(chat_id, ticker, token_addr)
context.bot.send_message(chat_id=chat_id, text=res)
elif len(query_received) == 3:
ticker = query_received[1].upper()
token_addr = query_received[2].lower()
logging.info("setting default channel " + str(chat_id) + " with address " + str(token_addr))
res = zerorpc_client_data_aggregator.set_default_token(chat_id, ticker, token_addr)
context.bot.send_message(chat_id=chat_id, text=res)
else:
context.bot.send_message(chat_id=chat_id, text="Please use the format /set_default_token TICKER (address)")
else:
context.bot.send_message(chat_id=chat_id, text="Only an admin can do that you silly.")
def get_default_token(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "default_token")
chat_id = update.message.chat_id
ticker, addr = __get_default_token_channel(chat_id)
context.bot.send_message(chat_id=chat_id, text="ticker: " + str(ticker) + " - addr: " + str(addr))
def __get_default_token_channel(channel_id: int):
res = zerorpc_client_data_aggregator.get_default_token(channel_id)
if res is not None:
logging.debug("Default token channel " + str(channel_id) + " is " + str(res[0]) + " - " + str(res[1]))
else:
logging.debug("Default token channel " + str(channel_id) + " is None")
return res
def set_function(update: Update, context: CallbackContext):
channel_type = update.message.chat.type
__log_channel(update.message.chat, "set_function")
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
if __is_user_admin(context, update) or channel_type == "private":
if len(query_received) == 2:
arg = query_received[1]
if arg.lower() == "meme":
res = not _is_meme_authorized_on_channel(chat_id)
_update_meme_status_on_channel(chat_id, res)
if res:
context.bot.send_message(chat_id=chat_id,
text="Memes are now activated. You can now:\nAdd some with /add_meme\nView one random with /get_meme\nRemove one with /delete_meme (only for admins).")
else:
context.bot.send_message(chat_id=chat_id,
text="Memes are now de-activated. You can always go back with /set_function meme (only for admins)")
else:
context.bot.send_message(chat_id=chat_id, text="Wrongly formatted query")
else:
context.bot.send_message(chat_id=chat_id, text="This function is only available to admins or in private chat")
def set_monitor(update: Update, context: CallbackContext):
channel_type = update.message.chat.type
__log_channel(update.message.chat, "set_monitor")
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
if __is_user_admin(context, update) or channel_type == "private":
if len(query_received) == 2:
ticker = query_received[1].upper()
token_addr = requests_util.get_token_contract_address(ticker)
message = "setting watcher for token actions (buys > 10eth) with address " + str(
token_addr) + ". If it is not the correct address, please define it explicitly with /set_default_token TICKER ADDRESS"
zerorpc_client_data_aggregator.add_monitor(chat_id, token_addr, "buy")
context.bot.send_message(chat_id=chat_id, text=message)
elif len(query_received) == 3:
ticker = query_received[1].upper()
token_addr = query_received[2].lower()
message = "setting watcher for token actions (buys > 10eth) with address " + str(
token_addr) + ". If it is not the correct address, please define it explicitly with /set_default_token TICKER ADDRESS"
zerorpc_client_data_aggregator.add_monitor(chat_id, token_addr, "buy")
context.bot.send_message(chat_id=chat_id, text=message)
else:
context.bot.send_message(chat_id=chat_id, text="Only admins can do that you silly")
def __is_user_admin(context, update):
user = context.bot.get_chat_member(update.effective_chat.id, update.message.from_user.id)
status = user.status
username = user.user.username
return status == 'administrator' or status == 'creator' or username == 'rotted_ben'
def get_chart_supply(update: Update, context: CallbackContext):
__log_channel(update.message.chat, "chart_supply")
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
default_ticker_channel = __get_default_token_channel(chat_id)
ok = True
if len(query_received) == 1 and (default_ticker_channel is None or default_ticker_channel == "null"):
ok = False
if default_ticker_channel is None or default_ticker_channel == "":
default_ticker_channel = ""
else:
default_ticker_channel = default_ticker_channel[0]
if ok:
time_type, k_hours, k_days, tokens = commands_util.check_query(query_received, default_ticker_channel)
if isinstance(tokens, list):
tokens = tokens[0]
ticker_supply_file_path = supply_file_path.replace("$TICKER", tokens.upper())
ticker_supply_chart_path = supply_chart_path.replace("$TICKER", tokens.upper())
current_token_nbr = general_end_functions.send_supply_single_pyplot(ticker_supply_file_path,
k_days,
k_hours,
tokens,
ticker_supply_chart_path)
current_token_str = util.number_to_beautiful(current_token_nbr)
msg_time = " " + str(k_days) + " day(s) " if k_days > 0 else " last " + str(k_hours) + " hour(s) "
caption = "Supply of the last " + msg_time + ".\nCurrent supply: \n<b>" + tokens + ":</b> <pre>" + current_token_str + "</pre>"
context.bot.send_photo(chat_id=chat_id,
photo=open(ticker_supply_chart_path, 'rb'),
caption=caption,
parse_mode="html")
def _is_meme_authorized_on_channel(channel_id: int) -> bool:
return zerorpc_client_data_aggregator.get_meme_channel_value(channel_id)
def _update_meme_status_on_channel(channel_id: int, status: bool):
return zerorpc_client_data_aggregator.update_meme_channel_value(channel_id, status)
@cached(cache=TTLCache(maxsize=1024, ttl=120))
def _is_coin_being_watched(ticker: str):
return zerorpc_client_data_aggregator.is_coin_being_watched(ticker.upper())
def text_if_coin_being_watched(ticker: str, small=False):
if _is_coin_being_watched(ticker):
logging.info(ticker + " is being watched")
if small:
return "➡ @TheFomoBot_" + ticker.upper() + "_actions ⬅"
else:
return "Live $" + ticker.upper() + " actions ➡ @TheFomoBot_" + ticker.upper() + "_actions ⬅"
else:
return None
def __log_channel(chat, method):
now = datetime.now().strftime('%Y-%m-%d, %H')
# today = datetime.today().strftime('%Y-%m-%d')
chat_id = chat.id
channel_type = chat.type
chat_name = chat.title
logging.info("chat_id = " + str(chat_id) + " - type = " + str(channel_type) + " - chat_name = " + str(
chat_name) + " - method " + method)
zerorpc_client_data_aggregator.log_action(chat_id, channel_type, str(chat_name), now,
method) # casting chat name to str in case it's None
def __send_message_to_announcement_channel_if_needed(username: str, method: str, token: str, message: str, context: CallbackContext) -> None:
if not __did_user_vote_too_much(username, method, token):
context.bot.send_message(chat_id=announcement_channel_id, text=message, parse_mode='html',
disable_web_page_preview=True)
def __did_user_vote_too_much(username, method, token) -> bool:
hashed_uname = util.get_hashed_uname(username)
return zerorpc_client_data_aggregator.did_user_vote_too_much(hashed_uname, method, token.upper())
def callback_minute(context: CallbackContext):
if IS_TEST_ENV:
return
channels_to_check = zerorpc_client_data_aggregator.get_all_monitors()
logging.info("checking monitors")
now = round(time.time())
last_min = now - (60 * 5) - 20
new_list = {}
if channels_to_check is not None:
for c in channels_to_check:
if c[1].lower() in new_list:
new_list[c[1].lower()] = new_list.get(c[1].lower()) + [c[0]]
else:
new_list[c[1].lower()] = [c[0]]
for coin, value in new_list.items():
# pprint.pprint(channel_mon)
# channel = channel_mon[0]
# coin = channel_mon[1]
# monitor_type = channel_mon[2]
options = ["buy", "whale"]
pair = web3_calls.does_pair_token_eth_exist(coin, uni_wrapper)
latest_actions_pretty = requests_util.pretty_print_monitor_last_actions(last_min, coin, pair.lower(),
graphql_client_uni,
uni_wrapper, converter, options)
if latest_actions_pretty is not None:
maybe_bottom_text = text_if_coin_being_watched(coin)
if maybe_bottom_text is not None and maybe_bottom_text != "":
follow_up_message = "\n" + maybe_bottom_text
else:
follow_up_message = ""
logging.debug("follow up message: " + follow_up_message)
message = latest_actions_pretty + follow_up_message
for channel in value:
logging.info("sent latest actions to channel: " + str(channel))
try:
context.bot.send_message(chat_id=channel, text=message, disable_web_page_preview=True,
parse_mode='html')
except ChatMigrated as err:
logging.info("CHANNEL ID CHANGED: ", err)
def translate_text(update: Update, context: CallbackContext):
# sourcery skip: assign-if-exp, extract-duplicate-method, merge-else-if-into-elif
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
# check if quoted message
if update.message.reply_to_message is not None:
original_message = update.message.reply_to_message.text
if len(query_received) == 1:
language_to = "en"
else:
language_to = query_received[1]
logging.info("translating " + original_message + " to " + language_to)
translation = translation_util.pretty_translate(original_message, language_to)
context.bot.send_message(chat_id=chat_id, text=translation, parse_mode='html', disable_web_page_preview=True)
else:
if len(query_received) <= 2:
message = "To use this endpoint, either quote a message that you wish to translate, or do /translate LANGUAGE TEXT"
context.bot.send_message(chat_id=chat_id, text=message, parse_mode='html', disable_web_page_preview=True)
else:
language_to = query_received[1]
original_message = ' '.join(query_received[2:])
logging.info("translating " + original_message + " to " + language_to)
translation = translation_util.pretty_translate(original_message, language_to)
context.bot.send_message(chat_id=chat_id, text=translation, parse_mode='html',
disable_web_page_preview=True)
# def ask_wolfram(update: Update, context: CallbackContext):
# chat_id = update.message.chat_id
# query_received = update.message.text.split(' ')
# if len(query_received) == 1:
# context.bot.send_message(chat_id=chat_id, text="To use this method, please use /ask YOUR QUESTION")
# else:
# query = ' '.join(query_received[1:])
# res = wolfram_queries.ask_wolfram_raw(query, wolfram_client)
# context.bot.send_message(chat_id=chat_id, text=res[:4055], parse_mode='html', disable_web_page_preview=True)
def get_price_direct(update: Update, context: CallbackContext):
command_list = ["p", "qs", "quickswap", "start", "charts", "chart", "c", "price", "twitter", "t", "biz", "b",
"convert", "gas", "g",
"balance", "timeto", "last_actions", "l", "trending", "gas_spent", "tr", "translate", "ask",
"set_default_token", "get_default_token", "set_faq", "faq", "chart_supply", "set_monitor",
"restart", "ban", "cp", "cb"]
chat_id = update.message.chat_id
ticker = update.message.text.split(' ')[0][1:]
if ticker not in command_list and '_' not in ticker and len(
ticker) < 6: # should not be needed but keeping it just in case
__log_channel(update.message.chat, "price_direct")
logging.info("Asking for token %s on chain %s", ticker, 'eth')
if ticker.upper() in symbol_gecko:
value = symbol_gecko.get(ticker.upper())
message = general_end_functions.get_price_gecko(value).generate_message(options=['NO_SCAP'])
button_list_price = [
[InlineKeyboardButton('refresh', callback_data='r_p_' + "null" + "_t_" + ticker + '_c_null')]]
reply_markup_price = InlineKeyboardMarkup(button_list_price)
context.bot.send_message(chat_id=chat_id, text=message, parse_mode='html', reply_markup=reply_markup_price,
disable_web_page_preview=True)
else:
contract_from_ticker = requests_util.get_token_contract_address(ticker)
if contract_from_ticker is not None:
maybe_price_message: Result[TokenPrices, str] = end_function_helper.get_price_from_oracle(contract_from_ticker, exchange_info_uniswap)
__analyse_and_send_token_price(maybe_price_message, context, update, chat_id)
def add_channel(update: Update, context: CallbackContext):
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
if len(query_received) != 6:
context.bot.send_message(chat_id=chat_id, text="wrong number of args")
else:
channel_id = query_received[1]
ticker = query_received[2]
contract = query_received[3]
pair_contract = query_received[4]
bot_assigned = query_received[5]
zerorpc_client_data_aggregator.assign_bot_to(channel_id, ticker, contract, pair_contract, bot_assigned)
context.bot.send_message(chat_id=chat_id, text="added channel")
def analyze_wallet(update: Update, context: CallbackContext):
chat_id = update.message.chat_id
query_received = update.message.text.split(' ')
logging.info("Analyzing wallet ")
if len(query_received) < 2:
context.bot.send_message(chat_id=chat_id,
text="To use this command, please use the syntax /analyze_wallet wallet (option: -simple), eg: /analyze_wallet 0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B")
else:
wallet = query_received[1]
if not Web3.isAddress(wallet.lower()):
context.bot.send_message(chat_id=chat_id,
text="Provided wallet " + wallet + " is not a valid Ethereum address.")
else:
if '-simple' in query_received:
res = general_end_functions.get_balance_wallet(wallet.lower(), pie_chart_wallet_path, True)
else:
res = general_end_functions.get_balance_wallet(wallet.lower(), pie_chart_wallet_path, False)
logging.debug(res)
logging.debug(res[:4093])
context.bot.send_message(chat_id=chat_id, text=res[:4093], parse_mode='MarkdownV2',
disable_web_page_preview=True)
context.bot.send_photo(chat_id=chat_id, photo=open(pie_chart_wallet_path, 'rb'))
def error_callback(update, context):
logging.error(context.error)
# Stages
FIRST, SECOND = range(2)
# Callback data
ONE, TWO = range(2)
TRENDING = 'TRENDING'
GAS = 'SHOW_GAS_PRICE'
TRENDING_TXT = "🔥 Trending"
GAS_TXT = "⛽ Gas"
HOME_KEYBOARD = [
[
InlineKeyboardButton("🔥 Trending", callback_data=TRENDING),
InlineKeyboardButton("⛽ Gas", callback_data=GAS),
]
]
REPLY_HOME_KEYBOARD = [
[
TRENDING_TXT,
GAS_TXT
]
]
def send_chart_trending(update: Update, context: CallbackContext) -> None:
"""Prompt same text & keyboard as `start` does but not as new message"""
# Get CallbackQuery from Update
query = update.callback_query
chat_id = query.message.chat_id
text_query = query.data[4:]
logging.info("Sending chart in private from query: " + str(text_query))
token = text_query
time_type, k_hours, k_days = 'd', 0, 3
t_to = int(time.time())
t_from = t_to - (k_days * 3600 * 24) - (k_hours * 3600)
trending = util.get_banner_txt(zerorpc_client_data_aggregator)
maybe_bottom_text = text_if_coin_being_watched(token)
(message, path, reply_markup_chart) = general_end_functions.send_candlestick_pyplot(token, charts_path, k_days,
k_hours, t_from,
t_to, txt=trending,
options=["rsi"],
with_ad=maybe_bottom_text)
util.create_and_send_vote(token, "chart", update.callback_query.message.from_user.name,
zerorpc_client_data_aggregator)
token_chat_id = str(chat_id) + "_" + token
charts_time_refresh[token_chat_id] = t_to
context.bot.send_photo(chat_id=chat_id, photo=open(path, 'rb'), caption=message, parse_mode="html",
reply_markup=reply_markup_chart)
query.answer()
# Instead of sending a new message, edit the message that
# originated the CallbackQuery. This gives the feeling of an
# interactive menu.
return FIRST
def _get_button_name(position, position_list):
if position == 0:
return "🥇 " + position_list[position]
elif position == 1:
return "🥈 " + position_list[position]
elif position == 2:
return "🥉 " + position_list[position]
else:
return emoji_number_dic.get(position + 1) + " " + position_list[position]
def view_trending(update: Update, context: CallbackContext):
"""Show new choice of buttons"""
logging.info("Viewing trending charts")
chat_id = update.message.chat_id
res = zerorpc_client_data_aggregator.view_trending_raw()
logging.info("res trending charts: " + str(res))
kb = [[], [], [], []]
for i in range(len(res)):
kb[i // 3].append(InlineKeyboardButton(_get_button_name(i, res), callback_data="TRD:" + res[i]))
reply_markup = InlineKeyboardMarkup(kb)
context.bot.send_message(text="Here's what's trending", chat_id=chat_id, reply_markup=reply_markup)
# query.edit_message_text(
# text="Here a the trending tokens:", reply_markup=reply_markup
# )
return FIRST
def general_gas_message():
asap, fast, average, low, price_one_tx_asap_eth, price_one_tx_asap_usd, price_one_swap_asap_eth, price_one_swap_asap_usd = general_end_functions.get_gas_price(
True)
message = "<b>Gas price:</b><code>" + \
"\nASAP: " + str(asap) + \
"\nFast: " + str(fast) + \
"\nAvg : " + str(average) + \
"\nSlow: " + str(low) + \
"\nASAP tx : Ξ" + str(price_one_tx_asap_eth)[0:8] + " | $" + str(price_one_tx_asap_usd)[0:4] + \
"\nUni swap~ Ξ" + str(price_one_swap_asap_eth)[0:8] + " | $" + str(price_one_swap_asap_usd)[
0:4] + "</code>"
return message
def view_gas(update: Update, context: CallbackContext):
"""Show new choice of buttons"""
logging.info("Viewing gas price")
chat_id = update.message.chat_id
message = general_gas_message()
context.bot.send_message(text=message, chat_id=chat_id, parse_mode="html")
return FIRST
def start_menu_private_conv(update: Update, context: CallbackContext) -> None:
"""Send message on `/start`."""
# Get user that sent /start and log his name
user = update.message.from_user
logging.info("User %s started the conversation.", user.name)
# Build InlineKeyboard where each button has a displayed text
# and a string as callback_data
# The keyboard is a list of button rows, where each row is in turn
# a list (hence `[[...]]`).
reply_markup = ReplyKeyboardMarkup(REPLY_HOME_KEYBOARD, resize_keyboard=True)
# Send message with text and appended InlineKeyboard
members_count = context.bot.get_chat_members_count(update.message.chat_id)
if members_count > 2:
get_start_message(update, context)
return ConversationHandler.END
else:
update.message.reply_text("Choose your path", reply_markup=reply_markup)
# Tell ConversationHandler that we're in state `FIRST` now
return FIRST
def get_token_price_inline_query(ticker):
if ticker.upper() in symbol_gecko:
value = symbol_gecko.get(ticker.upper())
message = general_end_functions.get_price_gecko(value).generate_message(options=['NO_SCAP'])
else:
contract_from_ticker = requests_util.get_token_contract_address(ticker)
logging.info("get_token_price_inline_query contract from ticker: " + str(contract_from_ticker))
if contract_from_ticker is None:
message = "Ticker not found"
else:
message = end_function_helper.get_price_from_oracle(contract_from_ticker, exchange_info_uniswap)
return message
def get_token_price_inline_query_full(ticker, title, url) -> InlineQueryResultArticle:
message = get_token_price_inline_query(ticker)
return InlineQueryResultArticle(
id=str(uuid4()),
title=title,
input_message_content=InputTextMessageContent(
message, parse_mode=ParseMode.HTML, disable_web_page_preview=True
),
thumb_url=url
)
def get_gas_price_future():
message_gas = "Query <code>@TheFomo_Bot gas</code>:\n" + general_gas_message()
return InlineQueryResultArticle(
id=str(uuid4()),
title="Gas price",
input_message_content=InputTextMessageContent(
message_gas, parse_mode=ParseMode.HTML, disable_web_page_preview=True
),
thumb_url="https://miro.medium.com/max/512/1*9NXyQgPke0RG_w-X3kGNXw.png"
)
def inline_query(update: Update, context: CallbackContext) -> None:
query = update.inline_query.query
logging.info("inline query: " + str(query))
ticker = query.lower()
if ticker.lower() == "gas":
message_gas = "Query <code>@TheFomo_Bot gas</code>:\n" + general_gas_message()
results = [
InlineQueryResultArticle(
id=str(uuid4()),
title="Gas price",
input_message_content=InputTextMessageContent(
message_gas, parse_mode=ParseMode.HTML, disable_web_page_preview=True
),
thumb_url="https://miro.medium.com/max/512/1*9NXyQgPke0RG_w-X3kGNXw.png"
)]
update.inline_query.answer(results, cache_time=60)
elif len(ticker) > 2:
message = get_token_price_inline_query(ticker)
results = [
InlineQueryResultArticle(
id=str(uuid4()),
title=ticker.upper(),
input_message_content=InputTextMessageContent(
message, parse_mode=ParseMode.HTML, disable_web_page_preview=True
)
)
]
update.inline_query.answer(results, cache_time=60)
else:
coins_to_watch = [["btc", "Bitcoin", "https://lebitcoin.fr/logos/BC_Logo_.png"],
["eth", "Ethereum", "https://www.bitladon.fr/img/currency/ETH_groot.png"],
["link", "Chainlink", "https://firebounty.com/image/939-chainlink"],
["dot", "Polkadot",
"https://assets.coingecko.com/coins/images/12171/small/aJGBjJFU_400x400.jpg?1597804776"]]
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
futures = []
for coin in coins_to_watch:
ticker = coin[0]
title = coin[1]
url = coin[2]
futures.append(executor.submit(get_token_price_inline_query_full, ticker, title, url))
futures.append(executor.submit(get_gas_price_future))
results = [future.result() for future in concurrent.futures.as_completed(futures)]
update.inline_query.answer(results, cache_time=60)
def main():
global TELEGRAM_KEY
global IS_TEST_ENV
webhook_port = 8443
webhook_url = 'https://' + SERVER_IP + ':' + str(webhook_port) + '/' + TELEGRAM_KEY
if len(sys.argv) == 2:
TELEGRAM_KEY = sys.argv[1]
webhook_port = 88
webhook_url = 'https://' + SERVER_IP + ':' + str(webhook_port) + '/' + TELEGRAM_KEY
IS_TEST_ENV = True
logging.info("webhook url is: " + webhook_url + " on port: " + str(webhook_port))
updater = Updater(TELEGRAM_KEY, use_context=True, workers=16)
updater.start_webhook(listen='0.0.0.0',
port=webhook_port,
url_path=TELEGRAM_KEY,
key=TELEGRAM_WEBHOOK_PRIVATE_KEY_PATH,
cert=TELEGRAM_WEBHOOK_CERT_PATH,
webhook_url=webhook_url)
dp = updater.dispatcher
def stop_and_restart():
"""Gracefully stop the Updater and replace the current process with a new one"""
updater.stop()
os.execl(sys.executable, sys.executable, *sys.argv)
def restart(update, context):
update.message.reply_text('Bot is restarting...')
Thread(target=stop_and_restart).start()
conv_handler = ConversationHandler(
entry_points=[CommandHandler('start', start_menu_private_conv),
MessageHandler(Filters.text([TRENDING_TXT]), view_trending, run_async=True),
MessageHandler(Filters.text([GAS_TXT]), view_gas, run_async=True),
],
states={
FIRST: [
# CommandHandler(TRENDING_TXT, view_trending),
# CommandHandler(GAS_TXT, view_gas),
CallbackQueryHandler(refresh_chart, pattern='refresh_chart(.*)'),
CallbackQueryHandler(refresh_price, pattern='r_p_(.*)'),
CallbackQueryHandler(delete_message, pattern='delete_message'),
MessageHandler(Filters.text(TRENDING_TXT), view_trending, run_async=True),
MessageHandler(Filters.text(GAS_TXT), view_gas, run_async=True),
CallbackQueryHandler(send_chart_trending, pattern='TRD:(.*)'),
]
},
fallbacks=[CommandHandler('start', get_start_message)],
allow_reentry=True
)
dp.add_handler(conv_handler)
# dp.add_error_handler(error_callback)
dp.add_handler(CommandHandler('start', get_start_message))
dp.add_handler(CommandHandler(['charts', 'chart', 'c'], get_candlestick, run_async=True))
dp.add_handler(CommandHandler(['charts_polygon', 'chart_polygon', 'cp'], get_chart_polygon, run_async=True))
dp.add_handler(CommandHandler(['charts_bsc', 'chart_bsc', 'cb'], get_chart_bsc, run_async=True))
dp.add_handler(CommandHandler(['price', 'p'], get_price_token, run_async=True))
dp.add_handler(CommandHandler(['quickswap', 'qs'], get_price_quickswap, run_async=True))
dp.add_handler(CommandHandler(['pancakeswap', 'cake'], get_price_pancakeswap, run_async=True))
dp.add_handler(CommandHandler(['twitter', 't'], get_twitter, run_async=True))
dp.add_handler(CommandHandler(['biz', 'b'], get_biz, run_async=True))
dp.add_handler(CommandHandler('convert', do_convert, run_async=True))
dp.add_handler(CommandHandler(['gas', 'g'], get_gas_average, run_async=True))
dp.add_handler(CommandHandler(['burned_eth', 'burn'], get_burned_eth, run_async=True))
dp.add_handler(CommandHandler('balance', balance_token_in_wallet, run_async=True))
dp.add_handler(CommandHandler('timeto', get_time_to))
dp.add_handler(CommandHandler(['last_actions', 'l'], get_latest_actions, run_async=True))
dp.add_handler(CommandHandler('trending', get_trending, run_async=True))
dp.add_handler(CommandHandler('gas_spent', get_gas_spent, run_async=True))
dp.add_handler(CommandHandler(['tr', 'translate'], translate_text, run_async=True))
# dp.add_handler(CommandHandler(['ask'], ask_wolfram, run_async=True))
dp.add_handler(CommandHandler(['analyze_wallet'], analyze_wallet, run_async=True))
# dank memes
dp.add_handler(CommandHandler(['get_meme'], get_meme, run_async=True))
dp.add_handler(CommandHandler(['add_meme'], add_meme_reply, run_async=True))
dp.add_handler(CommandHandler(['delete_meme'], delete_meme, run_async=True))
# customoization stuff
dp.add_handler(CommandHandler('set_default_token', set_default_token))
dp.add_handler(CommandHandler('get_default_token', get_default_token))
dp.add_handler(CommandHandler('set_faq', set_faq))
dp.add_handler(CommandHandler('faq', get_the_faq, run_async=True))
dp.add_handler(CommandHandler('chart_supply', get_chart_supply, run_async=True))
dp.add_handler(CommandHandler('set_monitor', set_monitor, run_async=False))
dp.add_handler(CommandHandler('set_function', set_function, run_async=False))
# dp.add_handler(CommandHandler('stop_monitor', stop_monitor, run_async=False))
# callbacks queries
dp.add_handler(CallbackQueryHandler(refresh_chart, pattern='refresh_chart(.*)', run_async=True))
dp.add_handler(CallbackQueryHandler(refresh_price, pattern='r_p_(.*)', run_async=True))
dp.add_handler(CallbackQueryHandler(refresh_chart_simple, pattern='rcs_(.*)', run_async=True))
dp.add_handler(CallbackQueryHandler(delete_message, pattern='delete_message', run_async=True))
dp.add_handler(MessageHandler(Filters.photo, handle_new_image, run_async=True))
dp.add_handler(MessageHandler(Filters.video, handle_new_video, run_async=True))
# admin stuff
dp.add_handler(CommandHandler('restart', restart, filters=Filters.user(username='@rotted_ben')))
dp.add_handler(CommandHandler('add_channel', add_channel, filters=Filters.user(username='@rotted_ben')))
# inline query
dp.add_handler(InlineQueryHandler(inline_query))
dp.add_handler(MessageHandler(Filters.command, get_price_direct, run_async=True))
logging.info("All handlers added")
j = updater.job_queue
# if not IS_TEST_ENV:
# j.run_repeating(callback_minute, interval=check_big_buys_interval_seconds, first=15)
updater.start_polling()
updater.idle()
if __name__ == '__main__':
main()
commands = """
chart - <TICKER> Display charts of the TICKER.
twitter - <TICKER> Get latests twitter containing $<TICKER>.
price - <TICKER> get price of the <TICKER> token.
quickswap - <TICKER OR ADDRESS> get the price on quickswap of the desired token.
pancakeswap - <TICKER OR ADDRESS> get the price on pancakeswap of the desired token.
biz - <WORD> get 4chan/biz threads containing <WORD>.
gas - Get ETH gas price.
burn - See how much eth has been burned by IEP 1559.
gas_spent - (/gas_spent WALLET (optional: number of days)) Shows how much gas a wallet has spent on a transaction
faq - Print the FAQ.
convert - <AMOUNT> <TICKER> option(<TICKER>) convert amount of ticker to usd (and to the second ticker if specified).
balance - <WALLET> <TICKER> check how much an address has of a specific coin.
timeto - time until date passed as argument.
last_actions - <TICKER> get the last trades / liq events of the coin.
trending - See which coins are trending in dextrends.
analyze_wallet - Provides analytics about a wallet (eg /analyze_wallet 0xbA1504000B5aC6cE413A1626d4833857Dd7311a0)
translate - <LANGUAGE_TO> <TEXT> Translate a text into the desired language.
add_meme - Add a meme
get_meme - Get a random meme
set_function - Admin functionalities (like /set_function meme)
"""
|
Sansiff/Coding-Practice
|
AtCoder/ABC 235/D.cpp
|
#include <bits/stdc++.h>
#define int long long
#define lowbit(x) (x&-x)
#define rep(i, l, r) for(int i = l; i < r; i ++)
#define all(x) (x).begin(),(x).end()
#define fi first
#define se second
using namespace std;
typedef long long LL;
typedef pair<int,int> PII;
typedef vector<int> VI;
typedef vector<vector<int>> VII;
typedef vector<PII> VPII;
void read(VI& a){
for(int& x : a) cin >> x;
}
int a, n;
string f[1000010];
signed main(){
ios::sync_with_stdio(false);
cin.tie(nullptr);
cin >> a >> n;
string s2 = to_string(n);
queue<int> q; q.push(1);
map<int, int> mp;
mp[1] = 0;
while(q.size()){
int t = q.front(); q.pop();
string s1 = to_string(t);
if(s1.length() > s2.length()) continue;
if(t == n){
cout << mp[t]; return 0;
}
int tf = t * a; string stf = to_string(tf);
if(!mp[tf]) q.push(tf), mp[tf] = mp[t] + 1;
if(s1[s1.length() - 1] != '0' && s1.length() >= 2){
string ts = s1[s1.size() - 1] + s1.substr(0, s1.length() - 1);
if(!mp[stoi(ts)]){
q.push(stoi(ts)); mp[stoi(ts)] = mp[t] + 1;
}
}
}
cout << -1;
return 0;
}
|
Jimmyee/Endless-Online-Awaken
|
server_engine/src/config.cpp
|
<gh_stars>10-100
// Endless Online Awaken
#include "config.hpp"
#include <fstream>
#include <iostream>
#include <string.h>
#include <stdexcept>
Config::Config()
{
}
Config::Config(std::string filename)
{
this->Load(filename);
}
void Config::Load(std::string filename)
{
std::ifstream file(filename);
if(!file.is_open())
{
throw std::runtime_error("Config: could not load file.");
}
std::vector<std::string> lines;
std::string line;
while(std::getline(file, line))
{
lines.push_back(line);
}
file.close();
while(lines.size() > 0)
{
if(lines[0][0] == '#')
{
lines.erase(lines.begin());
continue;
}
std::string key = lines[0];
lines.erase(lines.begin());
if(lines.size() == 0) continue;
std::string value = lines[0];
lines.erase(lines.begin());
if(value.size() > 0)
{
this->entries.push_back(Entry(key, value));
}
}
}
void Config::Save(std::string filename)
{
std::ofstream file(filename, std::ios::trunc);
if(!file.is_open())
{
throw std::runtime_error("Config: could not open file.");
}
else if(!file.good())
{
printf("Data stream error");
throw std::runtime_error("Config: data stream error.");
}
for(auto &it : this->entries)
{
std::string data = '[' + it.key + '=' + it.value + ']' + '\n';
const char *cdata = data.c_str();
file.write(cdata, (unsigned)strlen(cdata));
}
file.close();
}
Config::Entry Config::GetEntry(std::string key)
{
for(auto &it : this->entries)
{
if(it.key == key)
{
return it;
}
}
return Entry("", "");
}
std::string Config::GetValue(std::string key)
{
Entry entry = this->GetEntry(key);
return entry.value;
}
void Config::AddEntry(Entry entry)
{
if(this->GetEntry(entry.key).key == "")
{
this->entries.push_back(entry);
}
}
void Config::AddEntry(std::string key, std::string value)
{
this->AddEntry(Entry(key, value));
}
|
venkateshprasad123/wb
|
Java_GUI/src/com/ibm/safr/we/data/transfer/ViewTransfer.java
|
package com.ibm.safr.we.data.transfer;
/*
* Copyright Contributors to the GenevaERS Project. SPDX-License-Identifier: Apache-2.0 (c) Copyright IBM Corporation 2008.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Date;
public class ViewTransfer extends SAFRActiveComponentTransfer {
private String statusCode;
private String typeCode;
private Integer workFileNumber;
private String outputFormatCode;
private Integer outputLRId;
private Integer extractFileAssocId;
private Integer pageSize;
private Integer lineSize;
private Boolean zeroSuppressInd;
private Boolean headerRow;
private Integer extractMaxRecCount;
private Boolean extractSummaryIndicator;
private Integer extractSummaryBuffer;
private Integer outputMaxRecCount;
private Integer controlRecId;
private Integer writeExitId;
private String writeExitParams;
private Integer formatExitId;
private String formatExitParams;
private String fieldDelimCode;
private String stringDelimCode;
private Date effectiveDate;
private String formatFilterlogic;
private String compilerVersion;
// This field is to store the logic text of type Blob for a view from E_LOGICTBL.
private byte[] logicTextBytes;
// this will store the compiled version of logic text.
private byte[] compiledLogicTextBytes;
public void setStatusCode(String viewStatusCode) {
this.statusCode = viewStatusCode;
}
public String getStatusCode() {
return statusCode;
}
public void setTypeCode(String viewTypeCode) {
this.typeCode = viewTypeCode;
}
public String getTypeCode() {
return typeCode;
}
public Integer getWorkFileNumber() {
return workFileNumber;
}
public void setWorkFileNumber(Integer workFileNumber) {
this.workFileNumber = workFileNumber;
}
public void setOutputFormatCode(String viewOutputFormat) {
this.outputFormatCode = viewOutputFormat;
}
public String getOutputFormatCode() {
return outputFormatCode;
}
public Integer getOutputLRId() {
return outputLRId;
}
public void setOutputLRId(Integer outputLRId) {
this.outputLRId = outputLRId;
}
public Integer getExtractFileAssocId() {
return extractFileAssocId;
}
public void setExtractFileAssocId(Integer extractFileAssocId) {
this.extractFileAssocId = extractFileAssocId;
}
public Integer getPageSize() {
return pageSize;
}
public void setPageSize(Integer pageSize) {
this.pageSize = pageSize;
}
public Integer getLineSize() {
return lineSize;
}
public void setLineSize(Integer lineSize) {
this.lineSize = lineSize;
}
public Boolean isSuppressZeroRecords() {
return zeroSuppressInd;
}
public void setZeroSuppressInd(Boolean zeroSuppressInd) {
this.zeroSuppressInd = zeroSuppressInd;
}
public Boolean isHeaderRow() {
return headerRow;
}
public void setHeaderRow(Boolean headerRow) {
this.headerRow = headerRow;
}
public Integer getExtractMaxRecCount() {
return extractMaxRecCount;
}
public void setExtractMaxRecCount(Integer extractMaxRecCnt) {
this.extractMaxRecCount = extractMaxRecCnt;
}
public Boolean isAggregateBySortKey() {
return extractSummaryIndicator;
}
public void setExtractSummaryIndicator(Boolean aggregateBySortKey) {
this.extractSummaryIndicator = aggregateBySortKey;
}
public Integer getExtractSummaryBuffer() {
return extractSummaryBuffer;
}
public void setExtractSummaryBuffer(Integer aggregateBufferSize) {
this.extractSummaryBuffer = aggregateBufferSize;
}
public Integer getOutputMaxRecCount() {
return outputMaxRecCount;
}
public void setOutputMaxRecCount(Integer outputMaxRecCnt) {
this.outputMaxRecCount = outputMaxRecCnt;
}
public Integer getControlRecId() {
return controlRecId;
}
public void setControlRecId(Integer controlRecId) {
this.controlRecId = controlRecId;
}
public Integer getWriteExitId() {
return writeExitId;
}
public void setWriteExitId(Integer writeExitId) {
this.writeExitId = writeExitId;
}
public String getWriteExitParams() {
return writeExitParams;
}
public void setWriteExitParams(String writeExitParams) {
this.writeExitParams = writeExitParams;
}
public Integer getFormatExitId() {
return formatExitId;
}
public void setFormatExitId(Integer formatExitId) {
this.formatExitId = formatExitId;
}
public String getFormatExitParams() {
return formatExitParams;
}
public void setFormatExitParams(String formatExitParams) {
this.formatExitParams = formatExitParams;
}
public String getFieldDelimCode() {
return fieldDelimCode;
}
public void setFieldDelimCode(String fieldDelimCode) {
this.fieldDelimCode = fieldDelimCode;
}
public String getStringDelimCode() {
return stringDelimCode;
}
public void setStringDelimCode(String stringDelimCode) {
this.stringDelimCode = stringDelimCode;
}
public byte[] getLogicTextBytes() {
return logicTextBytes;
}
public void setLogicTextBytes(byte[] logicTextBytes) {
this.logicTextBytes = logicTextBytes;
}
public void setCompiledLogicTextBytes(byte[] compiledLogicTextBytes) {
this.compiledLogicTextBytes = compiledLogicTextBytes;
}
public byte[] getCompiledLogicTextBytes() {
return compiledLogicTextBytes;
}
public void setEffectiveDate(Date effectiveDate) {
this.effectiveDate = effectiveDate;
}
public Date getEffectiveDate() {
return effectiveDate;
}
public String getFormatFilterlogic() {
return formatFilterlogic;
}
public void setFormatFilterlogic(String formatFilterlogic) {
this.formatFilterlogic = formatFilterlogic;
}
public String getCompilerVersion() {
return compilerVersion;
}
public void setCompilerVersion(String compilerVersion) {
this.compilerVersion = compilerVersion;
}
}
|
zhangkn/iOS14Header
|
System/Library/PrivateFrameworks/OfficeImport.framework/WDAnnotationData.h
|
/*
* This header is generated by classdump-dyld 1.0
* on Sunday, September 27, 2020 at 11:54:57 AM Mountain Standard Time
* Operating System: Version 14.0 (Build 18A373)
* Image Source: /System/Library/PrivateFrameworks/OfficeImport.framework/OfficeImport
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>.
*/
@class WDText, NSDate, NSString;
@interface WDAnnotationData : NSObject {
WDText* mText;
NSDate* mDate;
NSString* mOwner;
}
-(id)owner;
-(void)setDate:(id)arg1 ;
-(id)date;
-(void)setOwner:(id)arg1 ;
-(id)text;
-(id)initWithText:(id)arg1 ;
@end
|
HerrB92/obp
|
OpenBeaconPackage/libraries/hibernate-release-4.2.7.SP1/project/hibernate-core/src/test/java/org/hibernate/test/legacy/MapTest.java
|
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2006-2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.test.legacy;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.hibernate.EntityMode;
import org.hibernate.Session;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.Environment;
import org.hibernate.criterion.Example;
import org.hibernate.criterion.Restrictions;
import static org.junit.Assert.assertTrue;
public class MapTest extends LegacyTestCase {
@Override
public String[] getMappings() {
return new String[] { "legacy/Map.hbm.xml", "legacy/Commento.hbm.xml", "legacy/Marelo.hbm.xml" };
}
@Override
public void configure(Configuration cfg) {
super.configure( cfg );
cfg.setProperty( Environment.DEFAULT_ENTITY_MODE, EntityMode.MAP.toString() );
}
@Test
public void testMap() throws Exception {
Session s = openSession();
s.beginTransaction();
Map map = new HashMap();
map.put("$type$", "TestMap");
map.put("name", "foo");
map.put("address", "bar");
Map cmp = new HashMap();
cmp.put( "a", new Integer(1) );
cmp.put( "b", new Float(1.0) );
map.put("cmp", cmp);
s.save(map);
s.getTransaction().commit();
s.close();
s = openSession();
s.beginTransaction();
map = (Map) s.get( "TestMap", (Serializable) map.get("id") );
assertTrue( map!=null && "foo".equals( map.get("name") ) );
assertTrue( map.get("$type$").equals("TestMap") );
int size = s.createCriteria("TestMap").add( Example.create(map) ).list().size();
assertTrue(size==1);
s.getTransaction().commit();
s.close();
s = openSession();
s.beginTransaction();
List list = s.createQuery("from TestMap").list();
map = (Map) list.get(0);
assertTrue( "foo".equals( map.get("name") ) );
assertTrue( "bar".equals( map.get("address") ) );
cmp = (Map) map.get("cmp");
assertTrue( new Integer(1).equals( cmp.get("a") ) && new Float(1.0).equals( cmp.get("b") ) );
assertTrue( null==map.get("parent") );
map.put("name", "foobar");
map.put("parent", map);
List bag = (List) map.get("children");
bag.add(map);
s.getTransaction().commit();
s.close();
s = openSession();
s.beginTransaction();
list = s.createQuery("from TestMap tm where tm.address = 'bar'").list();
map = (Map) list.get(0);
assertTrue( "foobar".equals( map.get("name") ) );
assertTrue( "bar".equals( map.get("address") ) );
assertTrue( map==map.get("parent") );
bag = (List) map.get("children");
assertTrue( bag.size()==1 );
size = s.createCriteria("TestMap")
.add( Restrictions.eq("address", "bar") )
.createCriteria("parent")
.add( Restrictions.eq("name", "foobar") )
.list()
.size();
assertTrue(size==1);
s.delete(map);
s.getTransaction().commit();
s.close();
}
@Test
public void testMapOneToOne() throws Exception {
Map child = new HashMap();
Map parent = new HashMap();
Session s = openSession();
s.beginTransaction();
child.put("parent", parent);
child.put("$type$", "ChildMap");
parent.put("child", child);
parent.put("$type$", "ParentMap");
s.save(parent);
s.getTransaction().commit();
s.close();
s = openSession();
s.beginTransaction();
Map cm = (Map) s.createQuery("from ChildMap cm where cm.parent is not null").uniqueResult();
s.delete(cm);
s.delete( cm.get("parent") );
s.getTransaction().commit();
s.close();
child = new HashMap();
parent = new HashMap();
s = openSession();
s.beginTransaction();
child.put("parent", parent);
child.put("$type$", "ChildMap");
parent.put("child", child);
parent.put("$type$", "ParentMap");
s.save(child);
s.getTransaction().commit();
s.close();
s = openSession();
s.beginTransaction();
Map pm = (Map) s.createQuery("from ParentMap cm where cm.child is not null").uniqueResult();
s.delete(pm);
s.delete( pm.get("child") );
s.getTransaction().commit();
s.close();
}
@Test
public void testOneToOnePropertyRef() throws Exception {
Session s = openSession();
s.beginTransaction();
s.createQuery("from Commento c where c.marelo.mlmag = 0").list();
s.createQuery("from Commento c where c.marelo.commento.mcompr is null").list();
s.createQuery("from Commento c where c.marelo.mlink = 0").list();
s.createQuery("from Commento c where c.marelo.commento = c").list();
s.createQuery("from Commento c where c.marelo.id.mlmag = 0").list();
s.createQuery("from Commento c where c.marelo.commento.id = c.id").list();
s.createQuery("from Commento c where c.marelo.commento.mclink = c.mclink").list();
s.createQuery("from Marelo m where m.commento.id > 0").list();
s.createQuery("from Marelo m where m.commento.marelo.commento.marelo.mlmag is not null").list();
s.getTransaction().commit();
s.close();
}
}
|
fabriziobertoglio1987/surf-rails
|
db/migrate/20190807093952_change_column_forecast_default_for_locations.rb
|
class ChangeColumnForecastDefaultForLocations < ActiveRecord::Migration[5.1]
def up
change_column :locations, :forecast, :jsonb, :default => nil
end
def down
change_column :locations, :forecast, :jsonb, :default => []
end
end
|
sudarsun/c48
|
evaluation/IncrementalEstimator.h
|
<reponame>sudarsun/c48
#ifndef _INCREMENTALESTIMATOR_
#define _INCREMENTALESTIMATOR_
/**
* Interface for an incremental probability estimators.
*
*/
class IncrementalEstimator {
public:
/**
* Add a new data value to the current estimator.
*
* @param data the new data value
* @param weight the weight assigned to the data value
*/
virtual void addValue(double data, const double weight) = 0;
};
#endif //_INCREMENTALESTIMATOR_
|
kayinli/bk-bcs
|
bcs-ui/backend/container_service/observability/metric/views/pod.py
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 TH<NAME>, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from typing import Callable, Dict
from rest_framework.decorators import action
from rest_framework.response import Response
from backend.bcs_web.viewsets import SystemViewSet
from backend.components import bcs_monitor as prom
from backend.container_service.observability.metric.constants import METRICS_DEFAULT_NAMESPACE
from backend.container_service.observability.metric.serializers import FetchPodMetricSLZ
class PodMetricViewSet(SystemViewSet):
serializer_class = FetchPodMetricSLZ
def _common_query_handler(self, query_metric_func: Callable, cluster_id: str) -> Dict:
"""
查询Pod指标通用逻辑
:param query_metric_func: 指标查询方法
:param cluster_id: 集群ID
:return: 指标查询结果
"""
params = self.params_validate(self.serializer_class)
return query_metric_func(
cluster_id,
params['namespace'],
params['pod_name_list'],
params['start_at'],
params['end_at'],
bk_biz_id=self.request.project.cc_app_id,
)
@action(methods=['POST'], url_path='cpu_usage', detail=False)
def cpu_usage(self, request, project_id, cluster_id):
"""获取指定 Pod CPU 使用情况"""
response_data = self._common_query_handler(prom.get_pod_cpu_usage_range, cluster_id)
return Response(response_data)
@action(methods=['POST'], url_path='memory_usage', detail=False)
def memory_usage(self, request, project_id, cluster_id):
"""获取 Pod 内存使用情况"""
response_data = self._common_query_handler(prom.get_pod_memory_usage_range, cluster_id)
return Response(response_data)
@action(methods=['POST'], url_path='network_receive', detail=False)
def network_receive(self, request, project_id, cluster_id):
"""获取 网络入流量 情况"""
response_data = self._common_query_handler(prom.get_pod_network_receive, cluster_id)
return Response(response_data)
@action(methods=['POST'], url_path='network_transmit', detail=False)
def network_transmit(self, request, project_id, cluster_id):
"""获取 网络出流量 情况"""
response_data = self._common_query_handler(prom.get_pod_network_transmit, cluster_id)
return Response(response_data)
|
khartig/assimilator
|
rio-lib/src/main/java/org/rioproject/system/measurable/memory/SystemMemoryMonitor.java
|
<reponame>khartig/assimilator
/*
* Copyright 2008 to the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rioproject.system.measurable.memory;
import org.rioproject.system.measurable.MeasurableMonitor;
import org.rioproject.system.measurable.SigarHelper;
import org.rioproject.watch.ThresholdValues;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.NumberFormat;
/**
* The <code>SystemMemoryMonitor</code> object provides feedback information to
* the {@link SystemMemory} object, providing memory usage information for the
* system using SIGAR. If SIGAR is not available, this utility returns a -1 for
* system memory utilization.
*
* <p><b>Note:</b>
* <a href="http://www.hyperic.com/products/sigar.html">Hyperic SIGAR</a>
* is licensed under the GPL with a FLOSS license exception, allowing it to be
* included with the Rio Apache License v2 distribution. If for some reason the
* GPL cannot be used with your distribution of Rio,
* remove the <tt>RIO_HOME/lib/hyperic</tt> directory.
*
* @author <NAME>
*/
public class SystemMemoryMonitor implements MeasurableMonitor<SystemMemoryUtilization> {
private String id;
private ThresholdValues tVals;
private SigarHelper sigar;
private static double KB = 1024;
private static double MB = Math.pow(KB, 2);
static Logger logger = LoggerFactory.getLogger(SystemMemoryMonitor.class);
public SystemMemoryMonitor() {
sigar = SigarHelper.getInstance();
}
public void terminate() {
}
public void setID(String id) {
this.id = id;
}
public void setThresholdValues(ThresholdValues tVals) {
this.tVals = tVals;
}
public SystemMemoryUtilization getMeasuredResource() {
SystemMemoryUtilization smu;
if(sigar==null) {
smu = new SystemMemoryUtilization(id, tVals);
} else {
long total = sigar.getTotalSystemMemory();
long free = sigar.getFreeSystemMemory();
long used = sigar.getUsedSystemMemory();
double utilization = (double)used/(double)total;
if(logger.isTraceEnabled()) {
StringBuilder builder = new StringBuilder();
NumberFormat nf = NumberFormat.getInstance();
nf.setMaximumFractionDigits(2);
long ram = sigar.getRam();
double d = ((double)total)/MB;
double e = ((double)used)/MB;
double f = ((double)ram)/KB;
double g = ((double)free)/MB;
String usedPerc = nf.format(sigar.getUsedSystemMemoryPercent());
String freePerc = nf.format(sigar.getFreeSystemMemoryPercent());
builder.append("\nTotal: ").append(nf.format(d)).append(" MB\n");
builder.append("Used: ").append(nf.format(e)).append(" MB, ").append(usedPerc).append(" %\n");
builder.append("Free: ").append(nf.format(g)).append(" MB, ").append(freePerc).append(" %\n");
builder.append("RAM: ").append(nf.format(f)).append(" GB\n");
builder.append("Utilization: ").append(utilization).append(" %");
logger.trace(builder.toString());
}
smu = new SystemMemoryUtilization(id,
utilization,
((double)total)/MB,
((double)free)/MB,
((double)used)/MB,
sigar.getFreeSystemMemoryPercent(),
sigar.getUsedSystemMemoryPercent(),
sigar.getRam(),
tVals);
}
return smu;
}
}
|
zhaoxiangchun/jdcloud-sdk-go
|
services/vod/models/TranscodeTaskObject.go
|
<filename>services/vod/models/TranscodeTaskObject.go
// Copyright 2018 JDCLOUD.COM
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// NOTE: This class is auto generated by the jdcloud code generator program.
package models
type TranscodeTaskObject struct {
/* 任务ID (Optional) */
TaskId int64 `json:"taskId"`
/* 码率名称 (Optional) */
Name string `json:"name"`
/* 媒体类型 (Optional) */
MediaType int `json:"mediaType"`
/* 状态 (Optional) */
Status int `json:"status"`
/* 进度 (Optional) */
Progress string `json:"progress"`
/* 文件大小 (Optional) */
Size int64 `json:"size"`
/* 转码模板ID (Optional) */
TemplateId int `json:"templateId"`
/* 码率 (Optional) */
Bitrate int64 `json:"bitrate"`
/* 编码格式 (Optional) */
Codec string `json:"codec"`
/* 封装格式 (Optional) */
Format string `json:"format"`
/* 宽度 (Optional) */
Width int `json:"width"`
/* 高度 (Optional) */
Height int `json:"height"`
/* 帧率 (Optional) */
Fps string `json:"fps"`
/* 创建时间 (Optional) */
CreateTime string `json:"createTime"`
/* 更新时间 (Optional) */
UpdateTime string `json:"updateTime"`
}
|
debanik1997/corona-aid
|
controllers/user.controller.js
|
<gh_stars>1-10
const Users = require('../models/user.model');
const Association = require('../models/association.model');
const passport = require('passport');
const emailer = require("../util/emailer");
const spreadsheets = require('../util/spreadsheet_tools');
const distance_tools = require('../util/distance_tools');
const asyncWrapper = require("../util/asyncWrapper")
var jwt = require('jwt-simple');
const UserService = require('../services/user.service');
// Helper function to determine whether an email is valid
function validateEmailAccessibility(email){
return Users.findOne({email: email}).then(function(result){
return result === null;
});
}
const sendHelpMatchEmail = async (assocID, volunteerName, volunteerEmail, helpDetails) => {
associationEmail = '<EMAIL>';
if (assocID !== '') {
var assoc = await Association.findById(assocID);
associationEmail = assoc.email;
}
var data = {
sender: "<EMAIL>",
receiver: associationEmail,
name: volunteerName,
email: volunteerEmail,
details: helpDetails,
templateName: "help_match"
};
console.log(data);
emailer.sendHelpMatchEmail(data);
}
const sendVerifyEmail = (userID, user) => {
var mode = "localhost:3000";
if (process.env.PROD) {
mode = "covaid.co";
}
var message = "http://" + mode + "/verify?ID=" + userID;
// Baltimore users will receive a Google Form
if (user.association == '5e8439ad9ad8d24834c8edbe') {
message = "https://forms.gle/aTxAbGVC49ff18R1A";
}
var data = {
//sender's and receiver's email
sender: "<EMAIL>",
receiver: user.email,
link: message,
templateName: "verification",
};
emailer.sendVerificationEmail(data);
}
/**
* Get users by ID list
*/
exports.getUsersByIds = asyncWrapper(async (req, res) => {
try {
const ids = req.query.ids;
const users = await UserService.getUsersByUserIDs(ids.split(","));
return res.status(200).send(users);
} catch (e) {
console.log(e);
return res.status(400).send(e);
}
});
/**
* Handle requests to register a user
*/
exports.register = function (req, res) {
const { body: { user } } = req;
if(!user.email) {
return res.status(422).json({
errors: {
email: 'is required',
},
});
}
// Validate that an email address is unique
validateEmailAccessibility(user.email).then(function(valid) {
if (valid) {
if(!user.password) {
return res.status(422).json({
errors: {
password: '<PASSWORD>',
},
});
}
const finalUser = new Users(user);
finalUser.setPassword(<PASSWORD>);
finalUser.preVerified = false;
finalUser.verified = false;
finalUser.agreedToTerms = true;
finalUser.availability = true;
finalUser.notes = '';
finalUser.save(function(err, result) {
if (err) {
return res.status(422).send(err);
}
const userID = result._id;
// Save Pittsburgh users to respective spreadsheets
if (user.association == "5e843ab29ad8d24834c8edbf") { // Pittsburgh
spreadsheets.addUserToSpreadsheet(finalUser, userID, '1l2kVGLjnk-XDywbhqCut8xkGjaGccwK8netaP3cyJR0');
}
// Send verification email to volunteer
sendVerifyEmail(userID, user);
// Sending email if volunteer marked they could help match
if (user.offer.canHelp) {
sendHelpMatchEmail(user.association, user.first_name, user.email, user.offer.helpDetails);
}
return (userID === null) ? res.sendStatus(500) : res.status(201).send({'id': userID});
});
} else {
return res.status(403).json({
errors: {
email: 'Already Exists',
},
});
}
});
};
/**
* Handle requests to login a user
*/
exports.login = function (req, res, next) {
const { body: { user } } = req;
if(!user.email) {
return res.status(422).json({
errors: {
email: 'is required',
},
});
}
if(!user.password) {
return res.status(422).json({
errors: {
password: '<PASSWORD>',
},
});
}
return passport.authenticate('userLocal', { session: false }, (err, passportUser, info) => {
if (err) {
return next(err);
}
if(passportUser) {
const user = passportUser;
if (passportUser.preVerified) {
user.token = passportUser.generateJWT();
return res.json({ user: user.toAuthJSON() });
} else {
return res.status(403).json({
errors: {
verifed: "unverifed",
},
});
}
} else {
return res.status(401).json({
errors: {
password: "<PASSWORD>",
},
});
}
})(req, res, next);
};
/**
* Handle requests to get the current logged in user
*/
exports.current = function (req, res) {
const id = req.token.id;
return Users.findById(id)
.then((user) => {
if(!user) {
return res.sendStatus(400);
}
return res.json(user.toJSON());
});
};
/**
* Handle requests to update user notes
*/
exports.set_notes = (req, res) => {
const user_id = req.body.user_id;
const note = req.body.note;
Users.findByIdAndUpdate(user_id,
{$set: {
"note": note
}
}, function (err, request) {
if (err) return next(err);
res.send('User updated.');
});
};
/**
* Handle requests to update user's verification status
*/
exports.update_verify = (req, res) => {
const user_id = req.body.user_id;
const preVerified = req.body.preVerified;
Users.findByIdAndUpdate(user_id,
{$set: {
"preVerified": preVerified
}
}, function (err, request) {
if (err) return next(err);
res.send('User updated.');
});
};
/**
* Handle requests to verify a user
*/
exports.verify = function(req, res) {
Users.findByIdAndUpdate(req.query.ID,
{"preVerified": true}, function(err, result){
if(err){
console.log("ERROR");
res.sendStatus(500);
}
else{
console.log("Success");
res.sendStatus(200);
}
})
}
/**
* Handle requests to get all users of a specific association
*/
exports.all_users_of_an_association = function (req, res) {
var assoc = req.query.association;
if (assoc !== '5e88cf8a6ea53ef574d1b80c') { // If association is not unaffiliated (i.e. Covaid)
Users.find({'association': assoc}).then(function (users) {
for (var i = 0; i < users.length; i++) {
const coords = users[i].location.coordinates;
const distance = distance_tools.calcDistance(req.query.latitude, req.query.longitude, coords[1], coords[0]);
users[i]['distance'] = distance;
}
users.sort(function(a, b){return a['distance'] - b['distance']});
res.send(users);
});
return;
} else { // If association is unaffiliated (i.e. Covaid)
if (req.query.latitude) {
Users.find({$or: [{'association': assoc}, {'association': ""}]})
.then(function (users) {
for (var i = 0; i < users.length; i++) {
const coords = users[i].location.coordinates;
const distance = distance_tools.calcDistance(req.query.latitude, req.query.longitude, coords[1], coords[0]);
users[i]['distance'] = distance;
}
users.sort(function(a, b){return a['distance'] - b['distance']});
res.send(users);
});
} else {
Users.find({$or: [{'association': assoc}, {'association': ""}]}).then(function (users) {
res.send(users);
});
}
};
};
/**
* Handle requests to find a user by ID
*/
exports.find_user = function (req, res) {
var id = req.query.id;
Users.find({
'_id': id
}).then(function (user) {
res.send(user);
});
}
/**
* Handle requests to get all users within a 20 mile radius of a lat, long
*/
exports.all_users = function (req, res) {
Users.find({'availability': true,
'preVerified': true,
'location':
{ $geoWithin:
{ $centerSphere:
[[ req.query.longitude, req.query.latitude],
20 / 3963.2]
}
}
}).then(function (users) {
for (var i = 0; i < users.length; i++) {
const coords = users[i].location.coordinates;
const distance = distance_tools.calcDistance(req.query.latitude, req.query.longitude, coords[1], coords[0]);
users[i]['distance'] = distance;
}
users.sort(function(a, b){return a['distance'] - b['distance']});
res.send(users);
});
}
/**
* Handle requests to get all users
*/
exports.actual_all_users = function (req, res) {
Users.find({}).then(function (users) {
res.send(users);
});
}
/**
* Handle requests to get the count of total users
*/
exports.total_users = function (req, res) {
Users.find({}).count(function(err, count) {
res.send({'count': count});
});
}
/**
* Handle requests to update a user
*/
exports.update = function (req, res) {
const id = req.token.id;
Users.findByIdAndUpdate(id, {$set: req.body}, function (err, offer) {
if (err) return next(err);
res.send('User updated.');
});
};
/**
* Handle requests to delete a user
*/
exports.delete = function (req, res) {
const userID = req.token.id;
Users.findByIdAndRemove(userID, function (err) {
if (err) return next(err);
res.send('Successfully opted out!');
});
};
exports.emailPasswordResetLink = asyncWrapper(async (req, res) => {
if (req.body.email !== undefined) {
var emailAddress = req.body.email;
Users.findOne({email: emailAddress}, function (err, user) {
if (err) {
return res.sendStatus(403)
}
const today = new Date();
const expirationDate = new Date(today);
expirationDate.setMinutes(today.getMinutes() + 5);
if (user) {
var payload = {
id: user._id, // User ID from database
email: emailAddress,
};
var secret = user.hash;
var token = jwt.encode(payload, secret);
emailer.sendPasswordLink(emailAddress, payload.id, token);
res.sendStatus(200)
} else {
return res.status(403).send('No accounts with that email')
}
})
} else {
return res.status(422).send('Email address is missing.')
}
});
exports.verifyPasswordResetLink = asyncWrapper(async (req, res) => {
const user = await Users.findById(req.params.id)
var secret = user.hash;
try {
var payload = jwt.decode(req.params.token, secret);
res.sendStatus(200)
} catch(error){
console.log(error.message);
res.sendStatus(403);
}
});
exports.resetPassword = asyncWrapper(async (req, res) => {
var newPassword = req.body.newPassword;
// update password
const user = await Users.findById(req.body.id)
user.setPassword(<PASSWORD>)
user.save(function(err, result) {
if (err) {
return res.status(422).send(err);
}
res.sendStatus(200)
})
});
|
dyna-mis/Hilabeling
|
src/app/3d/qgspoint3dsymbolwidget.h
|
/***************************************************************************
qgspoint3dsymbolwidget.h
--------------------------------------
Date : July 2017
Copyright : (C) 2017 by <NAME>
Email : wonder dot sk at <EMAIL> dot <EMAIL>
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#ifndef QGSPOINT3DSYMBOLWIDGET_H
#define QGSPOINT3DSYMBOLWIDGET_H
#include <QWidget>
#include "ui_point3dsymbolwidget.h"
class QgsPoint3DSymbol;
//! A widget for configuration of 3D symbol for points
class QgsPoint3DSymbolWidget : public QWidget, private Ui::Point3DSymbolWidget
{
Q_OBJECT
public:
explicit QgsPoint3DSymbolWidget( QWidget *parent = nullptr );
void setSymbol( const QgsPoint3DSymbol &symbol );
QgsPoint3DSymbol symbol() const;
signals:
void changed();
private slots:
void onShapeChanged();
void onChooseModelClicked( bool checked = false );
void onOverwriteMaterialChecked( int state );
};
#endif // QGSPOINT3DSYMBOLWIDGET_H
|
Manny27nyc/azure-sdk-for-java
|
sdk/mediaservices/azure-resourcemanager-mediaservices/src/main/java/com/azure/resourcemanager/mediaservices/models/H265Layer.java
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.mediaservices.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.annotation.JsonFlatten;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
import java.time.Duration;
/**
* Describes the settings to be used when encoding the input video into a desired output bitrate layer with the H.265
* video codec.
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata\\.type")
@JsonTypeName("#Microsoft.Media.H265Layer")
@JsonFlatten
@Fluent
public class H265Layer extends H265VideoLayer {
@JsonIgnore private final ClientLogger logger = new ClientLogger(H265Layer.class);
/*
* We currently support Main. Default is Auto.
*/
@JsonProperty(value = "profile")
private H265VideoProfile profile;
/*
* We currently support Level up to 6.2. The value can be Auto, or a number
* that matches the H.265 profile. If not specified, the default is Auto,
* which lets the encoder choose the Level that is appropriate for this
* layer.
*/
@JsonProperty(value = "level")
private String level;
/*
* The VBV buffer window length. The value should be in ISO 8601 format.
* The value should be in the range [0.1-100] seconds. The default is 5
* seconds (for example, PT5S).
*/
@JsonProperty(value = "bufferWindow")
private Duration bufferWindow;
/*
* The number of reference frames to be used when encoding this layer. If
* not specified, the encoder determines an appropriate number based on the
* encoder complexity setting.
*/
@JsonProperty(value = "referenceFrames")
private Integer referenceFrames;
/**
* Get the profile property: We currently support Main. Default is Auto.
*
* @return the profile value.
*/
public H265VideoProfile profile() {
return this.profile;
}
/**
* Set the profile property: We currently support Main. Default is Auto.
*
* @param profile the profile value to set.
* @return the H265Layer object itself.
*/
public H265Layer withProfile(H265VideoProfile profile) {
this.profile = profile;
return this;
}
/**
* Get the level property: We currently support Level up to 6.2. The value can be Auto, or a number that matches the
* H.265 profile. If not specified, the default is Auto, which lets the encoder choose the Level that is appropriate
* for this layer.
*
* @return the level value.
*/
public String level() {
return this.level;
}
/**
* Set the level property: We currently support Level up to 6.2. The value can be Auto, or a number that matches the
* H.265 profile. If not specified, the default is Auto, which lets the encoder choose the Level that is appropriate
* for this layer.
*
* @param level the level value to set.
* @return the H265Layer object itself.
*/
public H265Layer withLevel(String level) {
this.level = level;
return this;
}
/**
* Get the bufferWindow property: The VBV buffer window length. The value should be in ISO 8601 format. The value
* should be in the range [0.1-100] seconds. The default is 5 seconds (for example, PT5S).
*
* @return the bufferWindow value.
*/
public Duration bufferWindow() {
return this.bufferWindow;
}
/**
* Set the bufferWindow property: The VBV buffer window length. The value should be in ISO 8601 format. The value
* should be in the range [0.1-100] seconds. The default is 5 seconds (for example, PT5S).
*
* @param bufferWindow the bufferWindow value to set.
* @return the H265Layer object itself.
*/
public H265Layer withBufferWindow(Duration bufferWindow) {
this.bufferWindow = bufferWindow;
return this;
}
/**
* Get the referenceFrames property: The number of reference frames to be used when encoding this layer. If not
* specified, the encoder determines an appropriate number based on the encoder complexity setting.
*
* @return the referenceFrames value.
*/
public Integer referenceFrames() {
return this.referenceFrames;
}
/**
* Set the referenceFrames property: The number of reference frames to be used when encoding this layer. If not
* specified, the encoder determines an appropriate number based on the encoder complexity setting.
*
* @param referenceFrames the referenceFrames value to set.
* @return the H265Layer object itself.
*/
public H265Layer withReferenceFrames(Integer referenceFrames) {
this.referenceFrames = referenceFrames;
return this;
}
/** {@inheritDoc} */
@Override
public H265Layer withBitrate(int bitrate) {
super.withBitrate(bitrate);
return this;
}
/** {@inheritDoc} */
@Override
public H265Layer withMaxBitrate(Integer maxBitrate) {
super.withMaxBitrate(maxBitrate);
return this;
}
/** {@inheritDoc} */
@Override
public H265Layer withBFrames(Integer bFrames) {
super.withBFrames(bFrames);
return this;
}
/** {@inheritDoc} */
@Override
public H265Layer withFrameRate(String frameRate) {
super.withFrameRate(frameRate);
return this;
}
/** {@inheritDoc} */
@Override
public H265Layer withSlices(Integer slices) {
super.withSlices(slices);
return this;
}
/** {@inheritDoc} */
@Override
public H265Layer withAdaptiveBFrame(Boolean adaptiveBFrame) {
super.withAdaptiveBFrame(adaptiveBFrame);
return this;
}
/** {@inheritDoc} */
@Override
public H265Layer withWidth(String width) {
super.withWidth(width);
return this;
}
/** {@inheritDoc} */
@Override
public H265Layer withHeight(String height) {
super.withHeight(height);
return this;
}
/** {@inheritDoc} */
@Override
public H265Layer withLabel(String label) {
super.withLabel(label);
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
@Override
public void validate() {
super.validate();
}
}
|
tzpBingo/algorithms-learning
|
algorithms-lecture/src/main/java/com/learning/algorithms/lectures/sort/QuickSort.java
|
<reponame>tzpBingo/algorithms-learning
package com.learning.algorithms.lectures.sort;
import java.util.Random;
/**
* Created by admin on 16/11/3.
*
* 快速排序
*/
public class QuickSort {
private static Random random = new Random();
public int[] quickSort(int[] A, int n) {
if (n == 0 || A == null) {
return null;
}
sort(A, 0, n - 1);
return A;
}
private void sort(int[] a, int lo, int hi) {
if (lo >= hi) return;
int mid = partition(a, lo, hi);
sort(a, lo, mid - 1);
sort(a, mid + 1, hi);
}
private int partition(int[] a, int lo, int hi) {
int key = a[lo];
int left = lo + 1;
int right = hi;
while (true) {
while (a[left] <= key && left < hi) {
left++;
}
while (a[right] >= key && right > lo) {
right--;
}
if (left >= right) {
break;
}
swap(a, left, right);
}
swap(a, lo, right);
return right;
}
/*
//partition另一种写法
private int partition(int[] a, int lo, int hi) {
int key = a[lo];
int left = lo;
int right = hi;
//assert left < right;
while (left < right) {
while (left < right && a[right] > key) {
right--;
}
if (left < right) {
a[left++] = a[right];
}
while (left < right && a[left] < key) {
left++;
}
if (left < right) {
a[right--] = a[left];
}
}
a[left] = key;
return left;
}
*/
private void swap(int[] a, int i, int j) {
int tmp = a[i];
a[i] = a[j];
a[j] = tmp;
}
public static void main(String[] args) {
int[] a = {54, 35, 48, 36, 27, 12, 44, 44, 8, 14, 26, 17, 28};
int[] b = new QuickSort().quickSort(a, 13);
for (int i : b) {
System.out.print(i + ",");
}
System.out.println();
int[] c = {32, 103, 24, 88, 95, 70, 97, 15, 102, 6, 79, 46, 51, 37, 93, 108, 9, 58, 53, 58, 79, 36, 58, 91, 78, 58, 61, 81};
for (int i : new QuickSort().quickSort(c, 28)) {
System.out.print(i + ",");
}
}
}
|
AlTavares/babos.ios-server
|
sockets/sockets.js
|
module.exports = function(io) {
io.on('connection', function(socket) {
socket.on('disconnect', function() { });
});
}
|
shoukailiang/community
|
community-util/src/main/java/com/shoukailiang/community/util/exception/CommunityException.java
|
<filename>community-util/src/main/java/com/shoukailiang/community/util/exception/CommunityException.java<gh_stars>1-10
package com.shoukailiang.community.util.exception;
import com.shoukailiang.community.util.enums.ResultEnum;
import lombok.Getter;
@Getter
public class CommunityException extends RuntimeException {
private Integer code;
public CommunityException(ResultEnum resultEnum) {
super(resultEnum.getMessage());
this.code = resultEnum.getCode();
}
public CommunityException(Integer code, String message) {
super(message);
this.code = code;
}
}
|
dsiuta/Comps
|
src/Outputs/Netcdf.h
|
#ifndef OUTPUT_NETCDF_H
#define OUTPUT_NETCDF_H
#include "Output.h"
#include <netcdf.hh>
class OutputNetcdf : public Output {
public:
OutputNetcdf(const Options& iOptions, const Data& iData);
private:
void writeCore() const;
std::string getFilename(int iDate, int iInit, std::string iVariable, std::string iConfiguration) const;
void writeVariable(NcVar* iVariable, const std::map<float,int>& iValues) const;
void writeVariable(NcVar* iVariable, const std::vector<std::string>& iValues) const;
template <class T> void writeVariable(NcVar* iVariable, const std::vector<T>& iValues) const {
int N = (int) iValues.size();
T* values = new T[N];
for(int i = 0; i < (int) iValues.size(); i++) {
values[i] = iValues[i];
}
long int pos = 0;
iVariable->set_cur(&pos);
iVariable->put(values, N);
delete[] values;
};
bool mDontWriteProb;
};
#endif
|
waliens/weakseg
|
training/pannuke.py
|
<filename>training/pannuke.py
def main(argv):
pass
if __name__ == "__main__":
import sys
main(sys.argv[1:])
|
lambdaxymox/barrelfish
|
usr/eclipseclp/Alog/src/adjlogs.c
|
/*
* adjustlogs.c: Program to take multiple alog logfiles, extract events
* for synchronizing the clocks, and generating adjusted times.
* The files are replaced, allowing the use of other alog tools.
*
* -e n defines synchronization events
* -a1 n -a2 m -b1 k define pair-exchange events used to compute clock offsets
* (There are predefined values; these allow the user to define their own)
*
* Algorithm:
* Build a matrix of time events; solve it for the offset and skew for
* each clock. For the first pass, this "matrix" will have just the
* "synchronization" events.
*
* This is the formula:
* Processor 0 has the standard clock.
* At the end of each sync, the clock are re-synchronized.
* Thus, the global time for processor p is
* Find the interval I in synctime that contains the local time
* The adjusted gtime is:
*
* stime[0][I+1]-stime[0][I]
* gtime = ------------------------- (time - stime[p][I]) + stime[0][I]
* stime[p][I+1]-stime[p][I]
*
* The current implementation uses a single interval.
*
* Just to keep things more interesting, the timer is really a 64 bit clock,
* with the field "time_slot" containing the high bits.
*
*/
#include <stdio.h>
#include <ctype.h> /* for calling isdigit() */
#include "alog_evntdfs.h" /* Logfile definitions */
#define FALSE 0
#define TRUE !FALSE
#define MAX(x,y) ( (x) > (y) ? (x) : (y) )
#define C_DATA_LEN 50
#define DO_NEGATIVE 1
#define IGNORE_NEGATIVE 2
struct log_entry
{
int proc_id;
int task_id;
int event;
int i_data;
char c_data[C_DATA_LEN];
int time_slot;
unsigned long time;
};
#define MAX_NSYNC 2
typedef struct {
unsigned long *time; /* time values that were recorded */
} SyncTime;
SyncTime synctime[MAX_NSYNC];
/* For now, we just handle a set of timing events (np-1 of them)
between processor i and i+1 (processor 0 participates in only
1 event) */
typedef struct {
unsigned long a1, b1, a2; /* Times for the events */
int p0, p1; /* processors that participated in
this time-exchange */
} OffsetEvents;
OffsetEvents *offsetevents;
int noffsetevents = 0;
/* A local offset is used to compensate for the time_slot (upper 32 bits) */
/* NOT YET IMPLEMENTED */
long local_offset;
/* The global time is found by adding an offset and scaling by
a fraction that is represented by numer[i]/denom[i] on the i'th
processor */
unsigned long *numer;
unsigned long *denom;
long *globaloffset;
/* mintime holds the mintime for ALL runs; this can be used to
offset the values */
unsigned long mintime;
/* These hold user-defined synchronization events */
#define MAX_USERETYPES 100
static int syncevent[MAX_USERETYPES];
static int syncep=0;
/* These hold the 3 event types used to adjust the individual offsets
(if not present, the synchronization events are used to compute the
offsets)
*/
static int a1event[MAX_USERETYPES],
a2event[MAX_USERETYPES],
b1event[MAX_USERETYPES];
static int a1p = 0, a2p = 0, b1p = 0;
static unsigned long lowmask = 0xFFFF;
void ComputeOffsets();
main(argc,argv)
int argc;
char *argv[];
{
FILE *headerfp, *fd;
int np, i, nsync, nlsync;
char headerfile[255];
int pid;
int firstfile;
if ( argc <= 1 )
usage( argv[0] );
/* Look for user-defined events */
for (i=1; i<argc; i++) {
if (strcmp(argv[i],"-e") == 0)
/* Test on MAX_USERTYPES */
syncevent[syncep++] = atoi(argv[++i]);
else if (strcmp(argv[i],"-a1") == 0)
a1event[a1p++] = atoi(argv[++i]);
else if (strcmp(argv[i],"-a2") == 0)
a2event[a2p++] = atoi(argv[++i]);
else if (strcmp(argv[i],"-b1") == 0)
b1event[b1p++] = atoi(argv[++i]);
else
break;
}
/* Figure out how many processors there are */
np = argc - i;
firstfile = i;
/* These could be allocated on demand */
for (i=0; i<MAX_NSYNC; i++) {
synctime[i].time = (unsigned long *)
malloc( np * sizeof(unsigned long) );
}
globaloffset = (long *) malloc( np * sizeof(long) );
numer = (unsigned long *) malloc( np * sizeof(unsigned long) );
denom = (unsigned long *) malloc( np * sizeof(unsigned long) );
offsetevents = (OffsetEvents *) malloc( np * sizeof(OffsetEvents) );
mintime = (unsigned long)(~0);
/* Loop through each file, looking for the synchronization events */
for (i=0; i<np; i++) {
fd = fopen( argv[firstfile+i], "r" );
nsync = extract_timing( i, fd );
if (i > 0 && nsync != nlsync) {
fprintf( stderr, "Found differing numbers of syncs\n" );
exit(0);
}
nlsync = nsync;
fclose( fd );
}
/* If we didn't find enough events, we exit */
if (nsync < 2) {
fprintf( stderr,
"Not enough synchronization events to adjust logs\n" );
exit(0);
}
/* Compute a "global clock" time */
/* NOTE: if numer is changed, ComputeOffsets must be changed as well */
for (i=0; i<np; i++) {
numer[i] = (synctime[1].time[0] - synctime[0].time[0]);
denom[i] = (synctime[1].time[i] - synctime[0].time[i]);
/* Using mintime here fails for some log files (since some of the
computed/scaled times can then be negative. We have to pick
a value that makes the minimum COMPUTED time positive */
globaloffset[i] = synctime[0].time[i]; /* - mintime; */
}
fprintf( stderr, "Summary of clock transformations:\n" );
if (noffsetevents == np - 1) {
/* Print out the initial globaloffsets */
fprintf( stderr, "Global offsets from sync events are:\n" );
for (i=0; i<np; i++) {
fprintf( stderr, "%4d %12ld\n", i, globaloffset[i] );
}
}
/* Use adjust events to compute a modified offset (if such events
are not present, the globaloffset values above will be used) */
ComputeOffsets( np );
/* Write a summary */
for (i=0; i<np; i++) {
fprintf( stderr, "%4d (t - %12ld) (%lu/%lu)\n",
i, globaloffset[i], numer[i], denom[i] );
}
/* Rewrite the log files using the clock adjustment */
for (i=0; i<np; i++) {
pid = getpid();
sprintf( headerfile, "%s.new", argv[firstfile+i] );
/* sprintf(headerfile,"log.header.%d",pid); */
if ( (headerfp=fopen(headerfile,"w")) == NULL ) {
fprintf(stderr,"%s: unable to create temp file %s.\n",
argv[0], headerfile );
exit(0);
}
fd = fopen( argv[firstfile+i], "r" );
if (!fd) {
fprintf( stderr, "%s: Unable to open log file %s\n",
argv[0], argv[firstfile+i] );
exit(0);
}
adjust_file( i, fd, headerfp, 0, nsync, argv[firstfile+i] );
fclose( fd );
fclose( headerfp );
/* move filename */
/* unlink( argv[firstfile+i] );
link( headerfile, argv[firstfile+i] );
unlink( headerfile ); */
}
} /* main */
/*
Extract timing data for the i'th log file
*/
int extract_timing( i, fd )
int i;
FILE *fd;
{
struct log_entry entry;
int nsync = -1;
while (1) {
read_logentry(fd,&entry,DO_NEGATIVE);
if ( feof(fd) ) break;
if (is_sync_event(entry.event)) {
/* We do this so that we save the LAST sync event */
if (nsync + 1 < MAX_NSYNC) nsync++;
synctime[nsync].time[i] = entry.time;
/* fprintf( stdout, "Event type %d at time %d on proc %d\n",
entry.event, entry.time, i ); */
}
/* For the offset events, the assumption is that each processor
(except for processor 0) is the ORIGINATOR of one offsetevent.
It MAY participate as the respondent (b1 event) for multiple
events, including having processor 0 respond to EVERYONE.
Finally, the (b1) processor has processor number SMALLER than
the (a1,a2) processor. This makes the equations that need
to be solved for the offsets TRIANGULAR and easy.
*/
else if (is_a1_event(entry.event)) {
offsetevents[i].a1 = entry.time;
offsetevents[i].p0 = entry.i_data;
}
else if (is_a2_event(entry.event)) {
offsetevents[i].a2 = entry.time;
offsetevents[i].p0 = entry.i_data;
noffsetevents++;
}
else if (is_b1_event(entry.event)) {
if (entry.i_data < i) {
fprintf( stderr,
"Improper offset event (originating processor %d\n", i );
fprintf( stderr, "higher numbered than partner %d)\n",
entry.i_data );
exit(0);
}
offsetevents[entry.i_data].b1 = entry.time;
offsetevents[entry.i_data].p1 = i;
}
else if (entry.event > 0) {
if (mintime > entry.time) mintime = entry.time;
}
}
return nsync + 1;
}
adjust_file( p, fin, fout, leave_events, nsync, fname )
FILE *fin, *fout;
int p, leave_events, nsync;
char *fname;
{
struct log_entry entry;
unsigned long GlobalTime(), gtime;
unsigned long lasttime;
/* lasttime is used to make sure that we don't mess up the log files without
knowing it */
lasttime = 0;
while (1) {
read_logentry(fin,&entry,DO_NEGATIVE);
if ( feof(fin) ) break;
if (!leave_events && (entry.event == ALOG_EVENT_SYNC ||
entry.event == ALOG_EVENT_PAIR_A1 ||
entry.event == ALOG_EVENT_PAIR_A2 ||
entry.event == ALOG_EVENT_PAIR_B1)) continue;
/* adjust to the global clock time */
gtime = GlobalTime(entry.time,p,nsync);
if (entry.event > 0) {
if (gtime < lasttime) {
fprintf( stderr, "Error computing global times\n" );
fprintf( stderr, "Times are not properly sorted\n" );
fprintf( stderr, "Last time was %lu, current time is %lu\n",
lasttime, gtime );
fprintf( stderr, "(original new time is %lu)\n", entry.time );
fprintf( stderr, "processing file %s\n", fname );
exit(0);
}
else
lasttime = gtime;
}
/* negative events are unchanged. */
fprintf(fout,"%d %d %d %d %d %lu %s\n",entry.event,
entry.proc_id,entry.task_id,entry.i_data,
entry.time_slot, (entry.event >= 0) ? gtime : entry.time,
entry.c_data);
}
}
usage( a )
char *a;
{
fprintf(stderr,"%s: %s infile1 infile2 ...\n",a,a);
fprintf(stderr," updates files with synchronized clocks\n");
exit(0);
}
read_logentry(fp,table,do_negs)
FILE *fp;
struct log_entry *table;
int do_negs;
{
char buf[81];
char *cp;
int i;
do
{
fscanf(fp,"%d %d %d %d %d %lu",
&(table->event),&(table->proc_id),&(table->task_id),
&(table->i_data),&(table->time_slot),&(table->time));
cp = table->c_data;
i = 0;
do
{
fscanf(fp,"%c",cp);
}
while ( *cp == ' ' || *cp == '\t' );
i++;
while ( *cp != '\n' && i < C_DATA_LEN )
{
fscanf(fp,"%c",++cp);
i++;
}
*cp = '\0';
/*
if ( !feof(fp) && table->event == 0 )
fprintf(stderr,"0 reading in.\n");
*/
}
while( table->event < 0 && do_negs == IGNORE_NEGATIVE && !feof(fp) );
}
/* This routine allows use to define MANY sync events */
int is_sync_event( type )
int type;
{
int i;
if (type == ALOG_EVENT_SYNC) return 1;
for (i=0; i<syncep; i++)
if (type == syncevent[i]) return 1;
return 0;
}
int is_a1_event( type )
int type;
{
int i;
if (type == ALOG_EVENT_PAIR_A1) return 1;
for (i=0; i<a1p; i++)
if (type == a1event[i]) return 1;
return 0;
}
int is_a2_event( type )
int type;
{
int i;
if (type == ALOG_EVENT_PAIR_A2) return 1;
for (i=0; i<a2p; i++)
if (type == a2event[i]) return 1;
return 0;
}
int is_b1_event( type )
int type;
{
int i;
if (type == ALOG_EVENT_PAIR_B1) return 1;
for (i=0; i<b1p; i++)
if (type == b1event[i]) return 1;
return 0;
}
unsigned long GlobalTime( time, p, nsync )
unsigned long time;
int p, nsync;
{
unsigned long gtime, stime1, stime2;
unsigned long frac;
unsigned long tdiff;
unsigned long ScaleLong();
/* Problem: since times are UNSIGNED, we have to be careful about how they
are adjusted. time - synctime may not be positive. We make sure that
all of the subexpressions are unsigned longs */
if (time >= globaloffset[p]) {
tdiff = time - globaloffset[p];
frac = ScaleLong( numer[p], denom[p], tdiff );
gtime = frac + globaloffset[0];
}
else {
tdiff = globaloffset[p] - time;
frac = ScaleLong( numer[p], denom[p], tdiff );
if (frac > globaloffset[0]) printf( "Oops!\n" );
gtime = globaloffset[0] - frac;
}
return gtime;
}
/*
This routine takes offset events and solves for the offsets. The
approach is:
Let the global time be given by (local_time - offset)*scale ,
with a different offset and scale on each processor. Each processor
originates exactly one communication event (except processor 0),
generating an a1 and a2 event. A corresponding number of b2 events
are generated, but note that one processor may have more than 1 b2
event (if using Dunnigan's synchronization, there will be np-1 b2 events
on processor 0, and none anywhere else).
These events are:
pi a1 (send to nbr) (recv) a2
pj (recv) b1 (send back)
We base the analysis on the assumption that in the GLOBAL time
repreresentation, a2-a1 is twice the time to do a (send) and
a (recv). This is equivalent to assuming that global((a1+a2)/2) ==
global(b1). Then, with the unknowns the offsets (the scales
are assumed known from the syncevent calculation), the matrix is
1
-s0 s1
....
-sj ... si
where si is the scale for the i'th processor (note s0 = 1).
The right hand sides are (1/2)(a1(i)+a2(i)) *s(i) - b1(j)*s(j).
Because of the triangular nature of the matrix, this reduces to
o(i) = (a1(i)+a2(i))/2 - (s(j)/s(i)) * (b1(j)-o(j))
Note that if s(i)==s(j) and b1 == (a1+a2)/2, this gives o(i)==o(j).
*/
void ComputeOffsets( np )
int np;
{
int i, j;
unsigned long d1, delta;
unsigned long ScaleLong();
/* If there aren't enough events, return */
if (noffsetevents != np - 1) {
if (noffsetevents != 0)
fprintf( stderr,
"Incorrect number of offset events to compute clock offsets\n" );
else
fprintf( stderr, "No clock offset events\n" );
return;
}
/* Take globaloffset[0] from sync */
for (i=1; i<np; i++) {
/* o(i) = (a1(i)+a2(i))/2 - (s(j)/s(i)) * (b1(j)-o(j)) */
j = offsetevents[i].p1;
/* Compute a1(i)+a2(i)/2. Do this by adding half the difference;
this insures that we avoid overflow */
d1 = (offsetevents[i].a2 - offsetevents[i].a1)/2;
d1 = offsetevents[i].a1 + d1;
/* We form (b1-o(j))(s(j)/s(i)) by noting that
s(j)/s(i) == denom(i)/denom(j) (since numer(i)==numer(j)) */
delta = ScaleLong( denom[i], denom[j],
offsetevents[i].b1 - globaloffset[j] );
globaloffset[i] = d1 - delta;
}
}
#include <mp.h>
static MINT *prod, *qq, *rr;
static int mpallocated = 0;
unsigned long ScaleLong( n, d, v )
unsigned long n, d, v;
{
char buf[40];
char *s;
MINT *nn, *dd, *vv;
unsigned long q, r;
if (!mpallocated) {
prod = itom(0);
if (!prod) {
fprintf( stderr, "Could not allocate mp int\n" );
exit(0);
}
qq = itom(0);
if (!qq) {
fprintf( stderr, "Could not allocate mp int\n" );
exit(0);
}
rr = itom(0);
if (!rr) {
fprintf( stderr, "Could not allocate mp int\n" );
exit(0);
}
mpallocated = 1;
}
sprintf( buf, "%x", n );
nn = xtom(buf);
if (!nn) {
fprintf( stderr, "Could not allocate mp int\n" );
exit(0);
}
sprintf( buf, "%x", v );
vv = xtom(buf);
if (!vv) {
fprintf( stderr, "Could not allocate mp int\n" );
exit(0);
}
sprintf( buf, "%x", d );
dd = xtom(buf);
if (!dd) {
fprintf( stderr, "Could not allocate mp int\n" );
exit(0);
}
mult(nn,vv,prod);
mdiv(prod,dd,qq,rr);
s = mtox(qq);
sscanf( s, "%x", &q );
free( s );
s = mtox(rr);
sscanf( s, "%x", &r );
free( s );
/* Free the locals */
mfree( nn );
mfree( dd );
mfree( vv );
return q;
}
/* Here is not-quite working code for multiple precision arithmetic */
#ifdef DO_MULTIPLE_ARITH
/*
This routine takes a value v and scales it by (n/d). This
routine handles integer overflow by using the following formulas:
Let h(u) = high 16 bits of u, and l(u) = low 16 bits of u.
Then v *(n/d) =
(l(v)+h(v))*(l(u)+h(u))/d
= l(v)l(n)/d + (l(n)h(v)+l(v)h(n))/d + h(v)h(n)/d
== a1/d + a2/d + a3/d
In order to keep the values in-range, we define low(u)=l(u) and
high(u) = h(u) >> 16. Then this formula becomes (with high substituted
for h):
a1/d + (a2<<16)/d + (a3<<32)/d
Now, when doing the integer division, we need to propagate the remainders.
Let the result be r. Then
rd = a1 + (a2<<16) + (a3<<32)
if a1 = k1 d + b1, (a2 << 16) = (k2 d + b2), and (a3 << 32) = (k3 d + b3),
then
r d = (k1 d + b1) + (k2 d + b2) + (k3 d + b3);
= (k1 + k2 + k3) d + (b1 + b2 + b3)
and so
r = (k1 + k2 + k3) + (b1 + b2 + b3) / d
To compute (k2,b2) and (k3,b3), we do:
(a2<<16)/d:
a2 = p2 d + c2
a2 << 16 = p2 d << 16 + c2 << 16
= (p2 << 16) d + c2 << 16
Let c2 << 16 = r2 d + s2, then (finally!)
a2 << 16 = (p2 << 16 + r2) d + s2
(a3 << 32)/d:
a3 = p3 d + c3
a3 << 32 = p3 d << 32 + c3 << 32
= (p3 << 32) d + c3 << 32
Computing c3 << 32 = r3 d + s3 is a challange, particularly
since we need only the low 32 bits (the high 32 bits will be 0)
We do this in stages as well:
c3 << 32 = (c3 << 16) << 16;
(c3 << 16) = t3 d + u3
(c3 << 32) = (t3 << 16)d + u3 << 16
= (t3 << 16 + y3)d + z3,
== r3 d + s3
where u3 << 16 = y3 d + z3
Then
a3 << 32 = (p3 << 32 + r3) d + s3
*/
void DivLong();
/*
ScaleDecomp - convert (a << p) = alpha d + beta, with beta < d
This works by recursively:
a = b d + r,
a<<p = (b << p)d + (r<<p)
then process r<<p to bd + r' etc until b == 0
*/
void ScaleDecomp( a, p, d, alpha, beta )
int p;
unsigned long a, d, *alpha, *beta;
{
unsigned long b, r;
unsigned long Alpha, Beta;
int p1;
Alpha = 0; Beta = 0;
b = a / d;
r = a % d;
Alpha = b << p;
/* We need to gingerly deal with r, since shifting it by much
may make it too large, particularly if d is nearly 32 bits.
What we need is r << p = gamma d + delta, with r < d. This
is really the hard part. We can not assume that d is much
smaller than 32 bits, so this is tricky. */
DivLong( r, d, (unsigned long)(1 << p), &b, &r );
Alpha += b;
*beta = r;
#ifdef FOO
while (p > 1 && r > 0) {
p1 = p/2;
r = (r << p1);
b = r / d;
r = r % d;
Alpha += b << (p-p1);
p = (p - p1);
}
*alpha = Alpha;
*beta = r << p;
#endif
}
#define LOWBITS(a) (unsigned long)((a)&lowmask)
#define HIGHBITS(a) (unsigned long)( ((a) >> 16 ) & lowmask )
#include <mp.h>
unsigned long ScaleLong( n, d, v )
unsigned long n, d, v;
{
#ifdef FOO
#define LOWBITS(a) (unsigned long)((a)&lowmask)
#define HIGHBITS(a) (unsigned long)( ((a) >> 16 ) & lowmask )
unsigned long a1, a21, a22, a3, k1, k21, k22, k3, b1, b21, b22, b3;
DivLong( n, d, v, &k1, &b1 );
return k1 + b1/d;
a1 = LOWBITS(v)*LOWBITS(n);
a21 = LOWBITS(v)*HIGHBITS(n);
a22 = LOWBITS(n)*HIGHBITS(v);
a3 = HIGHBITS(v)*HIGHBITS(n);
k1 = a1 / d;
b1 = a1 % d;
ScaleDecomp( a21, 16, d, &k21, &b21 );
ScaleDecomp( a22, 16, d, &k22, &b22 );
ScaleDecomp( a3, 32, d, &k3, &b3 );
return (k1 + k21 + k22 + k3) + (b1 + b21 + b22 + b3) / d;
#else
char buf[40];
MINT *nn, *dd, *vv, *prod, *qq, *rr;
unsigned long q, r;
sprintf( buf, "%x", n );
nn = xtom(buf);
sprintf( buf, "%x", v );
vv = xtom(buf);
sprintf( buf, "%x", d );
dd = xtom(buf);
prod = itom(0);
qq = itom(0);
rr = itom(0);
mult(nn,vv,prod);
mdiv(prod,dd,qq,rr);
sscanf( mtox(qq), "%x", &q );
sscanf( mtox(rr), "%x", &r );
return q;
#endif
}
/*
Represent nv = alpha d + beta
*/
void DivLong( n, d, v, alpha, beta )
unsigned long n, d, v;
unsigned long *alpha, *beta;
{
unsigned long a1, a21, a22, a3, k1, k21, k22, k3, b1, b21, b22, b3;
a1 = LOWBITS(v)*LOWBITS(n);
a21 = LOWBITS(v)*HIGHBITS(n);
a22 = LOWBITS(n)*HIGHBITS(v);
a3 = HIGHBITS(v)*HIGHBITS(n);
k1 = a1 / d;
b1 = a1 % d;
ScaleDecomp( a21, 16, d, &k21, &b21 );
ScaleDecomp( a22, 16, d, &k22, &b22 );
ScaleDecomp( a3, 32, d, &k3, &b3 );
*alpha = k1 + k21 + k22 + k3;
*beta = b1 + b21 + b22 + b3;
}
#endif
|
glimmerhq/glimmerhq
|
db/migrate/20200830201204_add_index_to_package_creator.rb
|
<gh_stars>1-10
# frozen_string_literal: true
class AddIndexToPackageCreator < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_NAME = 'index_packages_packages_on_creator_id'
def up
add_concurrent_index :packages_packages, :creator_id, name: INDEX_NAME
add_concurrent_foreign_key(:packages_packages, :users, column: :creator_id, on_delete: :nullify)
end
def down
remove_foreign_key_if_exists(:packages_packages, :users, column: :creator_id)
remove_concurrent_index_by_name(:packages_packages, INDEX_NAME)
end
end
|
banggibima/react_bitcloud
|
src/screens/LearnCryptoDetails/Catalog/index.js
|
import React from 'react';
import cn from 'classnames';
import styles from './Catalog.module.sass';
import Icon from '../../../components/Icon';
import Card from '../../../components/Card';
// data
import { articles } from "../../../mocks/articles";
const Catalog = () => {
return (
<div className={cn("section", styles.section)}>
<div className={cn("container", styles.container)}>
<div className={styles.list}>
{articles.map((x, index) => (
<Card className={styles.card} item={x} key={index} />
))}
</div>
<div className={styles.btns}>
<button className={cn("button-stroke button-small", styles.button)}>
<span>Learn more</span>
<Icon name="arrow-down" size="16" />
</button>
</div>
</div>
</div>
);
};
export default Catalog;
|
lechium/iOS1351Headers
|
usr/libexec/storebookkeeperd/SBDDomainSyncServiceHandler.h
|
//
// Generated by classdumpios 1.0.1 (64 bit) (iOS port by DreamDevLost)(Debug version compiled Sep 26 2020 13:48:20).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import "SBDDomainServiceHandler.h"
#import "SBCDomainSyncServiceProtocol-Protocol.h"
#import "SBCDomainSyncServiceProtocol_Internal-Protocol.h"
#import "SBDDomainSyncedStorageObserver-Protocol.h"
@class NSObject, NSString, SBDDomainSyncedStorageController;
@protocol OS_dispatch_queue;
@interface SBDDomainSyncServiceHandler : SBDDomainServiceHandler <SBCDomainSyncServiceProtocol_Internal, SBDDomainSyncedStorageObserver, SBCDomainSyncServiceProtocol>
{
_Bool _hasChangesToPush; // 8 = 0x8
_Bool _isActive; // 9 = 0x9
NSObject<OS_dispatch_queue> *_queue; // 16 = 0x10
SBDDomainSyncedStorageController *_syncedStorageController; // 24 = 0x18
double _preferredSynchronizeInterval; // 32 = 0x20
}
+ (id)_sharedSystemSyncHandlers; // IMP=0x00000001000130b8
+ (void)enumerateSystemSyncHandlersWithBlock:(CDUnknownBlockType)arg1; // IMP=0x000000010001211c
- (void).cxx_destruct; // IMP=0x0000000100013390
@property(readonly, nonatomic) _Bool isActive; // @synthesize isActive=_isActive;
@property(readonly, nonatomic) _Bool hasChangesToPush; // @synthesize hasChangesToPush=_hasChangesToPush;
@property(readonly, nonatomic) double preferredSynchronizeInterval; // @synthesize preferredSynchronizeInterval=_preferredSynchronizeInterval;
@property(readonly, nonatomic) SBDDomainSyncedStorageController *syncedStorageController; // @synthesize syncedStorageController=_syncedStorageController;
@property(readonly, nonatomic) NSObject<OS_dispatch_queue> *queue; // @synthesize queue=_queue;
- (void)_onQueueSynchronizeWithInterval:(double)arg1 isCheckpoint:(_Bool)arg2; // IMP=0x00000001000132b4
- (void)_onQueue_clearHasChanges; // IMP=0x00000001000132a4
- (void)bookkeeperStorageControllerDidSync:(id)arg1; // IMP=0x0000000100013048
- (void)synchronizeImmediatelyWithCompletionHandler:(CDUnknownBlockType)arg1; // IMP=0x0000000100012f6c
- (void)synchronizeForChangedAccountStatus; // IMP=0x0000000100012e34
- (void)synchronizeIfNeedsInitialSynchronization; // IMP=0x0000000100012d10
- (void)synchronizeForChangedAccountByResetInvalidatingLocalChanges:(_Bool)arg1; // IMP=0x0000000100012bf8
- (void)synchronizeForUpdatedRemoteDomainVersion:(id)arg1; // IMP=0x0000000100012a94
- (id)lastSynchronizationFailureAccountIdentifier; // IMP=0x0000000100012a7c
- (id)lastSynchronizedAccountIdentifier; // IMP=0x0000000100012a64
- (oneway void)pullLocalPlaybackPositionForEntityIdentifiers:(id)arg1 completionBlock:(CDUnknownBlockType)arg2; // IMP=0x00000001000129c8
- (oneway void)synchronizeImmediately; // IMP=0x0000000100012950
- (oneway void)updateForeignDatabaseWithValuesFromPlaybackPositionEntity:(id)arg1; // IMP=0x0000000100012938
- (oneway void)deletePlaybackPositionEntities; // IMP=0x0000000100012934
- (oneway void)deletePlaybackPositionEntity:(id)arg1; // IMP=0x000000010001291c
- (oneway void)persistPlaybackPositionEntity:(id)arg1 isCheckpoint:(_Bool)arg2 completionBlock:(CDUnknownBlockType)arg3; // IMP=0x0000000100012588
- (oneway void)endAccessingPlaybackPositionEntities; // IMP=0x00000001000124f4
- (oneway void)beginAccessingPlaybackPositionEntities; // IMP=0x000000010001237c
- (void)dealloc; // IMP=0x0000000100012320
- (id)initWithDomain:(id)arg1; // IMP=0x0000000100012260
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
|
egmnklc/Egemen-Files
|
CS204/Hw 4/DynStackHW4.cpp
|
<reponame>egmnklc/Egemen-Files<filename>CS204/Hw 4/DynStackHW4.cpp
//Do not forget to rename this file according to the file naming rules
#include <iostream>
#include "DynStackHW4.h"
using namespace std;
//************************************************
// Constructor to generate an empty stack. *
//************************************************
DynStack::DynStack()
{
top = NULL;
}
//************************************************
// Member function push pushes the argument onto *
// the stack. *
//************************************************
void DynStack::push(char symbol, int line_number)
{
StackNode *newNode;
// Allocate a new node & store Num
newNode = new StackNode;
newNode->symbol = symbol;
newNode->line_number = line_number;
// If there are no nodes in the list
// make newNode the first node
if (isEmpty())
{
top = newNode;
newNode->next = NULL;
}
else // Otherwise, insert NewNode before top
{
newNode->next = top;
top = newNode;
}
}
//****************************************************
// Member function to delete all the nodes in the *
// stack *
//****************************************************
void DynStack::clear(){
StackNode * temp = top;
while (top!=nullptr){
top = top->next;
delete temp;
temp = top;
}
top = nullptr;
}
//****************************************************
// Destructor to delete the elements in the stack. *
//****************************************************
DynStack::~DynStack(){
this->clear();
}
//************************************************
// Copy constructor to copy the stack `rhs` into *
// the current stack. *
//************************************************
DynStack::DynStack(const DynStack& rhs){
top = rhs.clone();
}
//************************************************
// Create a clone of the StackNodes of the *
// calling object and return a pointer to the *
// first one.
//************************************************
StackNode* DynStack::clone() const{
if (top == nullptr){
return nullptr;
} else {
StackNode * existing_list_ptr = top;
StackNode * new_list_ptr = new StackNode;
new_list_ptr->line_number = existing_list_ptr->line_number;
new_list_ptr->symbol = existing_list_ptr->symbol;
new_list_ptr->next = nullptr;
StackNode* ptr = new_list_ptr;
while(existing_list_ptr->next!=nullptr){
ptr->next = new StackNode;
ptr=ptr->next;
existing_list_ptr = existing_list_ptr->next;
ptr->symbol = existing_list_ptr->symbol;
ptr->line_number = existing_list_ptr->line_number;
}
return new_list_ptr;
}
}
//************************************************
// Assignment operator to copy the stack `rhs` *
// into the current stack. *
//************************************************
const DynStack& DynStack::operator=(const DynStack& rhs){
if (&rhs == this){
return *this;
}
this->clear();
top = rhs.clone();
return *this;
}
//****************************************************
// Member function pop pops the value at the top *
// of the stack off, and copies it into the variable *
// passed as an argument. *
//****************************************************
void DynStack::pop(char& symbol, int &line_number)
{
StackNode *temp;
if (isEmpty())
{
cout << "Attempted to pop from an empty stack. Program terminates. If you got this message, then there is a problem in your program." << endl;
exit(1);
}
else // pop value off top of stack
{
symbol = top->symbol;
line_number = top->line_number;
temp = top->next;
delete top;
top = temp;
}
}
//****************************************************
// Member funciton isEmpty returns true if the stack *
// is empty, or false otherwise. *
//****************************************************
bool DynStack::isEmpty(void)
{
bool status;
if (top == NULL)
status = true;
else
status = false;
return status;
}
|
real-slim-chadi/Python_Master-the-Art-of-Design-Patterns
|
Module 1/Chapter3/3_24_house_rental.py
|
class HouseRental(Rental, House):
def prompt_init():
init = House.prompt_init()
init.update(Rental.prompt_init())
return init
prompt_init = staticmethod(prompt_init)
|
fflorens/portofolio42
|
42sh/libft/src/ft_memcmp.c
|
<reponame>fflorens/portofolio42
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* ft_memcmp.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: nmokrane <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2013/11/19 18:55:34 by nmokrane #+# #+# */
/* Updated: 2014/03/19 19:30:52 by nmokrane ### ########.fr */
/* */
/* ************************************************************************** */
#include "libft.h"
/*
** - Compare byte string s1 against byte string s2.
** Both strings are assumed to be n bytes long.
** - The function returns zero if the two strings are identical, otherwise
** returns the difference between the first two differing bytes
** (treated as unsigned char values, so that `\200' is greater than
** `\0', for example). Zero-length strings are always identical.
*/
int ft_memcmp(const void *s1, const void *s2, size_t n)
{
const unsigned char *base;
const unsigned char *comp;
if (s1 == s2 || n == 0)
return (0);
if (s1 == NULL || s2 == NULL)
return (-1);
base = (const unsigned char*)s1;
comp = (const unsigned char*)s2;
while (n--)
{
if (*base != *comp)
return ((int)(*base - *comp));
base++;
comp++;
}
return ((int)0);
}
|
caoshenghui/DataStructures
|
Chapter1-Overview/src/exercise1_zhangming/1-3-2.cpp
|
// File: 1-3-2.cpp
// Author: csh
// Date: 2020/05/01
// ===================
#include <iostream>
int* solution(double *A, int N, int *ret)
{
// ret用来记录最大值所在的位置
int position = 0; // 初始化设定数组的第一个元素
int j = 1; // 控制最大值数组的存放
for(int i = 1; i < N; i++)
{
if(A[i] > A[position])
{
position = i; // 更新最大值元素的位置
j = 1; // 重置ret数组的下一个存放位置,0号位置预留给position
}else if(A[i] == A[position])
ret[j++] = i; // 记录重复的最大值位置
}
ret[0] = position;
if(j < N)
ret[j] = -1; // -1为哨兵,前j个元素均为最大值的位置
return ret;
}
int main()
{
using namespace std;
double Num[] = {4529.9, 88, 333, 22, 100, 232.3, 4529.9};
int N = sizeof(Num)/sizeof(double);
int ret[N];
solution(Num, N, ret);
cout << "MaxNumber:" << Num[*ret] << endl;
cout << "MaxPosition:";
for(int i = 0; ret[i] != -1; i++)
cout << ret[i] << " ";
cout << endl;
return 0;
}
|
noscripter/h
|
h/presenters.py
|
<gh_stars>0
"""A class that wraps h.api.models.Annotation and adds some HTML properties."""
from __future__ import unicode_literals
import urlparse
import urllib2
from dateutil import parser
import jinja2
from h._compat import text_type
def _format_document_link(href, title, link_text, hostname):
"""Return a document link for the given components.
Helper function for the .document_link property below.
:returns: A document link as an HTML string, escaped and safe for
rendering. The returned string is a Markup object so that it won't be
double-escaped.
"""
if hostname and hostname in link_text:
hostname = ""
def truncate(content, length=60):
"""Truncate the given string to at most length chars."""
if len(content) <= length:
return content
else:
return content[:length] + jinja2.Markup("…")
hostname = truncate(hostname)
link_text = truncate(link_text)
if href and hostname:
link = '<a href="{href}" title="{title}">{link_text}</a><br>({hostname})'
elif hostname:
link = '<a title="{title}">{link_text}</a><br>({hostname})'
elif href:
link = '<a href="{href}" title="{title}">{link_text}</a>'
else:
link = '<a title="{title}">{link_text}</a>'
link = link.format(
href=jinja2.escape(href),
title=jinja2.escape(title),
link_text=jinja2.escape(link_text),
hostname=jinja2.escape(hostname))
return jinja2.Markup(link)
class AnnotationHTMLPresenter(object):
"""Wraps h.api.models.Annotation and adds some HTML properties."""
def __init__(self, annotation):
self.annotation = annotation
def __getattr__(self, attr):
return getattr(self.annotation, attr)
def __getitem__(self, key):
return self.annotation[key]
@property
def uri(self):
return jinja2.escape(self.annotation.uri)
@property
def filename(self):
"""Return the filename of this annotation's document, or "".
If the annotated URI is a file:// URI then return the filename part
of it, otherwise return "".
The filename is escaped and safe to be rendered.
If it contains escaped characters then the filename will be a
Markup object so it won't be double-escaped.
"""
if self.uri.lower().startswith("file:///"):
return jinja2.escape(self.uri.split("/")[-1])
else:
return ""
@property
def hostname_or_filename(self):
"""Return the hostname of this annotation's document.
Returns the hostname part of the annotated document's URI, e.g.
"www.example.com" for "http://www.example.com/example.html".
If the URI is a file:// URI then return the filename part of it
instead.
The returned hostname or filename is escaped and safe to be rendered.
If it contains escaped characters the returned value will be a Markup
object so that it doesn't get double-escaped.
"""
if self.filename:
return jinja2.escape(self.filename)
else:
hostname = urlparse.urlparse(self.uri).hostname
# urlparse()'s .hostname is sometimes None.
hostname = hostname or ""
return jinja2.escape(hostname)
@property
def title(self):
"""Return a title for this annotation.
Return the annotated document's title or if the document has no title
then return its filename (if it's a file:// URI) or its URI for
non-file URIs.
The title is escaped and safe to be rendered.
If it contains escaped characters then the title will be a
Markup object, so that it won't be double-escaped.
"""
document_ = self.annotation.document
if document_:
try:
title = document_["title"]
except (KeyError, TypeError):
# Sometimes document_ has no "title" key or isn't a dict at
# all.
title = ""
if title:
# Convert non-string titles into strings.
# We're assuming that title cannot be a byte string.
title = text_type(title)
return jinja2.escape(title)
if self.filename:
return jinja2.escape(urllib2.unquote(self.filename))
else:
return jinja2.escape(urllib2.unquote(self.uri))
@property
def href(self):
"""Return an href for this annotation's document, or "".
Returns a value suitable for use as the value of the href attribute in
an <a> element in an HTML document.
Returns an empty string if the annotation doesn't have a document with
an http(s):// URI.
The href is escaped and safe to be rendered.
If it contains escaped characters the returned value will be a
Markup object so that it doesn't get double-escaped.
"""
uri = self.uri
if (uri.lower().startswith("http://") or
uri.lower().startswith("https://")):
return jinja2.escape(uri)
else:
return ""
@property
def link_text(self):
"""Return some link text for this annotation's document.
Return a text representation of this annotation's document suitable
for use as the link text in a link like <a ...>{link_text}</a>.
Returns the document's title if it has one, or failing that uses part
of the annotated URI if the annotation has one.
The link text is escaped and safe for rendering.
If it contains escaped characters the returned value will be a
Markup object so it doesn't get double-escaped.
"""
title = jinja2.escape(self.title)
# Sometimes self.title is the annotated document's URI (if the document
# has no title). In those cases we want to remove the http(s):// from
# the front and unquote it for link text.
lower = title.lower()
if lower.startswith("http://") or lower.startswith("https://"):
parts = urlparse.urlparse(title)
return urllib2.unquote(parts.netloc + parts.path)
else:
return title
@property
def document_link(self):
"""Return a link to this annotation's document.
Returns HTML strings like:
<a href="{href}" title="{title}">{link_text}</a> ({hostname})
where:
- {href} is the uri of the annotated document,
if it has an http(s):// uri
- {title} is the title of the document.
If the document has no title then its uri will be used instead.
If it's a local file:// uri then only the filename part is used,
not the full path.
- {link_text} is the same as {title}, but truncated with … if
it's too long
- {hostname} is the hostname name of the document's uri without
the scheme (http(s)://) and www parts, e.g. "example.com".
If it's a local file:// uri then the filename is used as the
hostname.
If the hostname is too long it is truncated with ….
The ({hostname}) part will be missing if it wouldn't be any different
from the {link_text} part.
The href="{href}" will be missing if there's no http(s) uri to link to
for this annotation's document.
User-supplied values are escaped so the string is safe for raw
rendering (the returned string is actually a Markup object and
won't be escaped by Jinja2 when rendering).
"""
return _format_document_link(
self.href, self.title, self.link_text, self.hostname_or_filename)
@property
def description(self):
"""An HTML-formatted description of this annotation.
The description contains the target text that the user selected to
annotate, as a <blockquote>, and the body text of the annotation
itself.
"""
def get_selection():
targets = self.annotation.get("target")
if not isinstance(targets, list):
return
for target in targets:
if not isinstance(target, dict):
continue
selectors = target.get("selector")
if not isinstance(selectors, list):
continue
for selector in selectors:
if not isinstance(selector, dict):
continue
if "exact" in selector:
return selector["exact"]
description = ""
selection = get_selection()
if selection:
selection = jinja2.escape(selection)
description += "<blockquote>{selection}</blockquote>".format(
selection=selection)
text = self.annotation.get("text")
if text:
text = jinja2.escape(text)
description += "{text}".format(text=text)
return description
@property
def created_day_string(self):
"""A simple created day string for this annotation.
Returns a day string like '2015-03-11' from the annotation's 'created'
date.
"""
created_string = jinja2.escape(self.annotation["created"])
return parser.parse(created_string).strftime("%Y-%m-%d")
|
washingtonxr/AndroidSensor4testing
|
CySmart_Android_1.2.0.156_Source_Code_0/src/com/cypress/cysmart/RDKEmulatorView/KeyBoardAttributes.java
|
<reponame>washingtonxr/AndroidSensor4testing
/*
* Copyright Cypress Semiconductor Corporation, 2014-2018 All rights reserved.
*
* This software, associated documentation and materials ("Software") is
* owned by Cypress Semiconductor Corporation ("Cypress") and is
* protected by and subject to worldwide patent protection (UnitedStates and foreign), United States copyright laws and international
* treaty provisions. Therefore, unless otherwise specified in a separate license agreement between you and Cypress, this Software
* must be treated like any other copyrighted material. Reproduction,
* modification, translation, compilation, or representation of this
* Software in any other form (e.g., paper, magnetic, optical, silicon)
* is prohibited without Cypress's express written permission.
*
* Disclaimer: THIS SOFTWARE IS PROVIDED AS-IS, WITH NO WARRANTY OF ANY
* KIND, EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
* NONINFRINGEMENT, IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE. Cypress reserves the right to make changes
* to the Software without notice. Cypress does not assume any liability
* arising out of the application or use of Software or any product or
* circuit described in the Software. Cypress does not authorize its
* products for use as critical components in any products where a
* malfunction or failure may reasonably be expected to result in
* significant injury or death ("High Risk Product"). By including
* Cypress's product in a High Risk Product, the manufacturer of such
* system or application assumes all risk of such use and in doing so
* indemnifies Cypress against all liability.
*
* Use of this Software may be limited by and subject to the applicable
* Cypress software license agreement.
*
*
*/
package com.cypress.cysmart.RDKEmulatorView;
import java.util.HashMap;
/**
* Class created to map the keycodes received through Keyboard Report
*/
public class KeyBoardAttributes {
private static HashMap<Integer, String> m_keyCodes = new HashMap<Integer, String>();
static {
m_keyCodes.put(0, "Reserved");
m_keyCodes.put(1, "ErrorRollOver");
m_keyCodes.put(2, "POSTFail");
m_keyCodes.put(3, "ErrorUndefined");
m_keyCodes.put(4, "A");
m_keyCodes.put(5, "B");
m_keyCodes.put(6, "C");
m_keyCodes.put(7, "D");
m_keyCodes.put(8, "E");
m_keyCodes.put(9, "F");
m_keyCodes.put(10, "G");
m_keyCodes.put(11, "H");
m_keyCodes.put(12, "I");
m_keyCodes.put(13, "J");
m_keyCodes.put(14, "K");
m_keyCodes.put(15, "L");
m_keyCodes.put(16, "M");
m_keyCodes.put(17, "N");
m_keyCodes.put(18, "O");
m_keyCodes.put(19, "P");
m_keyCodes.put(20, "Q");
m_keyCodes.put(21, "R");
m_keyCodes.put(22, "S");
m_keyCodes.put(23, "T");
m_keyCodes.put(24, "U");
m_keyCodes.put(25, "V");
m_keyCodes.put(26, "W");
m_keyCodes.put(27, "X");
m_keyCodes.put(28, "Y");
m_keyCodes.put(29, "Z");
m_keyCodes.put(30, "1");
m_keyCodes.put(31, "2");
m_keyCodes.put(32, "3");
m_keyCodes.put(33, "4");
m_keyCodes.put(34, "5");
m_keyCodes.put(35, "6");
m_keyCodes.put(36, "7");
m_keyCodes.put(37, "8");
m_keyCodes.put(38, "9");
m_keyCodes.put(39, "0");
m_keyCodes.put(40, "Enter");
m_keyCodes.put(41, "Escape");
m_keyCodes.put(42, "Delete");
m_keyCodes.put(43, "Tab");
m_keyCodes.put(44, "Space");
m_keyCodes.put(45, "- Minus");
m_keyCodes.put(46, "= Equals");
m_keyCodes.put(47, "[ Left Bracket");
m_keyCodes.put(48, "] Right Bracket");
m_keyCodes.put(49, "\\ Backslash");
m_keyCodes.put(50, "Non-US # NonUS Pound");
m_keyCodes.put(51, "; Semicolon");
m_keyCodes.put(52, "' Quote");
m_keyCodes.put(53, "` Grave");
m_keyCodes.put(54, ", Comma");
m_keyCodes.put(55, ". Period");
m_keyCodes.put(56, "/ Slash");
m_keyCodes.put(57, "Caps Lock");
m_keyCodes.put(58, "F1");
m_keyCodes.put(59, "F2");
m_keyCodes.put(60, "F3");
m_keyCodes.put(61, "F4");
m_keyCodes.put(62, "F5");
m_keyCodes.put(63, "F6");
m_keyCodes.put(64, "F7");
m_keyCodes.put(65, "F8");
m_keyCodes.put(66, "F9");
m_keyCodes.put(67, "F10");
m_keyCodes.put(68, "F11");
m_keyCodes.put(69, "F12");
m_keyCodes.put(70, "Print Screen");
m_keyCodes.put(71, "Scroll Lock");
m_keyCodes.put(72, "Pause");
m_keyCodes.put(73, "Insert");
m_keyCodes.put(74, "Home");
m_keyCodes.put(75, "Page Up");
m_keyCodes.put(76, "Delete Forward");
m_keyCodes.put(77, "End");
m_keyCodes.put(78, "Page Down");
m_keyCodes.put(79, "Right");
m_keyCodes.put(80, "Left");
m_keyCodes.put(81, "Down");
m_keyCodes.put(82, "Up");
m_keyCodes.put(83, "Keypad NumLock");
m_keyCodes.put(84, "Keypad / Keypad Divide");
m_keyCodes.put(85, "Keypad * Keypad Multiply");
m_keyCodes.put(86, "Keypad - Keypad Subtract");
m_keyCodes.put(87, "Keypad + Keypad put");
m_keyCodes.put(88, "Keypad Enter");
m_keyCodes.put(89, "Keypad 1");
m_keyCodes.put(90, "Keypad 2");
m_keyCodes.put(91, "Keypad 3");
m_keyCodes.put(92, "Keypad 4");
m_keyCodes.put(93, "Keypad 5");
m_keyCodes.put(94, "Keypad 6");
m_keyCodes.put(95, "Keypad 7");
m_keyCodes.put(96, "Keypad 8");
m_keyCodes.put(97, "Keypad 9");
m_keyCodes.put(98, "Keypad 0");
m_keyCodes.put(99, "Keypad . Keypad Point");
m_keyCodes.put(100, "Non-US \\ NonUS Backslash");
m_keyCodes.put(101, "Application");
m_keyCodes.put(102, "Power");
m_keyCodes.put(103, "Keypad = Keypad Equals");
m_keyCodes.put(104, "F13");
m_keyCodes.put(105, "F14");
m_keyCodes.put(106, "F15");
m_keyCodes.put(107, "F16");
m_keyCodes.put(108, "F17");
m_keyCodes.put(109, "F18");
m_keyCodes.put(110, "F19");
m_keyCodes.put(111, "F20");
m_keyCodes.put(112, "F21");
m_keyCodes.put(113, "F22");
m_keyCodes.put(114, "F23");
m_keyCodes.put(115, "F24");
m_keyCodes.put(116, "Execute");
m_keyCodes.put(117, "Help");
m_keyCodes.put(118, "Menu");
m_keyCodes.put(119, "Select");
m_keyCodes.put(120, "Stop");
m_keyCodes.put(121, "Again");
m_keyCodes.put(122, "Undo");
m_keyCodes.put(123, "Cut");
m_keyCodes.put(124, "Copy");
m_keyCodes.put(125, "Paste");
m_keyCodes.put(126, "Find");
m_keyCodes.put(127, "Mute");
m_keyCodes.put(128, "Volume Up");
m_keyCodes.put(129, "Volume Down");
m_keyCodes.put(130, "Locking Caps Lock");
m_keyCodes.put(131, "Locking Num Lock");
m_keyCodes.put(132, "Locking Scroll Lock");
m_keyCodes.put(133, "Keypad Comma");
m_keyCodes.put(134, "Keypad Equal Sign");
m_keyCodes.put(135, "International 1");
m_keyCodes.put(136, "International 2");
m_keyCodes.put(137, "International 3");
m_keyCodes.put(138, "International 4");
m_keyCodes.put(139, "International 5");
m_keyCodes.put(140, "International 6");
m_keyCodes.put(141, "International 7");
m_keyCodes.put(142, "International 8");
m_keyCodes.put(143, "International 9");
m_keyCodes.put(144, "Lang 1");
m_keyCodes.put(145, "Lang 2");
m_keyCodes.put(146, "Lang 3");
m_keyCodes.put(147, "Lang 4");
m_keyCodes.put(148, "Lang 5");
m_keyCodes.put(149, "Lang 6");
m_keyCodes.put(150, "Lang 7");
m_keyCodes.put(151, "Lang 8");
m_keyCodes.put(152, "Lang 9");
m_keyCodes.put(153, "Alternate Erase");
m_keyCodes.put(154, "SysReq/Attention SysReq");
m_keyCodes.put(155, "Cancel");
m_keyCodes.put(156, "Clear");
m_keyCodes.put(157, "Prior");
m_keyCodes.put(158, "Return");
m_keyCodes.put(159, "Separator");
m_keyCodes.put(160, "Out");
m_keyCodes.put(161, "Oper");
m_keyCodes.put(162, "Clear/Again Clear");
m_keyCodes.put(163, "CrSel/Props CrSel");
m_keyCodes.put(164, "ExSel");
m_keyCodes.put(176, "Keypad 00");
m_keyCodes.put(177, "Keypad 000");
m_keyCodes.put(178, "Thousands Separator");
m_keyCodes.put(179, "Decimal Separator");
m_keyCodes.put(180, "Currency Unit");
m_keyCodes.put(181, "Currency Sub-unit Currency Subunit");
m_keyCodes.put(182, "Keypad ( Keypad Left Paren");
m_keyCodes.put(183, "Keypad ) Keypad Right Paren");
m_keyCodes.put(184, "Keypad { Keypad Left Brace");
m_keyCodes.put(185, "Keypad } Keypad Right Brace");
m_keyCodes.put(186, "Keypad Tab");
m_keyCodes.put(187, "Keypad Backspace");
m_keyCodes.put(188, "Keypad A");
m_keyCodes.put(189, "Keypad B");
m_keyCodes.put(190, "Keypad C");
m_keyCodes.put(191, "Keypad D");
m_keyCodes.put(192, "Keypad E");
m_keyCodes.put(193, "Keypad F");
m_keyCodes.put(194, "Keypad XOR");
m_keyCodes.put(195, "Keypad ^ Keypad Caret");
m_keyCodes.put(196, "Keypad % Keypad Percent");
m_keyCodes.put(197, "Keypad < Keypad Less Than");
m_keyCodes.put(198, "Keypad > Keypad Greater Than");
m_keyCodes.put(199, "Keypad & Keypad And");
m_keyCodes.put(200, "Keypad && Keypad Double And");
m_keyCodes.put(201, "Keypad | Keypad Pipe");
m_keyCodes.put(202, "Keypad || Keypad Double Pipe");
m_keyCodes.put(203, "Keypad : Keypad Colon");
m_keyCodes.put(204, "Keypad # Keypad Pound");
m_keyCodes.put(205, "Keypad Space");
m_keyCodes.put(206, "Keypad @ Keypad At Sign");
m_keyCodes.put(207, "Keypad ! Keypad Exclamation");
m_keyCodes.put(208, "Keypad Memory Store");
m_keyCodes.put(209, "Keypad Memory Recall");
m_keyCodes.put(210, "Keypad Memory Clear");
m_keyCodes.put(211, "Keypad Memory put");
m_keyCodes.put(212, "Keypad Memory Subtract");
m_keyCodes.put(213, "Keypad Memory Multiply");
m_keyCodes.put(214, "Keypad Memory Divide");
m_keyCodes.put(215, "Keypad +/- Keypad Plus Minus");
m_keyCodes.put(216, "Keypad Clear");
m_keyCodes.put(217, "Keypad Clear Entry");
m_keyCodes.put(218, "Keypad Binary");
m_keyCodes.put(219, "Keypad Octal");
m_keyCodes.put(220, "Keypad Decimal");
m_keyCodes.put(221, "Keypad Hexadecimal");
m_keyCodes.put(224, "Left Control");
m_keyCodes.put(225, "Left Shift");
m_keyCodes.put(226, "Left Alt");
m_keyCodes.put(227, "Left GUI");
m_keyCodes.put(228, "Right Control");
m_keyCodes.put(229, "Right Shift");
m_keyCodes.put(230, "Right Alt");
m_keyCodes.put(231, "Right GUI");
}
public static String lookupKeycodeDescription(Integer keycode) {
String name = m_keyCodes.get(keycode);
return name == null ? "" + keycode : name;
}
}
|
Foltik/Shrimpa
|
app/util/canonicalize.js
|
// Normalizes, decomposes, and lowercases a utf-8 string
exports.canonicalize = displayname => displayname.normalize('NFKD').toLowerCase();
exports.canonicalizeRequest =
(req, res, next) => {
if (req.body.displayname)
req.body.username = exports.canonicalize(req.body.displayname);
else if (req.body.username)
req.body.username = exports.canonicalize(req.body.username);
next();
};
|
parzulpan/demo
|
SpringBoot/1.x/src/data-jpa/src/main/java/cn/parzulpan/controller/UserController.java
|
package cn.parzulpan.controller;
import cn.parzulpan.entity.User;
import cn.parzulpan.repository.UserRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class UserController {
@Autowired
UserRepository userRepository;
// http://localhost:8080/user/1
@GetMapping("/user/{id}")
public User getUser(@PathVariable("id") Integer id){
User user = userRepository.findOne(id);
return user;
}
// http://localhost:8080/user/?lastName=parzul&email=<EMAIL>
@GetMapping("/user")
public User insertUser(User user){
User save = userRepository.save(user);
return save;
}
}
|
murkhog/emistoolbox
|
libs/joriki/src/info/joriki/awt/image/jp2/Precinct.java
|
/*
* Copyright 2003 <NAME>. All rights reserved.
* Use is subject to license terms.
*/
package info.joriki.awt.image.jp2;
import java.io.DataInput;
import java.io.IOException;
import java.awt.Rectangle;
import info.joriki.io.BitSource;
class Precinct extends Rectangle
{
Band [] bands;
Precinct (Rectangle patch,Rectangle [] [] subbands,CodingStyle codingStyle)
{
super (patch);
int n = subbands.length;
int x = patch.x / n;
int y = patch.y / n;
int width = patch.width / n;
int height = patch.height / n;
bands = new Band [n == 1 ? 1 : 3];
for (int suby = 0,sub = 0;suby < n;suby++)
for (int subx = 0;subx < n;subx++)
if ((subx == 0 && suby == 0) == (n == 1))
{
Rectangle subband = subbands [subx] [suby];
bands [sub++] = new Band
(subband.intersection
(new Rectangle (subband.x + x,subband.y + y,width,height)),
codingStyle);
}
}
void decodePacketHeader (BitSource source,int layer) throws IOException
{
for (int i = 0;i < bands.length;i++)
bands [i].decodePacketHeader (source,layer);
}
void readPacketData (DataInput in) throws IOException
{
for (int i = 0;i < bands.length;i++)
bands [i].readPacketData (in);
}
}
|
siy/reactive-toolbox
|
async-io/src/main/java/org/reactivetoolbox/io/async/net/Inet6Address.java
|
/*
* Copyright (c) 2020 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.reactivetoolbox.io.async.net;
import org.reactivetoolbox.core.lang.functional.Result;
import static org.reactivetoolbox.io.NativeFailureType.EFAULT;
public class Inet6Address implements InetAddress {
public static final int SIZE = 16;
private final byte[] address;
private Inet6Address(final byte[] address) {
this.address = address;
}
public static Result<Inet6Address> inet6Address(final byte[] address) {
return address.length != SIZE
? EFAULT.asResult()
: Result.ok(new Inet6Address(address));
}
@Override
public byte[] asBytes() {
return address;
}
}
|
PacktPublishing/The-Complete-Python-Course-including-Django-Web-Framework
|
Python 101/comparison_operators.py
|
<filename>Python 101/comparison_operators.py<gh_stars>1-10
# can_code = True
# if can_code == True:
# # do a thing
# print("You can code!")
# else:
# # do something else
# print("You dont know how to code yet")
# teacher = "<NAME>"
# if teacher.lower() == "<NAME>":
# print("Show the teacher portal")
# else:
# print("You are a student. Welcome to Python 101")
# name = input("What is your name? ")
# if name == "Bob":
# print("Welcome Bob!")
# bring_food = "Pizza"
# elif name == "Kalob":
# print("Welcome to your teacher portal")
# bring_food = "Tacos"
# elif name == "Nathan":
# print("Welcome to the Gym")
# bring_food = "Weigh protein"
# else:
# print("You're not bob get outta here")
# bring_food = "Salmon"
# print(f"You are eating {bring_food}")
# name = input("What is your name? ")
# name = name.lower()
# if name != "bob":
# print("You're not bob, get out of here")
# else:
# print("Welcome Bobby boy")
>
>=
<
<=
== is the same
!= is not the same
|
dbsystel/kewl
|
testing/corev1_test/example_pods.go
|
<filename>testing/corev1_test/example_pods.go<gh_stars>1-10
package corev1_test
import (
"github.com/dbsystel/kewl/testing"
"github.com/dbsystel/kewl/testing/json_test"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
)
// Namespace for the pods
const Namespace = "test"
// PodTypeMeta is the metav1.TypeMeta for corev1.Pod
var PodTypeMeta = metav1.TypeMeta{
Kind: "Pod",
APIVersion: corev1.SchemeGroupVersion.Group + "/" + corev1.SchemeGroupVersion.Version,
}
// PodKind is the metav1.GroupVersionKind for corev1.Pod
var PodKind = metav1.GroupVersionKind{Group: corev1.SchemeGroupVersion.Group, Version: corev1.SchemeGroupVersion.Version, Kind: "Pod"}
var _ testing.Reviewable = &Pod{}
var _ runtime.Object = &Pod{}
// Pod extends corev1.Pod for testing
type Pod corev1.Pod
func (t *Pod) MustMarshal() []byte {
return json_test.MarshalJSONOrPanic((*corev1.Pod)(t))
}
// RawExtension marshals the pod and creates a runtime.RawExtension for which Raw is set to the marshaled JSON
func (t *Pod) RawExtension() runtime.RawExtension {
return runtime.RawExtension{Raw: t.MustMarshal()}
}
func (t *Pod) DeepCopyObject() runtime.Object {
return (*Pod)((*corev1.Pod)(t).DeepCopy())
}
func (t *Pod) AsCoreV1() *corev1.Pod {
if t == nil {
return nil
}
return (*corev1.Pod)(t)
}
// NewPod creates a new Pod with the provided name
func NewPod(name string) *Pod {
return &Pod{TypeMeta: PodTypeMeta, ObjectMeta: metav1.ObjectMeta{Name: name, Namespace: Namespace}}
}
// NewBrokenPod creates a new broken Pod with the provided name
func NewBrokenPod(name string) *Pod {
return &Pod{TypeMeta: metav1.TypeMeta{
Kind: "BrokenPod",
APIVersion: PodTypeMeta.APIVersion,
}, ObjectMeta: metav1.ObjectMeta{Name: name, Namespace: Namespace}}
}
func NewDetachedPod(name string) *Pod {
return &Pod{TypeMeta: PodTypeMeta, ObjectMeta: metav1.ObjectMeta{Name: name}}
}
// ErrorPod is a pod which should create an error on handling
var ErrorPod = NewPod("error")
// PanicPod is a pod which should create a panic on handling
var PanicPod = NewPod("panic")
// ValidPod is a pod which is considered valid on handling
var ValidPod = NewPod("valid")
// InvalidPod is a pod which is considered invalid on handling
var InvalidPod = NewPod("invalid")
// DetachedPod is a pod which is not attached to a namespace
var DetachedPod = NewDetachedPod("detached")
// BadPod is a pod which does not serialize correctly
var BadPod = NewBrokenPod("broken")
|
ricofehr/enginepp
|
src/nextfloor/core/game_file_io.cc
|
<reponame>ricofehr/enginepp
/**
* @file game_file_io.cc
* @brief File I/O Operations
* @author <NAME> (<EMAIL>, github: ricofehr)
*/
#include "nextfloor/core/game_file_io.h"
#include <cassert>
#include <fstream>
#include <sstream>
#include "nextfloor/core/common_services.h"
namespace nextfloor {
namespace core {
namespace {
static bool sInstanciated = false;
} // anonymous namespace
GameFileIO::GameFileIO()
{
assert(!sInstanciated);
sInstanciated = true;
}
std::string GameFileIO::ReadFile(const std::string& file_path) const
{
std::string file_str;
std::ifstream file_stream(file_path, std::ios::in);
if (file_stream.is_open()) {
std::string line = "";
while (getline(file_stream, line)) {
file_str += "\n" + line;
}
file_stream.close();
}
else {
HandleErrorOnReadFile(file_path);
}
return file_str;
}
void GameFileIO::HandleErrorOnReadFile(const std::string& file_path) const
{
std::ostringstream message;
message << "Unable to open " << file_path;
CommonServices::getLog()->WriteLine(std::move(message));
CommonServices::getExit()->ExitOnError();
}
GameFileIO::~GameFileIO() noexcept
{
assert(sInstanciated);
sInstanciated = false;
}
} // namespace core
} // namespace nextfloor
|
henriquegemignani/urde
|
Runtime/MP1/CPauseScreenBlur.cpp
|
#include "Runtime/MP1/CPauseScreenBlur.hpp"
#include "Runtime/CSimplePool.hpp"
#include "Runtime/GameGlobalObjects.hpp"
#include "Runtime/Audio/CSfxManager.hpp"
namespace urde::MP1 {
CPauseScreenBlur::CPauseScreenBlur() : x4_mapLightQuarter(g_SimplePool->GetObj("TXTR_MapLightQuarter")) {}
void CPauseScreenBlur::OnNewInGameGuiState(EInGameGuiState state, CStateManager& stateMgr) {
switch (state) {
case EInGameGuiState::Zero:
case EInGameGuiState::InGame:
SetState(EState::InGame);
break;
case EInGameGuiState::MapScreen:
SetState(EState::MapScreen);
break;
case EInGameGuiState::PauseSaveGame:
SetState(EState::SaveGame);
break;
case EInGameGuiState::PauseHUDMessage:
SetState(EState::HUDMessage);
break;
case EInGameGuiState::PauseGame:
case EInGameGuiState::PauseLogBook:
SetState(EState::Pause);
break;
default:
break;
}
}
void CPauseScreenBlur::SetState(EState state) {
if (x10_prevState == EState::InGame && state != EState::InGame) {
CSfxManager::SetChannel(CSfxManager::ESfxChannels::PauseScreen);
if (state == EState::HUDMessage)
CSfxManager::SfxStart(SFXui_into_hud_message, 1.f, 0.f, false, 0x7f, false, kInvalidAreaId);
else if (state == EState::MapScreen)
CSfxManager::SfxStart(SFXui_into_map_screen, 1.f, 0.f, false, 0x7f, false, kInvalidAreaId);
x18_blurAmt = FLT_EPSILON;
}
if (state == EState::InGame && (x10_prevState != EState::InGame || x14_nextState != EState::InGame)) {
CSfxManager::SetChannel(CSfxManager::ESfxChannels::Game);
if (x10_prevState == EState::HUDMessage)
CSfxManager::SfxStart(SFXui_outof_hud_message, 1.f, 0.f, false, 0x7f, false, kInvalidAreaId);
else if (x10_prevState == EState::MapScreen)
CSfxManager::SfxStart(SFXui_outof_map_screen, 1.f, 0.f, false, 0x7f, false, kInvalidAreaId);
x18_blurAmt = -1.f;
}
x14_nextState = state;
}
void CPauseScreenBlur::OnBlurComplete(bool b) {
if (x14_nextState == EState::InGame && !b)
return;
x10_prevState = x14_nextState;
if (x10_prevState == EState::InGame)
x50_25_gameDraw = true;
}
void CPauseScreenBlur::Update(float dt, const CStateManager& stateMgr, bool b) {
if (x10_prevState == x14_nextState)
return;
if (x18_blurAmt < 0.f)
x18_blurAmt = std::min(0.f, 2.f * dt + x18_blurAmt);
else
x18_blurAmt = std::min(1.f, 2.f * dt + x18_blurAmt);
if (x18_blurAmt == 0.f || x18_blurAmt == 1.f)
OnBlurComplete(b);
if (x18_blurAmt == 0.f && b) {
x1c_camBlur.DisableBlur(0.f);
} else {
x1c_camBlur.SetBlur(EBlurType::HiBlur, g_tweakGui->GetPauseBlurFactor() * std::fabs(x18_blurAmt), 0.f);
x50_24_blurring = true;
}
}
void CPauseScreenBlur::Draw(const CStateManager&) {
SCOPED_GRAPHICS_DEBUG_GROUP("CPauseScreenBlur::Draw", zeus::skPurple);
x1c_camBlur.Draw(true);
const float t = std::fabs(x18_blurAmt);
if (x1c_camBlur.GetCurrType() != EBlurType::NoBlur) {
const auto filterColor = zeus::CColor::lerp(zeus::skWhite, g_tweakGuiColors->GetPauseBlurFilterColor(), t);
m_quarterFilter.DrawFilter(EFilterShape::FullscreenQuarters, filterColor, t);
const auto scanLinesColor = zeus::CColor::lerp(zeus::skWhite, zeus::CColor(0.75f, 1.f), t);
m_linesFilter.draw(scanLinesColor);
}
if (x50_24_blurring /*&& x1c_camBlur.x2d_noPersistentCopy*/) {
x50_24_blurring = false;
x50_25_gameDraw = false;
}
}
} // namespace urde::MP1
|
HongminWu/bnpy
|
tests/zzz_deprecated_unmaintained/obsmodel/TestZeroMeanGaussLocalStepSpeed.py
|
<filename>tests/zzz_deprecated_unmaintained/obsmodel/TestZeroMeanGaussLocalStepSpeed.py
import numpy as np
import scipy.linalg
import argparse
import time
from contextlib import contextmanager
def measureTime(f, nTrial=3):
def f_timer(*args, **kwargs):
times = list()
for rep in range(nTrial):
start = time.time()
result = f(*args, **kwargs)
end = time.time()
times.append(end-start)
if rep == 0:
print "trial %2d/%2d: %.3f sec %s" % (
rep+1, nTrial, times[-1], f.__name__)
else:
print "trial %2d/%2d: %.3f sec" % (
rep+1, nTrial, times[-1])
print "mean of %2d: %.3f sec" % (
nTrial, np.mean(times))
print "median of %2d: %.3f sec" % (
nTrial, np.median(times))
print ''
return result
return f_timer
@measureTime
def mahalDist_np_solve(X=None, B=None, cholB=None):
''' Compute mahalanobis the old fashioned way.
'''
if B is not None:
cholB = np.linalg.cholesky(B)
Q = np.linalg.solve(cholB, X.T)
return Q
@measureTime
def mahalDist_scipy_solve(X=None, B=None, cholB=None):
''' Compute mahalanobis the old fashioned way.
'''
if B is not None:
cholB = np.linalg.cholesky(B)
Q = scipy.linalg.solve(cholB, X.T)
return Q
@measureTime
def mahalDist_scipy_solve_triangular(X=None, B=None, cholB=None):
''' Compute mahalanobis with triangular method
'''
if B is not None:
cholB = np.linalg.cholesky(B)
Q = scipy.linalg.solve_triangular(cholB, X.T, lower=True)
return Q
@measureTime
def mahalDist_scipy_solve_triangular_nocheck(
X=None, B=None, cholB=None):
''' Compute mahalanobis with triangular method
'''
if B is not None:
cholB = np.linalg.cholesky(B)
Q = scipy.linalg.solve_triangular(
cholB, X.T, lower=True, check_finite=False)
return Q
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--N', type=int, default=1e5)
parser.add_argument('--D', type=int, default=64)
args = parser.parse_args()
N = args.N
D = args.D
print "TIMING TEST: N=%d D=%d" % (N, D)
X = np.random.randn(N, D)
R = np.random.randn(D, D)
B = np.dot(R.T, R) + np.eye(D, D)
cholB = np.linalg.cholesky(B)
mahalDist_np_solve(X=X, cholB=cholB)
mahalDist_scipy_solve(X=X, cholB=cholB)
mahalDist_scipy_solve_triangular(X=X, cholB=cholB)
mahalDist_scipy_solve_triangular_nocheck(X=X, cholB=cholB)
"""
In [41]: Qs = scipy.linalg.solve_triangular(cholB, X.T, lower=True, check_finite=False)
In [42]: %timeit -n1 -r1 Q = scipy.linalg.solve_triangular(cholB, X.T, lower=True, check_finite=False)
1 loops, best of 1: 625 ms per loop
In [43]: %timeit -n1 -r1 Q = scipy.linalg.solve_triangular(cholB, X.T, lower=True, check_finite=False)
1 loops, best of 1: 623 ms per loop
In [44]: %timeit -n1 -r1 Q = scipy.linalg.solve_triangular(cholB, X.T, lower=True)
1 loops, best of 1: 790 ms per loop
In [45]: %timeit -n1 -r1 Q = scipy.linalg.solve_triangular(cholB, X.T, lower=True)
1 loops, best of 1: 799 ms per loop
In [46]: %timeit -n1 -r1 Q = scipy.linalg.solve(cholB, X.T)
1 loops, best of 1: 1.26 s per loop
In [47]: %timeit -n1 -r1 Q = scipy.linalg.solve(cholB, X.T)
1 loops, best of 1: 1.26 s per loop
"""
|
dotenx/dotenx
|
ao-api/models/runner_job.go
|
package models
import (
"fmt"
"github.com/dotenx/dotenx/ao-api/config"
)
type Job struct {
ExecutionId int `json:"executionId"`
TaskId int `json:"taskId"`
Timeout int `json:"timeout"`
Name string `json:"name"`
Type string `json:"type"`
Image string `json:"image"`
AccountId string `json:"account_id"`
Body map[string]interface{} `json:"body"`
MetaData TaskDefinition `json:"task_meta_data"`
ResultEndpoint string `json:"result_endpoint"`
WorkSpace string `json:"workspace"`
}
// creates a new job dto for runner based on given task for certain execution
func NewJob(task TaskDetails, executionId int, accountId string) *Job {
image := AvaliableTasks[task.Type].Image
return &Job{
ExecutionId: executionId,
TaskId: task.Id,
Type: task.Type,
Timeout: task.Timeout,
Image: image,
Body: task.Body,
Name: task.Name,
AccountId: accountId,
MetaData: AvaliableTasks[task.Type],
ResultEndpoint: fmt.Sprintf("%s/execution/id/%d/task/%d/result", config.Configs.Endpoints.AoApi, executionId, task.Id),
}
}
// add integration fields to job body and task meta data fields
func (job *Job) SetIntegration(integration Integration) {
for key, value := range integration.Secrets {
k := "INTEGRATION_" + key
job.Body[k] = value
job.MetaData.Fields = append(job.MetaData.Fields, TaskField{Key: k, Type: "text"})
}
}
func (job *Job) SetRunCodeFields() {
variables := ""
for key, _ := range job.Body {
if key != "code" && key != "dependency" {
if variables != "" {
variables += ","
}
variables += key
job.MetaData.Fields = append(job.MetaData.Fields, TaskField{Key: key, Type: "text"})
}
}
job.MetaData.Fields = append(job.MetaData.Fields, TaskField{Key: "VARIABLES", Type: "text"})
job.Body["VARIABLES"] = variables
}
|
byteskeptical/salt
|
salt/modules/salt_proxy.py
|
# -*- coding: utf-8 -*-
'''
Salt proxy module
.. versionadded:: 2015.8.3
Module to deploy and manage salt-proxy processes
on a minion.
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals, print_function
import os
import logging
# Import Salt libs
import salt.utils.files
import salt.syspaths
# Import 3rd-party libs
import salt.ext.six.moves
log = logging.getLogger(__name__)
def _write_proxy_conf(proxyfile):
'''
write to file
'''
msg = 'Invalid value for proxy file provided!, Supplied value = {0}' \
.format(proxyfile)
log.trace('Salt Proxy Module: write proxy conf')
if proxyfile:
log.debug('Writing proxy conf file')
with salt.utils.files.fopen(proxyfile, 'w') as proxy_conf:
proxy_conf.write(salt.utils.stringutils.to_str('master: {0}'
.format(__grains__['master'])))
msg = 'Wrote proxy file {0}'.format(proxyfile)
log.debug(msg)
return msg
def _proxy_conf_file(proxyfile, test):
'''
Check if proxy conf exists and update
'''
changes_old = []
changes_new = []
success = True
if not os.path.exists(proxyfile):
try:
if not test:
changes_new.append(_write_proxy_conf(proxyfile))
msg = 'Salt Proxy: Wrote proxy conf {0}'.format(proxyfile)
else:
msg = 'Salt Proxy: Update required to proxy conf {0}' \
.format(proxyfile)
except (OSError, IOError) as err:
success = False
msg = 'Salt Proxy: Error writing proxy file {0}'.format(err)
log.error(msg)
changes_new.append(msg)
changes_new.append(msg)
log.debug(msg)
else:
msg = 'Salt Proxy: {0} already exists, skipping'.format(proxyfile)
changes_old.append(msg)
log.debug(msg)
return success, changes_new, changes_old
def _is_proxy_running(proxyname):
'''
Check if proxy for this name is running
'''
cmd = ('ps ax | grep "salt-proxy --proxyid={0}" | grep -v grep'
.format(salt.ext.six.moves.shlex_quote(proxyname)))
cmdout = __salt__['cmd.run_all'](
cmd,
timeout=5,
python_shell=True)
if not cmdout['stdout']:
return False
else:
return True
def _proxy_process(proxyname, test):
'''
Check and execute proxy process
'''
changes_old = []
changes_new = []
if not _is_proxy_running(proxyname):
if not test:
__salt__['cmd.run_all'](
'salt-proxy --proxyid={0} -l info -d'.format(salt.ext.six.moves.shlex_quote(proxyname)),
timeout=5)
changes_new.append('Salt Proxy: Started proxy process for {0}'
.format(proxyname))
else:
changes_new.append('Salt Proxy: process {0} will be started'
.format(proxyname))
else:
changes_old.append('Salt Proxy: already running for {0}'
.format(proxyname))
return True, changes_new, changes_old
def configure_proxy(proxyname, start=True):
'''
Create the salt proxy file and start the proxy process
if required
Parameters:
proxyname:
Name to be used for this proxy (should match entries in pillar)
start:
Boolean indicating if the process should be started
default = True
CLI Example:
.. code-block:: bash
salt deviceminion salt_proxy.configure_proxy p8000
'''
changes_new = []
changes_old = []
status_file = True
test = __opts__['test']
# write the proxy file if necessary
proxyfile = os.path.join(salt.syspaths.CONFIG_DIR, 'proxy')
status_file, msg_new, msg_old = _proxy_conf_file(proxyfile, test)
changes_new.extend(msg_new)
changes_old.extend(msg_old)
status_proc = False
# start the proxy process
if start:
status_proc, msg_new, msg_old = _proxy_process(proxyname, test)
changes_old.extend(msg_old)
changes_new.extend(msg_new)
else:
changes_old.append('Start is False, not starting salt-proxy process')
log.debug('Process not started')
return {
'result': status_file and status_proc,
'changes': {
'old': '\n'.join(changes_old),
'new': '\n'.join(changes_new),
},
}
def is_running(proxyname):
'''
Check if the salt-proxy process associated
with this proxy (name) is running.
Returns True if the process is running
False otherwise
Parameters:
proxyname:
String name of the proxy (p8000 for example)
CLI Example:
.. code-block:: bash
salt deviceminion salt_proxy.is_running p8000
'''
return {'result': _is_proxy_running(proxyname)}
|
zhouxl/J2ObjC-Framework
|
Scripts/Template/Headers/org/mockito/internal/util/ObjectMethodsGuru.h
|
//
// Generated by the J2ObjC translator. DO NOT EDIT!
// source: /Users/antoniocortes/j2objcprj/relases/j2objc/testing/mockito/build_result/java/org/mockito/internal/util/ObjectMethodsGuru.java
//
#include "../../../../J2ObjC_header.h"
#pragma push_macro("INCLUDE_ALL_OrgMockitoInternalUtilObjectMethodsGuru")
#ifdef RESTRICT_OrgMockitoInternalUtilObjectMethodsGuru
#define INCLUDE_ALL_OrgMockitoInternalUtilObjectMethodsGuru 0
#else
#define INCLUDE_ALL_OrgMockitoInternalUtilObjectMethodsGuru 1
#endif
#undef RESTRICT_OrgMockitoInternalUtilObjectMethodsGuru
#pragma clang diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#if __has_feature(nullability)
#pragma clang diagnostic push
#pragma GCC diagnostic ignored "-Wnullability"
#pragma GCC diagnostic ignored "-Wnullability-completeness"
#endif
#if !defined (OrgMockitoInternalUtilObjectMethodsGuru_) && (INCLUDE_ALL_OrgMockitoInternalUtilObjectMethodsGuru || defined(INCLUDE_OrgMockitoInternalUtilObjectMethodsGuru))
#define OrgMockitoInternalUtilObjectMethodsGuru_
#define RESTRICT_JavaIoSerializable 1
#define INCLUDE_JavaIoSerializable 1
#include "../../../../java/io/Serializable.h"
@class JavaLangReflectMethod;
@protocol OrgMockitoInternalInvocationMockitoMethod;
@interface OrgMockitoInternalUtilObjectMethodsGuru : NSObject < JavaIoSerializable >
#pragma mark Public
- (instancetype __nonnull)init;
- (jboolean)isCompareToMethodWithJavaLangReflectMethod:(JavaLangReflectMethod *)method;
- (jboolean)isEqualsMethodWithJavaLangReflectMethod:(JavaLangReflectMethod *)method;
- (jboolean)isHashCodeMethodWithJavaLangReflectMethod:(JavaLangReflectMethod *)method;
- (jboolean)isToStringWithJavaLangReflectMethod:(JavaLangReflectMethod *)method;
- (jboolean)isToStringWithOrgMockitoInternalInvocationMockitoMethod:(id<OrgMockitoInternalInvocationMockitoMethod>)method;
@end
J2OBJC_EMPTY_STATIC_INIT(OrgMockitoInternalUtilObjectMethodsGuru)
FOUNDATION_EXPORT void OrgMockitoInternalUtilObjectMethodsGuru_init(OrgMockitoInternalUtilObjectMethodsGuru *self);
FOUNDATION_EXPORT OrgMockitoInternalUtilObjectMethodsGuru *new_OrgMockitoInternalUtilObjectMethodsGuru_init(void) NS_RETURNS_RETAINED;
FOUNDATION_EXPORT OrgMockitoInternalUtilObjectMethodsGuru *create_OrgMockitoInternalUtilObjectMethodsGuru_init(void);
J2OBJC_TYPE_LITERAL_HEADER(OrgMockitoInternalUtilObjectMethodsGuru)
#endif
#if __has_feature(nullability)
#pragma clang diagnostic pop
#endif
#pragma clang diagnostic pop
#pragma pop_macro("INCLUDE_ALL_OrgMockitoInternalUtilObjectMethodsGuru")
|
ceekay1991/AliPayForDebug
|
AliPayForDebug/AliPayForDebug/AlipayWallet_Headers/LIVETRADEPRODGetDynamicIdReq.h
|
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>.
//
#import "LIVETRADEPRODToString.h"
@class LIVETRADEPRODPayChannelModel, NSString;
@interface LIVETRADEPRODGetDynamicIdReq : LIVETRADEPRODToString
{
NSString *_userId;
NSString *_type;
NSString *_extInfos;
NSString *_bizType;
LIVETRADEPRODPayChannelModel *_payChannelModel;
}
@property(retain, nonatomic) LIVETRADEPRODPayChannelModel *payChannelModel; // @synthesize payChannelModel=_payChannelModel;
@property(retain, nonatomic) NSString *bizType; // @synthesize bizType=_bizType;
@property(retain, nonatomic) NSString *extInfos; // @synthesize extInfos=_extInfos;
@property(retain, nonatomic) NSString *type; // @synthesize type=_type;
@property(retain, nonatomic) NSString *userId; // @synthesize userId=_userId;
- (void).cxx_destruct;
@end
|
ScottLiao920/noisepage
|
src/self_driving/planning/action/create_index_action.cpp
|
#include "self_driving/planning/action/create_index_action.h"
#include "self_driving/planning/mcts/action_state.h"
namespace noisepage::selfdriving::pilot {
void CreateIndexAction::ModifyActionState(ActionState *action_state) { action_state->AddIndex(index_name_, id_); }
} // namespace noisepage::selfdriving::pilot
|
acatalfano/chord-dht
|
src/app/node_data/node_data.py
|
<reponame>acatalfano/chord-dht<filename>src/app/node_data/node_data.py
from abc import ABC, abstractmethod, abstractproperty
from typing import Union
class NodeData(ABC):
def __init__(
self,
id_value: int,
predecessor: Union['NodeData', None] = None,
successor: Union['NodeData', None] = None,
recurse: bool = True
) -> None:
self.id = id_value
if recurse:
self._predecessor: NodeData = predecessor if predecessor is not None else self.null_object
self._successor: NodeData = successor if successor is not None else self.null_object
def update(self, id_value: int) -> None:
self.id = id_value
def is_nil(self) -> bool:
return self.id == -1
@property
def successor(self) -> 'NodeData':
return self._successor
@successor.setter
def successor(self, value: 'NodeData') -> None:
self._successor = value
@property
def predecessor(self) -> 'NodeData':
return self._predecessor
@predecessor.setter
def predecessor(self, value: 'NodeData') -> None:
self._predecessor = value
@property
@abstractmethod
def null_object(self) -> 'NodeData':
pass
|
pulumi/pulumi-aws-native
|
sdk/go/aws/robomaker/getRobot.go
|
<reponame>pulumi/pulumi-aws-native
// Code generated by the Pulumi SDK Generator DO NOT EDIT.
// *** WARNING: Do not edit by hand unless you're certain you know what you are doing! ***
package robomaker
import (
"context"
"reflect"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
// AWS::RoboMaker::Robot resource creates an AWS RoboMaker Robot.
func LookupRobot(ctx *pulumi.Context, args *LookupRobotArgs, opts ...pulumi.InvokeOption) (*LookupRobotResult, error) {
var rv LookupRobotResult
err := ctx.Invoke("aws-native:robomaker:getRobot", args, &rv, opts...)
if err != nil {
return nil, err
}
return &rv, nil
}
type LookupRobotArgs struct {
Arn string `pulumi:"arn"`
}
type LookupRobotResult struct {
Arn *string `pulumi:"arn"`
Tags *RobotTags `pulumi:"tags"`
}
func LookupRobotOutput(ctx *pulumi.Context, args LookupRobotOutputArgs, opts ...pulumi.InvokeOption) LookupRobotResultOutput {
return pulumi.ToOutputWithContext(context.Background(), args).
ApplyT(func(v interface{}) (LookupRobotResult, error) {
args := v.(LookupRobotArgs)
r, err := LookupRobot(ctx, &args, opts...)
var s LookupRobotResult
if r != nil {
s = *r
}
return s, err
}).(LookupRobotResultOutput)
}
type LookupRobotOutputArgs struct {
Arn pulumi.StringInput `pulumi:"arn"`
}
func (LookupRobotOutputArgs) ElementType() reflect.Type {
return reflect.TypeOf((*LookupRobotArgs)(nil)).Elem()
}
type LookupRobotResultOutput struct{ *pulumi.OutputState }
func (LookupRobotResultOutput) ElementType() reflect.Type {
return reflect.TypeOf((*LookupRobotResult)(nil)).Elem()
}
func (o LookupRobotResultOutput) ToLookupRobotResultOutput() LookupRobotResultOutput {
return o
}
func (o LookupRobotResultOutput) ToLookupRobotResultOutputWithContext(ctx context.Context) LookupRobotResultOutput {
return o
}
func (o LookupRobotResultOutput) Arn() pulumi.StringPtrOutput {
return o.ApplyT(func(v LookupRobotResult) *string { return v.Arn }).(pulumi.StringPtrOutput)
}
func (o LookupRobotResultOutput) Tags() RobotTagsPtrOutput {
return o.ApplyT(func(v LookupRobotResult) *RobotTags { return v.Tags }).(RobotTagsPtrOutput)
}
func init() {
pulumi.RegisterOutputType(LookupRobotResultOutput{})
}
|
irov/hummingbird
|
src/hb_events/hb_events.h
|
#ifndef HB_EVENTS_H_
#define HB_EVENTS_H_
#include "hb_config/hb_config.h"
#include "hb_cache/hb_cache.h"
#include "hb_db/hb_db.h"
typedef struct hb_events_handle_t hb_events_handle_t;
hb_result_t hb_events_create( hb_events_handle_t ** _handle );
void hb_events_destroy( hb_events_handle_t * _handle );
hb_result_t hb_events_new_topic( hb_events_handle_t * _handle, const hb_db_client_handle_t * _client, hb_uid_t _puid, const char * _name, uint32_t _delay, hb_uid_t * _tuid );
typedef struct hb_events_topic_t
{
uint32_t index;
const char * message;
}hb_events_topic_t;
hb_result_t hb_events_get_topic( hb_events_handle_t * _handle, const hb_cache_handle_t * _cache, const hb_db_client_handle_t * _client, hb_uid_t _puid, hb_uid_t _tuid, hb_events_topic_t * _topic, hb_error_code_t * _code );
#endif
|
hainguyen81/nlh4j
|
modules/core/src/main/java/org/nlh4j/core/text/numberinwords/ScaleUnit.java
|
/*
* @(#)ScaleUnit.java 1.0 Aug 28, 2015
* Copyright 2015 by GNU Lesser General Public License (LGPL). All rights reserved.
*/
package org.nlh4j.core.text.numberinwords;
import java.io.Serializable;
import org.apache.commons.lang3.ArrayUtils;
import org.springframework.util.Assert;
/**
* Number scale unit definition
*
* @author <NAME> (<EMAIL>)
*/
public final class ScaleUnit implements Serializable {
/** default serial uid **/
private static final long serialVersionUID = 1L;
/**
* The number exponent (to translate number to word)
*/
private int exponent;
/**
* The number to word definition (such as 1 - one; 2 - two; 10 - ten, etc...)
*/
private String[] names;
/**
* Initialize a new instance of {@link ScaleUnit}
*
* @param exponent exponent
* @param names unit names
*/
public ScaleUnit(int exponent, String... names) {
Assert.notEmpty(names, "names");
this.exponent = exponent;
this.names = names;
}
/**
* Get the number exponent
*
* @return the number exponent
*/
public int getExponent() {
return exponent;
}
/**
* Get the number to word definition
*
* @param index the definition inex
*
* @return the number to word definition
*/
public String getName(int index) {
return (ArrayUtils.isEmpty(names) || index < 0 || index >= names.length ? "" : names[index]);
}
}
|
profoundsoul/vue_webpack
|
vue-single/src/router.js
|
<filename>vue-single/src/router.js
export default (router)=>{
router.map({
'/':{
name:'index',
component:(resolve)=>{
require(['./views/index.vue'], resolve)
}
}
});
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.