repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
cstom4994/SourceEngineRebuild
|
src/game/shared/tf/entity_halloween_pickup.cpp
|
<reponame>cstom4994/SourceEngineRebuild
//========= Copyright Valve Corporation, All rights reserved. ============//
//
// Purpose: CTF HealthKit.
//
//=============================================================================//
#include "cbase.h"
#include "entity_halloween_pickup.h"
#ifdef GAME_DLL
#include "items.h"
#include "tf_gamerules.h"
#include "tf_player.h"
#include "tf_team.h"
#include "engine/IEngineSound.h"
#include "entity_halloween_pickup.h"
#include "tf_fx.h"
#include "tf_logic_halloween_2014.h"
#endif // GAME_DLL
#ifdef CLIENT_DLL
#include "c_tf_player.h"
#endif
#include "tf_shareddefs.h"
#include "tf_duckleaderboard.h"
#define TF_HALLOWEEN_PICKUP_RETURN_DELAY 10
#ifdef GAME_DLL
IMPLEMENT_AUTO_LIST( IHalloweenGiftSpawnAutoList );
#endif // GAME_DLL
//=============================================================================
//
// CTF Halloween Pickup defines.
IMPLEMENT_NETWORKCLASS_ALIASED( HalloweenPickup, DT_CHalloweenPickup )
BEGIN_NETWORK_TABLE( CHalloweenPickup, DT_CHalloweenPickup )
END_NETWORK_TABLE()
BEGIN_DATADESC( CHalloweenPickup )
DEFINE_KEYFIELD( m_iszSound, FIELD_STRING, "pickup_sound" ),
DEFINE_KEYFIELD( m_iszParticle, FIELD_STRING, "pickup_particle" ),
#ifdef GAME_DLL
DEFINE_OUTPUT( m_OnRedPickup, "OnRedPickup" ),
DEFINE_OUTPUT( m_OnBluePickup, "OnBluePickup" ),
#endif
END_DATADESC();
LINK_ENTITY_TO_CLASS( tf_halloween_pickup, CHalloweenPickup );
// ************************************************************************************
BEGIN_DATADESC( CBonusDuckPickup )
// DEFINE_KEYFIELD( m_iszSound, FIELD_STRING, "pickup_sound" ),
// DEFINE_KEYFIELD( m_iszParticle, FIELD_STRING, "pickup_particle" ),
END_DATADESC();
IMPLEMENT_NETWORKCLASS_ALIASED( BonusDuckPickup, DT_CBonusDuckPickup )
BEGIN_NETWORK_TABLE( CBonusDuckPickup, DT_CBonusDuckPickup )
#ifdef GAME_DLL
SendPropBool( SENDINFO( m_bSpecial ) ),
#else
RecvPropBool( RECVINFO( m_bSpecial ) ),
#endif
END_NETWORK_TABLE()
LINK_ENTITY_TO_CLASS( tf_bonus_duck_pickup, CBonusDuckPickup );
// ************************************************************************************
#ifdef GAME_DLL
LINK_ENTITY_TO_CLASS( tf_halloween_gift_spawn_location, CHalloweenGiftSpawnLocation );
#endif
// ************************************************************************************
IMPLEMENT_NETWORKCLASS_ALIASED( HalloweenGiftPickup, DT_CHalloweenGiftPickup )
BEGIN_NETWORK_TABLE( CHalloweenGiftPickup, DT_CHalloweenGiftPickup )
#ifdef CLIENT_DLL
RecvPropEHandle( RECVINFO( m_hTargetPlayer ) ),
#else
SendPropEHandle( SENDINFO( m_hTargetPlayer ) ),
#endif
END_NETWORK_TABLE()
BEGIN_DATADESC( CHalloweenGiftPickup )
END_DATADESC();
LINK_ENTITY_TO_CLASS( tf_halloween_gift_pickup, CHalloweenGiftPickup );
// ************************************************************************************
// ************************************************************************************
ConVar tf_halloween_gift_lifetime( "tf_halloween_gift_lifetime", "240", FCVAR_CHEAT | FCVAR_REPLICATED );
#ifdef STAGING_ONLY
ConVar tf_halloween_gift_soul_value( "tf_halloween_gift_soul_value", "10", FCVAR_CHEAT | FCVAR_REPLICATED );
#endif
//=============================================================================
//
// CTF Halloween Pickup functions.
//
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
CHalloweenPickup::CHalloweenPickup()
{
#ifdef GAME_DLL
ChangeTeam( TEAM_UNASSIGNED );
#endif
m_iszSound = MAKE_STRING( "Halloween.Quack" );
m_iszParticle = MAKE_STRING( "halloween_explosion" );
}
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
CHalloweenPickup::~CHalloweenPickup()
{
}
//-----------------------------------------------------------------------------
// Purpose: Precache function for the pickup
//-----------------------------------------------------------------------------
void CHalloweenPickup::Precache( void )
{
// We deliberately allow late precaches here
bool bAllowPrecache = CBaseEntity::IsPrecacheAllowed();
CBaseEntity::SetAllowPrecache( true );
PrecacheScriptSound( TF_HALLOWEEN_PICKUP_DEFAULT_SOUND );
if ( m_iszSound != NULL_STRING )
{
PrecacheScriptSound( STRING( m_iszSound ) );
}
if ( m_iszParticle != NULL_STRING )
{
PrecacheParticleSystem( STRING( m_iszParticle ) );
}
BaseClass::Precache();
CBaseEntity::SetAllowPrecache( bAllowPrecache );
}
#ifdef GAME_DLL
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
int CHalloweenPickup::UpdateTransmitState()
{
return SetTransmitState( FL_EDICT_ALWAYS );
}
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
int CHalloweenPickup::ShouldTransmit( const CCheckTransmitInfo *pInfo )
{
return FL_EDICT_ALWAYS;
}
//-----------------------------------------------------------------------------
// Purpose: MyTouch function for the pickup
//-----------------------------------------------------------------------------
bool CHalloweenPickup::MyTouch( CBasePlayer *pPlayer )
{
bool bSuccess = false;
if ( ValidTouch( pPlayer ) )
{
bSuccess = true;
switch( pPlayer->GetTeamNumber() )
{
case TF_TEAM_BLUE:
m_OnBluePickup.FireOutput( this, this );
break;
case TF_TEAM_RED:
m_OnRedPickup.FireOutput( this, this );
break;
}
Vector vecOrigin = GetAbsOrigin() + Vector( 0, 0, 32 );
CPVSFilter filter( vecOrigin );
if ( m_iszSound != NULL_STRING )
{
EmitSound( filter, entindex(), STRING( m_iszSound ) );
}
else
{
EmitSound( filter, entindex(), TF_HALLOWEEN_PICKUP_DEFAULT_SOUND );
}
if ( m_iszParticle != NULL_STRING )
{
TE_TFParticleEffect( filter, 0.0, STRING( m_iszParticle ), vecOrigin, vec3_angle );
}
// Increment score directly during 2014 halloween
if ( CTFMinigameLogic::GetMinigameLogic() && CTFMinigameLogic::GetMinigameLogic()->GetActiveMinigame() )
{
inputdata_t inputdata;
inputdata.pActivator = NULL;
inputdata.pCaller = NULL;
inputdata.value.SetInt( 1 );
inputdata.nOutputID = 0;
if ( pPlayer->GetTeamNumber() == TF_TEAM_RED )
{
CTFMinigameLogic::GetMinigameLogic()->GetActiveMinigame()->InputScoreTeamRed( inputdata );
}
else
{
CTFMinigameLogic::GetMinigameLogic()->GetActiveMinigame()->InputScoreTeamBlue( inputdata );
}
}
if ( TFGameRules() && TFGameRules()->IsHalloweenScenario( CTFGameRules::HALLOWEEN_SCENARIO_DOOMSDAY ) )
{
CTFPlayer *pTFPlayer = ToTFPlayer( pPlayer );
if ( pTFPlayer )
{
pTFPlayer->AwardAchievement( ACHIEVEMENT_TF_HALLOWEEN_DOOMSDAY_COLLECT_DUCKS );
IGameEvent *pEvent = gameeventmanager->CreateEvent( "halloween_duck_collected" );
if ( pEvent )
{
pEvent->SetInt( "collector", pTFPlayer->GetUserID() );
gameeventmanager->FireEvent( pEvent, true );
}
}
}
}
return bSuccess;
}
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
bool CHalloweenPickup::ValidTouch( CBasePlayer *pPlayer )
{
CTFPlayer *pTFPlayer = ToTFPlayer( pPlayer );
if ( pTFPlayer && pTFPlayer->m_Shared.InCond( TF_COND_HALLOWEEN_GHOST_MODE ) )
return false;
return BaseClass::ValidTouch( pPlayer );
}
float CHalloweenPickup::GetRespawnDelay( void )
{
return TF_HALLOWEEN_PICKUP_RETURN_DELAY;
}
//-----------------------------------------------------------------------------
// Purpose: Do everything that our base does, but don't change our origin
//-----------------------------------------------------------------------------
CBaseEntity* CHalloweenPickup::Respawn( void )
{
SetTouch( NULL );
AddEffects( EF_NODRAW );
VPhysicsDestroyObject();
SetMoveType( MOVETYPE_NONE );
SetSolid( SOLID_BBOX );
AddSolidFlags( FSOLID_TRIGGER );
m_bRespawning = true;
//UTIL_SetOrigin( this, g_pGameRules->VecItemRespawnSpot( this ) );// blip to whereever you should respawn.
SetAbsAngles( g_pGameRules->VecItemRespawnAngles( this ) );// set the angles.
#if !defined( TF_DLL )
UTIL_DropToFloor( this, MASK_SOLID );
#endif
RemoveAllDecals(); //remove any decals
SetThink ( &CItem::Materialize );
SetNextThink( gpGlobals->curtime + GetRespawnDelay() );
return this;
}
bool CHalloweenPickup::ItemCanBeTouchedByPlayer( CBasePlayer *pPlayer )
{
if ( m_flThrowerTouchTime > 0.f && gpGlobals->curtime < m_flThrowerTouchTime )
{
return false;
}
return BaseClass::ItemCanBeTouchedByPlayer( pPlayer );
}
#endif // GAME_DLL
// ***********************************************************************************************
ConVar tf_duck_allow_team_pickup( "tf_duck_allow_team_pickup", "1", FCVAR_REPLICATED | FCVAR_DEVELOPMENTONLY );
CBonusDuckPickup::CBonusDuckPickup()
{
#ifdef GAME_DLL
ChangeTeam( TEAM_UNASSIGNED );
m_iCreatorId = -1;
m_iVictimId = -1;
m_iAssisterId = -1;
m_iFlags = 0;
#else
pGlowEffect = NULL;
#endif
m_bSpecial = false;
m_iszSound = MAKE_STRING( BONUS_DUCK_CREATED_SOUND );
m_iszParticle = MAKE_STRING( "duck_pickup" );
}
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
CBonusDuckPickup::~CBonusDuckPickup()
{
#ifdef GAME_DLL
m_flLifeTime = 0;
#else
if ( pGlowEffect )
{
ParticleProp()->StopEmission( pGlowEffect );
pGlowEffect = NULL;
}
#endif
}
//-----------------------------------------------------------------------------
void CBonusDuckPickup::Precache( void )
{
// We deliberately allow late precaches here
bool bAllowPrecache = CBaseEntity::IsPrecacheAllowed();
CBaseEntity::SetAllowPrecache( true );
PrecacheParticleSystem( BONUS_DUCK_GLOW );
PrecacheParticleSystem( BONUS_DUCK_TRAIL_RED );
PrecacheParticleSystem( BONUS_DUCK_TRAIL_BLUE );
PrecacheParticleSystem( BONUS_DUCK_TRAIL_SPECIAL_RED );
PrecacheParticleSystem( BONUS_DUCK_TRAIL_SPECIAL_BLUE );
PrecacheScriptSound( BONUS_DUCK_CREATED_SOUND );
BaseClass::Precache();
CBaseEntity::SetAllowPrecache( bAllowPrecache );
}
#ifdef GAME_DLL
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
bool CBonusDuckPickup::ValidTouch( CBasePlayer *pPlayer )
{
// Is the item enabled?
if ( IsDisabled() )
return false;
// Only touch a live player.
if ( !pPlayer || !pPlayer->IsPlayer() || !pPlayer->IsAlive() )
return false;
if ( ( GetTeamNumber() >= FIRST_GAME_TEAM ) && ( pPlayer->GetTeamNumber() == GetTeamNumber() ) )
return false;
CTFPlayer *pTFPlayer = ToTFPlayer( pPlayer );
if ( pTFPlayer && pTFPlayer->m_Shared.InCond( TF_COND_HALLOWEEN_GHOST_MODE ) )
return false;
return true;
}
//-----------------------------------------------------------------------------
#define DUCK_BLINK_TIME 3.0f
void CBonusDuckPickup::Spawn( void )
{
BaseClass::Spawn();
//SetCycle( RandomFloat(0, 60.0f) );
//align to the ground so we're not standing on end
QAngle angle = vec3_angle;
// rotate randomly in yaw
angle[1] = random->RandomFloat( 0, 360 );
SetAbsAngles( angle );
float flLifeTime = GetLifeTime();
m_flKillTime = gpGlobals->curtime + flLifeTime;
m_nBlinkCount = 0;
SetContextThink( &CBonusDuckPickup::BlinkThink, gpGlobals->curtime + flLifeTime - DUCK_BLINK_TIME, "BonusDuckBlinkThink" );
SetContextThink( &CBonusDuckPickup::UpdateCollisionBounds, gpGlobals->curtime + 2.0f, "UpdateCollisionBoundsThink" );
}
//-----------------------------------------------------------------------------
bool CBonusDuckPickup::MyTouch( CBasePlayer *pPlayer )
{
bool bSuccess = false;
if ( tf_duck_allow_team_pickup.GetBool() || ValidTouch( pPlayer ) )
{
bSuccess = true;
Vector vecOrigin = GetAbsOrigin();
CPVSFilter pvsFilter( vecOrigin );
if ( m_iszSound != NULL_STRING )
{
EmitSound( pvsFilter, entindex(), STRING( m_iszSound ) );
}
else
{
EmitSound( pvsFilter, entindex(), TF_HALLOWEEN_PICKUP_DEFAULT_SOUND );
}
if ( m_iszParticle != NULL_STRING )
{
TE_TFParticleEffect( pvsFilter, 0.0, STRING( m_iszParticle ), vecOrigin, vec3_angle );
}
if ( m_bSpecial )
{
CSingleUserRecipientFilter userfilter( pPlayer );
UserMessageBegin( userfilter, "BonusDucks" );
WRITE_BYTE( pPlayer->entindex() );
WRITE_BYTE( true );
MessageEnd();
}
// Notify User that they picked up a EOTL duck if the holiday is active
if ( pPlayer && TFGameRules() && TFGameRules()->IsHolidayActive( kHoliday_EOTL ) && !TFGameRules()->HaveCheatsBeenEnabledDuringLevel() )
{
int iFlags = m_iFlags;
if ( m_bSpecial )
{
iFlags |= DUCK_FLAG_BONUS;
}
// Send Message to Toucher and Creator if Creator is same team as toucher
// Tell your team you picked up a duck
// IsCreated, ID of Creator, ID of Victim, Count, IsGolden
// Message to Toucher
{
CSingleUserRecipientFilter userfilter( pPlayer );
UserMessageBegin( userfilter, "EOTLDuckEvent" );
WRITE_BYTE( false );
WRITE_BYTE( m_iCreatorId );
WRITE_BYTE( m_iVictimId );
WRITE_BYTE( pPlayer->entindex() );
WRITE_BYTE( GetTeamNumber() );
WRITE_BYTE( 1 );
WRITE_BYTE( iFlags );
MessageEnd();
}
// Notify Creator
if ( m_iCreatorId != pPlayer->entindex() )
{
CBasePlayer *pCreator = UTIL_PlayerByIndex( m_iCreatorId );
if ( pCreator && pCreator->InSameTeam( pPlayer ) )
{
CSingleUserRecipientFilter userfilter( pCreator );
UserMessageBegin( userfilter, "EOTLDuckEvent" );
WRITE_BYTE( false );
WRITE_BYTE( m_iCreatorId );
WRITE_BYTE( m_iVictimId );
WRITE_BYTE( pPlayer->entindex() );
WRITE_BYTE( GetTeamNumber() );
WRITE_BYTE( 1 );
WRITE_BYTE( iFlags );
MessageEnd();
}
}
// Notify Assister someone picked up their duck as well
if ( m_iAssisterId != -1 && m_iAssisterId != pPlayer->entindex() )
{
CBasePlayer *pAssister = UTIL_PlayerByIndex( m_iAssisterId );
if ( pAssister && pAssister->InSameTeam( pPlayer ) )
{
CSingleUserRecipientFilter userfilter( pAssister );
UserMessageBegin( userfilter, "EOTLDuckEvent" );
WRITE_BYTE( false );
WRITE_BYTE( m_iAssisterId );
WRITE_BYTE( m_iVictimId );
WRITE_BYTE( pPlayer->entindex() );
WRITE_BYTE( GetTeamNumber() );
WRITE_BYTE( 1 );
WRITE_BYTE( iFlags );
MessageEnd();
}
}
}
}
return bSuccess;
}
//-----------------------------------------------------------------------------
void CBonusDuckPickup::DropSingleInstance( Vector &vecLaunchVel, CBaseCombatCharacter *pThrower, float flThrowerTouchDelay, float flResetTime /*= 0.1f*/ )
{
// Remove ourselves after some time
SetContextThink( &CBonusDuckPickup::NotifyFadeOut, gpGlobals->curtime + GetLifeTime(), "CBonusDuckPreRemoveThink" );
BaseClass::DropSingleInstance( vecLaunchVel, pThrower, flThrowerTouchDelay, flResetTime );
}
//-----------------------------------------------------------------------------
void CBonusDuckPickup::NotifyFadeOut( void )
{
//// Notify User that they picked up a EOTL duck if the holiday is active
//if ( TFGameRules() && TFGameRules()->IsHolidayActive( kHoliday_EOTL ) )
//{
// int iFlags = 0;
// if ( m_bSpecial )
// {
// iFlags |= DUCK_FLAG_BONUS;
// }
// // Tell your team you picked up a duck
// // IsCreated, ID of Creator, ID of Victim, Count, IsGolden
// CTeamRecipientFilter userfilter( GetTeamNumber(), true );
// UserMessageBegin( userfilter, "EOTLDuckEvent" );
// WRITE_BYTE( false );
// WRITE_BYTE( m_iCreatorId );
// WRITE_BYTE( m_iVictimId );
// WRITE_BYTE( 0 );
// WRITE_BYTE( GetTeamNumber() );
// WRITE_BYTE( 1 );
// WRITE_BYTE( iFlags );
// MessageEnd();
//}
}
//-----------------------------------------------------------------------------
void CBonusDuckPickup::UpdateCollisionBounds()
{
CollisionProp()->SetCollisionBounds( Vector( -50, -50, -50 ), Vector( 50, 50, 50 ) );
}
//-----------------------------------------------------------------------------
void CBonusDuckPickup::BlinkThink()
{
float flTimeToKill = m_flKillTime - gpGlobals->curtime;
float flNextBlink = RemapValClamped( flTimeToKill, DUCK_BLINK_TIME, 0.f, 0.3f, 0.05f );
SetContextThink( &CBonusDuckPickup::BlinkThink, gpGlobals->curtime + flNextBlink, "BonusDuckBlinkThink" );
SetRenderMode( kRenderTransAlpha );
++m_nBlinkCount;
if ( m_nBlinkCount % 2 == 0 )
{
SetRenderColorA( 50 );
}
else
{
SetRenderColorA( 255 );
}
}
#else
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
void CBonusDuckPickup::OnDataChanged( DataUpdateType_t updateType )
{
BaseClass::OnDataChanged( updateType );
if ( updateType == DATA_UPDATE_CREATED )
{
if ( !IsDormant() )
{
if ( pGlowEffect )
{
ParticleProp()->StopEmission( pGlowEffect );
pGlowEffect = NULL;
}
if ( m_bSpecial )
{
pGlowEffect = ParticleProp()->Create( BONUS_DUCK_GLOW, PATTACH_ABSORIGIN_FOLLOW, 0, Vector( 0, 0, 10 ) );
// these are fire and forget
ParticleProp()->Create( ( GetTeamNumber() == TF_TEAM_RED ) ? BONUS_DUCK_TRAIL_SPECIAL_RED : BONUS_DUCK_TRAIL_SPECIAL_BLUE, PATTACH_ABSORIGIN_FOLLOW );
}
else
{
// these are fire and forget
ParticleProp()->Create( ( GetTeamNumber() == TF_TEAM_RED ) ? BONUS_DUCK_TRAIL_RED : BONUS_DUCK_TRAIL_BLUE, PATTACH_ABSORIGIN_FOLLOW );
}
CPVSFilter filter( GetAbsOrigin() );
EmitSound( filter, entindex(), BONUS_DUCK_CREATED_SOUND );
}
}
}
#endif // GAME_DLL
//-----------------------------------------------------------------------------
// Purpose: Halloween Gift Spawn
//-----------------------------------------------------------------------------
#ifdef GAME_DLL
CHalloweenGiftSpawnLocation::CHalloweenGiftSpawnLocation()
{
}
#endif
//-----------------------------------------------------------------------------
CHalloweenGiftPickup::CHalloweenGiftPickup()
{
m_hTargetPlayer = NULL;
#ifdef CLIENT_DLL
m_pPreviousTargetPlayer = NULL;
#endif
}
//-----------------------------------------------------------------------------
void CHalloweenGiftPickup::Precache( void )
{
BaseClass::Precache();
PrecacheScriptSound( "sf15.Merasmus.Gargoyle.Spawn" );
PrecacheScriptSound( "sf15.Merasmus.Gargoyle.Gone" );
PrecacheScriptSound( "sf15.Merasmus.Gargoyle.Got" );
}
//------------------------------------------------------------------------
void CHalloweenGiftPickup::Spawn( void )
{
BaseClass::Spawn();
#ifdef GAME_DLL
// Set a timer
SetContextThink( &CHalloweenGiftPickup::DespawnGift, gpGlobals->curtime + tf_halloween_gift_lifetime.GetInt(), "DespawnGift" );
AddSpawnFlags( SF_NORESPAWN );
#endif // CLIENT_DLL
}
#ifdef GAME_DLL
//------------------------------------------------------------------------
// Despawn (and notify client) and then remove
//------------------------------------------------------------------------
void CHalloweenGiftPickup::DespawnGift()
{
SetTargetPlayer( NULL );
SetContextThink( &CHalloweenGiftPickup::RemoveGift, gpGlobals->curtime + 1.0, "RemoveGift" );
}
//------------------------------------------------------------------------
void CHalloweenGiftPickup::RemoveGift()
{
UTIL_Remove( this );
}
//------------------------------------------------------------------------
void CHalloweenGiftPickup::SetTargetPlayer( CTFPlayer *pTarget )
{
m_hTargetPlayer = pTarget;
}
//------------------------------------------------------------------------
bool CHalloweenGiftPickup::ValidTouch( CBasePlayer *pPlayer )
{
CTFPlayer *pTFPlayer = ToTFPlayer( pPlayer );
if ( pTFPlayer && pTFPlayer != m_hTargetPlayer.Get() )
return false;
return true;
}
//------------------------------------------------------------------------
bool CHalloweenGiftPickup::MyTouch( CBasePlayer *pPlayer )
{
CTFPlayer *pTFPlayer = ToTFPlayer( pPlayer );
if ( pTFPlayer && pTFPlayer != m_hTargetPlayer.Get() )
return false;
// TODO: Give contract points
// Visual effects
Vector vecOrigin = GetAbsOrigin();
CPVSFilter filter( vecOrigin );
TE_TFParticleEffect( filter, 0.0, "duck_collect_green", vecOrigin, vec3_angle );
// Sound effects
CSingleUserRecipientFilter touchingFilter( pPlayer );
EmitSound( touchingFilter, entindex(), "Halloween.PumpkinPickup" );
EmitSound( touchingFilter, entindex(), "sf15.Merasmus.Gargoyle.Got" );
// Give souls to the collecting player
#ifdef STAGING_ONLY
for( int i=0; i<tf_halloween_gift_soul_value.GetInt(); ++i )
#else
for( int i=0; i<10; ++i )
#endif // STAGING_ONLY
{
TFGameRules()->DropHalloweenSoulPack( 1, vecOrigin, pPlayer, TEAM_SPECTATOR );
}
// Achievement
if ( TFGameRules() && TFGameRules()->IsHalloweenScenario( CTFGameRules::HALLOWEEN_SCENARIO_MANN_MANOR ) )
{
pTFPlayer->AwardAchievement( ACHIEVEMENT_TF_HALLOWEEN_COLLECT_GOODY_BAG );
}
return true;
}
#endif // GAME_DLL
#ifdef CLIENT_DLL
//-----------------------------------------------------------------------------
void CHalloweenGiftPickup::OnDataChanged( DataUpdateType_t updateType )
{
BaseClass::OnDataChanged( updateType );
if ( updateType == DATA_UPDATE_DATATABLE_CHANGED )
{
C_TFPlayer *pLocalPlayer = C_TFPlayer::GetLocalTFPlayer();
if ( pLocalPlayer )
{
// Gift Added
if ( m_hTargetPlayer.Get() != NULL && m_pPreviousTargetPlayer == NULL && m_hTargetPlayer.Get() == pLocalPlayer )
{
// Notification
CEconNotification *pNotification = new CEconNotification();
pNotification->SetText( "#TF_HalloweenItem_SoulAppeared" );
pNotification->SetLifetime( 5.0f );
pNotification->SetSoundFilename( "ui/halloween_loot_spawn.wav" );
NotificationQueue_Add( pNotification );
pLocalPlayer->EmitSound( "sf15.Merasmus.Gargoyle.Spawn" );
}
// Gift Despawned
if ( m_hTargetPlayer.Get() == NULL && m_pPreviousTargetPlayer != NULL && m_pPreviousTargetPlayer == pLocalPlayer )
{
// Notification
CEconNotification *pNotification = new CEconNotification();
pNotification->SetText( "#TF_HalloweenItem_SoulDisappeared" );
pNotification->SetLifetime( 5.0f );
pNotification->SetSoundFilename( "ui/halloween_loot_found.wav" );
NotificationQueue_Add( pNotification );
pLocalPlayer->EmitSound( "sf15.Merasmus.Gargoyle.Gone" );
}
m_pPreviousTargetPlayer = m_hTargetPlayer.Get();
}
}
}
//------------------------------------------------------------------------
bool CHalloweenGiftPickup::ShouldDraw()
{
CTFPlayer *pOwner = m_hTargetPlayer.Get();
if ( pOwner != C_TFPlayer::GetLocalTFPlayer() )
return false;
return BaseClass::ShouldDraw();
}
#endif
|
Synclavier/Software
|
Code/Includes/FlatCarbon/CMPRComponent.h
|
#warning CMPRComponent.h is not available on Mac OS X
|
jensdietrich/se-teaching
|
visitor/src/main/java/nz/ac/vuw/jenz/visitor/iteration3/Expression.java
|
package nz.ac.vuw.jenz.visitor.iteration3;
/**
* Abstract visitable type, implemented by Term and Condition.
* @author <NAME>
*/
public interface Expression {
public void accept(ExpressionVisitor visitor);
}
|
persequor-com/valqueries-sql
|
core/src/test/java/com/valqueries/DatabaseTest.java
|
/* Copyright (C) Persequor ApS - All Rights Reserved
* Unauthorized copying of this file, via any medium is strictly prohibited
* Proprietary and confidential
* Written by Persequor Development Team <<EMAIL>>, 2020-12-02
*/
package com.valqueries;
import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLTransactionRollbackException;
import java.time.Duration;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class DatabaseTest {
private Database database;
@Mock
private DataSource dataSource;
@Mock
private Connection connection;
@Mock
private ITransaction tx;
private SQLTransactionRollbackException deadlockEx = new SQLTransactionRollbackException("reason", "40001", 1213);
@Before
public void setup() throws Exception {
doThrow(deadlockEx).when(tx).execute(any());
when(dataSource.getConnection()).thenReturn(connection);
database = new Database(dataSource);
}
@Test
public void doInRetryableTransaction_retries3times_reachesMaximum() throws Exception {
try {
database.doInTransaction( 3, Duration.ofMillis(100), tx);
fail("Should fail at this point");
} catch (OrmException e) {
assertTrue(e.getCause() instanceof SQLTransactionRollbackException);
}
verify(tx, times(3)).execute(any());
}
@Test
public void doInRetryableTransaction_retries2times_doesntReachMaximum() throws Exception {
doThrow(deadlockEx)
.doNothing()
.when(tx).execute(any());
database.doInTransaction(3, Duration.ofMillis(100), tx);
verify(tx, times(2)).execute(any());
}
@Test
public void doInRetryableTransaction_doesntThrowDeadlock_doesntRetry() throws Exception {
doThrow(new IllegalArgumentException())
.doNothing()
.when(tx).execute(any());
try {
database.doInTransaction(3, Duration.ofMillis(100), tx);
fail("Should fail at this point");
} catch (OrmException e) {
assertEquals(e.getCause().getClass(), IllegalArgumentException.class);
}
verify(tx).execute(any());
}
}
|
ignmiz/ATC_Console
|
test/suits/nav/test_atcairport.h
|
#ifndef TEST_ATCAIRPORT_H
#define TEST_ATCAIRPORT_H
#include "atcairport.h"
#include <QTest>
#include <QObject>
class Test_ATCAirport : public QObject
{
Q_OBJECT
private slots:
void test_constructObject_correct();
void test_constructObject_incorrectLat();
void test_constructObject_incorrectLon();
void test_deleteAllRunways();
void test_getName();
void test_setName();
void test_setGetScenePosition();
void test_setGetSymbol();
void test_setGetLabel();
void test_setColor();
void test_isRunwayVectorEmpty();
void test_appendGetRunway_correct();
void test_show();
void test_hide();
void test_isVisible();
};
#endif // TEST_ATCAIRPORT_H
|
zerowind1997/bookstore
|
src/main/java/edu/uc/action/BaseAction.java
|
package edu.uc.action;
import com.opensymphony.xwork2.ActionContext;
public abstract class BaseAction extends com.opensymphony.xwork2.ActionSupport {
/**
*
*/
private static final long serialVersionUID = 2509226703864561709L;
private final String AREANAME_KEY = "areaName";
protected String areaName = UIConst.AREANAME; // 当前区域的名称
protected String requestURI;
protected String queryString;
protected java.util.Map<String, Object> request;
protected java.util.Map<String, Object> session;
protected java.util.Map<String, Object> application;
@SuppressWarnings("unchecked")
public BaseAction() {
if (ActionContext.getContext().getSession().get(AREANAME_KEY) == null) {
ActionContext.getContext().getSession().put(AREANAME_KEY, areaName);
}
requestURI = org.apache.struts2.ServletActionContext.getRequest().getRequestURI();
queryString = org.apache.struts2.ServletActionContext.getRequest().getQueryString();
request = (java.util.Map<String, Object>) ActionContext.getContext().get("request");
session = com.opensymphony.xwork2.ActionContext.getContext().getSession();
application = com.opensymphony.xwork2.ActionContext.getContext().getApplication();
/*
* System.out.println(requestURI); System.out.println(queryString);
* System.out.println(pageNum); System.out.println(pageSize);
*/
}
/**
* 操作类型: 用于获取url中的oper参数
*/
protected String oper;
public String getOper() {
return oper;
}
public void setOper(String oper) {
this.oper = oper;
}
/**
* 检测是否登录,未登录,则返回login;否则,返回login。
*
* @return
*/
public String checkLogin() {
//System.out.println("checkLogin()");
if (ActionContext.getContext().getSession().get(UIConst.BG_LOGINUSER_KEY) == null) {
return "login";
}
return "view";
}
/**
* 清空登录信息:清空会话的所有数据
*/
public void clearLogin() {
ActionContext.getContext().getSession().clear();
}
/**
* 检测是否有权限
*
* @return
*/
public String checkRight() {
System.out.println("checkRight");
if (ActionContext.getContext().getSession().get(UIConst.BG_LOGINUSER_KEY) == null) {
System.out.println("checkLogin:no");
return "login";
}
if (ActionContext.getContext().getSession().get(UIConst.BG_ISADMIN_KEY) == null) {
System.out.println("checkRight:no");
return "go_noright";
}
return "";
}
protected String pageNum;
protected String pageSize;
public String getPageNum() {
return pageNum;
}
public void setPageNum(String pageNum) {
this.pageNum = pageNum;
}
public String getPageSize() {
return pageSize;
}
public void setPageSize(String pageSize) {
this.pageSize = pageSize;
}
}
|
npocmaka/Windows-Server-2003
|
shell/comctl32/v6/tvpaint.c
|
<gh_stars>10-100
#include "ctlspriv.h"
#include "treeview.h"
#include "image.h"
extern void TruncateString(char *sz, int cch);
void TV_GetBackgroundBrush(PTREE pTree, HDC hdc)
{
if (pTree->clrBk == (COLORREF)-1)
{
if (pTree->ci.style & WS_DISABLED)
pTree->hbrBk = FORWARD_WM_CTLCOLORSTATIC(pTree->ci.hwndParent, hdc, pTree->ci.hwnd, SendMessage);
else
pTree->hbrBk = FORWARD_WM_CTLCOLOREDIT(pTree->ci.hwndParent, hdc, pTree->ci.hwnd, SendMessage);
}
}
// ----------------------------------------------------------------------------
//
// Draws a horizontal or vertical dotted line from the given (x,y) location
// for the given length (c).
//
// ----------------------------------------------------------------------------
void TV_DrawDottedLine(HDC hdc, int x, int y, int c, BOOL fVert)
{
while (c > 0)
{
PatBlt(hdc, x, y, 1, 1, PATCOPY);
if (fVert)
y += 2;
else
x += 2;
c -= 2;
}
}
// ----------------------------------------------------------------------------
//
// Draws a plus or minus sign centered around the given (x,y) location and
// extending out from that location the given distance (c).
//
// ----------------------------------------------------------------------------
// TV_DrawPlusMinus is theme aware
void TV_DrawPlusMinus(PTREE pTree, HDC hdc, int x, int y, int c, HBRUSH hbrSign, HBRUSH hbrBox, HBRUSH hbrBk, BOOL fPlus)
{
HRESULT hr = E_FAIL;
int n;
int p = (c * 7) / 10;
n = p * 2 + 1;
if (pTree->hTheme)
{
RECT rc = { x - c, y - c, x + c + 1, y + c + 1 };
hr = DrawThemeBackground(pTree->hTheme, hdc, TVP_GLYPH, fPlus ? GLPS_CLOSED : GLPS_OPENED, &rc, 0);
}
if (FAILED(hr))
{
SelectObject(hdc, hbrBk);
PatBlt(hdc, x - c, y - c, 2*c, 2*c, PATCOPY);
SelectObject(hdc, hbrSign);
if (p >= 5)
{
PatBlt(hdc, x - p, y - 1, n, 3, PATCOPY);
if (fPlus)
PatBlt(hdc, x - 1, y - p, 3, n, PATCOPY);
p--;
n -= 2;
}
PatBlt(hdc, x - p, y, n, 1, PATCOPY);
if (fPlus)
PatBlt(hdc, x, y - p, 1, n, PATCOPY);
n = c * 2 + 1;
SelectObject(hdc, hbrBox);
PatBlt(hdc, x - c, y - c, n, 1, PATCOPY);
PatBlt(hdc, x - c, y - c, 1, n, PATCOPY);
PatBlt(hdc, x - c, y + c, n, 1, PATCOPY);
PatBlt(hdc, x + c, y - c, 1, n, PATCOPY);
}
}
// ----------------------------------------------------------------------------
//
// Create the bitmaps for the indent area of the tree as follows
// if fHasLines && fHasButtons --> 7 bitmaps
// if fHasLines && !fHasButtons --> 3 bitmaps
// if !fHasLines && fHasButtons --> 2 bitmaps
//
// sets hStartBmp, hBmp, hdcBits
//
// If "has lines" then there are three basic bitmaps.
//
// | | |
// | +--- +---
// | |
//
// (The plan vertical line does not get buttons.)
//
// Otherwise, there are no lines, so the basic bitmaps are blank.
//
// If "has buttons", then the basic bitmaps are augmented with buttons.
//
// [+] [-]
//
// And if you have "lines at root", you get
//
// __
//
//
// And if you have "lines at root" with "has buttons", then you also get
//
// --[+] --[-]
//
// So, there are twelve image types. Here they are, with the code names
// written underneath.
//
// | | | | | | |
// | +--- +--- [+]-- [+]-- [-]-- [-]--
// | | | |
//
// "|" "|-" "L" "|-+" "L+" "|--" "L-"
//
// --- [+]-- [-]-- [+] [-]
//
// ".-" ".-+" ".--" "+" "-"
//
// And the master table of which styles get which images.
//
//
// LINES BTNS ROOT | |- L |-+ L+ |-- L- .- .-+ .-- + -
//
// x 0 1
// x 0 1 2 3
// x 0 1 2 3
// x x 0 1 2 3 4 5 6
// x x 0 1 2 3
// x x x 0 1 2 3 4 5 6 7 8 9
//
// ----------------------------------------------------------------------------
void TV_DrawV(PTREE pTree, HDC hdc, int x, int y) // "|"
{
int xMid;
HBRUSH hbrOld = SelectObject(hdc, pTree->hbrLine);
if (pTree->hImageList)
xMid = (pTree->cxImage - MAGIC_INDENT) / 2;
else
xMid = pTree->cxIndent / 2;
TV_DrawDottedLine(hdc, x + xMid, y, pTree->cyItem, TRUE);
SelectObject(hdc, hbrOld);
}
void TV_DrawT(PTREE pTree, HDC hdc, int x, int y) // "|-"
{
int xMid, yMid;
HBRUSH hbrOld = SelectObject(hdc, pTree->hbrLine);
if (pTree->hImageList)
xMid = (pTree->cxImage - MAGIC_INDENT) / 2;
else
xMid = pTree->cxIndent / 2;
yMid = ((pTree->cyItem / 2) + 1) & ~1;
TV_DrawDottedLine(hdc, x + xMid, y, pTree->cyItem, TRUE);
TV_DrawDottedLine(hdc, x + xMid, y + yMid, pTree->cxIndent - xMid, FALSE);
SelectObject(hdc, hbrOld);
}
void TV_DrawL(PTREE pTree, HDC hdc, int x, int y, BOOL bRoot) // "L"
{
int xMid, yMid;
HBRUSH hbrOld = SelectObject(hdc, pTree->hbrLine);
if (pTree->hImageList)
xMid = (pTree->cxImage - MAGIC_INDENT) / 2;
else
xMid = pTree->cxIndent / 2;
yMid = ((pTree->cyItem / 2) + 1) & ~1;
if (!bRoot)
{
TV_DrawDottedLine(hdc, x + xMid, y, yMid, TRUE);
} else
{
TV_DrawDottedLine(hdc, x + xMid, y + yMid, yMid, TRUE);
}
TV_DrawDottedLine(hdc, x + xMid, y + yMid, pTree->cxIndent - xMid, FALSE);
SelectObject(hdc, hbrOld);
}
void TV_DrawPML(PTREE pTree, HDC hdc, int x, int y, BOOL fPlus, BOOL fL, BOOL bRoot)
{
int xMid, yMid, c;
HBRUSH hbrOld = SelectObject(hdc, pTree->hbrLine);
if (pTree->hImageList)
xMid = (pTree->cxImage - MAGIC_INDENT) / 2;
else
xMid = pTree->cxIndent / 2;
yMid = ((pTree->cyItem / 2) + 1) & ~1;
c = (min(xMid, yMid)) / 2;
if (fL)
{
if (!bRoot)
{
TV_DrawDottedLine(hdc, x + xMid, y, yMid - c, TRUE);
} else
{
TV_DrawDottedLine(hdc, x + xMid, y + yMid + c, yMid - c, TRUE);
}
TV_DrawDottedLine(hdc, x + xMid + c, y + yMid, pTree->cxIndent - xMid - c, FALSE);
}
else
{
TV_DrawDottedLine(hdc, x + xMid, y, yMid - c, TRUE);
TV_DrawDottedLine(hdc, x + xMid + c, y + yMid, pTree->cxIndent - xMid - c, FALSE);
TV_DrawDottedLine(hdc, x + xMid, y + yMid + c, yMid - c, TRUE);
}
TV_DrawPlusMinus(pTree, hdc, x + xMid, y + yMid, c, pTree->hbrText, pTree->hbrLine, pTree->hbrBk, fPlus);
SelectObject(hdc, hbrOld);
}
void TV_DrawTP(PTREE pTree, HDC hdc, int x, int y) // "|-+"
{
TV_DrawPML(pTree, hdc, x, y, TRUE, FALSE, FALSE);
}
void TV_DrawLP(PTREE pTree, HDC hdc, int x, int y, BOOL bRoot) // "L+"
{
TV_DrawPML(pTree, hdc, x, y, TRUE, TRUE, bRoot);
}
void TV_DrawTM(PTREE pTree, HDC hdc, int x, int y) // "|--"
{
TV_DrawPML(pTree, hdc, x, y, FALSE, FALSE, FALSE);
}
void TV_DrawLM(PTREE pTree, HDC hdc, int x, int y, BOOL bRoot) // "L-"
{
TV_DrawPML(pTree, hdc, x, y, FALSE, TRUE, bRoot);
}
void TV_DrawH(PTREE pTree, HDC hdc, int x, int y) // ".-"
{
int xMid, yMid;
HBRUSH hbrOld = SelectObject(hdc, pTree->hbrLine);
if (pTree->hImageList)
xMid = (pTree->cxImage - MAGIC_INDENT) / 2;
else
xMid = pTree->cxIndent / 2;
yMid = ((pTree->cyItem / 2) + 1) & ~1;
TV_DrawDottedLine(hdc, x + xMid, y + yMid, pTree->cxIndent - xMid, FALSE);
SelectObject(hdc, hbrOld);
}
void TV_DrawHP(PTREE pTree, HDC hdc, int x, int y) // ".-+"
{
int xMid, yMid, c;
HBRUSH hbrOld = SelectObject(hdc, pTree->hbrLine);
if (pTree->hImageList)
xMid = (pTree->cxImage - MAGIC_INDENT) / 2;
else
xMid = pTree->cxIndent / 2;
yMid = ((pTree->cyItem / 2) + 1) & ~1;
c = (min(xMid, yMid)) / 2;
TV_DrawDottedLine(hdc, x + c, y + yMid, pTree->cxIndent - xMid - c, FALSE);
TV_DrawPlusMinus(pTree, hdc, x + xMid, y + yMid, c, pTree->hbrText, pTree->hbrLine, pTree->hbrBk, TRUE);
SelectObject(hdc, hbrOld);
}
void TV_DrawHM(PTREE pTree, HDC hdc, int x, int y) // ".--"
{
int xMid, yMid, c;
HBRUSH hbrOld = SelectObject(hdc, pTree->hbrLine);
if (pTree->hImageList)
xMid = (pTree->cxImage - MAGIC_INDENT) / 2;
else
xMid = pTree->cxIndent / 2;
yMid = ((pTree->cyItem / 2) + 1) & ~1;
c = (min(xMid, yMid)) / 2;
TV_DrawDottedLine(hdc, x + c, y + yMid, pTree->cxIndent - xMid - c, FALSE);
TV_DrawPlusMinus(pTree, hdc, x + xMid, y + yMid, c, pTree->hbrText, pTree->hbrLine, pTree->hbrBk, FALSE);
SelectObject(hdc, hbrOld);
}
void TV_DrawP(PTREE pTree, HDC hdc, int x, int y) // "+"
{
int xMid, yMid, c;
if (pTree->hImageList)
xMid = (pTree->cxImage - MAGIC_INDENT) / 2;
else
xMid = pTree->cxIndent / 2;
yMid = ((pTree->cyItem / 2) + 1) & ~1;
c = (min(xMid, yMid)) / 2;
TV_DrawPlusMinus(pTree, hdc, x + xMid, y + yMid, c, pTree->hbrText, pTree->hbrLine, pTree->hbrBk, TRUE);
}
void TV_DrawM(PTREE pTree, HDC hdc, int x, int y) // "-"
{
int xMid, yMid, c;
if (pTree->hImageList)
xMid = (pTree->cxImage - MAGIC_INDENT) / 2;
else
xMid = pTree->cxIndent / 2;
yMid = ((pTree->cyItem / 2) + 1) & ~1;
c = (min(xMid, yMid)) / 2;
TV_DrawPlusMinus(pTree, hdc, x + xMid, y + yMid, c, pTree->hbrText, pTree->hbrLine, pTree->hbrBk, FALSE);
}
void TV_DrawState2(PTREE pTree, int iState, HDC hdc, int x, int y, BOOL bRoot)
{
switch (iState)
{
case 0:
if (pTree->ci.style & TVS_HASLINES)
{
TV_DrawV(pTree, hdc, x, y);
}
else
{
TV_DrawP(pTree, hdc, x, y);
}
break;
case 1:
if (pTree->ci.style & TVS_HASLINES)
{
TV_DrawT(pTree, hdc, x, y);
}
else
{
TV_DrawM(pTree, hdc, x, y);
}
break;
case 2:
TV_DrawL(pTree, hdc, x, y, bRoot);
break;
case 3:
if (!(pTree->ci.style & TVS_HASBUTTONS))
{
TV_DrawH(pTree, hdc, x, y);
}
else
{
TV_DrawTP(pTree, hdc, x, y);
}
break;
case 4:
TV_DrawLP(pTree, hdc, x, y, bRoot);
break;
case 5:
TV_DrawTM(pTree, hdc, x, y);
break;
case 6:
TV_DrawLM(pTree, hdc, x, y, bRoot);
break;
case 7:
TV_DrawH(pTree, hdc, x, y);
break;
case 8:
TV_DrawHP(pTree, hdc, x, y);
break;
case 9:
TV_DrawHM(pTree, hdc, x, y);
break;
}
}
void TV_DrawState(PTREE pTree, int iState, HDC hdc, int x, int y)
{
TV_DrawState2(pTree, iState, hdc, x, y, FALSE);
}
// TV_CreateIndentBmps is theme aware
void TV_CreateIndentBmps(PTREE pTree)
{
if (pTree->fRedraw)
InvalidateRect(pTree->ci.hwnd, NULL, TRUE);
return;
}
// ----------------------------------------------------------------------------
//
// fills in a TVITEM structure based by coying data from the item or
// by calling the callback to get it.
//
// in:
// hItem item to get TVITEM struct for
// mask which bits of the TVITEM struct you want (TVIF_ flags)
// out:
// lpItem TVITEM filled in
//
// ----------------------------------------------------------------------------
void TV_GetItem(PTREE pTree, HTREEITEM hItem, UINT mask, LPTVITEMEX lpItem)
{
TV_DISPINFO nm;
if (!hItem || !lpItem)
return;
DBG_ValidateTreeItem(hItem, FALSE);
nm.item.mask = 0;
// We need to check the mask to see if lpItem->pszText is valid
// And even then, it might not be, so be paranoid
if ((mask & TVIF_TEXT) && lpItem->pszText && lpItem->cchTextMax)
{
if (hItem->lpstr == LPSTR_TEXTCALLBACK)
{
nm.item.mask |= TVIF_TEXT;
// caller had to fill in pszText and cchTextMax with valid data
nm.item.pszText = lpItem->pszText;
nm.item.cchTextMax = lpItem->cchTextMax;
nm.item.pszText[0] = 0;
#ifdef DEBUG
pTree->fInTextCallback = TRUE;
#endif
}
else
{
ASSERT(hItem->lpstr);
// we could do this but this is dangerous (when responding
// to TVM_GETITEM we would be giving the app a pointer to our data)
// lpItem->pszText = hItem->lpstr;
StringCchCopy(lpItem->pszText, lpItem->cchTextMax, hItem->lpstr);
}
}
if (mask & TVIF_IMAGE)
{
if (hItem->iImage == (WORD)I_IMAGECALLBACK)
nm.item.mask |= TVIF_IMAGE;
else
lpItem->iImage = hItem->iImage;
}
if (mask & TVIF_SELECTEDIMAGE)
{
if (hItem->iSelectedImage == (WORD)I_IMAGECALLBACK)
nm.item.mask |= TVIF_SELECTEDIMAGE;
else
lpItem->iSelectedImage = hItem->iSelectedImage;
}
if (mask & TVIF_INTEGRAL)
{
lpItem->iIntegral = hItem->iIntegral;
}
if (mask & TVIF_CHILDREN)
{
switch (hItem->fKids)
{
case KIDS_COMPUTE:
lpItem->cChildren = hItem->hKids ? 1 : 0;// the actual count doesn't matter
break;
case KIDS_FORCE_YES:
lpItem->cChildren = 1;// the actual count doesn't matter
break;
case KIDS_FORCE_NO:
lpItem->cChildren = 0;
break;
case KIDS_CALLBACK:
nm.item.mask |= TVIF_CHILDREN;
break;
}
}
// copy out constant parameters (and prepare for callback)
// IE4 and IE5.0 did this unconditionally
lpItem->state = nm.item.state = hItem->state;
//
// NOTICE! We do not set TVIF_STATE nm.item.mask and we do not
// check for TVIF_STATE in the "any items need to be filled in
// by callback?" test a few lines below. This is necessary for
// backwards compat. IE5 and earlier did not call the app back
// if the only thing you asked for was TVIF_STATE. You can't
// change this behavior unless you guard it with a version check, or
// apps will break. (They'll get callbacks when they didn't used to.)
// Besides, nobody knows that they can customize the state, so it's
// not like we're missing out on anything.
//
lpItem->lParam = nm.item.lParam = hItem->lParam;
// any items need to be filled in by callback?
if (nm.item.mask & (TVIF_TEXT | TVIF_IMAGE | TVIF_SELECTEDIMAGE | TVIF_CHILDREN))
{
nm.item.hItem = hItem;
CCSendNotify(&pTree->ci, TVN_GETDISPINFO, &nm.hdr);
#ifdef DEBUG
pTree->fInTextCallback = FALSE;
#endif
// copy out things that may have been filled in on the callback
if (nm.item.mask & TVIF_CHILDREN)
lpItem->cChildren = nm.item.cChildren;
if (nm.item.mask & TVIF_IMAGE)
lpItem->iImage = nm.item.iImage;
if (nm.item.mask & TVIF_SELECTEDIMAGE)
lpItem->iSelectedImage = nm.item.iSelectedImage;
// callback may have redirected pszText to point into its own buffer
if (nm.item.mask & TVIF_TEXT)
lpItem->pszText = CCReturnDispInfoText(nm.item.pszText, lpItem->pszText, lpItem->cchTextMax);
if (nm.item.mask & TVIF_STATE)
{
lpItem->state = (nm.item.state & nm.item.stateMask) | (lpItem->state & ~nm.item.stateMask);
if ((lpItem->state & TVIS_BOLD) && !pTree->hFontBold)
TV_CreateBoldFont(pTree);
}
if (nm.item.mask & TVIF_DI_SETITEM)
{
if (nm.item.mask & TVIF_TEXT)
if (nm.item.pszText)
{
Str_Set(&hItem->lpstr, nm.item.pszText);
}
if (nm.item.mask & TVIF_STATE)
{
// if the bold bit changed, then the width changed
if ((hItem->state ^ lpItem->state) & TVIS_BOLD)
hItem->iWidth = 0;
hItem->state = (WORD) lpItem->state;
}
if (nm.item.mask & TVIF_IMAGE)
hItem->iImage = (WORD) lpItem->iImage;
if (nm.item.mask & TVIF_SELECTEDIMAGE)
hItem->iSelectedImage = (WORD) lpItem->iSelectedImage;
if (nm.item.mask & TVIF_CHILDREN)
{
switch(nm.item.cChildren)
{
case I_CHILDRENCALLBACK:
hItem->fKids = KIDS_CALLBACK;
break;
case I_CHILDRENAUTO:
hItem->fKids = KIDS_COMPUTE;
break;
case 0:
hItem->fKids = KIDS_FORCE_NO;
break;
default:
hItem->fKids = KIDS_FORCE_YES;
break;
}
}
}
}
}
// ----------------------------------------------------------------------------
//
// Draws the given item starting at the given (x,y) and extending down and to
// the right.
//
// ----------------------------------------------------------------------------
BOOL TV_ShouldItemDrawBlue(PTREE pTree, TVITEMEX *ti, UINT flags)
{
return ( (ti->state & TVIS_DROPHILITED) ||
(!pTree->hDropTarget &&
!(flags & TVDI_GRAYCTL) &&
(ti->state & TVIS_SELECTED) &&
pTree->fFocus));
}
#define TV_ShouldItemDrawDisabled(pTree, pti, flags) (flags & TVDI_GRAYCTL)
//
// Caution: Depending on the user's color scheme, a Gray item may
// end up looking Blue if Gray would otherwise be invisible. So make
// sure that there are other cues that the user can use to tell whether
// the item is "Really Blue" or "Gray masquerading as Blue".
//
// For example, you might get both is if the treeview is
// participating in drag/drop while it is not the active window,
// because the selected item gets "Gray masquerading as Blue" and
// the drop target gets "Really Blue". But we special-case that
// and turn off the selection while we are worrying about drag/drop,
// so there is no confusion after all.
//
BOOL TV_ShouldItemDrawGray(PTREE pTree, TVITEMEX *pti, UINT flags)
{
return ((flags & TVDI_GRAYCTL) ||
(!pTree->hDropTarget &&
((pti->state & TVIS_SELECTED) &&
(!pTree->fFocus && (pTree->ci.style & TVS_SHOWSELALWAYS)) )));
}
//
// Draw a descender line for the item. It is the caller's job to
// draw the appropriate glyph at level 0.
//
void
TV_DrawDescender(PTREE pTree, HDC hdc, int x, int y, HTREEITEM hItem)
{
int i;
for (i = 1; i < hItem->iIntegral; i++)
{
BitBlt(hdc, x, y + i * pTree->cyItem, pTree->cxIndent, pTree->cyItem, pTree->hdcBits, 0, 0, SRCCOPY);
}
}
//
// Erase any previous descender line for the item.
//
void
TV_EraseDescender(PTREE pTree, HDC hdc, int x, int y, HTREEITEM hItem)
{
RECT rc;
rc.left = x;
rc.right = x + pTree->cxIndent;
rc.top = y + pTree->cyItem;
rc.bottom = y + hItem->iIntegral * pTree->cyItem;
FillRect(hdc, &rc, pTree->hbrBk);
}
//
// Draw (or erase) descenders for siblings and children.
//
void TV_DrawKinDescender(PTREE pTree, HDC hdc, int x, int y, HTREEITEM hItem, UINT state)
{
if (hItem->hNext) // Connect to next sibling
TV_DrawDescender(pTree, hdc, x, y, hItem);
else
TV_EraseDescender(pTree, hdc, x, y, hItem);
// If any bonus images, then need to connect the image to the kids.
if (pTree->himlState || pTree->hImageList) {
if (state & (TVIS_EXPANDED | TVIS_EXPANDPARTIAL)) // Connect to expanded kids
TV_DrawDescender(pTree, hdc, x + pTree->cxIndent, y, hItem);
else
TV_EraseDescender(pTree, hdc, x + pTree->cxIndent, y, hItem);
}
}
// TV_DrawItem is theme aware
void TV_DrawItem(PTREE pTree, HTREEITEM hItem, HDC hdc, int x, int y, UINT flags)
{
UINT cxIndent = pTree->cxIndent;
COLORREF rgbOldBack = 0, rgbOldText;
COLORREF clrBk = CLR_DEFAULT;
RECT rc;
int iBack, iText;
HTREEITEM hItemSave = hItem;
LPTSTR lpstr;
int cch;
UINT etoFlags = ETO_OPAQUE | ETO_CLIPPED;
TVITEMEX ti;
TCHAR szTemp[MAX_PATH];
int iState = 0;
HFONT hFont; //$BOLD
DWORD dwRet;
NMTVCUSTOMDRAW nmcd;
BOOL fItemFocused = ((pTree->fFocus) && (hItem == pTree->hCaret));
DWORD clrTextTemp, clrTextBkTemp;
BOOL fSelectedIcon = FALSE;
int iOldBkMode = GetBkMode(hdc);
rc.top = y;
rc.bottom = rc.top + (pTree->cyItem * hItem->iIntegral);
rc.left = 0;
rc.right = pTree->cxWnd;
if (flags & TVDI_ERASE)
{
// Opaque the whole item
FillRect(hdc, &rc, pTree->hbrBk);
}
// make sure the callbacks don't invalidate this item
pTree->hItemPainting = hItem;
ti.pszText = szTemp;
ti.cchTextMax = ARRAYSIZE(szTemp);
ti.stateMask = TVIS_OVERLAYMASK | TVIS_CUT | TVIS_BOLD; //$BOLD
TV_GetItem(pTree, hItem, TVIF_IMAGE | TVIF_STATE | TVIF_SELECTEDIMAGE | TVIF_TEXT | TVIF_CHILDREN | TVIF_PARAM, &ti);
pTree->hItemPainting = NULL;
////////////////
// set up the HDC
if (TV_ShouldItemDrawBlue(pTree,&ti,flags))
{
// selected
iBack = COLOR_HIGHLIGHT;
iText = COLOR_HIGHLIGHTTEXT;
}
else if (TV_ShouldItemDrawDisabled(pTree, &pti, flags))
{
iBack = COLOR_3DFACE;
iText = COLOR_GRAYTEXT;
SetBkMode(hdc, TRANSPARENT);
etoFlags &= ~ ETO_OPAQUE;
}
else if (TV_ShouldItemDrawGray(pTree, &ti, flags))
{
// On some color schemes, the BTNFACE color equals the WINDOW color,
// and our gray comes out invisible. In such case, change from gray
// to blue so you can see it at all.
if (GetSysColor(COLOR_WINDOW) != GetSysColor(COLOR_BTNFACE))
{
iBack = COLOR_BTNFACE;
iText = COLOR_BTNTEXT;
}
else
{
iBack = COLOR_HIGHLIGHT;
iText = COLOR_HIGHLIGHTTEXT;
}
}
else
{
// not selected
iBack = COLOR_WINDOW;
iText = COLOR_WINDOWTEXT;
if (hItem == pTree->hHot)
{
iText = COLOR_HOTLIGHT;
}
}
if (iBack == COLOR_WINDOW && (pTree->clrBk != (COLORREF)-1))
nmcd.clrTextBk = clrTextBkTemp = pTree->clrBk;
else
nmcd.clrTextBk = clrTextBkTemp = GetSysColor(iBack);
if (iText == COLOR_WINDOWTEXT && (pTree->clrText != (COLORREF)-1))
nmcd.clrText = clrTextTemp = pTree->clrText;
else
nmcd.clrText = clrTextTemp = GetSysColor(iText);
// if forcing black and transparent, do so. dc's BkMode should
// already be set to TRANSPARENT by caller
if (flags & TVDI_TRANSTEXT)
{
nmcd.clrText = clrTextTemp = 0x000000;
etoFlags = 0; // don't opaque nothin'
}
rgbOldBack = SetBkColor(hdc, nmcd.clrTextBk);
rgbOldText = SetTextColor(hdc, nmcd.clrText);
if (pTree->ci.style & TVS_RTLREADING)
etoFlags |= ETO_RTLREADING;
// Figure out which font to use.
if (ti.state & TVIS_BOLD)
{
hFont = pTree->hFontBold;
if (hItem == pTree->hHot)
{
hFont = CCGetHotFont(pTree->hFontBold, &pTree->hFontBoldHot);
}
}
else
{
hFont = pTree->hFont;
if (hItem == pTree->hHot)
{
hFont = CCGetHotFont(pTree->hFont, &pTree->hFontHot);
}
}
hFont = SelectObject(hdc, hFont);
// End HDC setup
////////////////
// notify on custom draw then do it!
nmcd.nmcd.hdc = hdc;
nmcd.nmcd.dwItemSpec = (DWORD_PTR)hItem;
nmcd.nmcd.uItemState = 0;
nmcd.nmcd.rc = rc;
if (flags & TVDI_NOTREE)
nmcd.iLevel = 0;
else
nmcd.iLevel = hItem->iLevel;
if (ti.state & TVIS_SELECTED)
{
fSelectedIcon = TRUE;
if (pTree->fFocus || (pTree->ci.style & TVS_SHOWSELALWAYS))
nmcd.nmcd.uItemState |= CDIS_SELECTED;
}
if (fItemFocused)
nmcd.nmcd.uItemState |= CDIS_FOCUS;
if (hItem == pTree->hHot)
nmcd.nmcd.uItemState |= CDIS_HOT;
nmcd.nmcd.lItemlParam = ti.lParam;
dwRet = CICustomDrawNotify(&pTree->ci, CDDS_ITEMPREPAINT, &nmcd.nmcd);
if (dwRet & CDRF_SKIPDEFAULT)
return;
fItemFocused = (nmcd.nmcd.uItemState & CDIS_FOCUS);
if (nmcd.nmcd.uItemState & CDIS_SELECTED)
ti.state |= TVIS_SELECTED;
else
{
ti.state &= ~TVIS_SELECTED;
}
if (nmcd.clrTextBk != clrTextBkTemp)
SetBkColor(hdc, nmcd.clrTextBk);
if (nmcd.clrText != clrTextTemp)
SetTextColor(hdc, nmcd.clrText);
if (pTree->ci.style & TVS_FULLROWSELECT &&
!(flags & TVDI_TRANSTEXT))
{
FillRectClr(hdc, &nmcd.nmcd.rc, GetBkColor(hdc));
etoFlags |= ETO_OPAQUE;
clrBk = CLR_NONE;
}
if (!(flags & TVDI_NOTREE))
{
if ((pTree->ci.style & (TVS_HASLINES | TVS_HASBUTTONS)) &&
(pTree->ci.style & TVS_LINESATROOT))
// Make room for the "plus" at the front of the tree
x += cxIndent;
}
// deal with margin, etc.
x += (pTree->cxBorder + (nmcd.iLevel * cxIndent));
y += pTree->cyBorder;
// draw image
if ((!(flags & TVDI_NOTREE) && !(dwRet & TVCDRF_NOIMAGES)) || (flags & TVDI_FORCEIMAGE))
{
int dx, dy; // to clip the images within the borders.
COLORREF clrImage = CLR_HILIGHT;
COLORREF clrBkImage = clrBk;
if (flags & TVDI_NOBK)
{
clrBkImage = CLR_NONE;
}
if (pTree->himlState)
{
iState = TV_StateIndex(&ti);
// go figure. in the treeview, 0 for the state image index
// means draw nothing... the 0th item is unused.
// the listview is 0 based and uses the 0th item.
if (iState)
{
dx = min(pTree->cxState, pTree->cxMax - pTree->cxBorder - x);
dy = min(pTree->cyState, pTree->cyItem - (2 * pTree->cyBorder));
ImageList_DrawEx(pTree->himlState, iState, hdc, x,
y + max(pTree->cyItem - pTree->cyState, 0), dx, dy, clrBk, CLR_DEFAULT, ILD_NORMAL | (CCDPIScale(pTree->ci)?ILD_SCALE:0));
x += pTree->cxState;
}
}
if (pTree->hImageList)
{
UINT fStyle = 0;
int i = (fSelectedIcon) ? ti.iSelectedImage : ti.iImage;
if (ti.state & TVIS_CUT)
{
fStyle |= ILD_BLEND50;
clrImage = ImageList_GetBkColor(pTree->hImageList);
}
dx = min(pTree->cxImage - MAGIC_INDENT, pTree->cxMax - pTree->cxBorder - x);
dy = min(pTree->cyImage, pTree->cyItem - (2 * pTree->cyBorder));
// Draw image stretched (rather than centered) when ImageList allows for scaling
ImageList_DrawEx(pTree->hImageList, i, hdc,
x + (dx - pTree->cxNativeImage) / 2, y + (max(pTree->cyItem - pTree->cyImage, 0) / 2) + (dy - pTree->cyNativeImage) / 2,
pTree->cxNativeImage, pTree->cyNativeImage,
clrBkImage, clrImage,
fStyle | (ti.state & TVIS_OVERLAYMASK) | (CCDPIScale(pTree->ci)?ILD_SCALE:0));
}
}
if (pTree->hImageList)
{
// even if not drawing image, draw text in right place
x += pTree->cxImage;
}
// draw text
lpstr = ti.pszText;
cch = lstrlen(lpstr);
if (!hItem->iWidth || (hItem->lpstr == LPSTR_TEXTCALLBACK))
{
TV_ComputeItemWidth(pTree, hItem, hdc); //$BOLD
}
rc.left = x;
rc.top = y + pTree->cyBorder;
rc.right = min((x + hItem->iWidth),
(pTree->cxMax - pTree->cxBorder));
rc.bottom-= pTree->cyBorder;
// Draw the text, unless it's the one we are editing
if (pTree->htiEdit != hItem || !IsWindow(pTree->hwndEdit) || !IsWindowVisible(pTree->hwndEdit))
{
ExtTextOut(hdc, x + g_cxLabelMargin, y + ((pTree->cyItem - pTree->cyText) / 2) + g_cyBorder,
etoFlags, &rc, lpstr, cch, NULL);
// Draw the focus rect, if appropriate.
if (pTree->fFocus && (fItemFocused) &&
!(pTree->ci.style & TVS_FULLROWSELECT) &&
!(flags & (TVDI_TRANSTEXT | TVDI_GRAYCTL))&&
!(CCGetUIState(&(pTree->ci)) & UISF_HIDEFOCUS))
{
DrawFocusRect(hdc, &rc);
}
}
SetBkColor(hdc, rgbOldBack);
SetTextColor(hdc, rgbOldText);
// Restore the original font. //$BOLD
SelectObject(hdc, hFont); //$BOLD
// Notice that we should have opaque'd the rest of the line above if no tree
if (!(flags & TVDI_NOTREE))
{
int dx, dy;
if (pTree->hImageList)
x -= pTree->cxImage;
if (iState)
x -= pTree->cxState;
if (pTree->ci.style & TVS_HASLINES)
{
int i;
x -= cxIndent;
if (nmcd.iLevel-- || (pTree->ci.style & TVS_LINESATROOT))
{
// Special case the first root
if (nmcd.iLevel == -1 && hItem == hItem->hParent->hKids)
{
if (hItem->hNext)
{
i = 2; // "L"
if (ti.cChildren && (pTree->ci.style & TVS_HASBUTTONS))
{
i += 2; // "L+"
if ((ti.state & (TVIS_EXPANDED | TVIS_EXPANDPARTIAL)) == TVIS_EXPANDED)
i += 2; // "L-"
}
dx = min((int)cxIndent, pTree->cxMax - pTree->cxBorder - x);
dy = pTree->cyItem - (2 * pTree->cyBorder);
// Pass TRUE to draw the root ("L" upside down)
TV_DrawState2(pTree, i, hdc, x, y, TRUE);
i = -1;
}
else
{
// first root no siblings
// if there's no other item, draw just the button if button mode,
if (pTree->ci.style & TVS_HASBUTTONS)
{
if (ti.cChildren)
{
// hasbuttons, has lines, lines at root
i = ((ti.state & (TVIS_EXPANDED | TVIS_EXPANDPARTIAL)) == TVIS_EXPANDED) ?
9 : 8; // ".--" : ".-+"
}
else
{
i = 7; // ".-"
}
}
else
{
i = 3; // ".-"
}
}
}
else
{
i = (hItem->hNext) ? 1 : 2; // "|-" (rep) : "L"
if (ti.cChildren && (pTree->ci.style & TVS_HASBUTTONS))
{
i += 2; // "|-+" (rep) : "L+"
if ((ti.state & (TVIS_EXPANDED | TVIS_EXPANDPARTIAL)) == TVIS_EXPANDED)
i += 2; // "|--" (rep) : "L-"
}
}
if (hItem->iIntegral > 1)
TV_DrawKinDescender(pTree, hdc, x, y, hItem, ti.state);
if (i != -1)
{
dx = min((int)cxIndent, pTree->cxMax - pTree->cxBorder - x);
dy = pTree->cyItem - (2 * pTree->cyBorder);
if ((dx > 0) && (dy > 0))
TV_DrawState(pTree, i, hdc, x, y);
}
while ((--nmcd.iLevel >= 0) || ((pTree->ci.style & TVS_LINESATROOT) && nmcd.iLevel >= -1))
{
hItem = hItem->hParent;
x -= cxIndent;
if (hItem->hNext)
{
dx = min((int)cxIndent, (pTree->cxMax - pTree->cxBorder - x));
dy = min(pTree->cyItem, pTree->cyWnd - pTree->cyBorder - y);
if ((dx > 0) && (dy > 0))
TV_DrawState(pTree, 0, hdc, x, y);
TV_DrawDescender(pTree, hdc, x, y, hItemSave);
}
}
}
}
else
{ // no lines
if ((pTree->ci.style & TVS_HASBUTTONS) && (nmcd.iLevel || pTree->ci.style & TVS_LINESATROOT)
&& ti.cChildren)
{
int i = ((ti.state & (TVIS_EXPANDED | TVIS_EXPANDPARTIAL)) == TVIS_EXPANDED) ? 1 : 0;
x -= cxIndent;
dx = min((int)cxIndent, pTree->cxMax - pTree->cxBorder - x);
dy = min(pTree->cyItem, pTree->cyWnd - pTree->cyBorder - y);
if ((dx > 0) && (dy > 0))
TV_DrawState(pTree, i, hdc, x, y);
}
}
}
if (dwRet & CDRF_NOTIFYPOSTPAINT)
{
nmcd.nmcd.dwItemSpec = (DWORD_PTR)hItemSave;
CICustomDrawNotify(&pTree->ci, CDDS_ITEMPOSTPAINT, &nmcd.nmcd);
}
SetBkMode(hdc, iOldBkMode);
}
#define INSERTMARKSIZE 6
BOOL TV_GetInsertMarkRect(PTREE pTree, LPRECT prc)
{
ASSERT(pTree);
if(pTree->htiInsert && TV_GetItemRect(pTree, pTree->htiInsert, prc, TRUE))
{
if (pTree->fInsertAfter)
prc->top = prc->bottom;
else
prc->bottom = prc->top;
prc->top -= INSERTMARKSIZE/2;
prc->bottom += INSERTMARKSIZE/2 + 1;
prc->right = pTree->cxWnd - INSERTMARKSIZE; // should always go all the way to right with pad.
prc->left -= pTree->cxImage;
return TRUE;
}
return FALSE;
}
__inline COLORREF TV_GetInsertMarkColor(PTREE pTree)
{
if (pTree->clrim == CLR_DEFAULT)
return g_clrWindowText;
else
return pTree->clrim;
}
void TV_DrawTree(PTREE pTree, HDC hdc, BOOL fErase, LPRECT lprc)
{
int x;
int iStart, iCnt;
UINT uFlags;
RECT rc;
NMCUSTOMDRAW nmcd;
if (!pTree->fRedraw)
return;
if (pTree->ci.style & TVS_CHECKBOXES)
if (!pTree->himlState)
TV_InitCheckBoxes(pTree);
x = -pTree->xPos;
TV_GetBackgroundBrush(pTree, hdc);
rc = *lprc;
iStart = lprc->top / pTree->cyItem;
if (pTree->cItems && pTree->hTop)
{
ASSERT(ITEM_VISIBLE(pTree->hTop));
iCnt = pTree->cShowing - pTree->hTop->iShownIndex;
}
else
{
iCnt = 0; // Nothing to draw
}
nmcd.hdc = hdc;
/// not implemented yet
//if (ptb->ci.hwnd == GetFocus())
//nmcd.uItemState = CDIS_FOCUS;
//else
nmcd.uItemState = 0;
nmcd.lItemlParam = 0;
nmcd.rc = rc;
pTree->ci.dwCustom = CICustomDrawNotify(&pTree->ci, CDDS_PREPAINT, &nmcd);
if (!(pTree->ci.dwCustom & CDRF_SKIPDEFAULT))
{
if (iStart <= iCnt)
{
HTREEITEM hItem;
HFONT hOldFont;
RECT rcT;
int y = 0;
for (hItem = pTree->hTop; hItem; )
{
if (iStart > hItem->iIntegral)
{
iStart -= hItem->iIntegral;
y += hItem->iIntegral * pTree->cyItem;
hItem = TV_GetNextVisItem(hItem);
} else
break;
}
hOldFont = pTree->hFont ? SelectObject(hdc, pTree->hFont) : NULL;
// TVDI_* for all items
uFlags = (pTree->ci.style & WS_DISABLED) ? TVDI_GRAYCTL : 0;
if (fErase)
uFlags |= TVDI_ERASE;
// loop from the first visible item until either all visible items are
// drawn or there are no more items to draw
for ( ; hItem && y < lprc->bottom; hItem = TV_GetNextVisItem(hItem))
{
TV_DrawItem(pTree, hItem, hdc, x, y, uFlags);
y += pTree->cyItem * hItem->iIntegral;
}
//
// handle drawing the InsertMark next to this item.
//
if(TV_GetInsertMarkRect(pTree, &rcT))
CCDrawInsertMark(hdc, &rcT, FALSE, TV_GetInsertMarkColor(pTree));
if (hOldFont)
SelectObject(hdc, hOldFont);
rc.top = y;
}
if (fErase)
// Opaque out everything we have not drawn explicitly
FillRect(hdc, &rc, pTree->hbrBk);
// notify parent afterwards if they want us to
if (pTree->ci.dwCustom & CDRF_NOTIFYPOSTPAINT) {
CICustomDrawNotify(&pTree->ci, CDDS_POSTPAINT, &nmcd);
}
}
}
// ----------------------------------------------------------------------------
//
// Set up for paint, call DrawTree, and clean up after paint.
//
// ----------------------------------------------------------------------------
void TV_Paint(PTREE pTree, HDC hdc)
{
PAINTSTRUCT ps;
if (hdc)
{
// hdc != 0 indicates a subclassed paint -- use the hdc passed in
SetRect(&ps.rcPaint, 0, 0, pTree->cxWnd, pTree->cyWnd);
TV_DrawTree(pTree, hdc, TRUE, &ps.rcPaint);
}
else
{
BeginPaint(pTree->ci.hwnd, &ps);
TV_DrawTree(pTree, ps.hdc, ps.fErase, &ps.rcPaint);
EndPaint(pTree->ci.hwnd, &ps);
}
}
// ----------------------------------------------------------------------------
// Create an imagelist to be used for dragging.
//
// 1) create mask and image bitmap matching the select bounds size
// 2) draw the text to both bitmaps (in black for now)
// 3) create an imagelist with these bitmaps
// 4) make a dithered copy of the image onto the new imagelist
// ----------------------------------------------------------------------------
HIMAGELIST TV_CreateDragImage(PTREE pTree, HTREEITEM hItem)
{
HDC hdcMem = NULL;
HBITMAP hbmImage = NULL;
HBITMAP hbmMask = NULL;
HBITMAP hbmOld;
HIMAGELIST himl = NULL;
BOOL bMirroredWnd = (pTree->ci.dwExStyle&RTL_MIRRORED_WINDOW);
int dx, dy;
int iSrc;
TVITEMEX ti;
if (!pTree->hImageList)
return NULL;
if (hItem == NULL)
hItem = pTree->htiDrag;
if (hItem == NULL)
return NULL;
dx = hItem->iWidth + pTree->cxImage;
dy = pTree->cyItem;
if (!(hdcMem = CreateCompatibleDC(NULL)))
goto CDI_Exit;
if (!(hbmImage = CreateColorBitmap(dx, dy)))
goto CDI_Exit;
if (!(hbmMask = CreateMonoBitmap(dx, dy)))
goto CDI_Exit;
//
// Mirror the memory DC so that the transition from
// mirrored(memDC)->non-mirrored(imagelist DCs)->mirrored(screenDC)
// is consistent. [samera]
//
if (bMirroredWnd) {
SET_DC_RTL_MIRRORED(hdcMem);
}
// prepare for drawing the item
if (pTree->hFont)
SelectObject(hdcMem, pTree->hFont);
SetBkMode(hdcMem, TRANSPARENT);
/*
** draw the text to both bitmaps
*/
hbmOld = SelectObject(hdcMem, hbmImage);
// fill image with black for transparency
PatBlt(hdcMem, 0, 0, dx, dy, BLACKNESS);
TV_DrawItem(pTree, hItem, hdcMem, 0, 0,
TVDI_NOIMAGE | TVDI_NOTREE | TVDI_TRANSTEXT);
//
// If the header is RTL mirrored, then
// mirror the Memory DC, so that when copying back
// we don't get any image-flipping. [samera]
//
if (bMirroredWnd)
MirrorBitmapInDC(hdcMem, hbmImage);
SelectObject(hdcMem, hbmMask);
// fill mask with white for transparency
PatBlt(hdcMem, 0, 0, dx, dy, WHITENESS);
TV_DrawItem(pTree, hItem, hdcMem, 0, 0,
TVDI_NOIMAGE | TVDI_NOTREE | TVDI_TRANSTEXT);
//
// If the header is RTL mirrored, then
// mirror the Memory DC, so that when copying back
// we don't get any image-flipping. [samera]
//
if (bMirroredWnd)
MirrorBitmapInDC(hdcMem, hbmMask);
// unselect objects that we used
SelectObject(hdcMem, hbmOld);
SelectObject(hdcMem, g_hfontSystem);
/*
** make an image list that for now only has the text
*/
if (!(himl = ImageList_Create(dx, dy, ILC_MASK, 1, 0)))
goto CDI_Exit;
ImageList_SetBkColor(himl, CLR_NONE);
ImageList_Add(himl, hbmImage, hbmMask);
/*
** make a dithered copy of the image part onto our bitmaps
** (need both bitmap and mask to be dithered)
*/
TV_GetItem(pTree, hItem, TVIF_IMAGE, &ti);
iSrc = ti.iImage;
ImageList_CopyDitherImage(himl, 0, 0, (pTree->cyItem - pTree->cyImage) / 2,
pTree->hImageList, iSrc, ((pTree->ci.dwExStyle & dwExStyleRTLMirrorWnd) ? ILD_MIRROR : 0L) | (hItem->state & TVIS_OVERLAYMASK));
CDI_Exit:
if (hdcMem)
DeleteObject(hdcMem);
if (hbmImage)
DeleteObject(hbmImage);
if (hbmMask)
DeleteObject(hbmMask);
return himl;
}
#define COLORKEY RGB(0xF4, 0x0, 0x0)
LRESULT TV_GenerateDragImage(PTREE pTree, SHDRAGIMAGE* pshdi)
{
LRESULT lRet = 0;
HBITMAP hbmpOld = NULL;
HTREEITEM hItem = pTree->htiDrag;
RECT rc;
HDC hdcDragImage;
if (hItem == NULL)
return FALSE;
hdcDragImage = CreateCompatibleDC(NULL);
if (!hdcDragImage)
return 0;
// After this rc contains the bounds of all the items in Client Coordinates.
//
// Mirror the the DC, if the listview is mirrored.
//
if (pTree->ci.dwExStyle & RTL_MIRRORED_WINDOW)
{
SET_DC_RTL_MIRRORED(hdcDragImage);
}
TV_GetItemRect(pTree, hItem, &rc, TRUE);
// Subtract off the image...
rc.left -= pTree->cxImage;
pshdi->sizeDragImage.cx = RECTWIDTH(rc);
pshdi->sizeDragImage.cy = RECTHEIGHT(rc);
pshdi->hbmpDragImage = CreateBitmap( pshdi->sizeDragImage.cx, pshdi->sizeDragImage.cy,
GetDeviceCaps(hdcDragImage, PLANES), GetDeviceCaps(hdcDragImage, BITSPIXEL),
NULL);
if (pshdi->hbmpDragImage)
{
COLORREF clrBkSave;
RECT rcImage = {0, 0, pshdi->sizeDragImage.cx, pshdi->sizeDragImage.cy};
hbmpOld = SelectObject(hdcDragImage, pshdi->hbmpDragImage);
pshdi->crColorKey = COLORKEY;
FillRectClr(hdcDragImage, &rcImage, pshdi->crColorKey);
// Calculate the offset... The cursor should be in the bitmap rect.
if (pTree->ci.dwExStyle & RTL_MIRRORED_WINDOW)
pshdi->ptOffset.x = rc.right - pTree->ptCapture.x;
else
pshdi->ptOffset.x = pTree->ptCapture.x - rc.left;
pshdi->ptOffset.y = pTree->ptCapture.y - rc.top;
clrBkSave = pTree->clrBk;
pTree->clrBk = COLORKEY;
TV_DrawItem(pTree, hItem, hdcDragImage, 0, 0,
TVDI_NOTREE | TVDI_TRANSTEXT | TVDI_FORCEIMAGE | TVDI_NOBK);
pTree->clrBk = clrBkSave;
SelectObject(hdcDragImage, hbmpOld);
DeleteDC(hdcDragImage);
// We're passing back the created HBMP.
return 1;
}
return lRet;
}
|
spiritl7db/uni-app
|
packages/uni-quickapp-native/lib/manifest/base-parser.js
|
const ATTRS = {
'name': 'name',
'versionName': 'versionName',
'versionCode': 'versionCode'
}
function merge(to, from) {
Object.keys(ATTRS).forEach(name => {
if (!to[name]) {
to[name] = from[name]
}
})
}
module.exports = function parseBase(manifest, manifestJson) {
merge(manifest, manifestJson)
manifest.versionCode = parseInt(manifest.versionCode) || 1
if (!manifest.package) {
manifest.package = manifest.name || 'Bundle'
}
if (!manifest.config) {
manifest.config = {}
}
if (!manifest.config.dsl) {
manifest.config.dsl = {}
}
manifest.config.dsl.name = 'vue'
return manifest
}
|
sgenoud/federa
|
framework/signatures.py
|
<filename>framework/signatures.py
import base64
import hashlib
from datetime import datetime, timezone
from urllib.parse import urlparse
from email.utils import format_datetime
import requests
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.exceptions import InvalidSignature
from pyld import jsonld
# cache the downloaded "schemas", otherwise the library is super slow
# (https://github.com/digitalbazaar/pyld/issues/70)
_CACHE = {}
LOADER = jsonld.requests_document_loader()
def _caching_document_loader(url):
if url in _CACHE:
return _CACHE[url]
resp = LOADER(url)
_CACHE[url] = resp
return resp
jsonld.set_document_loader(_caching_document_loader)
TARGET = "(request-target)"
def sign(key, message):
signature = key.sign(message, padding.PKCS1v15(), hashes.SHA256())
return base64.b64encode(signature)
def verify(key, message, signature):
try:
key.verify(
base64.b64decode(signature),
message.encode("ascii"),
padding.PKCS1v15(),
hashes.SHA256(),
)
return True
except InvalidSignature:
return False
def signed_request(key, key_id, method, url, *args, signing_headers=(), headers=None, **kwargs):
if headers is None:
headers = {}
urlinfo = urlparse(url)
headers["Date"] = format_datetime(datetime.now(timezone.utc), True)
headers["Host"] = urlinfo.hostname
used_headers = [
(TARGET, f"{method.lower()} {urlinfo.path}"),
("Host", headers["Host"]),
("Date", headers["Date"]),
] + [(header, headers[header]) for header in signing_headers if header in headers]
to_sign = "\n".join(
[f"{header.lower()}: {header_value}" for header, header_value in used_headers]
)
signature = sign(key, to_sign.encode("ascii"))
signature_parts = (
("keyId", key_id),
("algorithm", "rsa-sha256"),
("headers", " ".join(h for h, _ in used_headers)),
("signature", signature.decode("ascii")),
)
headers["Signature"] = ",".join(['{}="{}"'.format(k, v) for k, v in signature_parts])
return requests.request(method, url, *args, headers=headers, **kwargs)
def _parse_sig_val(chars):
key, _, val = chars.partition("=")
return key, val[1:-1]
def parse_signature_header(header):
return dict(_parse_sig_val(elem.strip()) for elem in header.split(",") if elem)
def signature_is_valid(get_key, headers, path, method="GET"):
target = f"{method.lower()} {path}"
signature = parse_signature_header(headers.get("Signature", ""))
if "headers" not in signature or "signature" not in signature or "keyId" not in signature:
return False
to_verify = "\n".join(
f"{header.lower()}: {target if header == TARGET else headers.get(header)}"
for header in signature["headers"].split(" ")
)
key = get_key(signature["keyId"])
if not key:
return False
return verify(key, to_verify, signature.get("signature"))
# Copied from https://github.com/tsileo/little-boxes/blob/master/little_boxes/linked_data_sig.py
def _options_hash(doc):
doc = dict(doc["signature"])
for k in ["type", "id", "signatureValue"]:
if k in doc:
del doc[k]
doc["@context"] = "https://w3id.org/identity/v1"
normalized = jsonld.normalize(doc, {"algorithm": "URDNA2015", "format": "application/nquads"})
h = hashlib.new("sha256")
h.update(normalized.encode("utf-8"))
return h.hexdigest()
def _doc_hash(doc):
doc = dict(doc)
if "signature" in doc:
del doc["signature"]
normalized = jsonld.normalize(doc, {"algorithm": "URDNA2015", "format": "application/nquads"})
h = hashlib.new("sha256")
h.update(normalized.encode("utf-8"))
return h.hexdigest()
def document_is_valid(get_key, content):
to_be_signed = _options_hash(content) + _doc_hash(content)
if (
"signature" not in content
or "signatureValue" not in content["signature"]
or "creator" not in content["signature"]
):
return False
signature = content["signature"]["signatureValue"]
key = get_key(content["signature"]["creator"])
return verify(key, to_be_signed.encode("utf-8"), signature)
def signed_content(key, key_id, content):
doc = dict(content)
options = {
"type": "RsaSignature2017",
"creator": key_id,
"created": datetime.utcnow().replace(microsecond=0).isoformat() + "Z",
}
doc["signature"] = options
to_be_signed = _options_hash(doc) + _doc_hash(doc)
options["signatureValue"] = sign(key, to_be_signed.encode("utf-8")).decode("ascii")
return doc
|
francesco-p/FACIL
|
src/approach/dmc.py
|
<filename>src/approach/dmc.py
import torch
from torch import nn
from copy import deepcopy
from argparse import ArgumentParser
from datasets.data_loader import get_loaders
from .incremental_learning import Inc_Learning_Appr
from datasets.exemplars_dataset import ExemplarsDataset
class Appr(Inc_Learning_Appr):
""" Class implementing the Deep Model Consolidation (DMC) approach
described in https://arxiv.org/abs/1903.07864
Original code available at https://github.com/juntingzh/incremental-learning-baselines
"""
def __init__(self, model, device, nepochs=160, lr=0.1, lr_min=1e-4, lr_factor=10, lr_patience=8, clipgrad=10000,
momentum=0, wd=0, multi_softmax=False, wu_nepochs=0, wu_lr_factor=1, fix_bn=False, eval_on_train=False,
logger=None, exemplars_dataset=None, aux_dataset='imagenet_32', aux_batch_size=128):
super(Appr, self).__init__(model, device, nepochs, lr, lr_min, lr_factor, lr_patience, clipgrad, momentum, wd,
multi_softmax, wu_nepochs, wu_lr_factor, fix_bn, eval_on_train, logger,
exemplars_dataset)
self.model_old = None
self.model_new = None
self.aux_dataset = aux_dataset
self.aux_batch_size = aux_batch_size
# get dataloader for auxiliar dataset
aux_trn_ldr, _, aux_val_ldr, _ = get_loaders([self.aux_dataset], num_tasks=1, nc_first_task=None, validation=0,
batch_size=self.aux_batch_size, num_workers=4, pin_memory=False)
self.aux_trn_loader = aux_trn_ldr[0]
self.aux_val_loader = aux_val_ldr[0]
# Since an auxiliary dataset is available, using exemplars could be redundant
have_exemplars = self.exemplars_dataset.max_num_exemplars + self.exemplars_dataset.max_num_exemplars_per_class
assert (have_exemplars == 0), 'Warning: DMC does not use exemplars. Comment this line to force it.'
@staticmethod
def exemplars_dataset_class():
return ExemplarsDataset
@staticmethod
def extra_parser(args):
"""Returns a parser containing the approach specific parameters"""
parser = ArgumentParser()
# Sec. 4.2.1 "We use ImageNet32x32 dataset as the source for auxiliary data in the model consolidation stage."
parser.add_argument('--aux-dataset', default='imagenet_32_reduced', type=str, required=False,
help='Auxiliary dataset (default=%(default)s)')
parser.add_argument('--aux-batch-size', default=128, type=int, required=False,
help='Batch size for auxiliary dataset (default=%(default)s)')
return parser.parse_known_args(args)
def _get_optimizer(self):
"""Returns the optimizer"""
if len(self.exemplars_dataset) == 0 and len(self.model.heads) > 1:
# if there are no exemplars, previous heads are not modified
params = list(self.model.model.parameters()) + list(self.model.heads[-1].parameters())
else:
params = self.model.parameters()
return torch.optim.SGD(params, lr=self.lr, weight_decay=self.wd, momentum=self.momentum)
def pre_train_process(self, t, trn_loader):
"""Runs before training all epochs of the task (before the train session)"""
if t > 0:
# Re-initialize model
for m in self.model.modules():
if isinstance(m, (nn.Conv2d, nn.BatchNorm2d, nn.Linear)):
m.reset_parameters()
# Get new model
self.model_new = deepcopy(self.model)
for h in self.model_new.heads[:-1]:
with torch.no_grad():
h.weight.zero_()
h.bias.zero_()
for p in h.parameters():
p.requires_grad = False
else:
self.model_new = self.model
def train_loop(self, t, trn_loader, val_loader):
"""Contains the epochs loop"""
if t > 0:
# Args for the new data trainer and for the student trainer are the same
dmc_args = dict(nepochs=self.nepochs, lr=self.lr, lr_min=self.lr_min, lr_factor=self.lr_factor,
lr_patience=self.lr_patience, clipgrad=self.clipgrad, momentum=self.momentum,
wd=self.wd, multi_softmax=self.multi_softmax, wu_nepochs=self.warmup_epochs,
wu_lr_factor=self.warmup_lr, fix_bn=self.fix_bn, logger=self.logger)
# Train new model in new data
new_trainer = NewTaskTrainer(self.model_new, self.device, **dmc_args)
new_trainer.train_loop(t, trn_loader, val_loader)
self.model_new.eval()
self.model_new.freeze_all()
print('=' * 108)
print("Training of student")
print('=' * 108)
# Train student model using both old and new model
student_trainer = StudentTrainer(self.model, self.model_new, self.model_old, self.device, **dmc_args)
student_trainer.train_loop(t, self.aux_trn_loader, self.aux_val_loader)
else:
# FINETUNING TRAINING -- contains the epochs loop
super().train_loop(t, trn_loader, val_loader)
def post_train_process(self, t, trn_loader):
"""Runs after training all the epochs of the task (after the train session)"""
# Restore best and save model for future tasks
self.model_old = deepcopy(self.model)
self.model_old.eval()
self.model_old.freeze_all()
class NewTaskTrainer(Inc_Learning_Appr):
def __init__(self, model, device, nepochs=160, lr=0.1, lr_min=1e-4, lr_factor=10, lr_patience=8, clipgrad=10000,
momentum=0.9, wd=5e-4, multi_softmax=False, wu_nepochs=0, wu_lr_factor=1, fix_bn=False,
eval_on_train=False, logger=None):
super(NewTaskTrainer, self).__init__(model, device, nepochs, lr, lr_min, lr_factor, lr_patience, clipgrad,
momentum, wd, multi_softmax, wu_nepochs, wu_lr_factor, fix_bn,
eval_on_train, logger)
class StudentTrainer(Inc_Learning_Appr):
def __init__(self, model, model_new, model_old, device, nepochs=160, lr=0.1, lr_min=1e-4, lr_factor=10,
lr_patience=8, clipgrad=10000, momentum=0.9, wd=5e-4, multi_softmax=False, wu_nepochs=0,
wu_lr_factor=1, fix_bn=False, eval_on_train=False, logger=None):
super(StudentTrainer, self).__init__(model, device, nepochs, lr, lr_min, lr_factor, lr_patience, clipgrad,
momentum, wd, multi_softmax, wu_nepochs, wu_lr_factor, fix_bn,
eval_on_train, logger)
self.model_old = model_old
self.model_new = model_new
# Runs a single epoch of student's training
def train_epoch(self, t, trn_loader):
self.model.train()
if self.fix_bn and t > 0:
self.model.freeze_bn()
for images, targets in trn_loader:
images, targets = images.cuda(), targets.cuda()
# Forward old and new model
targets_old = self.model_old(images)
targets_new = self.model_new(images)
# Forward current model
outputs = self.model(images)
loss = self.criterion(t, outputs, targets_old, targets_new)
# Backward
self.optimizer.zero_grad()
loss.backward()
torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.clipgrad)
self.optimizer.step()
# Contains the evaluation code for evaluating the student
def eval(self, t, val_loader):
with torch.no_grad():
total_loss, total_acc_taw, total_acc_tag, total_num = 0, 0, 0, 0
self.model.eval()
for images, targets in val_loader:
images = images.cuda()
# Forward old and new model
targets_old = self.model_old(images)
targets_new = self.model_new(images)
# Forward current model
outputs = self.model(images)
loss = self.criterion(t, outputs, targets_old, targets_new)
# Log
total_loss += loss.item() * len(targets)
total_num += len(targets)
return total_loss / total_num, -1, -1
# Returns the loss value for the student
def criterion(self, t, outputs, targets_old, targets_new=None):
# Eq. 2: Model Consolidation
with torch.no_grad():
# Eq. 4: "The regression target of the consolidated model is the concatenation of normalized logits of
# the two specialist models."
targets = torch.cat(targets_old[:t] + [targets_new[t]], dim=1)
targets -= targets.mean(0)
# Eq. 3: Double Distillation Loss
return torch.nn.functional.mse_loss(torch.cat(outputs, dim=1), targets.detach(), reduction='mean')
|
mirreck/my4x
|
my4x-core/src/test/java/net/my4x/tasks/SVGFontTasksTest.java
|
<reponame>mirreck/my4x
package net.my4x.tasks;
import java.io.IOException;
import org.junit.Ignore;
import org.junit.Test;
public class SVGFontTasksTest {
private static final String SRC_PATH = "C:\\tmp\\TESTME\\mix.svg";
@Ignore
@Test
public void test() throws IOException {
SVGFontTasks.makeGlyphs(SRC_PATH);
}
}
|
hyshi-cn/oapi-sdk-java
|
larksuite-oapi/src/main/java/com/larksuite/oapi/service/search/v2/model/DataSourcePatchReqBody.java
|
// Code generated by lark suite oapi sdk gen
package com.larksuite.oapi.service.search.v2.model;
import com.google.gson.annotations.SerializedName;
public class DataSourcePatchReqBody {
@SerializedName("name")
private String name;
@SerializedName("state")
private Integer state;
@SerializedName("description")
private String description;
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public Integer getState() {
return this.state;
}
public void setState(Integer state) {
this.state = state;
}
public String getDescription() {
return this.description;
}
public void setDescription(String description) {
this.description = description;
}
}
|
xanderflood/math-circle
|
app/controllers/teacher/priorities_controller.rb
|
class Teacher::PrioritiesController < Teacher::BaseController
before_action :set_threshold
before_action :set_last_semester
DEFAULT_THRESHOLD = 4
def manage
end
def reset
unless @threshold = ensure_int(@threshold)
flash[:alert] = "Please choose a threshold that is an integer."
render :manage
return
end
table = Rollcall.attendance_table(@last_semester)
Student.where("id NOT IN (?)", table.keys)
.update_all(priority: false)
table.each do |id, att|
begin
Student.find(id).update!(priority: (att >= @threshold))
rescue ActiveRecord::RecordNotFound => e; end
# some student's listed in last semester's attendance rolls may have been deleted - that's totally acceptable
end
redirect_to teacher_home_path, notice: "Student priorities have been reset."
end
private
def set_threshold
@threshold = params[:threshold] || DEFAULT_THRESHOLD
end
def set_last_semester
@last_semester = Semester.limit(2).order(start: :desc)[1]
unless @last_semester
flash[:alert] = "There is only one semester. In order to reset student priorities, you need to have a past semester."
render :manage
end
end
def ensure_int str
Integer(str) rescue false
end
end
|
seigodev/emwiki
|
emwiki/symbol/symbol_html_builder.py
|
import glob
import os
from symbol.models import Symbol
from symbol.symbol_maker.processor import Processor
from django.conf import settings
class SymbolHtmlBuilder:
"""Create symbol HTML files from HTMLized MML.
"""
from_dir = settings.MML_HTML_DIR
to_dir = Symbol.get_htmlfile_dir()
def update_files(self):
existing_files = glob.glob(os.path.join(self.to_dir, '*'))
for file in existing_files:
os.remove(file)
print('Building Files')
print(f' from {self.from_dir}')
print(f' to {self.to_dir}')
processor = Processor()
processor.execute(self.from_dir, self.to_dir)
|
quantumlaser/code2016
|
LeetCode/Answers/Leetcode-cpp-solution/string/132_Palindrome Partitioning II.cpp
|
<filename>LeetCode/Answers/Leetcode-cpp-solution/string/132_Palindrome Partitioning II.cpp<gh_stars>0
/*
Given a string s, partition s such that every substring of the partition is a palindrome.
Return the minimum cuts needed for a palindrome partitioning of s.
For example, given s = "aab",
Return 1 since the palindrome partitioning ["aa","b"] could be produced using 1 cut.
*/
class Solution {
public:
int minCut(string s) {
int len = s.size();
int *dp = new int[len];
bool **palin = new bool*[len];
for(int i=0; i<len; ++i) {
dp[i] = i + 1;
palin[i] = new bool[len];
fill_n(palin[i], len, false);
}
for(int i = 1; i<len; ++i)
for(int j = i; j>=0; --j)
if(s[j]==s[i] && (i-j<2 || palin[j+1][i-1])) {
palin[j][i] = true;
if(j==0) dp[i] = 1;
else dp[i] = min(dp[i], dp[j-1]+1);
}
return dp[len-1] - 1;
}
};
|
strogiyotec/leetcode-oo
|
src/main/java/leetcode/oo/ll/ListHasCycle.java
|
package leetcode.oo.ll;
import java.util.IdentityHashMap;
import leetcode.oo.ListNode;
//https://leetcode.com/problems/linked-list-cycle/
final class ListHasCycle {
boolean hasCycle(ListNode head) {
final IdentityHashMap<ListNode, Boolean> map = new IdentityHashMap<>();
ListNode next = head;
while (next != null) {
if (map.containsKey(next)) {
return true;
}
map.put(next, true);
next = next.next;
}
return false;
}
}
|
zmlcoder/rpcx
|
rpcx-parent/rpcx-core/src/main/java/com/zmlcoder/rpcx/registry/IRegistry.java
|
package com.zmlcoder.rpcx.registry;
import java.util.List;
import com.zmlcoder.rpcx.common.RpcxUrl;
import com.zmlcoder.rpcx.config.RpcxReferenceConfig;
import com.zmlcoder.rpcx.config.RpcxRegistryConfig;
public interface IRegistry {
void init(RpcxRegistryConfig config);
List<RpcxUrl> lookup(RpcxReferenceConfig config);
List<RpcxUrl> lookup(RpcxUrl url);
void register(RpcxUrl url);
void subscribe(RpcxUrl url);
}
|
nareshbhusal/repass
|
client/src/repass.js
|
import axios from 'axios';
const { URL } = process.env;
axios.defaults.withCredentials = true;
const repass= axios.create({
baseURL: URL,
withCredentials: true,
headers: { crossDomain: true, 'Content-Type': 'application/json' },
})
repass.defaults.withCredentials = true
export default repass;
|
daihanqiao/TanglePay-Extension
|
src/panels/user/wallets/index.js
|
<reponame>daihanqiao/TanglePay-Extension<filename>src/panels/user/wallets/index.js
import React from 'react'
import { CopyToClipboard } from 'react-copy-to-clipboard'
import { Base, I18n } from '@tangle-pay/common'
import { useGetNodeWallet } from '@tangle-pay/store/common'
import { Nav, SvgIcon, Toast } from '@/common'
export const UserWallets = () => {
const [, walletsList] = useGetNodeWallet()
return (
<div className='page'>
<Nav title={I18n.t('user.manageWallets')} />
<div className='page-content ph20 pb20'>
{walletsList.map((e) => {
return (
<div
className='flex ac jsb row border radius10 ph20 pv15 mt20 press'
style={{
borderColor: '#000'
}}
onClick={() => {
Base.push('/user/editWallet', { id: e.id })
}}
key={e.id}>
<div>
<div className='fz17'>{e.name}</div>
<div className='mt20 flex row ae'>
<div className='fz15'>{Base.handleAddress(e.address)}</div>
<CopyToClipboard
text={e.address}
onCopy={() => Toast.success(I18n.t('assets.copied'))}>
<SvgIcon
onClick={(e) => {
e.stopPropagation()
e.preventDefault()
}}
name='copy'
size={20}
className='cB press ml30'
/>
</CopyToClipboard>
</div>
</div>
<div>
<SvgIcon name='right' size={15} className='cB' />
</div>
</div>
)
})}
</div>
</div>
)
}
|
mosajjal/vte-290
|
debug.c
|
/*
* Copyright (C) 2002,2003 Red Hat, Inc.
*
* This is free software; you can redistribute it and/or modify it under
* the terms of the GNU Library General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#include <config.h>
#include <glib.h>
#include "debug.h"
VteDebugFlags _vte_debug_flags;
void
_vte_debug_init(void)
{
#ifdef VTE_DEBUG
const GDebugKey keys[] = {
{ "misc", VTE_DEBUG_MISC },
{ "io", VTE_DEBUG_IO },
{ "adj", VTE_DEBUG_ADJ },
{ "updates", VTE_DEBUG_UPDATES },
{ "events", VTE_DEBUG_EVENTS },
{ "parse", VTE_DEBUG_PARSE },
{ "signals", VTE_DEBUG_SIGNALS },
{ "selection", VTE_DEBUG_SELECTION },
{ "substitution", VTE_DEBUG_SUBSTITUTION },
{ "ring", VTE_DEBUG_RING },
{ "pty", VTE_DEBUG_PTY },
{ "cursor", VTE_DEBUG_CURSOR },
{ "keyboard", VTE_DEBUG_KEYBOARD },
{ "lifecycle", VTE_DEBUG_LIFECYCLE },
{ "trie", VTE_DEBUG_TRIE },
{ "work", VTE_DEBUG_WORK },
{ "cells", VTE_DEBUG_CELLS },
{ "timeout", VTE_DEBUG_TIMEOUT },
{ "draw", VTE_DEBUG_DRAW },
{ "ally", VTE_DEBUG_ALLY },
{ "pangocairo", VTE_DEBUG_PANGOCAIRO },
{ "widget-size", VTE_DEBUG_WIDGET_SIZE },
{ "bg", VTE_DEBUG_BG }
};
_vte_debug_flags = g_parse_debug_string (g_getenv("VTE_DEBUG"),
keys, G_N_ELEMENTS (keys));
_vte_debug_print(0xFFFFFFFF, "VTE debug flags = %x\n", _vte_debug_flags);
#endif /* VTE_DEBUG */
}
|
wenxuejiang610/Loser
|
loser-tests/src/test/java/com/loserico/io/PathSeparatorTest.java
|
package com.loserico.io;
import org.junit.Test;
public class PathSeparatorTest {
@Test
public void testSeparator() {
String path = "/sishuok/master/server1";
String[] paths = path.split("/");
for (int i = 0; i < paths.length; i++) {
String string = paths[i];
System.out.println(string);
}
}
}
|
moutainhigh/attemper
|
attemper-common/src/main/java/com/github/attemper/common/param/dispatch/arg/ext/TradeDateArgParam.java
|
package com.github.attemper.common.param.dispatch.arg.ext;
import com.github.attemper.common.param.CommonParam;
import lombok.ToString;
import org.apache.commons.lang.StringUtils;
import java.util.regex.Pattern;
@ToString
public class TradeDateArgParam implements CommonParam {
protected String calendarName;
protected String expression;
private static Pattern tradeDatePattern = Pattern.compile("^([TWMSHY]{1})((([+-])([0123456789]*))?)(( ((([+-])([0123456789]*))?))?)$");;
@Override
public String validate() {
if (StringUtils.isNotBlank(expression)) {
if (!tradeDatePattern.matcher(expression).find()) {
return "7040";
}
} else {
return "7040";
}
return null;
}
public String getCalendarName() {
return calendarName;
}
public TradeDateArgParam setCalendarName(String calendarName) {
this.calendarName = calendarName;
return this;
}
public String getExpression() {
return expression;
}
public TradeDateArgParam setExpression(String expression) {
this.expression = expression;
return this;
}
}
|
AndreFx/ReceiptOrganizer
|
src/main/webapp/resources/theme1/js/test/app/actions/receipts/receiptsActions.test.js
|
<reponame>AndreFx/ReceiptOrganizer<filename>src/main/webapp/resources/theme1/js/test/app/actions/receipts/receiptsActions.test.js<gh_stars>0
import configureStore from "redux-mock-store";
import thunk from "redux-thunk";
import fetchMock from "fetch-mock";
import * as actions from "../../../../app/actions/receipts/receiptsActions";
import * as activeLabelActions from "../../../../app/actions/receipts/activeLabelsActions";
import * as snackbarActions from "../../../../app/actions/ui/snackbar/snackbarActions";
import * as constants from "../../../../common/constants";
const middlewares = [thunk];
const mockStore = configureStore(middlewares);
describe("receiptsActions", function() {
const errorSnackbar = {
msg: constants.SERVER_ERROR,
variant: constants.ERROR_SNACKBAR,
actions: [],
handlers: [],
handlerParams: [],
autohideDuration: constants.SNACKBAR_AUTOHIDE_DURATION_DEFAULT
};
describe("queryReceipts action creators", function() {
it("should create an action to request a receipt query", function() {
const expectedAction = {
type: actions.REQUEST_QUERY_RECEIPTS
};
expect(actions.requestQueryReceipts()).toEqual(expectedAction);
});
it("should create an action to receive a receipt query", function() {
const query = "MyQuery";
const receipts = [
{
title: "MyReceipts"
}
];
const numReceipts = 1;
const numPages = 1;
const expectedAction = {
type: actions.RECEIVE_QUERY_RECEIPTS,
receipts: receipts,
numReceipts: numReceipts,
numPages: numPages,
query: query,
success: true
};
expect(
actions.receiveQueryReceipts(
query,
receipts,
numReceipts,
numPages,
true
)
).toEqual(expectedAction);
});
});
describe("receiptPageLoad action creators", function() {
it("should create an action to request a receipt page load", function() {
const expectedAction = {
type: actions.REQUEST_RECEIPT_PAGE_LOAD
};
expect(actions.requestReceiptPageLoad()).toEqual(expectedAction);
});
it("should create an action to receive a page load", function() {
const receipts = [
{
title: "MyReceipts"
}
];
const pageNum = 1;
const expectedAction = {
type: actions.RECEIVE_RECEIPT_PAGE_LOAD,
receipts: receipts,
pageNum: pageNum,
success: true
};
expect(actions.receiveReceiptPageLoad(pageNum, receipts, true)).toEqual(
expectedAction
);
});
});
describe("addActiveLabel", function() {
afterEach(function() {
fetchMock.restore();
});
it("creates REQUEST and RECEIVE actions for ADD_ACTIVE_LABEL", function() {
const label = { name: "NewActiveLabel" };
const activeLabels = [];
const mockMsg = "Success";
const expectedActions = [
{
type: activeLabelActions.REQUEST_ADD_ACTIVE_LABEL
},
{
type: activeLabelActions.RECEIVE_ADD_ACTIVE_LABEL,
label: label,
receipts: [],
numPages: 0,
numReceipts: 0,
success: true
}
];
fetchMock.getOnce(
constants.HOST_URL +
constants.GET_RECEIPTS_PATH +
"?activeLabelNames=NewActiveLabel",
{
body: {
success: true,
message: mockMsg,
receipts: [],
totalNumReceipts: 0,
numPages: 0
},
status: 200,
headers: { "Content-Type": constants.CONTENT_TYPE_JSON }
}
);
const store = mockStore({
activeLabels: {
items: activeLabels
},
receipts: {
query: "",
currentPage: 0,
numPages: 0,
totalNumReceipts: 0,
items: [],
isLoading: false
}
});
return store.dispatch(actions.addActiveLabel(label)).then(function() {
expect(store.getActions()).toEqual(expectedActions);
});
});
it("creates REQUEST, RECEIVE and ADD_SNACKBAR actions for an unsuccessful ADD_ACTIVE_LABEL", function() {
const label = { name: "NewActiveLabel" };
const activeLabels = [];
const mockMsg = "Failure";
const expectedActions = [
{
type: activeLabelActions.REQUEST_ADD_ACTIVE_LABEL
},
{
type: activeLabelActions.RECEIVE_ADD_ACTIVE_LABEL,
label: label,
receipts: [],
numPages: 0,
numReceipts: 0,
success: false
},
{
type: snackbarActions.ADD_SNACKBAR,
newSnackbar: errorSnackbar
}
];
fetchMock.getOnce(
constants.HOST_URL +
constants.GET_RECEIPTS_PATH +
"?activeLabelNames=NewActiveLabel",
{
body: {
success: false,
message: mockMsg
},
status: 404,
headers: { "Content-Type": constants.CONTENT_TYPE_JSON }
}
);
const store = mockStore({
activeLabels: {
items: activeLabels
},
receipts: {
query: "",
currentPage: 0,
numPages: 0,
totalNumReceipts: 0,
items: [],
isLoading: false
}
});
return store.dispatch(actions.addActiveLabel(label)).then(function() {
expect(store.getActions()).toEqual(expectedActions);
});
});
});
describe("removeActiveLabel", function() {
afterEach(function() {
fetchMock.restore();
});
it("creates REQUEST and RECEIVE actions for REMOVE_ACTIVE_LABEL", function() {
const label = { name: "NewActiveLabel" };
const activeLabels = [label];
const mockMsg = "Success";
const expectedActions = [
{
type: activeLabelActions.REQUEST_REMOVE_ACTIVE_LABEL
},
{
type: activeLabelActions.RECEIVE_REMOVE_ACTIVE_LABEL,
label: label,
receipts: [],
numPages: 0,
numReceipts: 0,
success: true
}
];
fetchMock.getOnce(constants.HOST_URL + constants.GET_RECEIPTS_PATH, {
body: {
success: true,
message: mockMsg,
receipts: [],
totalNumReceipts: 0,
numPages: 0
},
status: 200,
headers: { "Content-Type": constants.CONTENT_TYPE_JSON }
});
const store = mockStore({
activeLabels: {
items: activeLabels
},
receipts: {
query: "",
currentPage: 0,
numPages: 0,
totalNumReceipts: 0,
items: [],
isLoading: false
}
});
return store.dispatch(actions.removeActiveLabel(label)).then(function() {
expect(store.getActions()).toEqual(expectedActions);
});
});
it("creates REQUEST, RECEIVE and ADD_SNACKBAR actions for unsuccessful REMOVE_ACTIVE_LABEL", function() {
const label = { name: "NewActiveLabel" };
const activeLabels = [label];
const mockMsg = "Failure";
const expectedActions = [
{
type: activeLabelActions.REQUEST_REMOVE_ACTIVE_LABEL
},
{
type: activeLabelActions.RECEIVE_REMOVE_ACTIVE_LABEL,
label: label,
receipts: [],
numPages: 0,
numReceipts: 0,
success: false
},
{
type: snackbarActions.ADD_SNACKBAR,
newSnackbar: errorSnackbar
}
];
fetchMock.getOnce(constants.HOST_URL + constants.GET_RECEIPTS_PATH, {
body: {
success: false,
message: mockMsg
},
status: 404,
headers: { "Content-Type": constants.CONTENT_TYPE_JSON }
});
const store = mockStore({
activeLabels: {
items: activeLabels
},
receipts: {
query: "",
currentPage: 0,
numPages: 0,
totalNumReceipts: 0,
items: [],
isLoading: false
}
});
return store.dispatch(actions.removeActiveLabel(label)).then(function() {
expect(store.getActions()).toEqual(expectedActions);
});
});
});
describe("editActiveLabel", function() {
afterEach(function() {
fetchMock.restore();
});
it("creates REQUEST and RECEIVE actions for EDIT_ACTIVE_LABEL", function() {
const newLabel = { name: "NewActiveLabel" };
const oldLabel = { name: "OldActiveLabel" };
const activeLabels = [oldLabel];
const mockMsg = "Success";
const expectedActions = [
{
type: activeLabelActions.REQUEST_EDIT_ACTIVE_LABEL
},
{
type: activeLabelActions.RECEIVE_EDIT_ACTIVE_LABEL,
newLabel: newLabel,
oldLabel: oldLabel,
success: true
}
];
fetchMock.getOnce(
constants.HOST_URL +
constants.GET_RECEIPTS_PATH +
"?activeLabelNames=NewActiveLabel",
{
body: {
success: true,
message: mockMsg
},
status: 200,
headers: { "Content-Type": constants.CONTENT_TYPE_JSON }
}
);
const store = mockStore({
activeLabels: {
items: activeLabels
},
receipts: {
query: "",
currentPage: 0,
numPages: 0,
totalNumReceipts: 0,
items: [],
isLoading: false
}
});
return store
.dispatch(actions.editActiveLabel(oldLabel, newLabel))
.then(function() {
expect(store.getActions()).toEqual(expectedActions);
});
});
it("creates REQUEST, RECEIVE, and SNACKBAR actions for unsuccessful EDIT_ACTIVE_LABEL", function() {
const newLabel = { name: "NewActiveLabel" };
const oldLabel = { name: "OldActiveLabel" };
const activeLabels = [oldLabel];
const mockMsg = "Failure";
const expectedActions = [
{
type: activeLabelActions.REQUEST_EDIT_ACTIVE_LABEL
},
{
type: activeLabelActions.RECEIVE_EDIT_ACTIVE_LABEL,
newLabel: newLabel,
oldLabel: oldLabel,
success: false
},
{
type: snackbarActions.ADD_SNACKBAR,
newSnackbar: errorSnackbar
}
];
fetchMock.getOnce(
constants.HOST_URL +
constants.GET_RECEIPTS_PATH +
"?activeLabelNames=NewActiveLabel",
{
body: {
success: false,
message: mockMsg
},
status: 404,
headers: { "Content-Type": constants.CONTENT_TYPE_JSON }
}
);
const store = mockStore({
activeLabels: {
items: activeLabels
},
receipts: {
query: "",
currentPage: 0,
numPages: 0,
totalNumReceipts: 0,
items: [],
isLoading: false
}
});
return store
.dispatch(actions.editActiveLabel(oldLabel, newLabel))
.then(function() {
expect(store.getActions()).toEqual(expectedActions);
});
});
});
describe("queryReceipts", function() {
afterEach(function() {
fetchMock.restore();
});
it("creates REQUEST and RECEIVE actions for QUERY_RECEIPTS", function() {
const query = "MyTestQuery";
const mockMsg = "Success";
const receipts = [
{
title: "MyReceipt",
date: JSON.stringify(new Date()),
items: []
}
].map(function(el) {
return actions.formatReceipt(el);
});
const expectedActions = [
{
type: actions.REQUEST_QUERY_RECEIPTS
},
{
type: actions.RECEIVE_QUERY_RECEIPTS,
receipts: receipts,
query: query,
numPages: 3,
numReceipts: 3,
success: true
}
];
fetchMock.getOnce(
constants.HOST_URL + constants.GET_RECEIPTS_PATH + "?query=" + query,
{
body: {
success: true,
receipts: receipts,
totalNumReceipts: 3,
numPages: 3,
message: mockMsg
},
status: 200,
headers: { "Content-Type": constants.CONTENT_TYPE_JSON }
}
);
const store = mockStore({
activeLabels: {
items: []
},
receipts: {
query: "",
currentPage: 0,
numPages: 0,
totalNumReceipts: 0,
items: [],
isLoading: false
}
});
return store.dispatch(actions.queryReceipts(query)).then(function() {
expect(store.getActions()).toEqual(expectedActions);
});
});
it("creates REQUEST, RECEIVE, and ADD_SNACKBAR actions for unsuccessful QUERY_RECEIPTS", function() {
const query = "MyTestQuery";
const mockMsg = "Failure";
const expectedActions = [
{
type: actions.REQUEST_QUERY_RECEIPTS
},
{
type: actions.RECEIVE_QUERY_RECEIPTS,
receipts: [],
query: query,
numPages: 0,
numReceipts: 0,
success: false
},
{
type: snackbarActions.ADD_SNACKBAR,
newSnackbar: errorSnackbar
}
];
fetchMock.getOnce(
constants.HOST_URL + constants.GET_RECEIPTS_PATH + "?query=" + query,
{
body: {
success: false,
message: mockMsg
},
status: 404,
headers: { "Content-Type": constants.CONTENT_TYPE_JSON }
}
);
const store = mockStore({
activeLabels: {
items: []
},
receipts: {
query: "",
currentPage: 0,
numPages: 0,
totalNumReceipts: 0,
items: [],
isLoading: false
}
});
return store.dispatch(actions.queryReceipts(query)).then(function() {
expect(store.getActions()).toEqual(expectedActions);
});
});
});
describe("changeReceiptPage", function() {
afterEach(function() {
fetchMock.restore();
});
it("creates REQUEST and RECEIVE actions for RECEIPT_PAGE_LOAD", function() {
const currentPageNum = 1;
const newPageNum = 2;
const mockMsg = "Success";
const receipts = [
{
title: "MyReceipt",
date: JSON.stringify(new Date()),
items: []
}
].map(function(el) {
return actions.formatReceipt(el);
});
const expectedActions = [
{
type: actions.REQUEST_RECEIPT_PAGE_LOAD
},
{
type: actions.RECEIVE_RECEIPT_PAGE_LOAD,
receipts: receipts,
pageNum: 2,
success: true
}
];
fetchMock.getOnce(
constants.HOST_URL +
constants.GET_RECEIPTS_PATH +
"?pageNum=" +
newPageNum,
{
body: {
success: true,
receipts: receipts,
message: mockMsg
},
status: 200,
headers: { "Content-Type": constants.CONTENT_TYPE_JSON }
}
);
const store = mockStore({
activeLabels: {
items: []
},
receipts: {
query: "",
currentPage: currentPageNum,
numPages: 0,
totalNumReceipts: 0,
items: [],
isLoading: false
}
});
return store
.dispatch(actions.loadReceiptPage(newPageNum))
.then(function() {
expect(store.getActions()).toEqual(expectedActions);
});
});
it("creates REQUEST, RECEIVE, and ADD_SNACKBAR actions for unsuccessful RECEIPT_PAGE_LOAD", function() {
const currentPageNum = 1;
const mockMsg = "Failure";
const expectedActions = [
{
type: actions.REQUEST_RECEIPT_PAGE_LOAD
},
{
type: actions.RECEIVE_RECEIPT_PAGE_LOAD,
receipts: [],
pageNum: currentPageNum,
success: false
},
{
type: snackbarActions.ADD_SNACKBAR,
newSnackbar: errorSnackbar
}
];
fetchMock.getOnce(
constants.HOST_URL +
constants.GET_RECEIPTS_PATH +
"?pageNum=" +
currentPageNum,
{
body: {
success: false,
message: mockMsg
},
status: 404,
headers: { "Content-Type": constants.CONTENT_TYPE_JSON }
}
);
const store = mockStore({
activeLabels: {
items: []
},
receipts: {
query: "",
currentPage: currentPageNum,
numPages: 0,
totalNumReceipts: 0,
items: [],
isLoading: false
}
});
return store
.dispatch(actions.loadReceiptPage(currentPageNum))
.then(function() {
expect(store.getActions()).toEqual(expectedActions);
});
});
});
});
|
jbcoe/CppSandbox
|
TemplatedFunctionRecursion/TemplatedFunctionRecursion.cpp
|
#include <iostream>
#include <functional>
template <int depth>
struct unary_recurser
{
template <typename T>
static T apply(std::function<T(T)> f, T t)
{
static_assert(depth > 0, "Recursion depth must be greater than 0");
return unary_recurser<depth - 1>::apply(f, f(t));
}
};
template <>
struct unary_recurser<0>
{
template <typename T>
static T apply(std::function<T(T)> f, T t)
{
return t;
}
};
template <int multiple>
double multiplier(double value)
{
return multiple * value;
}
int main(int argc, char* argv[])
{
std::cout << unary_recurser<5>::apply<double>(multiplier<2>, 1.) << std::endl;
}
|
leweihe/busStop
|
src/main/java/com/linde/web/rest/resourceassembler/impl/BusRouteAssemblerMapperImpl.java
|
package com.linde.web.rest.resourceassembler.impl;
import javax.inject.Inject;
import javax.validation.constraints.NotNull;
import com.linde.web.rest.resourceassembler.mapper.BusRouteResourceMapper;
import org.apache.commons.lang.Validate;
import org.springframework.stereotype.Component;
import com.linde.dto.BusRouteDTO;
import com.linde.web.rest.resource.BusRouteResource;
import com.linde.web.rest.resourceassembler.BusRouteResourceAssembler;
/**
* Created by cn40580 at 2016-10-10 10:00 AM.
*/
@Component
public class BusRouteAssemblerMapperImpl implements BusRouteResourceAssembler {
@Inject
private BusRouteResourceMapper busRouteResourceMapper;
@Override
public BusRouteResource toResource(@NotNull BusRouteDTO entity) {
Validate.notNull(entity);
BusRouteResource res = busRouteResourceMapper.DTOtoResource(entity);
return res;
}
public BusRouteDTO toDto(@NotNull BusRouteResource res) {
Validate.notNull(res);
BusRouteDTO entity = busRouteResourceMapper.resourceToDTO(res);
return entity;
}
}
|
WilliamDeveloper/udemy_cursos
|
nodejs/014_formacao-javascript-mestre-jedi/missao-estelar/segunda-missao/VictorGeruso/js/main.js
|
//6
//Segunda Missão Estelar JS
//7
/*
Aperte os cintos para adentrarmos ao hiper-espaço
rumo a uma nova Missão estelar JS! :)
*/
//8
console.log("++++Item 8++++");
var missao = "Segunda Missão Estelar JS";
console.log("Texto: " + missao + " - Tipo: " + typeof missao);
//9
console.log("++++Item 9++++");
var hiperespaco = new String('Aperte os cintos para adentrarmos ao hiper-espaço rumo a uma nova Missão estelar JS! :)');
console.log("Texto: " + hiperespaco.toString() + " - Tipo: " + typeof missao);
//10
console.log("++++Item 10++++");
var nomeCliente = "Carlos";
var renda = 900.00;
var dataNascimento = new Date('1996-05-02');
var ativo = true;
function exibirDadosClienteVariáveis() {
console.log("Nome: " + nomeCliente);
console.log("Renda: R$" + renda);
console.log("Data de nascimento: " + dataNascimento);
var usuarioAtivo = (ativo == true) ? "sim" : "não";
console.log("Ativo: " + usuarioAtivo);
}
exibirDadosClienteVariáveis();
//11
console.log("++++Item 11++++");
var clienteArray = [
[
'Nome:', '<NAME>'
],
[
'renda: R$', 1000
],
[
'Data de nascimento:', new Date('1978-12-06')
],
[
'Ativo:', true
]
];
function exibirDadosClienteArray() {
for(var i of clienteArray) {
if(i[0] == 'Ativo: ') {
console.log(i[0], i[1] == true ? 'sim' : 'não');
} else {
console.log(i[0], i[1]);
}
}
}
exibirDadosClienteArray();
//12
console.log("++++Item 12++++");
var clienteObject = {
nome: "Andre",
renda: 4000,
dataNascimento: new Date('1995-05-30'),
ativo: true
};
function exibirDadosCienteObjeto() {
for(var key in clienteObject) {
switch (key) {
case "nome":
console.log("Nome:", clienteObject[key]);
break;
case "renda":
console.log("Renda: R$", clienteObject[key]);
break;
case "dataNascimento":
console.log("Data de nascimento:", clienteObject[key]);
break;
case "ativo":
console.log("Ativo:", clienteObject[key] == true ? 'sim' : 'não');
break;
default:
console.log(key, ": ", clienteObject[key]);
break;
}
}
}
exibirDadosCienteObjeto();
//13
console.log("++++Item 13++++");
function retornarDataAtualFormatada() {
var data = new Date();
var dia = data.getDate();
var mes = data.getMonth()+1;
var ano = data.getFullYear();
var dataFomatada = '';
if(dia.toString().length == 1) {
dataFomatada += "0" + dia.toString();
}else{
dataFomatada += dia.toString();
}
if(mes.toString().length == 1) {
dataFomatada += "/0" + mes.toString();
}else{
dataFomatada += "/" + mes.toString();
}
dataFomatada += "/" + ano.toString();
return dataFomatada;
}
console.log(retornarDataAtualFormatada());
//14
console.log("++++Item 14++++");
var hiperespacoRegExp = new RegExp('Estelar');
var textoMissao = "Apertem os cintos para adentrarmos ao hiper-espaço rumo a uma nova Missão Estelar JS! :)";
console.log(hiperespacoRegExp.test(textoMissao));
console.log(hiperespacoRegExp.exec(textoMissao));
//15
console.log("++++Item 15++++");
var num = 30;
try {
for (let index = 0; index >= 0 ; index++) {
console.log(num);
num--;
if(index == 29) {
throw new Error("Interação 29");
}
}
} catch (error) {
console.log(error.name);
console.log(error.message);
console.log(error.stack);
} finally {
console.log("Executou o bloco Finally");
}
//16
console.log("++++Item 16++++");
function receberDadosFormBoot(objFormElementos) {
console.log("Tipo do Objeto: " + Object.prototype.toString.call(objFormElementos));
var objDadosForm = new Object();
objDadosForm.nome = objFormElementos.nomeBoot.value;
objDadosForm.email = objFormElementos.emailBoot.value;
objDadosForm.emailPromo = objFormElementos.emailPromocionalCheckBoot.checked;
objDadosForm.formaContato = objFormElementos.formaContatoRadioBoot.value;
objDadosForm.estado = objFormElementos.estadoSelectBoot.value;
if(objDadosForm.emailPromo) {
objDadosForm.emailPromo = 'sim';
} else {
objDadosForm.emailPromo = 'não';
}
if(objDadosForm.formaContato == "1") {
objDadosForm.formaContato = 'telefone';
} else {
objDadosForm.formaContato = 'email';
}
console.log(objDadosForm.nome);
console.log(objDadosForm.email);
console.log(objDadosForm.emailPromo);
console.log(objDadosForm.formaContato);
console.log(objDadosForm.estado);
console.log("Propriedade do objeto: ", objDadosForm);
preencherFormHTML(objDadosForm);
}
function preencherFormHTML(objDados) {
document.getElementById('nomeHtml').value = objDados.nome;
document.getElementById('emailHtml').value = objDados.email;
if(objDados.emailPromo == "sim") {
document.getElementById('emailPromocionalCheckHtml').checked = true;
} else {
document.getElementById('emailPromocionalCheckHtml').checked = false;
}
if(objDados.formaContato == "telefone") {
document.getElementById('formaContatoTelefoneRadioHtml').checked = true;
document.getElementById('formaContatoEmailRadioHtml').checked = false;
} else {
document.getElementById('formaContatoTelefoneRadioHtml').checked = false;
document.getElementById('formaContatoEmailRadioHtml').checked = true;
}
document.getElementById('estadoSelectHtml').value = objDados.estado;
}
|
lenaWitterauf/Domain-Guided-Monitoring
|
tests/src/training/models/test_multilabel_metrics.py
|
import unittest
import numpy as np
from numpy.lib.ufunclike import fix
import tensorflow as tf
from src.training.models import metrics
class TestMultilabelMetrics(unittest.TestCase):
def test_multilabel(self):
y_true = self._get_y_true_multilabel()
y_pred = self._get_y_pred_multilabel()
fixture = metrics.MultilabelNestedMetric(
nested_metric=tf.keras.metrics.TopKCategoricalAccuracy(k=2),
)
fixture.update_state(y_true, y_pred)
self.assertEquals(4.0/6.0, fixture.result())
def _get_y_true_multilabel(self):
return tf.constant([[1, 1, 0], [1, 1, 0], [1, 0, 1],])
def _get_y_pred_multilabel(self):
return tf.constant([[0.7, 0.3, 0.1], [0.24, 0.8, 0.3], [0.1, 0.6, 0.2],])
def _get_y_vocab(self):
return {
"y0": 0,
"y1": 1,
"y2": 2,
}
|
byteclubfr/eatlas
|
client/src/components/Spinner.js
|
<reponame>byteclubfr/eatlas
import React, { Component } from 'react'
import Icon from './Icon'
class Spinner extends Component<{ small: boolean }> {
render() {
return (
<Icon icon={`spinner fa-pulse${this.props.small ? '' : ' is-size-2'}`} />
)
}
}
export default Spinner
|
528246253/gwnHicc
|
app/src/main/java/com/hicc/cloud/teacher/activity/FeedBackActivity.java
|
package com.hicc.cloud.teacher.activity;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.RadioGroup;
import com.hicc.cloud.R;
import com.hicc.cloud.teacher.utils.ConstantValue;
import com.hicc.cloud.teacher.utils.Logs;
import com.hicc.cloud.teacher.utils.SpUtils;
import com.hicc.cloud.teacher.utils.ToastUtli;
import com.zhy.http.okhttp.OkHttpUtils;
import com.zhy.http.okhttp.callback.StringCallback;
import org.json.JSONException;
import org.json.JSONObject;
import okhttp3.Call;
/**
* Created by Administrator on 2016/11/4/004.
* 反馈 ——崔国钊
*/
public class FeedBackActivity extends AppCompatActivity {
private static final String URL = "http://suguan.hicc.cn/feedback1/suggest.do";
private ImageView iv_back;
private Button bt_send;
private EditText ed_sendtext;
private RadioGroup rg_root;
private String mTag = "1";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_feedback);
initUI();
}
private void initUI() {
ed_sendtext = (EditText) findViewById(R.id.sendtext);
bt_send = (Button) findViewById(R.id.bt_send);
rg_root = (RadioGroup) findViewById(R.id.rg_root);
iv_back = (ImageView) findViewById(R.id.iv_back);
iv_back.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
}
});
rg_root.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(RadioGroup group, int checkedId) {
switch (checkedId){
case R.id.rb_ui:
mTag = "1";
break;
case R.id.rb_function:
mTag = "2";
break;
}
}
});
// 发送按钮的点击事件
bt_send.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String massage = ed_sendtext.getText().toString().trim();
// 如果输入不为空
if(!massage.equals("")){
OkHttpUtils
.get()
.url(URL)
.addParams("userId", SpUtils.getStringSp(getApplicationContext(), ConstantValue.USER_NAME, ""))
.addParams("appId", "1")
.addParams("appVersion", String.valueOf(getVersionCode()))
.addParams("tag", mTag)
.addParams("content", massage)
.build()
.execute(new StringCallback() {
@Override
public void onError(Call call, Exception e, int id) {
Logs.i("发送反馈信息失败:"+e.toString());
ToastUtli.show(getApplicationContext(), "服务器繁忙,请重新发送");
}
@Override
public void onResponse(String response, int id) {
try {
JSONObject jsonObject = new JSONObject(response);
boolean falg = jsonObject.getBoolean("falg");
if(falg){
ToastUtli.show(getApplicationContext(), "发送成功");
} else {
Logs.i("服务器错误,发送失败:"+jsonObject.getString("data"));
}
} catch (JSONException e) {
e.printStackTrace();
Logs.i("解析反馈信息结果失败:"+e.toString());
}
}
});
} else {
ToastUtli.show(getApplicationContext(),"请输入您的反馈信息");
}
}
});
}
// 获取本应用版本号
private int getVersionCode() {
// 拿到包管理者
PackageManager pm = getPackageManager();
// 获取包的基本信息
try {
PackageInfo info = pm.getPackageInfo(getPackageName(), 0);
// 返回应用的版本号
return info.versionCode;
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
return 0;
}
}
|
devrelm/storybook
|
examples/polymer-cli/src/stories/core.stories.js
|
import { storiesOf, addParameters } from '@storybook/polymer';
const globalParameter = 'globalParameter';
const chapterParameter = 'chapterParameter';
const storyParameter = 'storyParameter';
addParameters({ globalParameter });
storiesOf('Core|Parameters', module)
.addParameters({ chapterParameter })
.add(
'passed to story',
({ parameters: { fileName, ...parameters } }) =>
`<div>Parameters are ${JSON.stringify(parameters)}</div>`,
{
storyParameter,
}
);
|
RanerL/analyzer
|
tests/juliet/testcases/CWE78_OS_Command_Injection/s04/CWE78_OS_Command_Injection__char_listen_socket_system_84.h
|
<gh_stars>10-100
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE78_OS_Command_Injection__char_listen_socket_system_84.h
Label Definition File: CWE78_OS_Command_Injection.one_string.label.xml
Template File: sources-sink-84.tmpl.h
*/
/*
* @description
* CWE: 78 OS Command Injection
* BadSource: listen_socket Read data using a listen socket (server side)
* GoodSource: Fixed string
* Sinks: system
* BadSink : Execute command in data using system()
* Flow Variant: 84 Data flow: data passed to class constructor and destructor by declaring the class object on the heap and deleting it after use
*
* */
#include "std_testcase.h"
#include <wchar.h>
#ifdef _WIN32
#define FULL_COMMAND "dir "
#else
#include <unistd.h>
#define FULL_COMMAND "ls "
#endif
namespace CWE78_OS_Command_Injection__char_listen_socket_system_84
{
#ifndef OMITBAD
class CWE78_OS_Command_Injection__char_listen_socket_system_84_bad
{
public:
CWE78_OS_Command_Injection__char_listen_socket_system_84_bad(char * dataCopy);
~CWE78_OS_Command_Injection__char_listen_socket_system_84_bad();
private:
char * data;
};
#endif /* OMITBAD */
#ifndef OMITGOOD
class CWE78_OS_Command_Injection__char_listen_socket_system_84_goodG2B
{
public:
CWE78_OS_Command_Injection__char_listen_socket_system_84_goodG2B(char * dataCopy);
~CWE78_OS_Command_Injection__char_listen_socket_system_84_goodG2B();
private:
char * data;
};
#endif /* OMITGOOD */
}
|
Exactpro/jackfish
|
api/src/main/java/com/exactprosystems/jf/api/common/Zip.java
|
/*******************************************************************************
* Copyright 2009-2018 Exactpro (Exactpro Systems Limited)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.exactprosystems.jf.api.common;
import com.exactprosystems.jf.api.common.i18n.R;
import java.io.*;
import java.nio.file.*;
import java.util.*;
import java.util.stream.Collectors;
import java.util.zip.*;
public class Zip
{
private Map<String, byte[]> entries;
private Zip()
{
}
@DescriptionAttribute(text = R.ZIP_CREATE_DESCRIPTION)
public static Zip create()
{
Zip zip = new Zip();
zip.entries = new HashMap<>();
return zip;
}
@DescriptionAttribute(text = R.ZIP_LOAD_DESCRIPTION)
public Zip load(String path) throws Exception
{
File file = new File(path);
if(file.isFile()){
try(ZipFile zf = new ZipFile(file)){
Enumeration<? extends ZipEntry> enumeration = zf.entries();
ZipEntry entry;
while(enumeration.hasMoreElements()){
entry = enumeration.nextElement();
if (!entry.isDirectory()){
InputStream is = zf.getInputStream(entry);
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
int nRead;
byte[] data = new byte[16384];
while ((nRead = is.read(data, 0, data.length)) != -1) {
buffer.write(data, 0, nRead);
}
buffer.flush();
this.entries.put(entry.getName(), compress(buffer.toByteArray()));
}
}
}
} else {
throw new Exception(String.format(R.ZIP_NOT_EXISTS_OR_NOT_FILE.get(), file.getName()));
}
return this;
}
@DescriptionAttribute(text = R.ZIP_SAVE_DESCRIPTION)
public Zip save(String path) throws IOException, DataFormatException
{
Path pathToFile = Paths.get(path).toAbsolutePath();
File file = new File(pathToFile.toString());
try(ZipOutputStream zipOut = new ZipOutputStream(
file.exists() ? new FileOutputStream(file) : new FileOutputStream(path)
)){
zipOut.setLevel(Deflater.DEFAULT_COMPRESSION);
for(Map.Entry<String, byte[]> entry: this.entries.entrySet()){
zipOut.putNextEntry(new ZipEntry(entry.getKey()));
zipOut.write(decompress(entry.getValue()));
zipOut.closeEntry();
}
zipOut.flush();
}
return this;
}
@DescriptionAttribute(text = R.ZIP_ADD_DESCRIPTION)
public Zip add(String path) throws Exception
{
File file = new File(path);
if (file.isFile() && file.exists()){
this.entries.put(file.getName(), compress(getBytesFromFile(file)));
} else {
throw new Exception(String.format(R.ZIP_FILE_NOT_EXIST_OR_ITS_DIR.get(), file.getName()));
}
return this;
}
@DescriptionAttribute(text = R.ZIP_REMOVE_DESCRIPTION)
public Zip remove(String name)
{
this.entries.entrySet().removeIf(e-> e.getKey().equals(name));
return this;
}
@DescriptionAttribute(text = R.ZIP_EXTRACT_DESCRIPTION)
public Zip extract(String name, String path) throws Exception
{
if (this.entries.containsKey(name)) {
Path p = Paths.get(path).toAbsolutePath();
if(new File(p.toString()).isDirectory()){
Path pathToFile = p.resolve(name);
if(!pathToFile.toFile().exists()){
Files.createFile(pathToFile);
}
try(FileOutputStream fos = new FileOutputStream(pathToFile.toFile())){
byte[] preparedBytes = decompress(this.entries.get(name));
fos.write(preparedBytes);
fos.close();
}
} else {
throw new Exception(String.format(R.ZIP_PATH_NOT_EXIST_OR_ITS_FILE.get(), path));
}
} else {
throw new Exception(String.format(R.ZIP_NOT_CONTAINS_FILE.get(), name));
}
return this;
}
public List<String> names()
{
return this.entries.keySet().stream().collect(Collectors.toList());
}
private byte[] getBytesFromFile(File file) throws IOException{
String separator = System.lineSeparator();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (BufferedReader br = new BufferedReader(new FileReader(file))) {
String line;
while ((line = br.readLine()) != null) {
baos.write(line.getBytes());
baos.write(separator.getBytes());
}
}
return baos.toByteArray();
}
private byte[] compress(byte[] input){
byte[] output = new byte[input.length];
Deflater compresser = new Deflater();
compresser.setInput(input);
compresser.finish();
int compressedDataLength = compresser.deflate(output);
compresser.end();
return output;
}
private byte[] decompress(byte[] input) throws DataFormatException {
Inflater decompresser = new Inflater();
decompresser.setInput(input, 0, input.length);
byte[] result = new byte[input.length];
int resultLength = decompresser.inflate(result);
decompresser.end();
return result;
}
}
|
CNICCSTNET/onoscnic
|
drivers/fujitsu/src/test/java/org/onosproject/drivers/fujitsu/FujitsuDriverHandlerAdapter.java
|
<filename>drivers/fujitsu/src/test/java/org/onosproject/drivers/fujitsu/FujitsuDriverHandlerAdapter.java
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.drivers.fujitsu;
import org.onosproject.net.DeviceId;
import org.onosproject.net.driver.Behaviour;
import org.onosproject.net.driver.DefaultDriverHandler;
import org.onosproject.net.driver.DriverData;
import org.onosproject.mastership.MastershipService;
import org.onosproject.mastership.MastershipServiceAdapter;
import org.onosproject.netconf.NetconfController;
/**
* Mock DefaultDriverHandler.
*/
public class FujitsuDriverHandlerAdapter extends DefaultDriverHandler {
private NetconfController controller;
private final MastershipService mastershipService = new InternalMastershipServiceMock();;
/**
* Creates new driver handler with the attached driver data.
*
* @param data driver data to attach
*/
public FujitsuDriverHandlerAdapter(DriverData data) {
super(data);
}
@Override
public boolean hasBehaviour(Class<? extends Behaviour> behaviourClass) {
return true;
}
@Override
public <T> T get(Class<T> serviceClass) {
if (serviceClass == NetconfController.class) {
return (T) this.controller;
} else if (serviceClass == MastershipService.class) {
return (T) this.mastershipService;
}
return null;
}
/**
* Set up initial environment.
*
* @param controller NETCONF controller instance
*/
public void setUp(NetconfController controller) {
this.controller = controller;
}
/**
* Mock MastershipServiceAdapter.
*/
private class InternalMastershipServiceMock extends MastershipServiceAdapter {
@Override
public boolean isLocalMaster(DeviceId deviceId) {
return true;
}
}
}
|
jiandiao/flowable-engine
|
modules/flowable-event-registry/src/main/java/org/flowable/eventregistry/impl/cmd/GetEventDefinitionCmd.java
|
<gh_stars>1000+
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.eventregistry.impl.cmd;
import java.io.Serializable;
import org.flowable.common.engine.impl.interceptor.Command;
import org.flowable.common.engine.impl.interceptor.CommandContext;
import org.flowable.eventregistry.impl.persistence.entity.EventDefinitionEntity;
import org.flowable.eventregistry.impl.util.CommandContextUtil;
/**
* @author <NAME>
*/
public class GetEventDefinitionCmd implements Command<EventDefinitionEntity>, Serializable {
private static final long serialVersionUID = 1L;
protected String eventDefinitionId;
public GetEventDefinitionCmd(String eventDefinitionId) {
this.eventDefinitionId = eventDefinitionId;
}
@Override
public EventDefinitionEntity execute(CommandContext commandContext) {
return CommandContextUtil.getEventRegistryConfiguration().getDeploymentManager()
.findDeployedEventDefinitionById(eventDefinitionId);
}
}
|
dmccubbing/symbiose
|
usr/lib/gnome-screensaver/webos.js
|
//Load dependencies
Webos.require('/usr/share/css/gnome-screensaver/main.css', function() {
if (window.GnomeScreenSaver) { //If library is already loaded
return;
}
var GnomeScreenSaver = {};
GnomeScreenSaver._status = false;
GnomeScreenSaver._locked = false;
GnomeScreenSaver._$screensaver = $();
GnomeScreenSaver._$screenlocker = $();
GnomeScreenSaver._screenlockerClockInterval = null;
/**
* Get the screensaver's status.
* @returns {Boolean} True if the screensaver is activated, false if not.
*/
GnomeScreenSaver.activated = function activated() {
return GnomeScreenSaver._status;
};
GnomeScreenSaver.setStatus = function setStatus(value) {
value = (value) ? true : false;
if (!Webos.UserInterface.Booter.current()) { //No interface loaded
return false;
}
if (value == GnomeScreenSaver.activated()) { //If we don't want to change the screensaver's status
return;
}
var booter = Webos.UserInterface.Booter.current();
if (value) { //Activate the screensaver
GnomeScreenSaver._$screensaver = $('<div></div>', { id: 'gnome-screensaver' })
.hide()
.one('mousemove', function() {
GnomeScreenSaver.deactivate();
})
.appendTo(booter.element())
.fadeIn();
GnomeScreenSaver.trigger('activate');
} else { //Desactivate the screensaver
GnomeScreenSaver._$screensaver.stop().fadeOut('fast', function() {
$(this).remove();
});
GnomeScreenSaver.trigger('deactivate');
}
GnomeScreenSaver._status = value;
};
GnomeScreenSaver.activate = function activate() {
GnomeScreenSaver.setStatus(true);
};
GnomeScreenSaver.deactivate = function deactivate() {
GnomeScreenSaver.setStatus(false);
};
GnomeScreenSaver.locked = function locked() {
return GnomeScreenSaver._locked;
};
GnomeScreenSaver.lock = function lock() {
if (!Webos.UserInterface.Booter.current()) { //No interface loaded
return false;
}
if (GnomeScreenSaver.locked()) {
return;
}
Webos.Translation.load(function(t) {
GnomeScreenSaver._$screenlocker = $('<div></div>', { id: 'gnome-screenlocker' });
var $prompt = $('<div></div>', { 'class': 'screenlocker-prompt' }).hide().appendTo(GnomeScreenSaver._$screenlocker),
$overlay = $('<div></div>', { 'class': 'screenlocker-overlay' }).hide().appendTo(GnomeScreenSaver._$screenlocker),
$desktopBar = $('<div></div>', { 'class': 'desktop-bar' }).appendTo(GnomeScreenSaver._$screenlocker);
if (Webos.Theme && Webos.Theme.current()) {
Webos.Theme.current().applyBackgroundOn($overlay);
}
var $clockContainer = $('<div></div>', { 'class': 'clock-container' }).appendTo($overlay),
$clock = $('<div></div>', { 'class': 'clock' }).appendTo($clockContainer),
$time = $('<div></div>', { 'class': 'clock-time' }).appendTo($clock),
$date = $('<div></div>', { 'class': 'clock-date' }).appendTo($clock);
var showTimeFn = function() {
var locale = Webos.Locale.current();
var time = locale.time(new Date()), date = locale.date(new Date());
$time.html(time);
$date.html(date);
};
setTimeout(function() { //Quand la minute actuelle est passee
GnomeScreenSaver._screenlockerClockInterval = setInterval(function() { //On rafraichit l'heure toutes les minutes
showTimeFn();
}, 60000);
showTimeFn();
}, (60 - new Date().getSeconds()) * 1000);
showTimeFn(); //On affiche l'heure
$('<div></div>', { 'class': 'arrow-up' }).appendTo($overlay);
var $barRealname = $('<span></span>').html('User');
$('<ul></ul>', { 'class': 'menu' }).append(
$('<li></li>').append(
$.webos.image(new Webos.Icon('status/lock-symbolic', 16)).addClass('icon'),
$barRealname
)
).appendTo($desktopBar);
//Now insert element in the DOM
if (GnomeScreenSaver.activated()) {
GnomeScreenSaver._$screenlocker.insertBefore(GnomeScreenSaver._$screensaver);
} else {
var booter = Webos.UserInterface.Booter.current();
GnomeScreenSaver._$screenlocker.appendTo(booter.element());
}
var height = GnomeScreenSaver._$screenlocker.height(), width = GnomeScreenSaver._$screenlocker.width();
$overlay.ui_draggable({
containment: [ 0, - height, 0, 0 ],
axis: 'y'
});
$overlay.css('top', - height).show().animate({
top: 0
});
var $formContainer = $('<div></div>', { 'class': 'form-container' }).appendTo($prompt),
$form = $.w.entryContainer().addClass('form').appendTo($formContainer);
$.webos.image(new Webos.Icon('stock/person')).addClass('form-avatar').appendTo($form);
var $realname = $('<div></div>', { 'class': 'form-realname' }).html('User').appendTo($form),
$password = $.w.passwordEntry(t.get('Password :')).addClass('form-passwordentry').appendTo($form),
$error = $('<div></div>', { 'class': 'form-error' }).appendTo($form),
$buttons = $.w.buttonContainer().appendTo($form);
$.w.button(t.get('Cancel')).click(function() {
$overlay.animate({
top: 0
});
}).appendTo($buttons);
var $submitButton = $.w.button(t.get('Unlock'), true).addClass('form-submit').appendTo($buttons);
Webos.User.get([function(user) {
$overlay.bind('dragstop', function(event, ui) {
var ratio = - ui.position.top / height;
if (ratio > 0.2) {
$overlay.animate({
top: - height
}, 'normal', 'linear', function() {
if (!user) {
GnomeScreenSaver._unlock();
} else {
$password.passwordEntry('content').focus();
}
});
} else {
$overlay.animate({
top: 0
});
}
});
$overlay.bind('dragstart', function(event, ui) {
if (user) {
$prompt.show();
}
});
$form.submit(function() {
if ($submitButton.button('option', 'disabled')) {
return;
}
$submitButton.button('option', 'disabled', true);
Webos.User.login(user.get('username'), $password.passwordEntry('value'), [function() {
GnomeScreenSaver._unlock();
}, function(response) {
var err = response.getErrorsChannel();
if (response.getStatusCode() == 401) {
err = t.get('Bad password');
$password.passwordEntry('value', '');
}
$error.html(err);
$password.passwordEntry('content').focus();
$submitButton.button('option', 'disabled', false);
}]);
});
if (user) {
$realname.html(user.get('realname'));
$barRealname.html(user.get('realname'));
Webos.User.logout([function() {}, function() {}]);
} else {
$prompt.hide();
}
}, function() {}]);
}, 'gnome-screensaver');
GnomeScreenSaver._locked = true;
GnomeScreenSaver.trigger('lock');
};
GnomeScreenSaver._unlock = function() {
if (!GnomeScreenSaver.locked()) {
return;
}
GnomeScreenSaver._$screenlocker.fadeOut('fast', function() {
$(this).remove();
});
clearInterval(GnomeScreenSaver._screenlockerClockInterval);
GnomeScreenSaver._locked = false;
GnomeScreenSaver.trigger('unlock');
};
GnomeScreenSaver.autoActivate = function $_GnomeScreenSaver_autoActivate(time, lock, lockTime) {
if (typeof GnomeScreenSaver.autoActivate._timer != 'undefined') {
clearInterval(GnomeScreenSaver.autoActivate._timer);
delete GnomeScreenSaver.autoActivate._timer;
}
if (!(time > 0)) {
return;
}
var mouseMoved = false;
$(document).off('mousemove.gnomescreensaver').on('mousemove.gnomescreensaver', function() {
mouseMoved = true;
});
GnomeScreenSaver.autoActivate._timer = setInterval(function() {
if (!mouseMoved) {
if (lock) {
if (lockTime > 0) {
if (typeof GnomeScreenSaver.autoActivate._lockTimer == 'undefined') {
GnomeScreenSaver.autoActivate._lockTimer = setTimeout(function() {
delete GnomeScreenSaver.autoActivate._lockTimer;
GnomeScreenSaver.lock();
}, lockTime * 1000);
}
GnomeScreenSaver.activate();
} else {
GnomeScreenSaver.lock();
GnomeScreenSaver.activate();
}
} else {
GnomeScreenSaver.activate();
}
} else {
mouseMoved = false;
if (typeof GnomeScreenSaver.autoActivate._lockTimer != 'undefined') {
clearInterval(GnomeScreenSaver.autoActivate._lockTimer);
delete GnomeScreenSaver.autoActivate._lockTimer;
}
}
}, time * 1000);
};
GnomeScreenSaver.loadConfig = function $_GnomeScreenSaver_loadConfig(callback) {
callback = Webos.Callback.toCallback(callback);
Webos.ConfigFile.loadUserConfig('~/.config/exiting.xml', null, [function(configFile) {
var shutdownScreenTime = 0, lockScreen = false, lockTime = 0;
if (configFile.get('shutdownScreen') > 0) {
shutdownScreenTime = parseFloat(configFile.get('shutdownScreen')) * 60;
lockScreen = (configFile.get('lockScreenEnabled') == 1);
if (lockScreen) {
lockTime = parseFloat(configFile.get('lockScreenTime')) * 60;
}
}
GnomeScreenSaver.autoActivate(shutdownScreenTime, lockScreen, lockTime);
callback.success();
}, callback.error]);
};
Webos.Observable.build(GnomeScreenSaver);
window.GnomeScreenSaver = GnomeScreenSaver; //Export library
});
|
ethansaxenian/RosettaDecode
|
lang/Ruby/hello-world-graphical-1.rb
|
require 'gtk2'
window = Gtk::Window.new
window.title = 'Goodbye, World'
window.signal_connect(:delete-event) { Gtk.main_quit }
window.show_all
Gtk.main
|
noahbarnette/ugahacks5
|
sponsors/migrations/0006_sponsor_scanned_hackers.py
|
<filename>sponsors/migrations/0006_sponsor_scanned_hackers.py<gh_stars>1-10
# Generated by Django 2.2.10 on 2020-06-02 17:36
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('sponsors', '0005_auto_20200523_1556'),
]
operations = [
migrations.AddField(
model_name='sponsor',
name='scanned_hackers',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL),
),
]
|
sethsandaru/passing-through-note
|
src/libraries/PathInternalHook.js
|
import {HookProvider} from "@/classes/HookProvider";
const HOOKS = {
GLOBAL_BODY_CLICK: new HookProvider(),
};
export {
HOOKS
}
|
SusmithaGU/clouddriver
|
clouddriver-cloudrun/src/main/java/com/netflix/spinnaker/clouddriver/cloudrun/cache/Keys.java
|
<reponame>SusmithaGU/clouddriver<filename>clouddriver-cloudrun/src/main/java/com/netflix/spinnaker/clouddriver/cloudrun/cache/Keys.java
/*
* Copyright 2022 OpsMx, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.clouddriver.cloudrun.cache;
import com.google.common.base.CaseFormat;
import com.netflix.frigga.Names;
import com.netflix.spinnaker.clouddriver.cloudrun.CloudrunCloudProvider;
import groovy.util.logging.Slf4j;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Stream;
import lombok.Getter;
@Slf4j
public class Keys {
public static final String KEY_DELIMITER = ":";
public enum Namespace {
APPLICATIONS,
PLATFORM_APPLICATIONS,
CLUSTERS,
SERVER_GROUPS,
INSTANCES,
LOAD_BALANCERS,
ON_DEMAND;
public static String provider = CloudrunCloudProvider.ID;
@Getter final String ns;
private Namespace() {
this.ns = CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.LOWER_CAMEL, name()); // FOO_BAR -> fooBar
}
public String toString() {
return ns;
}
public static Namespace from(String ns) {
return Stream.of(values())
.filter(namespace -> namespace.ns.equals(ns))
.findAny()
.orElseThrow(IllegalArgumentException::new);
}
}
public static Map<String, String> parse(String key) {
String[] parts = key.split(":");
if (parts.length < 2 || parts[0] != CloudrunCloudProvider.ID) {
return null;
}
Map<String, String> result = new HashMap<>();
result.put("provider", parts[0]);
result.put("type", parts[1]);
Namespace namespace = Namespace.from(parts[1]);
switch (namespace) {
case APPLICATIONS:
result.put("application", parts[2]);
break;
case PLATFORM_APPLICATIONS:
result.put("project", parts[2]);
break;
case CLUSTERS:
Names names = Names.parseName(parts[4]);
result.put("account", parts[2]);
result.put("application", parts[3]);
result.put("name", parts[4]);
result.put("cluster", parts[4]);
result.put("stack", names.getStack());
result.put("detail", names.getDetail());
break;
case INSTANCES:
result.put("account", parts[2]);
result.put("name", parts[3]);
result.put("instance", parts[3]);
break;
case LOAD_BALANCERS:
result.put("account", parts[2]);
result.put("name", parts[3]);
result.put("loadBalancer", parts[3]);
break;
case SERVER_GROUPS:
Names names_1 = Names.parseName(parts[5]);
result.put("application", names_1.getApp());
result.put("cluster", parts[2]);
result.put("account", parts[3]);
result.put("region", parts[4]);
result.put("stack", names_1.getStack());
result.put("detail", names_1.getDetail());
result.put("serverGroup", parts[5]);
result.put("name", parts[5]);
result.put("sequence", names_1.getSequence().toString());
break;
default:
break;
}
return result;
}
public static String getApplicationKey(String application) {
return keyFor(Namespace.APPLICATIONS, application);
}
public static String getPlatformApplicationKey(String project) {
return keyFor(Namespace.PLATFORM_APPLICATIONS, project);
}
public static String getClusterKey(String account, String application, String clusterName) {
return keyFor(Namespace.CLUSTERS, account, application, clusterName);
}
public static String getInstanceKey(String account, String instanceName) {
return keyFor(Namespace.INSTANCES, account, instanceName);
}
public static String getLoadBalancerKey(String account, String loadBalancerName) {
return keyFor(Namespace.LOAD_BALANCERS, account, loadBalancerName);
}
public static String getServerGroupKey(String account, String serverGroupName, String region) {
Names names = Names.parseName(serverGroupName);
return keyFor(Namespace.SERVER_GROUPS, names.getCluster(), account, region, names.getGroup());
}
private static String keyFor(Namespace namespace, String... parts) {
StringBuilder builder =
new StringBuilder(CloudrunCloudProvider.ID + KEY_DELIMITER).append(namespace);
for (String part : parts) {
builder.append(KEY_DELIMITER).append(part);
}
return builder.toString();
}
}
|
Teslos/libgeodecomp
|
src/libgeodecomp/loadbalancer/loadbalancer.cpp
|
<gh_stars>0
#include <libgeodecomp/loadbalancer/loadbalancer.h>
namespace LibGeoDecomp {
std::vector<std::size_t> LoadBalancer::initialWeights(std::size_t items, const std::vector<double>& rankSpeeds)
{
std::size_t size = rankSpeeds.size();
double totalSum = sum(rankSpeeds);
std::vector<std::size_t> ret(size);
std::size_t lastPos = 0;
double partialSum = 0.0;
for (std::size_t i = 0; i < size - 1; ++i) {
partialSum += rankSpeeds[i];
std::size_t nextPos = items * partialSum / totalSum;
ret[i] = nextPos - lastPos;
lastPos = nextPos;
}
ret[size - 1] = items - lastPos;
return ret;
}
}
|
whatshow/WifiManager
|
demo/linux-headers-4.2.0-27/arch/arm/include/asm/elf.h
|
<reponame>whatshow/WifiManager
#ifndef __ASMARM_ELF_H
#define __ASMARM_ELF_H
#include <asm/auxvec.h>
#include <asm/hwcap.h>
#include <asm/vdso_datapage.h>
/*
* ELF register definitions..
*/
#include <asm/ptrace.h>
#include <asm/user.h>
struct task_struct;
typedef unsigned long elf_greg_t;
typedef unsigned long elf_freg_t[3];
#define ELF_NGREG (sizeof (struct pt_regs) / sizeof(elf_greg_t))
typedef elf_greg_t elf_gregset_t[ELF_NGREG];
typedef struct user_fp elf_fpregset_t;
#define EF_ARM_EABI_MASK 0xff000000
#define EF_ARM_EABI_UNKNOWN 0x00000000
#define EF_ARM_EABI_VER1 0x01000000
#define EF_ARM_EABI_VER2 0x02000000
#define EF_ARM_EABI_VER3 0x03000000
#define EF_ARM_EABI_VER4 0x04000000
#define EF_ARM_EABI_VER5 0x05000000
#define EF_ARM_BE8 0x00800000 /* ABI 4,5 */
#define EF_ARM_LE8 0x00400000 /* ABI 4,5 */
#define EF_ARM_MAVERICK_FLOAT 0x00000800 /* ABI 0 */
#define EF_ARM_VFP_FLOAT 0x00000400 /* ABI 0 */
#define EF_ARM_SOFT_FLOAT 0x00000200 /* ABI 0 */
#define EF_ARM_OLD_ABI 0x00000100 /* ABI 0 */
#define EF_ARM_NEW_ABI 0x00000080 /* ABI 0 */
#define EF_ARM_ALIGN8 0x00000040 /* ABI 0 */
#define EF_ARM_PIC 0x00000020 /* ABI 0 */
#define EF_ARM_MAPSYMSFIRST 0x00000010 /* ABI 2 */
#define EF_ARM_APCS_FLOAT 0x00000010 /* ABI 0, floats in fp regs */
#define EF_ARM_DYNSYMSUSESEGIDX 0x00000008 /* ABI 2 */
#define EF_ARM_APCS_26 0x00000008 /* ABI 0 */
#define EF_ARM_SYMSARESORTED 0x00000004 /* ABI 1,2 */
#define EF_ARM_INTERWORK 0x00000004 /* ABI 0 */
#define EF_ARM_HASENTRY 0x00000002 /* All */
#define EF_ARM_RELEXEC 0x00000001 /* All */
#define R_ARM_NONE 0
#define R_ARM_PC24 1
#define R_ARM_ABS32 2
#define R_ARM_CALL 28
#define R_ARM_JUMP24 29
#define R_ARM_TARGET1 38
#define R_ARM_V4BX 40
#define R_ARM_PREL31 42
#define R_ARM_MOVW_ABS_NC 43
#define R_ARM_MOVT_ABS 44
#define R_ARM_THM_CALL 10
#define R_ARM_THM_JUMP24 30
#define R_ARM_THM_MOVW_ABS_NC 47
#define R_ARM_THM_MOVT_ABS 48
/*
* These are used to set parameters in the core dumps.
*/
#define ELF_CLASS ELFCLASS32
#ifdef __ARMEB__
#define ELF_DATA ELFDATA2MSB
#else
#define ELF_DATA ELFDATA2LSB
#endif
#define ELF_ARCH EM_ARM
/*
* This yields a string that ld.so will use to load implementation
* specific libraries for optimization. This is more specific in
* intent than poking at uname or /proc/cpuinfo.
*
* For now we just provide a fairly general string that describes the
* processor family. This could be made more specific later if someone
* implemented optimisations that require it. 26-bit CPUs give you
* "v1l" for ARM2 (no SWP) and "v2l" for anything else (ARM1 isn't
* supported). 32-bit CPUs give you "v3[lb]" for anything based on an
* ARM6 or ARM7 core and "armv4[lb]" for anything based on a StrongARM-1
* core.
*/
#define ELF_PLATFORM_SIZE 8
#define ELF_PLATFORM (elf_platform)
extern char elf_platform[];
struct elf32_hdr;
/*
* This is used to ensure we don't load something for the wrong architecture.
*/
extern int elf_check_arch(const struct elf32_hdr *);
#define elf_check_arch elf_check_arch
#define vmcore_elf64_check_arch(x) (0)
extern int arm_elf_read_implies_exec(const struct elf32_hdr *, int);
#define elf_read_implies_exec(ex,stk) arm_elf_read_implies_exec(&(ex), stk)
struct task_struct;
int dump_task_regs(struct task_struct *t, elf_gregset_t *elfregs);
#define ELF_CORE_COPY_TASK_REGS dump_task_regs
#define CORE_DUMP_USE_REGSET
#define ELF_EXEC_PAGESIZE 4096
/* This is the location that an ET_DYN program is loaded if exec'ed. Typical
use of this is to invoke "./ld.so someprog" to test out a new version of
the loader. We need to make sure that it is out of the way of the program
that it will "exec", and that there is sufficient room for the brk. */
#define ELF_ET_DYN_BASE (TASK_SIZE / 3 * 2)
/* When the program starts, a1 contains a pointer to a function to be
registered with atexit, as per the SVR4 ABI. A value of 0 means we
have no such handler. */
#define ELF_PLAT_INIT(_r, load_addr) (_r)->ARM_r0 = 0
extern void elf_set_personality(const struct elf32_hdr *);
#define SET_PERSONALITY(ex) elf_set_personality(&(ex))
#ifdef CONFIG_MMU
#ifdef CONFIG_VDSO
#define ARCH_DLINFO \
do { \
NEW_AUX_ENT(AT_SYSINFO_EHDR, \
(elf_addr_t)current->mm->context.vdso); \
} while (0)
#endif
#define ARCH_HAS_SETUP_ADDITIONAL_PAGES 1
struct linux_binprm;
int arch_setup_additional_pages(struct linux_binprm *, int);
#endif
#endif
|
EuPathDB-Infra/WDK
|
Model/src/main/java/org/gusdb/wdk/model/Reference.java
|
package org.gusdb.wdk.model;
/**
* Represents a reference in a wdk model. Has a two-part name: set and element
*
* Created: Tue May 11 15:17:30 EDT 2004
*
* @author <NAME>
* @version $Revision$ $Date$ $Author$
*/
public class Reference extends WdkModelBase {
private String setName;
private String elementName;
private String twoPartName;
private String groupRef;
public Reference() {}
/**
* @param twoPartName Of the form "set.element"
*/
public Reference(String twoPartName) throws WdkModelException{
setRef(twoPartName);
}
public String getSetName(){
return this.setName;
}
public String getElementName(){
return this.elementName;
}
public String getTwoPartName() {
return twoPartName;
}
public static boolean isTwoPartName(String twoPartName) {
try { assertTwoPartName(twoPartName); return true; }
catch (WdkModelException e) { return false; }
}
public static void assertTwoPartName(String twoPartName) throws WdkModelException {
if (twoPartName == null) {
throw new WdkModelException("Error: twoPartName is null");
}
if (!twoPartName.matches("\\S+\\.\\S+")) {
throw new WdkModelException("Error: Reference '" + twoPartName + "' is not in the form 'setName.elementName'");
}
}
/**
* @param twoPartName Of the form "set.element"
*/
public void setRef(String twoPartName) throws WdkModelException {
assertTwoPartName(twoPartName);
String[] parts = twoPartName.split("\\.");
setName = parts[0];
elementName = parts[1];
this.twoPartName = twoPartName;
}
public void setGroupRef(String groupRef) throws WdkModelException {
if (groupRef == null) {
throw new WdkModelException("Error: twoPartName is null");
}
if (!groupRef.matches("\\S+\\.\\S+")) {
throw new WdkModelException("Error: Group Reference '" + groupRef + "' is not in the form 'setName.elementName'");
}
this.groupRef = groupRef;
}
/**
* @return the groupRef
*/
public String getGroupRef() {
return groupRef;
}
@Override
public String toString() {
return "Reference: "+twoPartName;
}
/* (non-Javadoc)
* @see org.gusdb.wdk.model.WdkModelBase#excludeResources(java.lang.String)
*/
@Override
public void excludeResources(String projectId) throws WdkModelException {
// do nothing
}
/* (non-Javadoc)
* @see org.gusdb.wdk.model.WdkModelBase#resolveReferences(org.gusdb.wdk.model.WdkModel)
*/
@Override
public void resolveReferences(WdkModel wodkModel) throws WdkModelException {
// do nothing
}
}
|
viridia/coda
|
libs/coda/backend/cpp/__init__.py
|
<reponame>viridia/coda<filename>libs/coda/backend/cpp/__init__.py
def createGenerators(options):
from . import gen
return (gen.CppHeaderGenerator('cpp', options), gen.CppGenerator('cpp', options))
|
wwjiang007/spring-xd
|
spring-xd-dirt/src/main/java/org/springframework/xd/dirt/container/initializer/package-info.java
|
/**
* Package for container context initializer classes.
*/
package org.springframework.xd.dirt.container.initializer;
|
elevation/spree
|
vendor/plugins/state_machine/test/app_root/config/environment.rb
|
<reponame>elevation/spree
require 'config/boot'
Rails::Initializer.run do |config|
config.cache_classes = false
config.whiny_nils = true
config.active_record.observers = :switch_observer
end
|
exactpro/clearth
|
clearth-core/src/test/java/com/exactprosystems/clearth/automation/ActionGeneratorTest.java
|
<reponame>exactpro/clearth
/******************************************************************************
* Copyright 2009-2019 Exactpro Systems Limited
* https://www.exactpro.com
* Build Software to Test Software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.exactprosystems.clearth.automation;
import org.apache.commons.lang.StringUtils;
import org.junit.Test;
import org.slf4j.Logger;
import java.util.HashMap;
import java.util.Map;
import static java.lang.String.format;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.*;
public class ActionGeneratorTest {
Logger logger = mock(Logger.class);
private static final String ACTION_NAME = "ACTION_NAME";
private static final String INCORRECT_PARSING_RESULT_MSG = "Incorrect parsing result. Expected: %s, Actual: %s";
@Test
public void parseDefaultInputParams_Simple() {
Map<String, String> expected = buildMap("param1", "One");
Map<String, String> actual = ActionGenerator.parseDefaultInputParams("param1=One", ACTION_NAME, logger);
verify(logger, times(0)).warn(anyString());
verify(logger, times(0)).error(anyString());
assertTrue(format(INCORRECT_PARSING_RESULT_MSG, expected, actual), mapIsEquals(expected, actual));
}
@Test
public void parseDefaultInputParams_Simple2() {
Map<String, String> expected = buildMap("param1", "One", "param2", "Two", "param3", "Three");
Map<String, String> actual = ActionGenerator.parseDefaultInputParams("param1=One, param2 = Two ,param3=Three", ACTION_NAME, logger);
verify(logger, times(0)).warn(anyString());
verify(logger, times(0)).error(anyString());
assertTrue(format(INCORRECT_PARSING_RESULT_MSG, expected, actual), mapIsEquals(expected, actual));
}
@Test
public void parseDefaultInputParams_Quoted() {
String line = "param1=\"One\"";
Map<String, String> expected = buildMap("param1", "One");
Map<String, String> actual = ActionGenerator.parseDefaultInputParams(line, ACTION_NAME, logger);
verify(logger, times(0)).warn(anyString());
verify(logger, times(0)).error(anyString());
assertTrue(format(INCORRECT_PARSING_RESULT_MSG, expected, actual), mapIsEquals(expected, actual));
}
@Test
public void parseDefaultInputParams_Quoted2() {
String line = "param1=\"One\", param2=\"T\\\"w\\\"o\" , OtherParam = \",=\\t!@$%^&*[]{}()\\\\\"";
Map<String, String> expected = buildMap("param1", "One", "param2", "T\"w\"o", "OtherParam", ",=\t!@$%^&*[]{}()\\");
Map<String, String> actual = ActionGenerator.parseDefaultInputParams(line, ACTION_NAME, logger);
verify(logger, times(0)).warn(anyString());
verify(logger, times(0)).error(anyString());
assertTrue(format(INCORRECT_PARSING_RESULT_MSG, expected, actual), mapIsEquals(expected, actual));
}
@Test
public void parseDefaultInputParams_FailWithoutValue() {
String line = "param1=One, param2";
Map<String, String> expected = buildMap("param1", "One");
Map<String, String> actual = ActionGenerator.parseDefaultInputParams(line, ACTION_NAME, logger);
verify(logger, times(1)).warn(contains("unexpected end of parameter's list"));
verify(logger, times(0)).error(anyString());
assertTrue(format(INCORRECT_PARSING_RESULT_MSG, expected, actual), mapIsEquals(expected, actual));
}
@Test
public void parseDefaultInputParams_FailUnclosedQuote() {
String line = "param1=One, param2=\"Two";
Map<String, String> expected = buildMap("param1", "One");
Map<String, String> actual = ActionGenerator.parseDefaultInputParams(line, ACTION_NAME, logger);
verify(logger, times(1)).warn(contains("unexpected end of parameter's list"));
verify(logger, times(0)).error(anyString());
assertTrue(format(INCORRECT_PARSING_RESULT_MSG, expected, actual), mapIsEquals(expected, actual));
}
private static boolean mapIsEquals(Map<String, String> map1, Map<String, String> map2)
{
if (map1 == null && map2 == null)
return true;
if (map1 == null || map2 == null)
return false;
if (map1.size() != map2.size())
return false;
for (Map.Entry<String, String> entry1: map1.entrySet())
{
if (!StringUtils.equals(entry1.getValue(), map2.get(entry1.getKey())))
return false;
}
return true;
}
private static Map<String, String> buildMap(String... keysAndValues) {
if (keysAndValues.length % 2 != 0)
throw new RuntimeException("Please use even number of arguments for buildMap() method");
Map<String, String> map = new HashMap<String, String>();
for (int i = 1; i < keysAndValues.length; i+=2)
{
map.put(keysAndValues[i-1], keysAndValues[i]);
}
return map;
}
}
|
qinFamily/dangchat-sdk
|
actor-sdk/sdk-core/runtime/runtime-shared/src/main/java/im/actor/runtime/crypto/primitives/curve25519/fe_mul121666.java
|
package im.actor.runtime.crypto.primitives.curve25519;
// Disabling Bounds checks for speeding up calculations
/*-[
#define J2OBJC_DISABLE_ARRAY_BOUND_CHECKS 1
]-*/
public class fe_mul121666 {
//CONVERT #include "fe.h"
//CONVERT #include "long.h"
/*
h = f * 121666
Can overlap h with f.
Preconditions:
|f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc.
Postconditions:
|h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
*/
public static void fe_mul121666(int[] h, int[] f) {
int f0 = f[0];
int f1 = f[1];
int f2 = f[2];
int f3 = f[3];
int f4 = f[4];
int f5 = f[5];
int f6 = f[6];
int f7 = f[7];
int f8 = f[8];
int f9 = f[9];
long h0 = f0 * (long) 121666;
long h1 = f1 * (long) 121666;
long h2 = f2 * (long) 121666;
long h3 = f3 * (long) 121666;
long h4 = f4 * (long) 121666;
long h5 = f5 * (long) 121666;
long h6 = f6 * (long) 121666;
long h7 = f7 * (long) 121666;
long h8 = f8 * (long) 121666;
long h9 = f9 * (long) 121666;
long carry0;
long carry1;
long carry2;
long carry3;
long carry4;
long carry5;
long carry6;
long carry7;
long carry8;
long carry9;
carry9 = (h9 + (long) (1 << 24)) >> 25;
h0 += carry9 * 19;
h9 -= carry9 << 25;
carry1 = (h1 + (long) (1 << 24)) >> 25;
h2 += carry1;
h1 -= carry1 << 25;
carry3 = (h3 + (long) (1 << 24)) >> 25;
h4 += carry3;
h3 -= carry3 << 25;
carry5 = (h5 + (long) (1 << 24)) >> 25;
h6 += carry5;
h5 -= carry5 << 25;
carry7 = (h7 + (long) (1 << 24)) >> 25;
h8 += carry7;
h7 -= carry7 << 25;
carry0 = (h0 + (long) (1 << 25)) >> 26;
h1 += carry0;
h0 -= carry0 << 26;
carry2 = (h2 + (long) (1 << 25)) >> 26;
h3 += carry2;
h2 -= carry2 << 26;
carry4 = (h4 + (long) (1 << 25)) >> 26;
h5 += carry4;
h4 -= carry4 << 26;
carry6 = (h6 + (long) (1 << 25)) >> 26;
h7 += carry6;
h6 -= carry6 << 26;
carry8 = (h8 + (long) (1 << 25)) >> 26;
h9 += carry8;
h8 -= carry8 << 26;
h[0] = (int) h0;
h[1] = (int) h1;
h[2] = (int) h2;
h[3] = (int) h3;
h[4] = (int) h4;
h[5] = (int) h5;
h[6] = (int) h6;
h[7] = (int) h7;
h[8] = (int) h8;
h[9] = (int) h9;
}
}
|
DepartmentOfHealth-htbhf/htbhf-claimant-service
|
api/src/test/java/uk/gov/dhsc/htbhf/claimant/ReportPaymentIntegrationTest.java
|
<filename>api/src/test/java/uk/gov/dhsc/htbhf/claimant/ReportPaymentIntegrationTest.java
package uk.gov.dhsc.htbhf.claimant;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.zonky.test.db.AutoConfigureEmbeddedDatabase;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import uk.gov.dhsc.htbhf.claimant.entity.Claim;
import uk.gov.dhsc.htbhf.claimant.entity.PaymentCycle;
import uk.gov.dhsc.htbhf.claimant.model.PostcodeDataResponse;
import uk.gov.dhsc.htbhf.claimant.reporting.ReportPaymentMessageSender;
import uk.gov.dhsc.htbhf.claimant.repository.ClaimRepository;
import uk.gov.dhsc.htbhf.claimant.repository.PaymentCycleRepository;
import uk.gov.dhsc.htbhf.claimant.scheduler.MessageProcessorScheduler;
import uk.gov.dhsc.htbhf.claimant.testsupport.RepositoryMediator;
import uk.gov.dhsc.htbhf.claimant.testsupport.WiremockManager;
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT;
import static uk.gov.dhsc.htbhf.claimant.reporting.PaymentAction.INITIAL_PAYMENT;
import static uk.gov.dhsc.htbhf.claimant.reporting.PaymentAction.SCHEDULED_PAYMENT;
import static uk.gov.dhsc.htbhf.claimant.reporting.PaymentAction.TOP_UP_PAYMENT;
import static uk.gov.dhsc.htbhf.claimant.testsupport.ClaimTestDataFactory.aValidClaim;
import static uk.gov.dhsc.htbhf.claimant.testsupport.PaymentCycleTestDataFactory.aPaymentCycleWithClaim;
import static uk.gov.dhsc.htbhf.claimant.testsupport.PostcodeDataResponseTestFactory.aPostcodeDataResponseObjectForPostcode;
@SpringBootTest(webEnvironment = RANDOM_PORT)
@AutoConfigureEmbeddedDatabase
public class ReportPaymentIntegrationTest {
@Autowired
private ReportPaymentMessageSender reportPaymentMessageSender;
@Autowired
private MessageProcessorScheduler messageProcessorScheduler;
@Autowired
private ClaimRepository claimRepository;
@Autowired
private PaymentCycleRepository paymentCycleRepository;
@Autowired
private WiremockManager wiremockManager;
@Autowired
private RepositoryMediator repositoryMediator;
@BeforeEach
void setup() {
wiremockManager.startWireMock();
}
@AfterEach
void tearDown() {
wiremockManager.stopWireMock();
repositoryMediator.deleteAllEntities();
}
@Test
void shouldReportInitialPaymentToGoogleAnalytics() throws JsonProcessingException {
Claim claim = claimRepository.save(aValidClaim());
PaymentCycle paymentCycle = paymentCycleRepository.save(aPaymentCycleWithClaim(claim));
String postcode = claim.getClaimant().getAddress().getPostcode();
stubPostcodesIoAndGoogleAnalytics(postcode);
reportPaymentMessageSender.sendReportPaymentMessage(claim, paymentCycle, INITIAL_PAYMENT);
messageProcessorScheduler.processReportPaymentMessages();
wiremockManager.verifyPostcodesIoCalled(postcode);
wiremockManager.verifyGoogleAnalyticsCalledForPaymentEvent(claim, INITIAL_PAYMENT,
paymentCycle.getTotalEntitlementAmountInPence(), paymentCycle.getChildrenDob());
}
@Test
void shouldReportScheduledPaymentToGoogleAnalytics() throws JsonProcessingException {
Claim claim = claimRepository.save(aValidClaim());
PaymentCycle paymentCycle = paymentCycleRepository.save(aPaymentCycleWithClaim(claim));
String postcode = claim.getClaimant().getAddress().getPostcode();
stubPostcodesIoAndGoogleAnalytics(postcode);
reportPaymentMessageSender.sendReportPaymentMessage(claim, paymentCycle, SCHEDULED_PAYMENT);
messageProcessorScheduler.processReportPaymentMessages();
wiremockManager.verifyPostcodesIoCalled(postcode);
wiremockManager.verifyGoogleAnalyticsCalledForPaymentEvent(claim, SCHEDULED_PAYMENT,
paymentCycle.getTotalEntitlementAmountInPence(), paymentCycle.getChildrenDob());
}
@Test
void shouldReportTopUpPaymentToGoogleAnalytics() throws JsonProcessingException {
Claim claim = claimRepository.save(aValidClaim());
PaymentCycle paymentCycle = paymentCycleRepository.save(aPaymentCycleWithClaim(claim));
String postcode = claim.getClaimant().getAddress().getPostcode();
stubPostcodesIoAndGoogleAnalytics(postcode);
int paymentAmount = 100;
reportPaymentMessageSender.sendReportPregnancyTopUpPaymentMessage(claim, paymentCycle, paymentAmount);
messageProcessorScheduler.processReportPaymentMessages();
wiremockManager.verifyPostcodesIoCalled(postcode);
wiremockManager.verifyGoogleAnalyticsCalledForPaymentEvent(claim, TOP_UP_PAYMENT, paymentAmount, paymentCycle.getChildrenDob());
}
private void stubPostcodesIoAndGoogleAnalytics(String postcode) throws JsonProcessingException {
PostcodeDataResponse postcodeDataResponse = aPostcodeDataResponseObjectForPostcode(postcode);
wiremockManager.stubPostcodeDataLookup(postcodeDataResponse);
wiremockManager.stubGoogleAnalyticsCall();
}
}
|
zhaochaohui/MFC
|
cow2/TeeChartAPI/calendarseries.h
|
#if !defined(AFX_CALENDARSERIES_H__E63CB61D_1DE1_4EFF_9BAF_D9C37BA183FA__INCLUDED_)
#define AFX_CALENDARSERIES_H__E63CB61D_1DE1_4EFF_9BAF_D9C37BA183FA__INCLUDED_
#if _MSC_VER > 1000
#pragma once
#endif // _MSC_VER > 1000
// Machine generated IDispatch wrapper class(es) created by Microsoft Visual C++
// NOTE: Do not modify the contents of this file. If this class is regenerated by
// Microsoft Visual C++, your modifications will be overwritten.
// Dispatch interfaces referenced by this interface
class CCalendarCell;
class CCalendarCellUpper;
/////////////////////////////////////////////////////////////////////////////
// CCalendarSeries wrapper class
class CCalendarSeries : public COleDispatchDriver
{
public:
CCalendarSeries() {} // Calls COleDispatchDriver default constructor
CCalendarSeries(LPDISPATCH pDispatch) : COleDispatchDriver(pDispatch) {}
CCalendarSeries(const CCalendarSeries& dispatchSrc) : COleDispatchDriver(dispatchSrc) {}
// Attributes
public:
// Operations
public:
double GetDate();
void SetDate(double newValue);
CCalendarCell GetDays();
CCalendarCellUpper GetMonths();
CCalendarCell GetTrailing();
CCalendarCell GetSunday();
CCalendarCell GetToday();
CCalendarCellUpper GetWeekdays();
void NextMonth();
void PreviousMonth();
// method 'RectCell' not emitted because of invalid return type or parameter type
long Rows();
// method 'SeriesRect' not emitted because of invalid return type or parameter type
BOOL GetNextButtonVisible();
void SetNextButtonVisible(BOOL bNewValue);
BOOL GetPreviousButtonVisible();
void SetPreviousButtonVisible(BOOL bNewValue);
};
//{{AFX_INSERT_LOCATION}}
// Microsoft Visual C++ will insert additional declarations immediately before the previous line.
#endif // !defined(AFX_CALENDARSERIES_H__E63CB61D_1DE1_4EFF_9BAF_D9C37BA183FA__INCLUDED_)
|
AsahiOS/gate
|
usr/src/lib/libnisdb/yptol/shim_hooks.h
|
/*
* CDDL HEADER START
*
* The contents of this file are subject to the terms of the
* Common Development and Distribution License, Version 1.0 only
* (the "License"). You may not use this file except in compliance
* with the License.
*
* You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
* or http://www.opensolaris.org/os/licensing.
* See the License for the specific language governing permissions
* and limitations under the License.
*
* When distributing Covered Code, include this CDDL HEADER in each
* file and include the License file at usr/src/OPENSOLARIS.LICENSE.
* If applicable, add the following below this CDDL HEADER, with the
* fields enclosed by brackets "[]" replaced with your own identifying
* information: Portions Copyright [yyyy] [name of copyright owner]
*
* CDDL HEADER END
*/
/*
* Copyright 2003 Sun Microsystems, Inc. All rights reserved.
* Use is subject to license terms.
*/
#ifndef __SHIM_HOOKS_H
#define __SHIM_HOOKS_H
#pragma ident "%Z%%M% %I% %E% SMI"
#ifdef __cplusplus
extern "C" {
#endif
/*
* DESCRIPTION: This file implements the hooks between old style DBM calls and
* the shim version of the same calls. By including this file a
* C files calls are diverted to the shim versions.
*
* Do NOT include this in the shim code itself or you will be
* unable to make real dbm calls.
*
* Do NOT include this in the client side NIS files.
*
* One day it may be possible to implement a more elegant version
* of this based on the linkers 'interposition' mechanism.
*/
/*
* Extern defs for new calls. Must have identical args to traditional version.
*/
extern void shim_dbm_close(DBM *db);
extern int shim_dbm_delete(DBM *db, datum key);
extern datum shim_dbm_fetch(DBM *db, datum key);
extern datum shim_dbm_fetch_noupdate(DBM *db, datum key);
extern datum shim_dbm_firstkey(DBM *db);
extern datum shim_dbm_nextkey(DBM *db);
extern datum shim_dbm_do_nextkey(DBM *db, datum inkey);
extern DBM *shim_dbm_open(const char *file, int open_flags,
mode_t file_mode);
extern int shim_dbm_store(DBM *db, datum key, datum content,
int store_mode);
void shim_exit(int code);
/*
* Externs for other function related to maps
*/
extern char *get_map_name(DBM *);
/*
* Hooks. Alias standard dbm call names to new calls
*/
#define dbm_close shim_dbm_close
#define dbm_delete shim_dbm_delete
#define dbm_fetch shim_dbm_fetch
#define dbm_firstkey shim_dbm_firstkey
#define dbm_nextkey shim_dbm_nextkey
#define dbm_do_nextkey shim_dbm_do_nextkey
#define dbm_open shim_dbm_open
#define dbm_store shim_dbm_store
#define exit shim_exit
#ifdef __cplusplus
}
#endif
#endif /* __SHIM_HOOKS_H */
|
jaybarra/emp3-web
|
src/sdk/core/api/cmapi/cmapi.js
|
<filename>src/sdk/core/api/cmapi/cmapi.js
// Define cmapi namespaces
var cmapi = cmapi || {};
cmapi.channel = cmapi.channel || {};
cmapi.channel.schema = cmapi.channel.schema || {};
cmapi.channel.handler = cmapi.channel.handler || {};
cmapi.channel.publisher = cmapi.channel.publisher || {};
cmapi.channel.support = cmapi.channel.support || {};
cmapi.typeLibrary = cmapi.typeLibrary || {};
cmapi.map = cmapi.map || {};
cmapi.map.message = cmapi.map.message || {};
cmapi.map.message.complete = cmapi.map.message.complete || {};
cmapi.map.message.complete.builder = cmapi.map.message.complete.builder || {};
cmapi.map.message.progress = cmapi.map.message.progress || {};
cmapi.map.message.progress.builder = cmapi.map.message.progress.builder || {};
|
SWAT-engineering/bird
|
nest/test/engineering/swat/nest/CommonTestHelper.java
|
package engineering.swat.nest;
import engineering.swat.nest.core.bytes.ParseLogTarget;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.ByteBuffer;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.FileVisitResult;
import java.nio.file.FileVisitor;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Stream;
import engineering.swat.nest.core.bytes.ByteStream;
import engineering.swat.nest.core.bytes.source.ByteSliceBuilder;
import org.checkerframework.checker.nullness.qual.Nullable;
public class CommonTestHelper {
public static ByteStream wrap(byte... bytes) {
try {
return new ByteStream(ByteSliceBuilder.wrap(ByteBuffer.wrap(bytes), new URI("tmp:///test")) );
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
public static ByteStream wrap(int... bytes) {
byte[] data = new byte[bytes.length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte)bytes[i];
}
try {
return new ByteStream(ByteSliceBuilder.wrap(ByteBuffer.wrap(data), new URI("tmp:///test")) );
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
public static Stream<Path> findResources(String extension) {
ClassLoader context = Objects.requireNonNull(CommonTestHelper.class.getClassLoader(), "Unexpected missing classloader");
URL rootDir = context.getResource("test-files/");
if (rootDir == null) {
throw new RuntimeException("Could not find /test-files/ in " + context);
}
List<Path> result = new ArrayList<>();
try {
walkURL(rootDir, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (file.toString().endsWith(extension)) {
result.add(file);
}
return super.visitFile(file, attrs);
}
});
} catch (URISyntaxException | IOException e) {
throw new RuntimeException(e);
}
return result.stream();
}
private static void walkURL(URL rootDir, FileVisitor<Path> visitor) throws URISyntaxException, IOException {
if (rootDir.getProtocol().equals("file")) {
Files.walkFileTree(Paths.get(rootDir.toURI()), visitor);
}
else {
try (FileSystem fs = FileSystems.newFileSystem(rootDir.toURI(), Collections.<String, Object>emptyMap())) {
Files.walkFileTree(fs.getPath("/"), visitor);
}
}
}
public static ParseLogTarget TRACE_FAILURES = new ParseLogTarget() {
@Override
public void fail(String msg) {
System.err.println(msg);
}
private String replaceFirstPlaceHolder(String s, @Nullable Object o) {
return s.replaceFirst("\\{}", Objects.toString(o));
}
@Override
public void fail(String msg, @Nullable Object p0) {
//System.err.println(replaceFirstPlaceHolder(msg, p0));
System.err.println(msg);
}
@Override
public void fail(String msg, @Nullable Object p0, @Nullable Object p1) {
System.err.println(replaceFirstPlaceHolder(replaceFirstPlaceHolder(msg, p0), p1));
}
@Override
public void fail(String msg, @Nullable Object p0, @Nullable Object p1, @Nullable Object p2) {
System.err.println(replaceFirstPlaceHolder(replaceFirstPlaceHolder(replaceFirstPlaceHolder(msg, p0), p1), p2));
}
};
}
|
zx1239856/Indulger
|
app/src/main/java/com/inftyloop/indulger/viewholder/BaseRecyclerViewHolder.java
|
<filename>app/src/main/java/com/inftyloop/indulger/viewholder/BaseRecyclerViewHolder.java
package com.inftyloop.indulger.viewholder;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
public class BaseRecyclerViewHolder extends RecyclerView.ViewHolder {
public BaseRecyclerViewHolder(@NonNull ViewGroup parent, int layoutResId) {
super(LayoutInflater.from(parent.getContext()).inflate(layoutResId, parent, false));
}
public <T extends View> T findViewById(int resId) {
return itemView.findViewById(resId);
}
public View getView() {
return itemView;
}
}
|
victor141516/core
|
database/mongo/storage_test.go
|
package mongo
import (
"testing"
"time"
"github.com/staticbackendhq/core/internal"
)
func TestFileStorage(t *testing.T) {
f := internal.File{
AccountID: adminAccount.ID,
Key: "key",
URL: "https://test",
Size: 123456,
Uploaded: time.Now(),
}
id, err := datastore.AddFile(confDBName, f)
if err != nil {
t.Fatal(err)
} else if len(id) < 10 {
t.Fatalf("expected to get id got %s", id)
}
f2, err := datastore.GetFileByID(confDBName, id)
if err != nil {
t.Fatal(err)
} else if f2.Key != f.Key {
t.Errorf("expected key to be %s got %s", f.Key, f2.Key)
}
if err := datastore.DeleteFile(confDBName, id); err != nil {
t.Fatal(err)
}
check, err := datastore.GetFileByID(confDBName, id)
if err == nil {
t.Errorf("error should not be nil")
} else if check.ID == id {
t.Errorf("deleted file id returned? %v", check)
}
}
|
ghsecuritylab/embeddedsw
|
XilinxProcessorIPLib/drivers/axiethernet/doc/html/api/xaxiethernet_8c.js
|
var xaxiethernet_8c =
[
[ "XAxiEthernet_CfgInitialize", "group__axiethernet__v5__8.html#gabe257efe600df43d8148c772ea3878f9", null ],
[ "XAxiEthernet_ClearBadFrmRcvOption", "group__axiethernet__v5__8.html#gaab45f6c7a4440bc3f58d4ff971e6be7b", null ],
[ "XAxiEthernet_ClearOptions", "group__axiethernet__v5__8.html#gad3e7253d626c02a96ac35f981ac11ba2", null ],
[ "XAxiEthernet_DisableControlFrameLenCheck", "group__axiethernet__v5__8.html#ga41049ca22799d42d1279461be627c72e", null ],
[ "XAxiEthernet_EnableControlFrameLenCheck", "group__axiethernet__v5__8.html#ga8b71bcdc8be6ae5d6364c59ea0f53aab", null ],
[ "XAxiEthernet_GetMacAddress", "group__axiethernet__v5__8.html#gaa5b70a456ac18505132f61f596428ab7", null ],
[ "XAxiEthernet_GetOperatingSpeed", "group__axiethernet__v5__8.html#ga8e8caa9c4ac753744bd08ebf103877c3", null ],
[ "XAxiEthernet_GetOptions", "group__axiethernet__v5__8.html#gaf48b8f321b842bfb139b473ad7185874", null ],
[ "XAxiEthernet_Initialize", "group__axiethernet__v5__8.html#ga2eb402aae216f2c43777251397d96cae", null ],
[ "XAxiEthernet_PhySetMdioDivisor", "group__axiethernet__v5__8.html#ga2b33bc77237bf039c02d4619a811aed6", null ],
[ "XAxiEthernet_Reset", "group__axiethernet__v5__8.html#ga091a291d11f394e3eaf0b79195666d9f", null ],
[ "XAxiEthernet_SetBadFrmRcvOption", "group__axiethernet__v5__8.html#ga3db57c4a261afc854985297afed2373b", null ],
[ "XAxiEthernet_SetMacAddress", "group__axiethernet__v5__8.html#gab2298b2b1af89cf0f0a78d4fda03ed7f", null ],
[ "XAxiEthernet_SetOperatingSpeed", "group__axiethernet__v5__8.html#gab2d0106f26a1a2e1f2b902dc0a2822c9", null ],
[ "XAxiEthernet_SetOptions", "group__axiethernet__v5__8.html#gae3265620b9d9b86643abe5ff5e2b7d79", null ],
[ "XAxiEthernet_Start", "group__axiethernet__v5__8.html#ga2cfa48d42e55238f6f30aff6d658e4fe", null ],
[ "XAxiEthernet_Stop", "group__axiethernet__v5__8.html#ga4e952a55f342f176ba9ec2b49e2e47cb", null ]
];
|
agriyakhetarpal/dffml
|
tests/test_ci.py
|
"""
Validate that CI and source and docs are in sync
"""
import re
import os
import pathlib
import unittest
import platform
import itertools
import subprocess
from typing import Callable, List, Union
from dffml.plugins import PACKAGE_DIRECTORY_TO_NAME
class IgnoreFile:
"""
Checks if files should be ignored by reading ignore files such as .gitignore
and .dockerignore and parsing their rules.
Examples
--------
>>> import pathlib
>>> from dffml import IgnoreFile
>>>
>>> root = pathlib.Path(".")
>>> root.joinpath(".gitignore").write_text("subdir/**")
>>> root.joinpath("subdir", ".gitignore").mkdir()
>>> root.joinpath("subdir", ".gitignore").write_text("!sub2/**")
>>>
>>> ignorefile = IgnoreFile()
>>> print(ignorefile("subdir/sub2/feedface"))
False
>>> print(ignorefile("subdir/other"))
True
"""
def __init__(
self, root: pathlib.Path, ignore_files: List[str] = [".gitignore"]
):
self.root = root
self.ignore_files = ignore_files
self.compiled_regexes = {}
@staticmethod
def path_to_lines(path: pathlib.Path):
return list(
filter(bool, path.read_text().replace("\r\n", "\n").split("\n"))
)
@staticmethod
def compile_regexes(
contents: List[str],
) -> List[Callable[[str], Union[None, bool, re.Match]]]:
for line in contents:
# Handle the case where we do not want to ignore files matching this
# pattern
do_not_ignore = False
if line.startswith("!"):
line = line[1:]
do_not_ignore = True
# Substitute periods for literal periods
line = line.replace(".", r"\.")
# Substitute * for regex version of *, which is .*
line = line.replace("*", r".*")
# Compile the regex
yield do_not_ignore, re.compile(line)
def __call__(self, filename: str) -> bool:
# Get the absolute file path
filepath = pathlib.Path(filename).absolute()
# Read any ignore files and compile their regexes from the file path up
# to the root of the repo
for ignore_filename in self.ignore_files:
for directory in list(filepath.parents)[
: filepath.parents.index(self.root) + 1
]:
ignore_path = directory / ignore_filename
if (
directory not in self.compiled_regexes
and ignore_path.is_file()
):
self.compiled_regexes[directory] = list(
self.compile_regexes(self.path_to_lines(ignore_path))
)
# Get all applicable regexes by looking through dict of compiled regexes
# and grabbing any that are in the files parents
directories = []
for directory in self.compiled_regexes.keys():
if directory.resolve() in filepath.parents:
directories.append(directory)
# Check if any match
ignore = False
for directory in directories:
for do_not_ignore, regex in self.compiled_regexes[directory]:
if not do_not_ignore and regex.match(
str(filepath.relative_to(directory)).replace(os.sep, "/")
):
ignore = True
# Check if any are supposed to not be ignored even though they match
# other patterns
for directory in directories:
for do_not_ignore, regex in self.compiled_regexes[directory]:
if (
do_not_ignore
and ignore
and regex.match(
str(filepath.relative_to(directory)).replace(
os.sep, "/"
)
)
):
ignore = False
return ignore
class TestGitIgnore(unittest.TestCase):
def test_ignore(self):
ignorefile = IgnoreFile(root=pathlib.Path(__file__).parents[1])
self.assertFalse(ignorefile("setup.py"))
self.assertFalse(ignorefile("dffml/skel/common/setup.py"))
self.assertTrue(ignorefile("dffml/skel/model/setup.py"))
self.assertTrue(
ignorefile(
"examples/shouldi/tests/downloads/cri-resource-manager-download/.gopath/pkg/mod/github.com/apache/thrift@v0.12.0/contrib/fb303/py/setup.py"
)
)
REPO_ROOT = pathlib.Path(__file__).resolve().parents[1]
@unittest.skipUnless(platform.system() == "Linux", "Only runs on Linux")
class TestCI(unittest.TestCase):
maxDiff = None
SKIP_SETUP_PY_FILES = [
REPO_ROOT / "setup.py",
REPO_ROOT / "dffml" / "skel" / "common" / "setup.py",
REPO_ROOT / "build" / "lib" / "dffml" / "skel" / "common" / "setup.py",
REPO_ROOT / "examples" / "source" / "setup.py",
REPO_ROOT
/ "examples"
/ "tutorials"
/ "sources"
/ "file"
/ "dffml-source-ini"
/ "setup.py",
]
def test_all_plugins_appear_in_dffml_plugins(self):
"""
Make sure that any setup.py files associated with a plugin appear in
dffml/plugins.py
"""
ignorefile = IgnoreFile(REPO_ROOT)
# A list of directory tuples, relative to the root of the repo, which
# contain setup.py files. Directories who have setup.py files listed in
# SKIP_SETUP_PY_FILES will not be in this list
setup_py_directories = sorted(
map(
lambda path: path.parent.relative_to(REPO_ROOT).parts,
filter(
lambda path: path not in self.SKIP_SETUP_PY_FILES,
itertools.filterfalse(
ignorefile, REPO_ROOT.rglob("setup.py")
),
),
)
)
self.assertListEqual(
setup_py_directories, sorted(PACKAGE_DIRECTORY_TO_NAME.keys())
)
def test_all_plugins_being_tested(self):
"""
Make sure that plugins are included in the test matrix and therefore
being tested by the CI.
"""
# We compare against PACKAGE_DIRECTORY_TO_NAME as the truth because the
# test_all_plugins_appear_in_dffml_plugins() validates that every
# directory that has a setup.py appears in PACKAGE_DIRECTORY_TO_NAME.
should_be = sorted(
list(
map(
lambda directories: "/".join(directories),
PACKAGE_DIRECTORY_TO_NAME.keys(),
)
)
+ ["."]
)
# Load the ci testing workflow avoid requiring the yaml module as that
# has C dependencies.
# We read the file, split it by lines., filter by lines mentioning PyPi
lines = (
pathlib.Path(REPO_ROOT, ".github", "workflows", "testing.yml",)
.read_text()
.split("\n")
)
# filter by lines mentioning PyPi
# tokens, and make a list of tuples which contain the left hand side of
# the lines '=', split on the '/' character.
# We skip the line which the default TWINE_PASSWORD environment
# variable, since that's for the main package (not any of the plugins).
plugins_tested_by_ci = []
# Once we see plugins: we start adding the subsequent list of plugins to
# our list of plugins tested by CI.
start_adding_plugins = 0
# Go over each line in the YAML file
for line in lines:
if line.strip() == "plugin:":
# Start adding when we see the list of plugins
start_adding_plugins += 1
elif start_adding_plugins and ":" in line:
# If we've reached the next YAML object key we're done adding to
# the list of plugins
break
elif start_adding_plugins:
# Add plugins to list of plugins being tested
# Line is in the format of: "- plugin/path"
plugins_tested_by_ci.append(line.strip().split()[-1])
# Make sure there was only one list
self.assertTrue(plugins_tested_by_ci, "No plugins found!")
self.assertEqual(
start_adding_plugins, 1, "More than one list of plugins found!"
)
# Sort them
plugins_tested_by_ci = sorted(plugins_tested_by_ci)
# Compare to truth
self.assertListEqual(should_be, plugins_tested_by_ci)
def test_all_plugins_have_pypi_tokens(self):
"""
Make sure every plugin is listed with a PyPi API token to enable
automatic releases.
"""
# Load the ci testing workflow avoid requiring the yaml module as that
# has C dependencies.
# We read the file, split it by lines, filter by lines mentioning PyPi
# tokens, and make a list of tuples which contain the left hand side of
# the lines '=', split on the '/' character.
# We skip the line which the default TWINE_PASSWORD environment
# variable, since that's for the main package (not any of the plugins).
# Example:
# model/vowpalWabbit=${{ secrets.PYPI_MODEL_VOWPALWABBIT }}
# This line results in a list entry of: ('model', 'vowpalWabbit')
plugins_with_pypi_tokens = sorted(
map(
lambda i: tuple(i.strip().split("=")[0].split("/")),
filter(
lambda line: "secrets.PYPI_" in line
and not "TWINE_PASSWORD" in line,
pathlib.Path(
REPO_ROOT, ".github", "workflows", "testing.yml"
)
.read_text()
.split("\n"),
),
)
)
# We compare list list to the list of packages dffml.plugins knows
# about, to make sure that every package has a secret so it can be
# auto-deployed to PyPi.
self.assertListEqual(
plugins_with_pypi_tokens, sorted(PACKAGE_DIRECTORY_TO_NAME.keys())
)
class TestSecurity(unittest.TestCase):
"""
Tests to keep our codebase secure
"""
def test_hash_usages(self):
"""
Make sure we've audited everywhere hashlib is used
"""
output = subprocess.check_output(
["git", "grep", "hashlib", "--", "**/*.py"], cwd=str(REPO_ROOT)
).decode()
file_name_to_list_of_lines = {}
for line in filter(bool, output.split("\n")):
filename, line = line.split(":", maxsplit=1)
# Skip this file
if filename == str(
pathlib.Path(__file__).resolve().relative_to(REPO_ROOT)
):
continue
file_name_to_list_of_lines.setdefault(filename, [])
file_name_to_list_of_lines[filename].append(line)
self.maxDiff = None
self.assertDictEqual(
file_name_to_list_of_lines,
{
"dffml/util/crypto.py": [
"import hashlib",
"SECURE_HASH_ALGORITHM = hashlib.sha384",
"INSECURE_HASH_ALGORITHM = hashlib.md5",
],
"dffml/util/file.py": [
" >>> import hashlib",
" >>> expected_sha384_hash = hashlib.sha384(correct_contents).hexdigest()",
],
"feature/auth/dffml_feature_auth/feature/operations.py": [
"import hashlib",
" # ---- BEGIN Python hashlib docs ----",
" # ---- END Python hashlib docs ----",
' hashed_password = <PASSWORD>("<PASSWORD>", password, salt, 100000)',
],
"operations/deploy/dffml_operations_deploy/operations.py": [
"import hashlib",
" calculated = hmac.new(key, body, hashlib.sha1).hexdigest()",
],
},
)
|
CLOSER-Cohorts/archivist
|
react/src/components/Dashboard.js
|
import React, { useEffect } from 'react';
import { useSelector } from 'react-redux'
import { get } from 'lodash'
import clsx from 'clsx';
import { makeStyles } from '@material-ui/core/styles';
import { WhoAmI } from '../actions'
import CssBaseline from '@material-ui/core/CssBaseline';
import Drawer from '@material-ui/core/Drawer';
import Box from '@material-ui/core/Box';
import AppBar from '@material-ui/core/AppBar';
import Toolbar from '@material-ui/core/Toolbar';
import List from '@material-ui/core/List';
import Typography from '@material-ui/core/Typography';
import Divider from '@material-ui/core/Divider';
import IconButton from '@material-ui/core/IconButton';
import Container from '@material-ui/core/Container';
import Grid from '@material-ui/core/Grid';
import Paper from '@material-ui/core/Paper';
import MenuIcon from '@material-ui/icons/Menu';
import ChevronLeftIcon from '@material-ui/icons/ChevronLeft';
import ExitToAppIcon from '@material-ui/icons/ExitToApp';
import ListItem from '@material-ui/core/ListItem';
import ListItemIcon from '@material-ui/core/ListItemIcon';
import ListItemText from '@material-ui/core/ListItemText';
import StorageIcon from '@material-ui/icons/Storage';
import QuestionAnswerIcon from '@material-ui/icons/QuestionAnswer';
import SupervisedUserCircleIcon from '@material-ui/icons/SupervisedUserCircle';
import { Link } from 'react-router-dom';
import { reverse as url } from 'named-urls'
import routes from '../routes'
import Helmet from "react-helmet";
import { useDispatch } from 'react-redux'
import BreadcrumbBar from './BreadcrumbBar'
import Collapse from '@material-ui/core/Collapse';
import ExpandLess from '@material-ui/icons/ExpandLess';
import ExpandMore from '@material-ui/icons/ExpandMore';
function Copyright() {
return (
<Typography variant="body2" color="textSecondary" align="center">
{'Copyright © '}
<Link color="inherit" to="https://material-ui.com/">
Archivist
</Link>{' '}
{new Date().getFullYear()}
{'.'}
</Typography>
);
}
const drawerWidth = 240;
const useStyles = makeStyles((theme) => ({
root: {
display: 'flex',
},
nested: {
paddingLeft: theme.spacing(4),
},
toolbar: {
paddingRight: 24, // keep right padding when drawer closed
},
toolbarIcon: {
display: 'flex',
alignItems: 'center',
justifyContent: 'flex-end',
padding: '0 8px',
...theme.mixins.toolbar,
},
appBar: {
zIndex: theme.zIndex.drawer + 1,
transition: theme.transitions.create(['width', 'margin'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
},
appBarShift: {
marginLeft: drawerWidth,
width: `calc(100% - ${drawerWidth}px)`,
transition: theme.transitions.create(['width', 'margin'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
menuButton: {
marginRight: 36,
},
menuButtonHidden: {
display: 'none',
},
title: {
flexGrow: 1,
},
drawerPaper: {
position: 'relative',
whiteSpace: 'nowrap',
width: drawerWidth,
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
drawerPaperClose: {
overflowX: 'hidden',
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
width: theme.spacing(7),
[theme.breakpoints.up('sm')]: {
width: theme.spacing(9),
},
},
appBarSpacer: theme.mixins.toolbar,
content: {
flexGrow: 1,
height: '100vh',
overflow: 'auto',
},
container: {
paddingTop: theme.spacing(4),
paddingBottom: theme.spacing(4),
},
paper: {
padding: theme.spacing(2),
display: 'flex',
flexDirection: 'column',
},
fixedHeight: {
height: 240,
},
}));
const MainListItems = ({onExpand, user}) => {
const classes = useStyles();
const [open, setOpen] = React.useState(false);
const handleClick = () => {
onExpand()
setOpen(!open);
};
return (
<div>
<ListItem button>
<ListItemIcon>
<Link to={url(routes.instruments.all)}>
<QuestionAnswerIcon />
</Link>
</ListItemIcon>
<Link to={url(routes.instruments.all)}>
<ListItemText primary="Instruments" />
</Link>
</ListItem>
<ListItem button>
<ListItemIcon>
<Link to={url(routes.datasets.all)}>
<StorageIcon />
</Link>
</ListItemIcon>
<Link to={url(routes.datasets.all)} title={'Datasets'}>
<ListItemText primary="Datasets" />
</Link>
</ListItem>
{ user && user.role === 'admin' && (
<>
<ListItem button onClick={handleClick}>
<ListItemIcon>
<SupervisedUserCircleIcon style={{ color: '37b34a' }} />
</ListItemIcon>
<ListItemText primary="Admin" />
{open ? <ExpandLess /> : <ExpandMore />}
</ListItem>
<Collapse in={open} timeout="auto" unmountOnExit>
<List component="div" disablePadding>
<ListItem button className={classes.nested}>
<Link to={url(routes.admin.instruments.all)}>
<ListItemText primary="Instruments" />
</Link>
</ListItem>
<ListItem button className={classes.nested}>
<Link to={url(routes.admin.datasets.all)}>
<ListItemText primary="Datasets" />
</Link>
</ListItem>
<ListItem button className={classes.nested}>
<Link to={url(routes.admin.users.all)}>
<ListItemText primary="Users" />
</Link>
</ListItem>
<ListItem button className={classes.nested}>
<Link to={url(routes.admin.import)}>
<ListItemText primary="Import" />
</Link>
</ListItem>
<ListItem button className={classes.nested}>
<Link to={url(routes.admin.imports.all)}>
<ListItemText primary="DDI Imports" />
</Link>
</ListItem>
<ListItem button className={classes.nested}>
<Link to={url(routes.admin.instruments.exports)}>
<ListItemText primary="Instrument Exports" />
</Link>
</ListItem>
</List>
</Collapse>
</>
)}
</div>
)
}
export const Dashboard = (props) => {
const classes = useStyles();
const [open, setOpen] = React.useState(false);
const dispatch = useDispatch();
const { instrumentId } = props;
const handleDrawerOpen = () => {
setOpen(true);
};
const handleDrawerClose = () => {
setOpen(false);
};
const user = useSelector(state => get(state.auth, 'user'));
useEffect(() => {
dispatch(WhoAmI())
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
return (
<div className={classes.root}>
<Helmet>
<meta charSet="utf-8" />
<title>Archivist</title>
</Helmet>
<CssBaseline />
<AppBar position="absolute" className={clsx(classes.appBar, open && classes.appBarShift)}>
<Toolbar className={classes.toolbar}>
<IconButton
edge="start"
color="inherit"
aria-label="open drawer"
onClick={handleDrawerOpen}
className={clsx(classes.menuButton, open && classes.menuButtonHidden)}
>
<MenuIcon />
</IconButton>
<Typography component="h1" variant="h6" color="inherit" noWrap className={classes.title}>
{props.title}
</Typography>
<IconButton color="inherit">
<ExitToAppIcon onClick={()=>{ dispatch({type:'LOGOUT'}) }}/>
</IconButton>
</Toolbar>
</AppBar>
<Drawer
variant="permanent"
classes={{
paper: clsx(classes.drawerPaper, !open && classes.drawerPaperClose),
}}
open={open}
>
<div className={classes.toolbarIcon}>
<img src="/logo.svg" alt="BigCo Inc. logo" style={{width: "51%"}}/>
<IconButton onClick={handleDrawerClose}>
<ChevronLeftIcon />
</IconButton>
</div>
<Divider />
<MainListItems onExpand={handleDrawerOpen} user={user} />
<Divider />
</Drawer>
<main className={classes.content}>
<div className={classes.appBarSpacer} />
<Container maxWidth={false} maxHeight={false} className={classes.container}>
<Grid container spacing={3}>
<Grid item xs={12}>
<BreadcrumbBar instrumentId={instrumentId} />
<Paper className={classes.paper}>
{props.children}
</Paper>
</Grid>
</Grid>
<Box pt={4}>
<Copyright />
</Box>
</Container>
</main>
</div>
);
}
|
rmolinamir/algorithms-and-data-structures
|
02. Big O Notation/measurePerformance.js
|
<filename>02. Big O Notation/measurePerformance.js
const { performance } = require('perf_hooks');
/**
* Measures the amount of time that it took to run a synchronous callback function.
* @param {Function} callback - Callback function to be measured.
*/
function measurePerformance(callback) {
const t1 = performance.now();
callback();
const t2 = performance.now();
console.log(`Time Elapsed: ${(t2 - t1) / 1000} seconds.`);
}
module.exports = measurePerformance;
|
jonasmue/adventofcode20
|
day18/common.py
|
def get_input():
with open("input.txt") as f:
return f.read().splitlines()
def evaluate(rpn, operators):
num_stack = []
for token in rpn:
if token in operators.keys():
arg_2 = num_stack.pop()
if token == "+":
num_stack[-1] += arg_2
elif token == "*":
num_stack[-1] *= arg_2
else:
num_stack.append(token)
return num_stack.pop()
def rpn(term, operators):
output_queue = []
op_stack = []
for token in term:
if token.isnumeric():
output_queue.append(int(token))
elif token in operators.keys():
precedence = operators[token]
while len(op_stack) and op_stack[-1] != "(" and operators[op_stack[-1]] >= precedence:
output_queue.append(op_stack.pop())
op_stack.append(token)
elif token == "(":
op_stack.append(token)
elif token == ")":
while op_stack[-1] != "(":
output_queue.append(op_stack.pop())
op_stack.pop()
while len(op_stack):
output_queue.append(op_stack.pop())
return output_queue
|
emanuellucas2/OVPsimProject
|
doc/api/ocl/html/search/variables_7.js
|
<filename>doc/api/ocl/html/search/variables_7.js<gh_stars>0
var searchData=
[
['l',['l',['../structoctiaAddrExpS.html#a47288a36a60fefd9c028e44fa6bc8ad8',1,'octiaAddrExpS']]]
];
|
kimjand/cxf
|
rt/rs/description-swagger/src/main/java/org/apache/cxf/jaxrs/swagger/openapi/SwaggerToOpenApiConversionUtils.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.jaxrs.swagger.openapi;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import org.apache.cxf.Bus;
import org.apache.cxf.BusFactory;
import org.apache.cxf.common.logging.LogUtils;
import org.apache.cxf.common.util.StringUtils;
import org.apache.cxf.helpers.CastUtils;
import org.apache.cxf.helpers.IOUtils;
import org.apache.cxf.jaxrs.ext.MessageContext;
import org.apache.cxf.jaxrs.json.basic.JsonMapObject;
import org.apache.cxf.jaxrs.json.basic.JsonMapObjectReaderWriter;
import org.apache.cxf.jaxrs.utils.ExceptionUtils;
import org.apache.cxf.jaxrs.utils.ResourceUtils;
public final class SwaggerToOpenApiConversionUtils {
private static final Logger LOG = LogUtils.getL7dLogger(SwaggerToOpenApiConversionUtils.class);
private static final List<String> SIMPLE_TYPE_RELATED_PROPS =
Arrays.asList("format", "minimum", "maximum", "default");
private SwaggerToOpenApiConversionUtils() {
}
public static String getOpenApiFromSwaggerLoc(String loc) {
return getOpenApiFromSwaggerLoc(loc, null);
}
public static String getOpenApiFromSwaggerLoc(String loc, OpenApiConfiguration cfg) {
return getOpenApiFromSwaggerLoc(loc, cfg, BusFactory.getThreadDefaultBus());
}
public static String getOpenApiFromSwaggerLoc(String loc, OpenApiConfiguration cfg, Bus bus) {
try {
InputStream is = ResourceUtils.getResourceStream(loc, bus);
if (is == null) {
return null;
}
return getOpenApiFromSwaggerStream(is, cfg);
} catch (Exception ex) {
LOG.warning("Problem with processing a user model at " + loc + ", exception: "
+ ExceptionUtils.getStackTrace(ex));
}
return null;
}
public static String getOpenApiFromSwaggerStream(InputStream is) throws IOException {
return getOpenApiFromSwaggerStream(is, null);
}
public static String getOpenApiFromSwaggerStream(InputStream is, OpenApiConfiguration cfg) throws IOException {
return getOpenApiFromSwaggerJson(null, IOUtils.readStringFromStream(is), cfg);
}
public static String getOpenApiFromSwaggerJson(String json) throws IOException {
return getOpenApiFromSwaggerJson(null, json, null);
}
public static String getOpenApiFromSwaggerJson(
MessageContext ctx, String json, OpenApiConfiguration cfg) throws IOException {
JsonMapObjectReaderWriter readerWriter = new JsonMapObjectReaderWriter();
JsonMapObject sw2 = readerWriter.fromJsonToJsonObject(json);
JsonMapObject sw3 = new JsonMapObject();
// "openapi"
sw3.setProperty("openapi", "3.0.0");
// "servers"
setServersProperty(ctx, sw2, sw3);
// "info"
JsonMapObject infoObject = sw2.getJsonMapProperty("info");
if (infoObject != null) {
sw3.setProperty("info", infoObject);
}
// "tags"
List<Map<String, Object>> tagsObject = sw2.getListMapProperty("tags");
if (tagsObject != null) {
sw3.setProperty("tags", tagsObject);
}
// paths
Map<String, JsonMapObject> requestBodies = cfg != null && cfg.isCreateRequestBodies()
? new LinkedHashMap<>() : null;
setPathsProperty(sw2, sw3, requestBodies);
// components
setComponentsProperty(sw2, sw3, requestBodies);
// externalDocs
Object externalDocsObject = sw2.getProperty("externalDocs");
if (externalDocsObject != null) {
sw3.setProperty("externalDocs", externalDocsObject);
}
return readerWriter.toJson(sw3).replace("#/definitions/", "#/components/schemas/");
}
private static void setComponentsProperty(JsonMapObject sw2, JsonMapObject sw3,
Map<String, JsonMapObject> requestBodies) {
JsonMapObject comps = new JsonMapObject();
JsonMapObject requestBodiesObj = new JsonMapObject();
if (requestBodies != null) {
for (Map.Entry<String, JsonMapObject> entry : requestBodies.entrySet()) {
requestBodiesObj.setProperty(entry.getKey(), entry.getValue());
}
}
comps.setProperty("requestBodies", requestBodiesObj);
Object s2Defs = sw2.getProperty("definitions");
if (s2Defs != null) {
comps.setProperty("schemas", s2Defs);
}
JsonMapObject s2SecurityDefs = sw2.getJsonMapProperty("securityDefinitions");
if (s2SecurityDefs != null) {
comps.setProperty("securitySchemes", s2SecurityDefs);
for (String property : s2SecurityDefs.asMap().keySet()) {
JsonMapObject securityScheme = s2SecurityDefs.getJsonMapProperty(property);
if ("basic".equals(securityScheme.getStringProperty("type"))) {
securityScheme.setProperty("type", "http");
securityScheme.setProperty("scheme", "basic");
}
}
}
sw3.setProperty("components", comps);
}
private static void setPathsProperty(JsonMapObject sw2, JsonMapObject sw3,
Map<String, JsonMapObject> requestBodies) {
JsonMapObject sw2Paths = sw2.getJsonMapProperty("paths");
for (Map.Entry<String, Object> sw2PathEntries : sw2Paths.asMap().entrySet()) {
JsonMapObject sw2PathVerbs = new JsonMapObject(CastUtils.cast((Map<?, ?>)sw2PathEntries.getValue()));
for (Map.Entry<String, Object> sw2PathVerbEntries : sw2PathVerbs.asMap().entrySet()) {
JsonMapObject sw2PathVerbProps =
new JsonMapObject(CastUtils.cast((Map<?, ?>)sw2PathVerbEntries.getValue()));
prepareRequestBody(sw2PathVerbProps, requestBodies);
prepareResponses(sw2PathVerbProps);
}
}
sw3.setProperty("paths", sw2Paths);
}
private static void prepareResponses(JsonMapObject sw2PathVerbProps) {
List<String> sw2PathVerbProduces =
CastUtils.cast((List<?>)sw2PathVerbProps.removeProperty("produces"));
JsonMapObject sw2PathVerbResps = sw2PathVerbProps.getJsonMapProperty("responses");
if (sw2PathVerbResps != null) {
JsonMapObject sw3PathVerbResps = new JsonMapObject();
if (sw2PathVerbResps.containsProperty("200")) {
JsonMapObject okResp =
new JsonMapObject(CastUtils.cast((Map<?, ?>)sw2PathVerbResps.removeProperty("200")));
JsonMapObject newOkResp = new JsonMapObject();
String description = okResp.getStringProperty("description");
if (description != null) {
newOkResp.setProperty("description", description);
}
JsonMapObject schema = okResp.getJsonMapProperty("schema");
if (schema != null) {
JsonMapObject content = prepareContentFromSchema(schema, sw2PathVerbProduces, false);
if (content != null) {
newOkResp.setProperty("content", content);
}
}
JsonMapObject headers = okResp.getJsonMapProperty("headers");
if (headers != null) {
newOkResp.setProperty("headers", headers);
}
sw3PathVerbResps.setProperty("200", newOkResp);
}
for (Map.Entry<String, Object> entry : sw2PathVerbResps.asMap().entrySet()) {
sw3PathVerbResps.setProperty(entry.getKey(), entry.getValue());
}
sw2PathVerbProps.setProperty("responses", sw3PathVerbResps);
}
}
private static void prepareRequestBody(JsonMapObject sw2PathVerbProps,
Map<String, JsonMapObject> requestBodies) {
List<String> sw2PathVerbConsumes =
CastUtils.cast((List<?>)sw2PathVerbProps.removeProperty("consumes"));
JsonMapObject sw3RequestBody = null;
List<JsonMapObject> sw3formBody = null;
List<Map<String, Object>> sw2PathVerbParamsList = sw2PathVerbProps.getListMapProperty("parameters");
if (sw2PathVerbParamsList != null) {
for (Iterator<Map<String, Object>> it = sw2PathVerbParamsList.iterator(); it.hasNext();) {
JsonMapObject sw2PathVerbParamMap = new JsonMapObject(it.next());
if ("body".equals(sw2PathVerbParamMap.getStringProperty("in"))) {
it.remove();
sw3RequestBody = new JsonMapObject();
String description = sw2PathVerbParamMap.getStringProperty("description");
if (description != null) {
sw3RequestBody.setProperty("description", description);
}
Boolean required = sw2PathVerbParamMap.getBooleanProperty("required");
if (required != null) {
sw3RequestBody.setProperty("required", required);
}
JsonMapObject schema = sw2PathVerbParamMap.getJsonMapProperty("schema");
if (schema != null) {
JsonMapObject content = prepareContentFromSchema(schema, sw2PathVerbConsumes,
requestBodies != null);
if (content != null) {
sw3RequestBody.setProperty("content", content);
}
}
} else if ("formData".equals(sw2PathVerbParamMap.getStringProperty("in"))) {
it.remove();
if (sw3formBody == null) {
sw3formBody = new LinkedList<>();
sw3RequestBody = new JsonMapObject();
}
sw2PathVerbParamMap.removeProperty("in");
sw2PathVerbParamMap.removeProperty("required");
sw3formBody.add(sw2PathVerbParamMap);
} else if ("array".equals(sw2PathVerbParamMap.getStringProperty("type"))) {
sw2PathVerbParamMap.removeProperty("type");
sw2PathVerbParamMap.removeProperty("collectionFormat");
sw2PathVerbParamMap.setProperty("explode", true);
JsonMapObject items = sw2PathVerbParamMap.getJsonMapProperty("items");
sw2PathVerbParamMap.removeProperty("items");
JsonMapObject schema = new JsonMapObject();
schema.setProperty("type", "array");
schema.setProperty("items", items);
sw2PathVerbParamMap.setProperty("schema", schema);
} else {
if ("matrix".equals(sw2PathVerbParamMap.getStringProperty("in"))) {
sw2PathVerbParamMap.removeProperty("in");
sw2PathVerbParamMap.setProperty("in", "path");
sw2PathVerbParamMap.setProperty("style", "matrix");
}
String type = (String)sw2PathVerbParamMap.removeProperty("type");
Object enumK = sw2PathVerbParamMap.removeProperty("enum");
if (type != null) {
JsonMapObject schema = new JsonMapObject();
schema.setProperty("type", type);
if (enumK != null) {
schema.setProperty("enum", enumK);
}
for (String prop : SIMPLE_TYPE_RELATED_PROPS) {
Object value = sw2PathVerbParamMap.removeProperty(prop);
if (value != null) {
schema.setProperty(prop, value);
}
}
if ("password".equals(sw2PathVerbParamMap.getProperty("name"))) {
schema.setProperty("format", "password");
}
sw2PathVerbParamMap.setProperty("schema", schema);
}
}
}
}
if (sw2PathVerbParamsList.isEmpty()) {
sw2PathVerbProps.removeProperty("parameters");
}
if (sw3formBody != null) {
sw3RequestBody.setProperty("content", prepareFormContent(sw3formBody, sw2PathVerbConsumes));
}
if (sw3RequestBody != null) {
if (requestBodies == null || sw3formBody != null) {
sw2PathVerbProps.setProperty("requestBody", sw3RequestBody);
} else {
JsonMapObject content = sw3RequestBody.getJsonMapProperty("content");
if (content != null) {
String requestBodyName = (String)content.removeProperty("requestBodyName");
if (requestBodyName != null) {
requestBodies.put(requestBodyName, sw3RequestBody);
String ref = "#components/requestBodies/" + requestBodyName;
sw2PathVerbProps.setProperty("requestBody",
Collections.singletonMap("$ref", ref));
}
}
}
}
}
private static JsonMapObject prepareFormContent(List<JsonMapObject> formList, List<String> mediaTypes) {
String mediaType = StringUtils.isEmpty(mediaTypes)
? "application/x-www-form-urlencoded" : mediaTypes.get(0);
JsonMapObject content = new JsonMapObject();
JsonMapObject formType = new JsonMapObject();
JsonMapObject schema = new JsonMapObject();
schema.setProperty("type", "object");
JsonMapObject props = new JsonMapObject();
for (JsonMapObject prop : formList) {
String name = (String)prop.removeProperty("name");
props.setProperty(name, prop);
if ("file".equals(prop.getProperty("type"))) {
prop.setProperty("type", "string");
if (!prop.containsProperty("format")) {
prop.setProperty("format", "binary");
}
}
}
schema.setProperty("properties", props);
formType.setProperty("schema", schema);
content.setProperty(mediaType, formType);
return content;
}
private static JsonMapObject prepareContentFromSchema(JsonMapObject schema,
List<String> mediaTypes,
boolean storeModelName) {
String type = schema.getStringProperty("type");
String modelName = null;
boolean isArray = false;
if (!"object".equals(type) || !"string".equals(type)) {
final String ref;
JsonMapObject items = null;
if ("array".equals(type)) {
isArray = true;
items = schema.getJsonMapProperty("items");
ref = (String)items.getProperty("$ref");
} else {
ref = schema.getStringProperty("$ref");
}
if (ref != null) {
int index = ref.lastIndexOf('/');
modelName = ref.substring(index + 1);
if (items == null) {
schema.setProperty("$ref", "#components/schemas/" + modelName);
} else {
items.setProperty("$ref", "#components/schemas/" + modelName);
}
}
}
JsonMapObject content = new JsonMapObject();
List<String> mediaTypesList = mediaTypes == null
? Collections.singletonList("application/json") : mediaTypes;
for (String mediaType : mediaTypesList) {
content.setProperty(mediaType,
Collections.singletonMap("schema", schema));
}
if (modelName != null && storeModelName) {
content.setProperty("requestBodyName", isArray ? modelName + "Array" : modelName);
}
// pass the model name via the content object
return content;
}
private static void setServersProperty(MessageContext ctx, JsonMapObject sw2, JsonMapObject sw3) {
URI requestURI = ctx == null ? null : URI.create(ctx.getHttpServletRequest().getRequestURL().toString());
List<String> sw2Schemes = sw2.getListStringProperty("schemes");
String sw2Scheme;
if (StringUtils.isEmpty(sw2Schemes)) {
if (requestURI == null) {
sw2Scheme = "https";
} else {
sw2Scheme = requestURI.getScheme();
}
} else {
sw2Scheme = sw2Schemes.get(0);
}
String sw2Host = sw2.getStringProperty("host");
if (sw2Host == null && requestURI != null) {
sw2Host = requestURI.getHost() + ':' + requestURI.getPort();
}
String sw2BasePath = sw2.getStringProperty("basePath");
String sw3ServerUrl = sw2Scheme + "://" + sw2Host + sw2BasePath;
sw3.setProperty("servers", Collections.singletonList(Collections.singletonMap("url", sw3ServerUrl)));
}
}
|
BSFrance/BSFrance
|
stm32/libraries/uCGUI391/src/Pack/pack7.c
|
#include "../../uCGUI391/Font/F08_1.c"
#include "../../uCGUI391/Font/F08_ASCII.c"
#include "../../uCGUI391/Font/F10S_1.c"
#include "../../uCGUI391/Font/F10S_ASCII.c"
#include "../../uCGUI391/Font/F10_1.c"
#include "../../uCGUI391/Font/F10_ASCII.c"
#include "../../uCGUI391/Font/F13B_1.c"
#include "../../uCGUI391/Font/F13B_ASCII.c"
#include "../../uCGUI391/Font/F13HB_1.c"
#include "../../uCGUI391/Font/F13HB_ASCII.c"
#include "../../uCGUI391/Font/F13H_1.c"
#include "../../uCGUI391/Font/F13H_ASCII.c"
#include "../../uCGUI391/Font/F13_1.c"
#include "../../uCGUI391/Font/F13_ASCII.c"
#include "../../uCGUI391/Font/F16B_1.c"
#include "../../uCGUI391/Font/F16B_ASCII.c"
#include "../../uCGUI391/Font/F16_1.c"
#include "../../uCGUI391/Font/F16_1HK.c"
#include "../../uCGUI391/Font/F16_ASCII.c"
#include "../../uCGUI391/Font/F16_HK.c"
#include "../../uCGUI391/Font/F24B_1.c"
#include "../../uCGUI391/Font/F24B_ASCII.c"
#include "../../uCGUI391/Font/F24_1.c"
#include "../../uCGUI391/Font/F24_ASCII.c"
#include "../../uCGUI391/Font/F32B_1.c"
#include "../../uCGUI391/Font/F32B_ASCII.c"
#include "../../uCGUI391/Font/F32_1.c"
#include "../../uCGUI391/Font/F32_ASCII.c"
#include "../../uCGUI391/Font/F4x6.c"
#include "../../uCGUI391/Font/F6x8.c"
#include "../../uCGUI391/Font/F8x10_ASCII.c"
#include "../../uCGUI391/Font/F8x12_ASCII.c"
#include "../../uCGUI391/Font/F8x13_1.c"
#include "../../uCGUI391/Font/F8x13_ASCII.c"
#include "../../uCGUI391/Font/F8x15B_1.c"
#include "../../uCGUI391/Font/F8x15B_ASCII.c"
#include "../../uCGUI391/Font/F8x16.c"
#include "../../uCGUI391/Font/F8x8.c"
#include "../../uCGUI391/Font/FComic18B_1.c"
#include "../../uCGUI391/Font/FComic18B_ASCII.c"
#include "../../uCGUI391/Font/FComic24B_1.c"
#include "../../uCGUI391/Font/FComic24B_ASCII.c"
#include "../../uCGUI391/Font/FD24x32.c"
#include "../../uCGUI391/Font/FD32.c"
#include "../../uCGUI391/Font/FD36x48.c"
#include "../../uCGUI391/Font/FD48.c"
#include "../../uCGUI391/Font/FD48x64.c"
#include "../../uCGUI391/Font/FD60x80.c"
#include "../../uCGUI391/Font/FD64.c"
#include "../../uCGUI391/Font/FD80.c"
#include "../../uCGUI391/AntiAlias/GUIAAChar4.c"
#include "../../uCGUI391/MemDev/GUIDEV_1.c"
|
ScalablyTyped/SlinkyTyped
|
b/babel__types/src/main/scala/typingsSlinky/babelTypes/anon/BuildUndefinedNode.scala
|
package typingsSlinky.babelTypes.anon
import typingsSlinky.babelTypes.indexTs37Mod.Node
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait BuildUndefinedNode extends StObject {
def buildUndefinedNode(): Node = js.native
def push(value: Id): Unit = js.native
}
object BuildUndefinedNode {
@scala.inline
def apply(buildUndefinedNode: () => Node, push: Id => Unit): BuildUndefinedNode = {
val __obj = js.Dynamic.literal(buildUndefinedNode = js.Any.fromFunction0(buildUndefinedNode), push = js.Any.fromFunction1(push))
__obj.asInstanceOf[BuildUndefinedNode]
}
@scala.inline
implicit class BuildUndefinedNodeMutableBuilder[Self <: BuildUndefinedNode] (val x: Self) extends AnyVal {
@scala.inline
def setBuildUndefinedNode(value: () => Node): Self = StObject.set(x, "buildUndefinedNode", js.Any.fromFunction0(value))
@scala.inline
def setPush(value: Id => Unit): Self = StObject.set(x, "push", js.Any.fromFunction1(value))
}
}
|
jerryleooo/rpc-benchmark-1
|
netty-server/src/main/java/benchmark/rpc/netty/serializer/FastestSerializer.java
|
<filename>netty-server/src/main/java/benchmark/rpc/netty/serializer/FastestSerializer.java<gh_stars>100-1000
package benchmark.rpc.netty.serializer;
import java.io.IOException;
import benchmark.rpc.protocol.Request;
import benchmark.rpc.protocol.Response;
import io.netty.buffer.ByteBuf;
public class FastestSerializer {
private static final RequestSerializer requestSerializer = new RequestSerializer();
private static final ResponseSerializer responseSerializer = new ResponseSerializer();
public static void writeRequest(ByteBuf byteBuf, Request request) throws IOException {
int beginWriterIndex = byteBuf.writerIndex();
byteBuf.writeInt(0);
requestSerializer.write(byteBuf, request);
int finishWriterIndex = byteBuf.writerIndex();
int length = finishWriterIndex - beginWriterIndex - 4;
byteBuf.setInt(beginWriterIndex, length);
}
public static Request readRequest(ByteBuf byteBuf) throws IOException {
return requestSerializer.read(byteBuf);
}
public static void writeResponse(ByteBuf byteBuf, Response response) throws IOException {
int beginWriterIndex = byteBuf.writerIndex();
byteBuf.writeInt(0);
responseSerializer.write(byteBuf, response);
int finishWriterIndex = byteBuf.writerIndex();
int length = finishWriterIndex - beginWriterIndex - 4;
byteBuf.setInt(beginWriterIndex, length);
}
public static Response readResponse(ByteBuf byteBuf) throws IOException {
return responseSerializer.read(byteBuf);
}
}
|
jamesanto/scala
|
test/files/jvm/t11321.scala
|
<filename>test/files/jvm/t11321.scala<gh_stars>1000+
package t11321 {
final class V(val x: Int) extends AnyVal
object V { def get: Option[V] = null }
final class U(val y: String) extends AnyVal
object U { def get: Option[U] = null }
final class W[T](val z: T) extends AnyVal
object W { def get: Option[W[Int => String]] = null }
}
object Test extends App {
def check[T](implicit tt: reflect.ClassTag[T]): Unit = {
val companion = tt.runtimeClass.getClassLoader.loadClass(tt.runtimeClass.getName + '$')
val get = companion.getMethod("get")
assert(get.getReturnType == classOf[Option[_]])
println(s"${tt.runtimeClass.getName}: ${get.getGenericReturnType}")
}
import t11321._
check[V]
check[U]
check[W[_]]
}
|
Ramzawulf/Bebocho
|
Bebocho/Assets/Plugins/iOS/RobotKit.framework/Headers/RKAbortMacroCommand.h
|
//
// RKAbortMacroCommand.h
// RobotKit
//
// Created by <NAME> on 8/30/11.
// Copyright 2011 Orbotix Inc. All rights reserved.
//
/*! @file */
#import <RobotKit/RKDeviceCommand.h>
/*!
* @brief Class to encapsulate a save macro command.
*
* This class is used to send a macro to abort the current running command.
*
* @sa RKAbortMacroResponse
*/
@interface RKAbortMacroCommand : RKDeviceCommand
@end
|
ceekay1991/AliPayForDebug
|
AliPayForDebug/AliPayForDebug/AlipayWallet_Headers/AUFileManager.h
|
<reponame>ceekay1991/AliPayForDebug
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>.
//
#import <objc/NSObject.h>
@interface AUFileManager : NSObject
{
}
+ (id)themeFilePath:(id)arg1 inBundle:(id)arg2;
+ (id)readRemoteThemesInBundle:(id)arg1 themeName:(id)arg2;
@end
|
lechium/iPhoneOS_12.1.1_Headers
|
System/Library/PrivateFrameworks/LinkPresentation.framework/LPiTunesMediaAsset.h
|
/*
* This header is generated by classdump-dyld 1.0
* on Saturday, June 1, 2019 at 6:48:22 PM Mountain Standard Time
* Operating System: Version 12.1.1 (Build 16C5050a)
* Image Source: /System/Library/PrivateFrameworks/LinkPresentation.framework/LinkPresentation
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>.
*/
@class NSURL, NSDictionary, NSString;
@interface LPiTunesMediaAsset : NSObject {
NSURL* _URL;
NSDictionary* _colors;
NSString* _name;
NSString* _string;
Class _customResolverClass;
}
@property (nonatomic,copy,readonly) NSURL * URL; //@synthesize URL=_URL - In the implementation block
@property (nonatomic,copy,readonly) NSDictionary * colors; //@synthesize colors=_colors - In the implementation block
@property (nonatomic,copy,readonly) NSString * name; //@synthesize name=_name - In the implementation block
@property (nonatomic,copy,readonly) NSString * string; //@synthesize string=_string - In the implementation block
@property (nonatomic,retain,readonly) Class customResolverClass; //@synthesize customResolverClass=_customResolverClass - In the implementation block
-(Class)customResolverClass;
-(id)initWithURL:(id)arg1 colors:(id)arg2 name:(id)arg3 ;
-(id)initWithString:(id)arg1 forName:(id)arg2 ;
-(id)initWithName:(id)arg1 customStringResolverClass:(Class)arg2 ;
-(NSString *)string;
-(NSString *)name;
-(NSURL *)URL;
-(id)metadata;
-(NSDictionary *)colors;
@end
|
tharindusathis/sourcecodes-of-CodeReadingTheOpenSourcePerspective
|
ace/tao/tao_idl/be_include/be_union_branch.h
|
<reponame>tharindusathis/sourcecodes-of-CodeReadingTheOpenSourcePerspective
// be_union_branch.h,v 1.16 2000/09/20 21:09:16 parsons Exp
// ============================================================================
//
// = LIBRARY
// TAO IDL
//
// = FILENAME
// be_union_branch.h
//
// = DESCRIPTION
// Extension of class AST_UnionBranch that provides additional means for C++
// mapping.
//
// = AUTHOR
// Copyright 1994-1995 by Sun Microsystems, Inc.
// and
// <NAME>
//
// ============================================================================
#ifndef BE_UNION_BRANCH_H
#define BE_UNION_BRANCH_H
#include "be_decl.h"
#include "ast_union_branch.h"
class UTL_LabelList;
class UTL_Type;
class UTL_StrList;
class be_visitor;
class be_union_branch : public virtual AST_UnionBranch,
public virtual be_decl
{
public:
be_union_branch (void);
// default constructor.
be_union_branch (UTL_LabelList *ll,
AST_Type *ft,
UTL_ScopedName *n,
UTL_StrList *p);
// Constructor.
int gen_label_value (TAO_OutStream *os,
unsigned long index = 0);
// Generate the label value (as in a switch/case statement).
int gen_default_label_value (TAO_OutStream *os,
be_union *bu);
// Generate the default label value (as in a switch/case statement).
// Visiting.
virtual int accept (be_visitor *visitor);
// Narrowing.
DEF_NARROW_METHODS2 (be_union_branch, AST_UnionBranch, be_decl);
DEF_NARROW_FROM_DECL (be_union_branch);
protected:
virtual int compute_size_type (void);
// Compute the size type if it is unknown.
};
#endif
|
Si-elegans/Web-based_GUI_Tools
|
static-src/js/ember.js/controllers/subtypeControllers/pointSourceLight.js
|
<reponame>Si-elegans/Web-based_GUI_Tools<gh_stars>1-10
App.PointSourceLightController = Ember.ObjectController.extend({
needs: ['phototaxisExact','interval'],
updateWavelength: function(){
var slider = this.get('model.pointLightWavelengthSlider');
if(slider){
slider.setValue(parseFloat(this.get('model.waveLength')));
}
}.observes('model.waveLength'),
updateIntensity: function(){
var slider = this.get('model.pointLightIntensitySlider');
if(slider){
slider.setValue(parseFloat(this.get('model.intensity')));
}
}.observes('model.intensity'),
updateDistance: function(){
var slider = this.get('model.pointLightLightingDistanceSlider');
if(slider){
slider.setValue(parseFloat(this.get('model.lightingPointDistance')));
}
}.observes('model.lightingPointDistance'),
updateBeam: function(){
var slider = this.get('model.pointLightBeamSlider');
if(slider){
slider.setValue(parseFloat(this.get('model.lightBeamRadius')));
}
}.observes('model.lightBeamRadius'),
});
|
Eanes-dev/Ecommerce
|
Server/controllers/assessment.js
|
<filename>Server/controllers/assessment.js
const assessmentModel = require('../models/assessment')
const userModel = require('../models/user')
const clotherController = require('../controllers/clother')
module.exports = class AssessmentController {
static async create( assessment, userId, clotherId ) {
this.#checkParam( assessment, 'object', 'create' )
this.#checkParam( userId, 'number', 'create' )
this.#checkParam( clotherId, 'number', 'create' )
try {
const getAssessments = ( acu, elem ) => elem.assessment + acu
const assessmentOfClother = await clotherController.getAllAssessments( clotherId ),
sumTotalOfAssessment = assessmentOfClother.reduce( getAssessments, 0 ),
assessmentLength = assessmentOfClother.length,
avaregeRatings = sumTotalOfAssessment / assessmentLength <= 5 ? assessmentLength : 5
await assessmentModel.create( { ...assessment, userId, clotherId } )
clotherController.update({ assessmentAverege: avaregeRatings }, clotherId )
} catch (err) {
throw err
}
}
static delete( assessmentId ) {
this.#checkParam( assessmentId, 'number', 'delete' )
try {
const option = { where: { id: assessmentId } }
assessmentModel.destroy( option )
} catch (err) {
throw err
}
}
static update( newData, assessmentId ) {
this.#checkParam( newData, 'object', 'update' )
this.#checkParam( assessmentId, 'number', 'update' )
try {
const option = { where: { id: assessmentId }}
assessmentModel.update( newData, option )
} catch (err) {
throw err
}
}
static async getAssessmentById( assessmentId ) {
this.#checkParam( assessmentId, 'number', 'getAssessmentById' )
try {
const option = { where: { id: assessmentId }, raw: true, nest: true }
const assessment = await assessmentModel.findOne( option )
return assessment
} catch (err) {
throw err
}
}
static async getAssessmentByWhere( where ) {
this.#checkParam( where, 'object', 'getAssessmentByWhere' )
try {
const option = { where, raw: true, nest: true }
const assessment = await assessmentModel.findOne( option )
return assessment
} catch (err) {
throw err
}
}
static async getAllAssessmentByUserId( userId ) {
this.#checkParam( userId, 'number', 'getAllAssessmentByUserId' )
try {
const option = { where: { userId }, raw: true, nest: true }
const assessment = await assessmentModel.findAll( option )
return assessment
} catch (err) {
throw err
}
}
static async getAllAssessmentByClotherId( clotherId ) {
this.#checkParam( clotherId, 'number', 'getAllAssessmentByClotherId' )
try {
const option = { where: { clotherId }, raw: true, nest: true }
const assessment = await assessmentModel.findAll( option )
return assessment
} catch (err) {
throw err
}
}
static async getAllAssessment() {
try {
const option = { include: assessmentModel }
const clother = (await assessmentModel.findAll( option )).dataValues,
assessment = clother.assessment
return assessment
} catch (err) {
throw err
}
}
static async getUser( assessmentId ) {
this.#checkParam( assessmentId, 'number', 'getUser' )
try {
const optionAssessment = { where: { id: assessmentId }, raw: true, nest: true }
const userId = (await assessmentModel.findOne( optionAssessment )).userId,
optionUser= { where: { id: userId } },
user = ( await userModel.findOne( optionUser ) ).dataValues
return user
} catch (err) {
throw err
}
}
static #checkParam( param, type, message ) {
if( typeof param === type ) return true
throw new Error(`Param is inválid!. the error occurred at: ${ message }`)
}
}
|
kiro112/impok
|
src/interfaces/http/joblevel/JobLevelSerializer.js
|
'use strict';
module.exports = {
serialize({ id, level }) {
return {
id,
level
};
}
};
|
Maycoooonm/test
|
api/object/ICoverArt.java
|
package org.xbmc.api.object;
import java.io.Serializable;
public interface ICoverArt extends Serializable {
long getCrc();
int getFallbackCrc();
int getId();
int getMediaType();
String getName();
String getPath();
String getThumbUrl();
}
|
notaphplover/catan-heroes
|
src/io/github/notaphplover/catan/core/game/trade/ITradeDiscard.java
|
<gh_stars>0
package io.github.notaphplover.catan.core.game.trade;
public interface ITradeDiscard extends IReference {
IReference getTrade();
}
|
bobheadlabs/sourcegraph
|
internal/database/migration/definition/read.go
|
package definition
import (
"fmt"
"io"
"io/fs"
"net/http"
"path/filepath"
"sort"
"strconv"
"strings"
"github.com/keegancsmith/sqlf"
"gopkg.in/yaml.v2"
"github.com/sourcegraph/sourcegraph/internal/lazyregexp"
"github.com/sourcegraph/sourcegraph/lib/errors"
)
func ReadDefinitions(fs fs.FS, schemaBasePath string) (*Definitions, error) {
migrationDefinitions, err := readDefinitions(fs, schemaBasePath)
if err != nil {
return nil, err
}
if err := reorderDefinitions(migrationDefinitions); err != nil {
return nil, err
}
return newDefinitions(migrationDefinitions), nil
}
type instructionalError struct {
class string
description string
instructions string
}
func (e instructionalError) Error() string {
return fmt.Sprintf("%s: %s\n\n%s\n", e.class, e.description, e.instructions)
}
func readDefinitions(fs fs.FS, schemaBasePath string) ([]Definition, error) {
root, err := http.FS(fs).Open("/")
if err != nil {
return nil, err
}
defer func() { _ = root.Close() }()
migrations, err := root.Readdir(0)
if err != nil {
return nil, err
}
versions := make([]int, 0, len(migrations))
for _, file := range migrations {
if version, err := strconv.Atoi(file.Name()); err == nil {
versions = append(versions, version)
}
}
sort.Ints(versions)
definitions := make([]Definition, 0, len(versions))
for _, version := range versions {
definition, err := readDefinition(fs, schemaBasePath, version)
if err != nil {
return nil, errors.Wrapf(err, "malformed migration definition at '%s'", filepath.Join(schemaBasePath, strconv.Itoa(version)))
}
definitions = append(definitions, definition)
}
return definitions, nil
}
func readDefinition(fs fs.FS, schemaBasePath string, version int) (Definition, error) {
upFilename := fmt.Sprintf("%d/up.sql", version)
downFilename := fmt.Sprintf("%d/down.sql", version)
metadataFilename := fmt.Sprintf("%d/metadata.yaml", version)
upQuery, err := readQueryFromFile(fs, upFilename)
if err != nil {
return Definition{}, err
}
downQuery, err := readQueryFromFile(fs, downFilename)
if err != nil {
return Definition{}, err
}
return hydrateMetadataFromFile(fs, schemaBasePath, metadataFilename, Definition{
ID: version,
UpQuery: upQuery,
DownQuery: downQuery,
})
}
// hydrateMetadataFromFile populates the given definition with metdata parsed
// from the given file. The mutated definition is returned.
func hydrateMetadataFromFile(fs fs.FS, schemaBasePath, metadataFilename string, definition Definition) (_ Definition, _ error) {
file, err := fs.Open(metadataFilename)
if err != nil {
return Definition{}, err
}
defer file.Close()
contents, err := io.ReadAll(file)
if err != nil {
return Definition{}, err
}
var payload struct {
Name string `yaml:"name"`
Parent int `yaml:"parent"`
Parents []int `yaml:"parents"`
CreateIndexConcurrently bool `yaml:"createIndexConcurrently"`
Privileged bool `yaml:"privileged"`
NonIdempotent bool `yaml:"nonIdempotent"`
}
if err := yaml.Unmarshal(contents, &payload); err != nil {
return Definition{}, err
}
definition.Name = payload.Name
definition.Privileged = payload.Privileged
definition.NonIdempotent = payload.NonIdempotent
parents := payload.Parents
if payload.Parent != 0 {
parents = append(parents, payload.Parent)
}
sort.Ints(parents)
definition.Parents = parents
schemaPath := filepath.Join(schemaBasePath, strconv.Itoa(definition.ID))
metadataPath := filepath.Join(schemaBasePath, metadataFilename)
if _, ok := parseIndexMetadata(definition.DownQuery.Query(sqlf.PostgresBindVar)); ok {
return Definition{}, instructionalError{
class: "malformed concurrent index creation",
description: fmt.Sprintf("did not expect down query of migration at '%s' to contain concurrent creation of an index", schemaPath),
instructions: strings.Join([]string{
"Remove `CONCURRENTLY` when re-creating an old index in down migrations.",
"Downgrades indicate an instance stability error which generally requires a maintenance window.",
}, " "),
}
}
if indexMetadata, ok := parseIndexMetadata(definition.UpQuery.Query(sqlf.PostgresBindVar)); ok {
if !payload.CreateIndexConcurrently {
return Definition{}, instructionalError{
class: "malformed concurrent index creation",
description: fmt.Sprintf("did not expect up query of migration at '%s' to contain concurrent creation of an index", schemaPath),
instructions: strings.Join([]string{
fmt.Sprintf("Add `createIndexConcurrently: true` to the metadata file '%s'.", metadataPath),
}, " "),
}
}
definition.IsCreateIndexConcurrently = true
definition.IndexMetadata = indexMetadata
} else if payload.CreateIndexConcurrently {
return Definition{}, instructionalError{
class: "malformed concurrent index creation",
description: fmt.Sprintf("expected up query of migration at '%s' to contain concurrent creation of an index", schemaPath),
instructions: strings.Join([]string{
fmt.Sprintf("Remove `createIndexConcurrently: true` from the metadata file '%s'.", metadataPath),
}, " "),
}
}
if isPrivileged(definition.UpQuery.Query(sqlf.PostgresBindVar)) || isPrivileged(definition.DownQuery.Query(sqlf.PostgresBindVar)) {
if !payload.Privileged {
return Definition{}, instructionalError{
class: "malformed Postgres extension modification",
description: fmt.Sprintf("did not expect queries of migration at '%s' to require elevated permissions", schemaPath),
instructions: strings.Join([]string{
fmt.Sprintf("Add `privileged: true` to the metadata file '%s'.", metadataPath),
}, " "),
}
}
}
return definition, nil
}
// readQueryFromFile returns the query parsed from the given file.
func readQueryFromFile(fs fs.FS, filepath string) (*sqlf.Query, error) {
file, err := fs.Open(filepath)
if err != nil {
return nil, err
}
defer file.Close()
contents, err := io.ReadAll(file)
if err != nil {
return nil, err
}
// Stringify -> SQL-ify the contents of the file. We first replace any
// SQL placeholder values with an escaped version so that the sqlf.Sprintf
// call does not try to interpolate the text with variables we don't have.
return sqlf.Sprintf(strings.ReplaceAll(string(contents), "%", "%%")), nil
}
var createIndexConcurrentlyPattern = lazyregexp.New(`CREATE\s+INDEX\s+CONCURRENTLY\s+(?:IF\s+NOT\s+EXISTS\s+)?([A-Za-z0-9_]+)\s+ON\s+([A-Za-z0-9_]+)`)
func parseIndexMetadata(queryText string) (*IndexMetadata, bool) {
matches := createIndexConcurrentlyPattern.FindStringSubmatch(queryText)
if len(matches) == 0 {
return nil, false
}
return &IndexMetadata{
TableName: matches[2],
IndexName: matches[1],
}, true
}
var alterExtensionPattern = lazyregexp.New(`(CREATE|COMMENT ON|DROP)\s+EXTENSION`)
func isPrivileged(queryText string) bool {
matches := alterExtensionPattern.FindStringSubmatch(queryText)
return len(matches) != 0
}
// reorderDefinitions will re-order the given migration definitions in-place so that
// migrations occur before their dependents in the slice. An error is returned if the
// given migration definitions do not form a single-root directed acyclic graph.
func reorderDefinitions(migrationDefinitions []Definition) error {
if len(migrationDefinitions) == 0 {
return nil
}
// Stash migration definitions by identifier
migrationDefinitionMap := make(map[int]Definition, len(migrationDefinitions))
for _, migrationDefinition := range migrationDefinitions {
migrationDefinitionMap[migrationDefinition.ID] = migrationDefinition
}
for _, migrationDefinition := range migrationDefinitions {
for _, parent := range migrationDefinition.Parents {
if _, ok := migrationDefinitionMap[parent]; !ok {
return unknownMigrationError(parent, &migrationDefinition.ID)
}
}
}
// Find topological order of migrations
order, err := findDefinitionOrder(migrationDefinitions)
if err != nil {
return err
}
for i, id := range order {
// Re-order migration definitions slice to be in topological order. The order
// returned by findDefinitionOrder is reversed; we want parents _before_ their
// dependencies, so we fill this slice in backwards.
migrationDefinitions[len(migrationDefinitions)-1-i] = migrationDefinitionMap[id]
}
return nil
}
// findDefinitionOrder returns an order of migration definition identifiers such that
// migrations occur only after their dependencies (parents). This assumes that the set
// of definitions provided form a single-root directed acyclic graph and fails with an
// error if this is not the case.
func findDefinitionOrder(migrationDefinitions []Definition) ([]int, error) {
root, err := root(migrationDefinitions)
if err != nil {
return nil, err
}
// Use depth-first-search to topologically sort the migration definition sets as a
// graph. At this point we know we have a single root; this means that the given set
// of definitions either (a) form a connected acyclic graph, or (b) form a disconnected
// set of graphs containing at least one cycle (by construction). In either case, we'll
// return an error indicating that a cycle exists and that the set of definitions are
// not well-formed.
//
// See the following Wikipedia article for additional intuition and description of the
// `marks` array to detect cycles.
// https://en.wikipedia.org/wiki/Topological_sorting#Depth-first_search
type MarkType uint
const (
MarkTypeUnvisited MarkType = iota
MarkTypeVisiting
MarkTypeVisited
)
var (
order = make([]int, 0, len(migrationDefinitions))
marks = make(map[int]MarkType, len(migrationDefinitions))
childMap = children(migrationDefinitions)
dfs func(id int, parents []int) error
)
for _, children := range childMap {
// Reverse-order each child slice. This will end up giving the output slice the
// property that migrations not related via ancestry will be ordered by their
// version number. This gives a nice, determinstic, and intuitive order in which
// migrations will be applied.
sort.Sort(sort.Reverse(sort.IntSlice(children)))
}
dfs = func(id int, parents []int) error {
if marks[id] == MarkTypeVisiting {
// We're currently processing the descendants of this node, so we have a paths in
// both directions between these two nodes.
// Peel off the head of the parent list until we reach the target node. This leaves
// us with a slice starting with the target node, followed by the path back to itself.
// We'll use this instance of a cycle in the error description.
for len(parents) > 0 && parents[0] != id {
parents = parents[1:]
}
if len(parents) == 0 || parents[0] != id {
panic("unreachable")
}
cycle := append(parents, id)
return instructionalError{
class: "migration dependency cycle",
description: fmt.Sprintf("migrations %d and %d declare each other as dependencies", parents[len(parents)-1], id),
instructions: strings.Join([]string{
fmt.Sprintf("Break one of the links in the following cycle:\n%s", strings.Join(intsToStrings(cycle), " -> ")),
}, " "),
}
}
if marks[id] == MarkTypeVisited {
// already visited
return nil
}
marks[id] = MarkTypeVisiting
defer func() { marks[id] = MarkTypeVisited }()
for _, child := range childMap[id] {
if err := dfs(child, append(append([]int(nil), parents...), id)); err != nil {
return err
}
}
// Add self _after_ adding all children recursively
order = append(order, id)
return nil
}
// Perform a depth-first traversal from the single root we found above
if err := dfs(root, nil); err != nil {
return nil, err
}
if len(order) < len(migrationDefinitions) {
// We didn't visit every node, but we also do not have more than one root. There necessarily
// exists a cycle that we didn't enter in the traversal from our root. Continue the traversal
// starting from each unvisited node until we return a cycle.
for _, migrationDefinition := range migrationDefinitions {
if _, ok := marks[migrationDefinition.ID]; !ok {
if err := dfs(migrationDefinition.ID, nil); err != nil {
return nil, err
}
}
}
panic("unreachable")
}
return order, nil
}
// root returns the unique migration definition with no parent or an error of no such migration exists.
func root(migrationDefinitions []Definition) (int, error) {
roots := make([]int, 0, 1)
for _, migrationDefinition := range migrationDefinitions {
if len(migrationDefinition.Parents) == 0 {
roots = append(roots, migrationDefinition.ID)
}
}
if len(roots) == 0 {
return 0, instructionalError{
class: "no roots",
description: "every migration declares a parent",
instructions: strings.Join([]string{
`There is no migration defined in this schema that does not declare a parent.`,
`This indicates either a migration dependency cycle or a reference to a parent migration that no longer exists.`,
}, " "),
}
}
if len(roots) > 1 {
strRoots := intsToStrings(roots)
sort.Strings(strRoots)
return 0, instructionalError{
class: "multiple roots",
description: fmt.Sprintf("expected exactly one migration to have no parent but found %d", len(roots)),
instructions: strings.Join([]string{
`There are multiple migrations defined in this schema that do not declare a parent.`,
`This indicates a new migration that did not correctly attach itself to an existing migration.`,
`This may also indicate the presence of a duplicate squashed migration.`,
}, " "),
}
}
return roots[0], nil
}
func children(migrationDefinitions []Definition) map[int][]int {
childMap := make(map[int][]int, len(migrationDefinitions))
for _, migrationDefinition := range migrationDefinitions {
for _, parent := range migrationDefinition.Parents {
childMap[parent] = append(childMap[parent], migrationDefinition.ID)
}
}
return childMap
}
func intsToStrings(ints []int) []string {
strs := make([]string, 0, len(ints))
for _, value := range ints {
strs = append(strs, strconv.Itoa(value))
}
return strs
}
|
darizagrl/internet-service-provider-servlet
|
src/main/java/com/provider/model/service/TariffService.java
|
<reponame>darizagrl/internet-service-provider-servlet
package com.provider.model.service;
import com.provider.model.dao.DaoFactory;
import com.provider.model.dao.TariffDao;
import com.provider.model.dao.exception.DAOException;
import com.provider.model.entity.Tariff;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.sql.SQLException;
import java.util.List;
public class TariffService {
DaoFactory daoFactory = DaoFactory.getInstance();
private final Logger logger = LogManager.getLogger(TariffService.class);
public List<Tariff> findPaginatedAndSorted(int serviceId, String sortField, String sortOrder, Integer currentPageNum, Integer recordsPerPage){
try (TariffDao dao = daoFactory.getTariffDao()) {
return dao.findPaginatedAndSorted(serviceId, sortField, sortOrder, currentPageNum, recordsPerPage);
} catch (SQLException | ClassNotFoundException e) {
logger.error(e.getMessage());
throw new DAOException(e);
}
}
public List<Tariff> findAllByServiceId(int serviceId) {
try (TariffDao dao = daoFactory.getTariffDao()) {
logger.info("Finding tariffs with service id {}:", serviceId);
return dao.findAllByServiceId(serviceId);
} catch (SQLException | ClassNotFoundException e) {
logger.error(e.getMessage());
throw new DAOException(e);
}
}
public List<Tariff> findAll() {
try (TariffDao dao = daoFactory.getTariffDao()) {
return dao.findAll();
} catch (SQLException | ClassNotFoundException e) {
logger.error(e.getMessage());
throw new DAOException(e);
}
}
public void create(Tariff tariff) {
try (TariffDao dao = daoFactory.getTariffDao()) {
logger.info("Creating tariff with name: {}", tariff.getName());
dao.create(tariff);
} catch (SQLException | ClassNotFoundException e) {
logger.error(e.getMessage());
throw new DAOException(e);
}
}
public void delete(int tariffId) {
try (TariffDao dao = daoFactory.getTariffDao()) {
logger.info("Deleting tariff with id: {}", tariffId);
dao.delete(tariffId);
} catch (SQLException | ClassNotFoundException e) {
logger.error(e.getMessage());
throw new DAOException(e);
}
}
public void update(Tariff tariff) {
try (TariffDao dao = daoFactory.getTariffDao()) {
logger.info("Updating tariff with name: {}", tariff.getName());
dao.update(tariff);
} catch (SQLException | ClassNotFoundException e) {
logger.error(e.getMessage());
throw new DAOException(e);
}
}
public Tariff findById(int id) {
try (TariffDao dao = daoFactory.getTariffDao()) {
logger.info("Searching for tariff with id: {}", id);
return dao.findById(id);
} catch (SQLException | ClassNotFoundException e) {
logger.error(e.getMessage());
throw new DAOException(e);
}
}
}
|
Paradem/material-ui
|
src/styles/transformers/index.js
|
import autoprefixer from './autoprefixer';
import callOnce from './callOnce';
import rtl from './rtl';
export {
autoprefixer,
callOnce,
rtl,
};
|
GameModsBR/VoiceChat
|
src/main/java/ovr/paulscode/sound/libraries/ChannelLWJGLOpenAL.java
|
<filename>src/main/java/ovr/paulscode/sound/libraries/ChannelLWJGLOpenAL.java
package ovr.paulscode.sound.libraries;
import org.lwjgl.BufferUtils;
import org.lwjgl.openal.AL10;
import paulscode.sound.Channel;
import javax.sound.sampled.AudioFormat;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
import java.util.LinkedList;
public class ChannelLWJGLOpenAL extends Channel {
public IntBuffer ALSource;
public int ALformat;
public int sampleRate;
public float millisPreviouslyPlayed = 0.0F;
public ChannelLWJGLOpenAL(int type, IntBuffer src) {
super(type);
this.libraryType = LibraryLWJGLOpenAL.class;
this.ALSource = src;
}
public boolean attachBuffer(IntBuffer buf) {
if (this.errorCheck(this.channelType != 0, "Sound buffers may only be attached to normal sources.")) {
return false;
} else {
AL10.alSourcei(this.ALSource.get(0), 4105, buf.get(0));
if (this.attachedSource != null && this.attachedSource.soundBuffer != null && this.attachedSource.soundBuffer.audioFormat != null) {
this.setAudioFormat(this.attachedSource.soundBuffer.audioFormat);
}
return this.checkALError();
}
}
@Override
public int buffersProcessed() {
if (this.channelType != 1) {
return 0;
} else {
int processed = AL10.alGetSourcei(this.ALSource.get(0), 4118);
return this.checkALError() ? 0 : processed;
}
}
private boolean checkALError() {
switch (AL10.alGetError()) {
case 0:
return false;
case '\ua001':
this.errorMessage("Invalid name parameter.");
return true;
case '\ua002':
this.errorMessage("Invalid parameter.");
return true;
case '\ua003':
return false;
case '\ua004':
this.errorMessage("Illegal call.");
return true;
case '\ua005':
this.errorMessage("Unable to allocate memory.");
return true;
default:
this.errorMessage("An unrecognized error occurred.");
return true;
}
}
@Override
public void cleanup() {
if (this.ALSource != null) {
try {
AL10.alSourceStop(this.ALSource);
AL10.alGetError();
} catch (Exception var3) {
}
try {
AL10.alDeleteSources(this.ALSource);
AL10.alGetError();
} catch (Exception var2) {
}
this.ALSource.clear();
}
this.ALSource = null;
super.cleanup();
}
@Override
public void close() {
try {
AL10.alSourceStop(this.ALSource.get(0));
AL10.alGetError();
} catch (Exception var2) {
}
if (this.channelType == 1) {
this.flush();
}
}
@Override
public int feedRawAudioData(byte[] buffer) {
if (this.errorCheck(this.channelType != 1, "Raw audio data can only be fed to streaming sources.")) {
return -1;
} else {
ByteBuffer byteBuffer = (ByteBuffer) BufferUtils.createByteBuffer(buffer.length).put(buffer).flip();
int processed = AL10.alGetSourcei(this.ALSource.get(0), 4118);
IntBuffer intBuffer;
if (processed > 0) {
intBuffer = BufferUtils.createIntBuffer(processed);
AL10.alGenBuffers(intBuffer);
if (this.errorCheck(this.checkALError(), "Error clearing stream buffers in method \'feedRawAudioData\'")) {
return -1;
}
AL10.alSourceUnqueueBuffers(this.ALSource.get(0), intBuffer);
if (this.errorCheck(this.checkALError(), "Error unqueuing stream buffers in method \'feedRawAudioData\'")) {
return -1;
}
intBuffer.rewind();
for (; intBuffer.hasRemaining(); this.checkALError()) {
int i = intBuffer.get();
if (AL10.alIsBuffer(i)) {
this.millisPreviouslyPlayed += this.millisInBuffer(i);
}
}
AL10.alDeleteBuffers(intBuffer);
this.checkALError();
}
intBuffer = BufferUtils.createIntBuffer(1);
AL10.alGenBuffers(intBuffer);
if (this.errorCheck(this.checkALError(), "Error generating stream buffers in method \'preLoadBuffers\'")) {
return -1;
} else {
AL10.alBufferData(intBuffer.get(0), this.ALformat, byteBuffer, this.sampleRate);
if (this.checkALError()) {
return -1;
} else {
AL10.alSourceQueueBuffers(this.ALSource.get(0), intBuffer);
if (this.checkALError()) {
return -1;
} else {
if (this.attachedSource != null && this.attachedSource.channel == this && this.attachedSource.active() && !this.playing()) {
AL10.alSourcePlay(this.ALSource.get(0));
this.checkALError();
}
return processed;
}
}
}
}
}
@Override
public void flush() {
int queued = AL10.alGetSourcei(this.ALSource.get(0), 4117);
for (IntBuffer intBuffer = BufferUtils.createIntBuffer(1); queued > 0; --queued) {
try {
AL10.alSourceUnqueueBuffers(this.ALSource.get(0), intBuffer);
} catch (Exception var4) {
return;
}
if (this.checkALError()) {
return;
}
}
this.millisPreviouslyPlayed = 0.0F;
}
@Override
public float millisecondsPlayed() {
float offset = (float) AL10.alGetSourcei(this.ALSource.get(0), 4134);
float bytesPerFrame = 1.0F;
switch (this.ALformat) {
case 4352:
bytesPerFrame = 1.0F;
break;
case 4353:
bytesPerFrame = 2.0F;
break;
case 4354:
bytesPerFrame = 2.0F;
break;
case 4355:
bytesPerFrame = 4.0F;
}
offset = offset / bytesPerFrame / (float) this.sampleRate * 1000.0F;
if (this.channelType == 1) {
offset += this.millisPreviouslyPlayed;
}
return offset;
}
public float millisInBuffer(int alBufferi) {
return (float) AL10.alGetBufferi(alBufferi, 8196) / (float) AL10.alGetBufferi(alBufferi, 8195) / ((float) AL10.alGetBufferi(alBufferi, 8194) / 8.0F) / (float) this.sampleRate * 1000.0F;
}
@Override
public void pause() {
AL10.alSourcePause(this.ALSource.get(0));
this.checkALError();
}
@Override
public void play() {
AL10.alSourcePlay(this.ALSource.get(0));
this.checkALError();
}
@Override
public boolean playing() {
int state = AL10.alGetSourcei(this.ALSource.get(0), 4112);
return !this.checkALError() && state == 4114;
}
@Override
public boolean preLoadBuffers(LinkedList bufferList) {
if (this.errorCheck(this.channelType != 1, "Buffers may only be queued for streaming sources.")) {
return false;
} else if (this.errorCheck(bufferList == null, "Buffer List null in method \'preLoadBuffers\'")) {
return false;
} else {
boolean playing = this.playing();
if (playing) {
AL10.alSourceStop(this.ALSource.get(0));
this.checkALError();
}
int processed = AL10.alGetSourcei(this.ALSource.get(0), 4118);
IntBuffer streamBuffers;
if (processed > 0) {
streamBuffers = BufferUtils.createIntBuffer(processed);
AL10.alGenBuffers(streamBuffers);
if (this.errorCheck(this.checkALError(), "Error clearing stream buffers in method \'preLoadBuffers\'")) {
return false;
}
AL10.alSourceUnqueueBuffers(this.ALSource.get(0), streamBuffers);
if (this.errorCheck(this.checkALError(), "Error unqueuing stream buffers in method \'preLoadBuffers\'")) {
return false;
}
}
if (playing) {
AL10.alSourcePlay(this.ALSource.get(0));
this.checkALError();
}
streamBuffers = BufferUtils.createIntBuffer(bufferList.size());
AL10.alGenBuffers(streamBuffers);
if (this.errorCheck(this.checkALError(), "Error generating stream buffers in method \'preLoadBuffers\'")) {
return false;
} else {
ByteBuffer byteBuffer = null;
for (int e = 0; e < bufferList.size(); ++e) {
byteBuffer = (ByteBuffer) BufferUtils.createByteBuffer(((byte[]) bufferList.get(e)).length).put((byte[]) bufferList.get(e)).flip();
try {
AL10.alBufferData(streamBuffers.get(e), this.ALformat, byteBuffer, this.sampleRate);
} catch (Exception var9) {
this.errorMessage("Error creating buffers in method \'preLoadBuffers\'");
this.printStackTrace(var9);
return false;
}
if (this.errorCheck(this.checkALError(), "Error creating buffers in method \'preLoadBuffers\'")) {
return false;
}
}
try {
AL10.alSourceQueueBuffers(this.ALSource.get(0), streamBuffers);
} catch (Exception var8) {
this.errorMessage("Error queuing buffers in method \'preLoadBuffers\'");
this.printStackTrace(var8);
return false;
}
if (this.errorCheck(this.checkALError(), "Error queuing buffers in method \'preLoadBuffers\'")) {
return false;
} else {
AL10.alSourcePlay(this.ALSource.get(0));
return !this.errorCheck(this.checkALError(), "Error playing source in method \'preLoadBuffers\'");
}
}
}
}
@Override
public boolean queueBuffer(byte[] buffer) {
if (this.errorCheck(this.channelType != 1, "Buffers may only be queued for streaming sources.")) {
return false;
} else {
ByteBuffer byteBuffer = (ByteBuffer) BufferUtils.createByteBuffer(buffer.length).put(buffer).flip();
IntBuffer intBuffer = BufferUtils.createIntBuffer(1);
AL10.alSourceUnqueueBuffers(this.ALSource.get(0), intBuffer);
if (this.checkALError()) {
return false;
} else {
if (AL10.alIsBuffer(intBuffer.get(0))) {
this.millisPreviouslyPlayed += this.millisInBuffer(intBuffer.get(0));
}
this.checkALError();
AL10.alBufferData(intBuffer.get(0), this.ALformat, byteBuffer, this.sampleRate);
if (this.checkALError()) {
return false;
} else {
AL10.alSourceQueueBuffers(this.ALSource.get(0), intBuffer);
return !this.checkALError();
}
}
}
}
@Override
public void rewind() {
if (this.channelType != 1) {
AL10.alSourceRewind(this.ALSource.get(0));
if (!this.checkALError()) {
this.millisPreviouslyPlayed = 0.0F;
}
}
}
@Override
public void setAudioFormat(AudioFormat audioFormat) {
boolean soundFormat = false;
short soundFormat1;
if (audioFormat.getChannels() == 1) {
if (audioFormat.getSampleSizeInBits() == 8) {
soundFormat1 = 4352;
} else {
if (audioFormat.getSampleSizeInBits() != 16) {
this.errorMessage("Illegal sample size in method \'setAudioFormat\'");
return;
}
soundFormat1 = 4353;
}
} else {
if (audioFormat.getChannels() != 2) {
this.errorMessage("Audio data neither mono nor stereo in method \'setAudioFormat\'");
return;
}
if (audioFormat.getSampleSizeInBits() == 8) {
soundFormat1 = 4354;
} else {
if (audioFormat.getSampleSizeInBits() != 16) {
this.errorMessage("Illegal sample size in method \'setAudioFormat\'");
return;
}
soundFormat1 = 4355;
}
}
this.ALformat = soundFormat1;
this.sampleRate = (int) audioFormat.getSampleRate();
}
public void setFormat(int format, int rate) {
this.ALformat = format;
this.sampleRate = rate;
}
@Override
public void stop() {
AL10.alSourceStop(this.ALSource.get(0));
if (!this.checkALError()) {
this.millisPreviouslyPlayed = 0.0F;
}
}
}
|
yuanfayang/wms-parent
|
wms-bill-manage/wms-bill-manage/src/main/java/com/deer/wms/bill/manage/web/MtAlonePrintModelController.java
|
package com.deer.wms.bill.manage.web;
import com.deer.wms.project.seed.core.result.Result;
import com.deer.wms.project.seed.core.result.ResultGenerator;
import com.deer.wms.bill.manage.model.MtAlonePrintModel;
import com.deer.wms.bill.manage.model.MtAlonePrintModelCriteria;
import com.deer.wms.bill.manage.service.MtAlonePrintModelService;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import com.deer.wms.bill.manage.model.MtAlonePrintModel;
/**
* Created by on 2019/02/21.
*/
@RestController
@RequestMapping("/mt/alone/print/models")
public class MtAlonePrintModelController {
@Autowired
private MtAlonePrintModelService mtAlonePrintModelService;
@PostMapping("/add")
public Result add(@RequestBody MtAlonePrintModel mtAlonePrintModel) {
mtAlonePrintModelService.save(mtAlonePrintModel);
return ResultGenerator.genSuccessResult();
}
@GetMapping("/delete")
public Result delete( Integer mtAlonePrintModelId) {
mtAlonePrintModelService.deleteById(mtAlonePrintModelId);
return ResultGenerator.genSuccessResult();
}
@PostMapping("/update")
public Result update(@RequestBody MtAlonePrintModel mtAlonePrintModel) {
mtAlonePrintModelService.update(mtAlonePrintModel);
return ResultGenerator.genSuccessResult();
}
@GetMapping("/{id}")
public Result detail(@PathVariable Integer id) {
MtAlonePrintModel mtAlonePrintModel = mtAlonePrintModelService.findById(id);
return ResultGenerator.genSuccessResult(mtAlonePrintModel);
}
@GetMapping("/list")
public Result list(MtAlonePrintModelCriteria criteria) {
PageHelper.startPage(criteria.getPageNum(), criteria.getPageSize());
List<MtAlonePrintModel> list = mtAlonePrintModelService.findList(criteria);
PageInfo pageInfo = new PageInfo(list);
return ResultGenerator.genSuccessResult(pageInfo);
}
}
|
woshiluo/oi
|
cf/old/1307/A.cpp
|
#include <cstdio>
inline int Min( int a, int b ) { return a < b? a: b; }
const int N = 110;
int T, n, d;
int a[N];
int main() {
#ifdef woshiluo
freopen( "A.in", "r", stdin );
freopen( "A.out", "w", stdout );
#endif
scanf( "%d", &T );
while( T -- ) {
scanf( "%d%d", &n, &d );
for( int i = 1; i <= n; i ++ ) {
scanf( "%d", &a[i] );
if( i != 1 && a[i] != 0 ) {
int tmp = Min( d, a[i] * ( i - 1 ) );
tmp -= ( tmp % ( i - 1 ) );
d -= tmp;
a[1] += tmp / ( i - 1 );
}
}
printf( "%d\n", a[1] );
}
}
|
ericchill/Bonsai
|
runtime/include/postProcessModules.h
|
#pragma once
#include <cassert>
#ifdef USE_MPI
#include <mpi.h>
#endif
#include <vector>
#include <sys/time.h>
struct DENSITY
{
private:
#define G_CONST 6.672e-8
#define M_SUN 1.989e33
#define PARSEC 3.08567802e18
#define ONE_YEAR 3.1558149984e7
// #define DMSTARTID 200000000
// #define BULGESTARTID 100000000
#define N_MESH 1024
#define N_MESH_R 20
#define N_MESH_PHI 128
#define MIN_D 1.0
#define MAX_D 10000.0
float perProcRes [N_MESH][4*N_MESH];
float combinedRes[N_MESH][4*N_MESH];
float perProcResRPhi [N_MESH_PHI][2*N_MESH_R]; //First half is np, second half is mass
float combinedResRPhi[N_MESH_PHI][2*N_MESH_R]; //First half is np, second half is mass
const MPI_Comm &mpiCommWorld;
const int procId, nProcs, nParticles;
const double xscale, mscale, xmax;
//For R-Phi computation
const double Rmin;
const double Rmax;
const double pmin;
const double pmax;
double dp;
double dR;
private:
double get_time2()
{
struct timeval Tvalue;
struct timezone dummy;
gettimeofday(&Tvalue,&dummy);
return ((double) Tvalue.tv_sec +1.e-6*((double) Tvalue.tv_usec));
}
void reduceAndScale(float dataIn [N_MESH][4*N_MESH],
float dataOut[N_MESH][4*N_MESH],
float dataInRPhi [N_MESH_PHI][2*N_MESH_R],
float dataOutRPhi[N_MESH_PHI][2*N_MESH_R],
const float dscale)
{
//Sum over all processes
double t0 = get_time2();
#ifdef USE_MPI
MPI_Reduce(dataIn, dataOut, 4*(N_MESH*N_MESH), MPI_FLOAT, MPI_SUM, 0, mpiCommWorld);
MPI_Reduce(dataInRPhi, dataOutRPhi, 2*(N_MESH_PHI*N_MESH_R), MPI_FLOAT, MPI_SUM, 0, mpiCommWorld);
#else
memcpy(dataOut, dataIn, 4*(N_MESH*N_MESH)*sizeof(float));
memcpy(dataOutRPhi, dataInRPhi, 2*(N_MESH_PHI*N_MESH_R)*sizeof(float));
#endif
double t1 = get_time2();
if(procId == 0)
{
//Scale results
// float bg = log10(MIN_D);
//Density
// for(int i=0;i<N_MESH;i++)
// {
// for(int j=0;j<N_MESH;j++)
// {
// //Normalize top view
// if(dataOut[i][j]>0.0){
// dataOut[i][j] = log10(dataOut[i][j]*dscale);
// }else{
// dataOut[i][j] = bg;
// }
// //Normalize front view
// if(dataOut[i][N_MESH+j]>0.0){
// dataOut[i][N_MESH+j] = log10(dataOut[i][N_MESH+j]*dscale);
// }else{
// dataOut[i][N_MESH+j] = bg;
// }
// }//for j
// }//for i
//R-Phi scaling
for(int j=0;j<N_MESH_R;j++)
{
float R = Rmin + (j+0.5)*dR;
float SigR = 0.0;
int nR = 0;
for(int i=0;i<N_MESH_PHI;i++){
SigR += dataOutRPhi[i][N_MESH_R+j];
nR += dataOutRPhi[i][ j];
}
SigR /= (2.*M_PI*R*dR);
float ds = 2.*M_PI*R*dR/(float)N_MESH_PHI;
for(int i=0;i<N_MESH_PHI;i++)
{
dataOutRPhi[i][j] = dataOutRPhi[i][N_MESH_R+j]/ds;
dataOutRPhi[i][j]/= SigR;
}
}//for j
}//if procId == 0
//if(procId == 0) fprintf(stderr,"MPI Reduce: %lg Scale took: %lg \n", t1-t0, get_time2()-t1);
}//reduceAndScale
void writeData(const char *fileName,
float data [N_MESH] [4*N_MESH],
float dataRPhi[N_MESH_PHI][2*N_MESH_R],
const double tSim)
{
#if 1
std::ofstream out;
out.open(fileName);
if(out.is_open())
{
//Write the header for
int flag = -1;
double tmpD = 0.0;
float tmpF = -1.f;
out.write((char*)&flag, sizeof(int));
int n = N_MESH;
out.write((char*)&n, sizeof(int));
out.write((char*)&n, sizeof(int));
int Nz = 1;
out.write((char*)&Nz, sizeof(int));
out.write((char*)&tSim, sizeof(double));
out.write((char*)&tmpD, sizeof(double));
out.write((char*)&tmpD, sizeof(double));
out.write((char*)&tmpD, sizeof(double));
out.write((char*)&tmpD, sizeof(double));
out.write((char*)&tmpD, sizeof(double));
//The actual data
for(int i=0;i<N_MESH;i++){
for(int j=0;j<N_MESH;j++){
//Density
out.write((char*)&data[i][N_MESH * 0 + j], sizeof(float));
out.write((char*)&data[i][N_MESH * 1 + j], sizeof(float));
out.write((char*)&data[i][N_MESH * 2 + j], sizeof(float));
out.write((char*)&data[i][N_MESH * 3 + j], sizeof(float));
//R-phi
if(i < N_MESH_PHI && j < N_MESH_R)
{
out.write((char*)&dataRPhi[i][j], sizeof(float));
}
else
{
out.write((char*)&tmpF, sizeof(float));
}
}//for j
}//for i
out.close();
}//out.is_open
#else
//Old ASCII format, does not contain velocities
char filename2[256];
sprintf(filename2, "TestJB.txt");
FILE *dump = NULL;
dump = fopen(filename2, "w");
if(dump)
{
//Write some info
fprintf(dump, "#Tsim= %f\n", tSim);
fprintf(dump, "#X Y TOP FRONT R-Phi\n", tSim);
for(int i=0;i<N_MESH;i++){
for(int j=0;j<N_MESH;j++){
if(i < N_MESH_PHI && j < N_MESH_R)
{
fprintf(dump, "%d\t%d\t%f\t%f\t%f\n", i, j, data[i][j],data[i][j+2*N_MESH], dataRPhi[i][j]);
}
else
{
fprintf(dump, "%d\t%d\t%f\t%f\t-\n", i, j, data[i][j],data[i][j+2*N_MESH]);
}
}//j
}//i
fclose(dump);
}//if dump
#endif
}//writeData
public:
//Create two density plots and write results to file
DENSITY(const MPI_Comm &comm,
const int _procId, const int _nProc, const int _n,
const float4 *positions,
const float4 *velocities,
const unsigned long long *IDs,
double _xscale, double _mscale, double _xmax,
const char *baseFilename,
const double time) :
mpiCommWorld(comm),
procId(_procId), nProcs(_nProc), nParticles(_n),
xscale(_xscale), mscale(_mscale), xmax(_xmax),
Rmin(0.0), Rmax(20.0), pmin(-180), pmax(180)
{
//Reset the buffers
for(uint i=0; i < N_MESH; i++)
{
for(uint j=0; j < N_MESH; j++)
{
perProcRes [i][N_MESH*0 + j] = 0.0;
perProcRes [i][N_MESH*1 + j] = 0.0;
perProcRes [i][N_MESH*2 + j] = 0.0;
perProcRes [i][N_MESH*3 + j] = 0.0;
if(i < N_MESH_PHI&& j < 2*N_MESH_R)
{
perProcResRPhi [i][j] = 0.0;
}
}//j
}//i
dp = (pmax - pmin)/(double)N_MESH_PHI;
dR = (Rmax - Rmin)/(double)N_MESH_R;
double xmin = -xmax;
double ymin = -xmax;
double ymax = xmax;
double dx = (xmax - xmin)/N_MESH;
double dy = (ymax - ymin)/N_MESH;
double x = xscale*1e3*PARSEC;
double tmp = x*x*x/(G_CONST*mscale*M_SUN);
double tscale = sqrt(tmp)*1e-6/ONE_YEAR;
tmp = 1.e3*dx*xscale;
double dscale = 1./(tmp*tmp);
double t1 = get_time2();
//Walk through the particles and sum the densities
for(uint i = 0; i < nParticles; i++)
{
//Only use star particles
if(IDs[i] < DARKMATTERID)
{
//Top view
int x = (int)floor((positions[i].x*xscale-xmin)/dx);
int y = (int)floor((positions[i].y*xscale-ymin)/dy); //Topview
int z = (int)floor((positions[i].z*xscale-ymin)/dy); //Front view
//Top view
if(x<N_MESH && y<N_MESH && x>0 && y>0)
{
perProcRes[x][y+0*N_MESH] += positions[i].w*mscale;
perProcRes[x][y+1*N_MESH] += velocities[i].x*velocities[i].x+velocities[i].y*velocities[i].y;
}
//Front view
if(x<N_MESH && z<N_MESH && x>0 && z>0)
{
perProcRes[x][z+2*N_MESH] += positions[i].w*mscale;
perProcRes[x][z+3*N_MESH] += velocities[i].x*velocities[i].x+velocities[i].z*velocities[i].z;
}//for i
//R-Phi projection
double r2 = positions[i].x*positions[i].x + positions[i].y*positions[i].y;
double r = sqrt(r2);
double sinp = positions[i].y/r;
double cosp = positions[i].x/r;
double phi;
if(positions[i].y>0.0){
phi = acos(cosp)*180/M_PI;
}else{
phi = -acos(cosp)*180/M_PI;
}
x = (int)floor((phi-pmin)/dp);
y = (int)floor((r-Rmin)/dR);
if(x<N_MESH_PHI && y<N_MESH_R && x>=0 && y>=0)
{
perProcResRPhi[x][ y] += 1;
perProcResRPhi[x][N_MESH_R+y] += positions[i].w;
}
}//if ID < DMSTARTID
}//for i < nParticles
double t2 = get_time2();
//Combine the results for all processes, top view
reduceAndScale(perProcRes, combinedRes, perProcResRPhi, combinedResRPhi, dscale);
double t3 = get_time2();
//Dump top view results
char fileName[256];
sprintf(fileName,"%s-TopFront-%f", baseFilename, time);
if(procId == 0) writeData(fileName, combinedRes, combinedResRPhi, tscale*time);
double t3b = get_time2();
//if(procId == 0) fprintf(stderr, "Compute took: %lg Write took: %lg \n", t2-t1, t3b-t3);
}//Function
}; //Struct
struct DISKSTATS
{
private:
//Some constants
#define ONE_YEAR 3.1558149984e7
#define VELOCITY_KMS_CGS (1.e+5) // [km/s] -> [cm/s]
#define KPC_CGS (PC_CGS*1.e+3) // [cm]
#define PC_CGS (3.08568025e+18) // [cm]
#define MSUN_CGS (1.98892e+33) // [g]
#define GRAVITY_CONSTANT_CGS 6.6725985e-8 // [dyne m^2/kg^2] = [cm^3/g/s^2]
#define PI (3.1415926535897932384626433832795)
#define SQ(x) ((x)*(x))
#define CUBE(x) ((x)*(x)*(x))
#define MAX(x,y) (((x)>(y))?(x):(y))
#define DARKMATTERID 3000000000000000000
#define DISKID 0
#define BULGEID 2000000000000000000
const MPI_Comm &mpiCommWorld;
const int procId, nProcs, nParticles;
const double xscale, mscale;
#define iMax 600
//nItems should match the items in the enum below
#define nItems 9
//Enum contains variables Ns, Sigs, Vrs, Vas, Vzs, Drs, Das, Dzs, zrms
enum {NS = 0, SIGS,
VRS, VAS, VZS, // mean speed
DRS, DAS, DZS, // dispersion
ZRMS};
float perProcRes[nItems][3*iMax] ;
float RrotEnd;
float RrotMin;
float dR;
double UnitVelocity, UnitLength, UnitTime, GravConst, SDUnit, UnitMass;
private:
double get_time()
{
struct timeval Tvalue;
struct timezone dummy;
gettimeofday(&Tvalue,&dummy);
return ((double) Tvalue.tv_sec +1.e-6*((double) Tvalue.tv_usec));
}
void Analysis(const char *fileNameOut, const int procId, const double treal, const double tsim)
{
float comProcRes[nItems][iMax*3] ;
float Rs[iMax*3];
float Qs[iMax*3], Gam[iMax*3], mX[iMax*3];
float Omgs[iMax*3], kapps[iMax*3];
float m[iMax*3], Mass[iMax*3];
float VelUnit = UnitVelocity/VELOCITY_KMS_CGS;
//Init
for(int i=0; i<iMax; i++){
//Rs[i] = RrotMin + (i+0.5)*dR;
Qs[i] = Gam[i] = mX[i] = 0.0;
Omgs[i] = kapps[i] = 0.0;
Mass[i] = 0.0;
}
for(int j=0; j < 3; j++){
for(int i=0; i<iMax; i++){
Rs[i+j*iMax] = RrotMin + (i+0.5)*dR;
}}
for(int j=0; j<nItems; j++)
for(int i=0; i<iMax*3; i++)
comProcRes[j][i] = 0.0;
double t0 = get_time();
#ifdef USE_MPI
//MPI Reduce: Sum results over all processes store in procId == 0
MPI_Reduce(perProcRes, comProcRes, nItems*iMax*3, MPI_FLOAT, MPI_SUM, 0, mpiCommWorld);
#else
memcpy(comProcRes, perProcRes, nItems*iMax*3*sizeof(float));
#endif
double t1 = get_time();
if(procId == 0)
{
for(int j=0; j < 3; j++)
{
const int offset = j*iMax;
// average
Mass[offset+0] = comProcRes[SIGS][offset+0];
for(int i=0; i<iMax; i++)
{
if(i>0){
Mass[offset+i] = Mass[offset+i-1] + comProcRes[SIGS][offset+i];
}
if(comProcRes[NS][offset+i] != 0)
{
comProcRes[SIGS][offset+i] /= (2.0*PI*Rs[offset+i]*dR);
comProcRes[VRS] [offset+i] /= (float)comProcRes[NS][offset+i];
comProcRes[VAS] [offset+i] /= (float)comProcRes[NS][offset+i];
comProcRes[VZS] [offset+i] /= (float)comProcRes[NS][offset+i];
comProcRes[DRS] [offset+i] = sqrt(MAX(comProcRes[DRS][offset+i]/(float)comProcRes[NS][offset+i] - SQ(comProcRes[VRS][offset+i]),0.0));
comProcRes[DAS] [offset+i] = sqrt(MAX(comProcRes[DAS][offset+i]/(float)comProcRes[NS][offset+i] - SQ(comProcRes[VAS][offset+i]),0.0));
comProcRes[DZS] [offset+i] = sqrt(MAX(comProcRes[DZS][offset+i]/(float)comProcRes[NS][offset+i] - SQ(comProcRes[VZS][offset+i]),0.0));
Omgs [offset+i] = comProcRes[VAS][offset+i]/Rs[offset+i];
comProcRes[ZRMS][offset+i] = sqrt(comProcRes[ZRMS][offset+i]/(float)comProcRes[NS][offset+i]);
}
}//for i
for(int i=1; i<iMax-1; i++)
{
kapps[offset+i] = sqrt(MAX(0.5*Rs[offset+i]*((SQ(Omgs[offset+i+1])-SQ(Omgs[offset+i-1]))/dR) + 4.0*SQ(Omgs[offset+i]),0.0));
Gam[offset+i] = -(Rs[offset+i]/Omgs[offset+i])*0.5*(Omgs[offset+i+1]-Omgs[offset+i-1])/dR;
}
kapps[offset+0] = 2.0*Omgs[offset+0];
kapps[offset+iMax-1] = kapps[offset+iMax-2];
Gam[offset+0] = Gam[offset+1];
Gam[offset+iMax-1] = Gam[offset+iMax-2];
for(int i=0; i<iMax; i++)
{
m[offset+i] = kapps[offset+i]*kapps[offset+i]/(GravConst*comProcRes[SIGS][offset+i]);
Qs[offset+i] = comProcRes[DRS][offset+i]*kapps[offset+i]/(3.36*GravConst*comProcRes[SIGS][offset+i]);
mX[offset+i] = SQ(kapps[offset+i])*Rs[offset+i]/(2.0*PI*GravConst*comProcRes[SIGS][offset+i])/4.0;
comProcRes[VRS][offset+i] *= VelUnit;
comProcRes[VAS][offset+i] *= VelUnit;
comProcRes[VZS][offset+i] *= VelUnit;
comProcRes[DRS][offset+i] *= VelUnit;
comProcRes[DAS][offset+i] *= VelUnit;
comProcRes[DZS][offset+i] *= VelUnit;
kapps[offset+i] *= VelUnit;
Omgs[offset+i] /= UnitTime;
comProcRes[SIGS][offset+i]*= SDUnit;
Mass[offset+i] *= 2.33e9;//UnitMass;
}
}//For j
double t2 = get_time();
char fileNameOut2[512];
sprintf(fileNameOut2,"%s-%f", fileNameOut, tsim);
std::ofstream out(fileNameOut2);
if(out.is_open())
{
out.setf(std::ios::scientific);
out.precision(6);
out << "# T = " << treal << " (Gyr) \n";
out << "#RS(D+B) Vas Drs Das Dzs Omg Kapp Q Gam mX Sigs Mass m Zrms RS(B) Vas Drs Das Dzs Sigs Mass RS(D) Vas Drs Das Dzs Sigs Mass\n";
for(int i=0; i<iMax; i++)
{
//Disk + Bulge results
out << Rs[i] << " " << comProcRes[VAS][i] << " " // 1,2
<< comProcRes[DRS][i] << " " << comProcRes[DAS][i] << " " << comProcRes[DZS][i] << " "// 3,4,5
<< Omgs[i] << " " << kapps[i] << " " << Qs[i] << " " // 6,7,8
<< Gam[i] << " " << mX[i] << " " << comProcRes[SIGS][i] << " " //9,10,11
<< Mass[i] << " " << m[i] << " " << comProcRes[ZRMS][i] << " " << comProcRes[NS][i] << " " // 12,13,14
//Bulge only
<< Rs[i+iMax] << " " << comProcRes[VAS][i+iMax] << " " // 15,16
<< comProcRes[DRS][i+iMax] << " " << comProcRes[DAS][i+iMax] << " " << comProcRes[DZS][i+iMax] << " "// 17,18,19
<< comProcRes[SIGS][i+iMax] << " " << Mass[i+iMax] << " " // 20,21
//Disk only
<< Rs[i+2*iMax] << " " << comProcRes[VAS][i+2*iMax] << " " // 22,23
<< comProcRes[DRS][i+2*iMax] << " " << comProcRes[DAS][i+2*iMax] << " " << comProcRes[DZS][i+2*iMax] << " "// 24,25,26
<< comProcRes[SIGS][i+2*iMax] << " " << Mass[i+2*iMax] << std::endl; // 27,28
}
out.close();
}
else
{
fprintf(stderr,"Failed to open output file for disk-stats: %s \n", fileNameOut);
}
double t3 = get_time();
//fprintf(stderr,"Timing: Reduce: %lg\tComp: %lg\tWrite: %lg\n",t1-t0,t2-t1,t3-t2);
} //if procId == 0
}//end analysis
public:
//Create two density plots and write results to file
DISKSTATS(const MPI_Comm &comm,
const int _procId, const int _nProc, const int _n,
const float4 *positions,
const float4 *velocities,
const unsigned long long *IDs,
double _xscale, double _mscale,
const char *baseFilename,
const double tsim) :
mpiCommWorld(comm),
procId(_procId), nProcs(_nProc), nParticles(_n),
xscale(_xscale), mscale(_mscale)
{
RrotEnd = 30.0;
RrotMin = 0.0;
dR = (RrotEnd - RrotMin)/iMax;
for(int j=0; j<nItems; j++)
for(int i=0; i<3*iMax; i++)
perProcRes[j][i] = 0.0;
GravConst = 1.0;
UnitLength = xscale*KPC_CGS; //[kpc]->[cm]
UnitMass = mscale*MSUN_CGS;
SDUnit = 2.3e+9/1.e6;
double tmp = CUBE(UnitLength)/(GRAVITY_CONSTANT_CGS*UnitMass);
UnitTime = sqrt(tmp); //[s]
UnitVelocity = UnitLength/UnitTime; //[cm/s]
double treal = 1e-9*tsim*UnitTime/ONE_YEAR;
//Process the particles
for(int j=0; j < nParticles; j++)
{
if(IDs[j] >= 0 && IDs[j] < DARKMATTERID)
{
//Bluge+disk particles
double R = sqrt(SQ(positions[j].x) + SQ(positions[j].y));
double z = positions[j].z;
double vr = velocities[j].x*positions[j].x/R + velocities[j].y*positions[j].y/R;
double va = -velocities[j].x*positions[j].y/R + velocities[j].y*positions[j].x/R;
double vz = velocities[j].z;
if( R <= RrotMin || R >= RrotEnd ) continue;
int i = (int)((R-RrotMin)/dR);
//Store the different properties
//First disk+bulge data
int offset = 0;
perProcRes[NS ][offset+i] += 1;
perProcRes[SIGS][offset+i] += (float)positions[j].w;
perProcRes[VRS ][offset+i] += (float)vr;
perProcRes[VAS ][offset+i] += (float)va;
perProcRes[VZS ][offset+i] += (float)vz;
perProcRes[DRS ][offset+i] += SQ((float)vr);
perProcRes[DAS ][offset+i] += SQ((float)va);
perProcRes[DZS ][offset+i] += SQ((float)vz);
perProcRes[ZRMS][offset+i] += SQ(z);
offset = iMax;
//Bulge only
if(IDs[j] >= BULGEID && IDs[j] < DARKMATTERID)
{
perProcRes[NS ][offset+i] += 1;
perProcRes[SIGS][offset+i] += (float)positions[j].w;
perProcRes[VRS ][offset+i] += (float)vr;
perProcRes[VAS ][offset+i] += (float)va;
perProcRes[VZS ][offset+i] += (float)vz;
perProcRes[DRS ][offset+i] += SQ((float)vr);
perProcRes[DAS ][offset+i] += SQ((float)va);
perProcRes[DZS ][offset+i] += SQ((float)vz);
perProcRes[ZRMS][offset+i] += SQ(z);
}//Bulge
offset = 2*iMax;
//Disk only
if(IDs[j] >= 0 && IDs[j] < BULGEID)
{
perProcRes[NS ][offset+i] += 1;
perProcRes[SIGS][offset+i] += (float)positions[j].w;
perProcRes[VRS ][offset+i] += (float)vr;
perProcRes[VAS ][offset+i] += (float)va;
perProcRes[VZS ][offset+i] += (float)vz;
perProcRes[DRS ][offset+i] += SQ((float)vr);
perProcRes[DAS ][offset+i] += SQ((float)va);
perProcRes[DZS ][offset+i] += SQ((float)vz);
perProcRes[ZRMS][offset+i] += SQ(z);
}//Disk
}//if(IDs[j] >= 0 && IDs[j] < DMSTARTID)
}//for nParticles
Analysis(baseFilename, procId, treal, tsim);
}//DISKSTATS func
}; //DISKSTATS struct
|
haadiraja/Candy-Machine-V2
|
js/node_modules/@magic-sdk/provider/dist/es/modules/base-module.js
|
import { __read, __spread } from "tslib";
import { MagicOutgoingWindowMessage, MagicIncomingWindowMessage } from '@magic-sdk/types';
import { createMalformedResponseError, MagicRPCError } from '../core/sdk-exceptions';
import { standardizeJsonRpcRequestPayload } from '../core/json-rpc';
import { createPromiEvent } from '../util/promise-tools';
var BaseModule = /** @class */ (function () {
function BaseModule(sdk) {
this.sdk = sdk;
}
Object.defineProperty(BaseModule.prototype, "transport", {
/**
* The `PayloadTransport` for the SDK instance registered to this module.
*/
get: function () {
return this.sdk.transport;
},
enumerable: true,
configurable: true
});
Object.defineProperty(BaseModule.prototype, "overlay", {
/**
* The `ViewController` for the SDK instance registered to this module.
*/
get: function () {
return this.sdk.overlay;
},
enumerable: true,
configurable: true
});
/**
* Emits promisified requests to the Magic `<iframe>` context.
*/
BaseModule.prototype.request = function (payload) {
var responsePromise = this.transport.post(this.overlay, MagicOutgoingWindowMessage.MAGIC_HANDLE_REQUEST, standardizeJsonRpcRequestPayload(payload));
// PromiEvent-ify the response.
var promiEvent = createPromiEvent(function (resolve, reject) {
responsePromise
.then(function (res) {
cleanupEvents();
if (res.hasError)
reject(new MagicRPCError(res.payload.error));
else if (res.hasResult)
resolve(res.payload.result);
else
throw createMalformedResponseError();
})
.catch(function (err) {
cleanupEvents();
reject(err);
});
});
// Listen for events from the `<iframe>` associated with the current payload
// and emit those to `PromiEvent` subscribers.
var cleanupEvents = this.transport.on(MagicIncomingWindowMessage.MAGIC_HANDLE_EVENT, function (evt) {
var _a;
var response = evt.data.response;
if (response.id === payload.id && ((_a = response.result) === null || _a === void 0 ? void 0 : _a.event)) {
var _b = response.result, event_1 = _b.event, _c = _b.params, params = _c === void 0 ? [] : _c;
promiEvent.emit.apply(promiEvent, __spread([event_1], params));
}
});
return promiEvent;
};
return BaseModule;
}());
export { BaseModule };
//# sourceMappingURL=base-module.js.map
|
Havoc-OS/androidprebuilts_go_linux-x86
|
test/fixedbugs/issue12944.go
|
<filename>test/fixedbugs/issue12944.go
// errorcheck
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import "unsafe"
const (
_ = unsafe.Sizeof([0]byte{}[0]) // ERROR "out of bounds"
)
|
NavpreetDevpuri/Python
|
web_programming/get_top_hn_posts.py
|
from __future__ import annotations
import requests
def get_hackernews_story(story_id: str) -> dict:
url = f"https://hacker-news.firebaseio.com/v0/item/{story_id}.json?print=pretty"
return requests.get(url).json()
def hackernews_top_stories(max_stories: int = 10) -> list[dict]:
"""
Get the top max_stories posts from HackerNews - https://news.ycombinator.com/
"""
url = "https://hacker-news.firebaseio.com/v0/topstories.json?print=pretty"
story_ids = requests.get(url).json()[:max_stories]
return [get_hackernews_story(story_id) for story_id in story_ids]
def hackernews_top_stories_as_markdown(max_stories: int = 10) -> str:
stories = hackernews_top_stories(max_stories)
return "\n".join("* [{title}]({url})".format(**story) for story in stories)
if __name__ == "__main__":
print(hackernews_top_stories_as_markdown())
|
mdamyanova/C-Sharp-Web-Development
|
06.JavaScript Core/07.01.JS Fundamentals/12.Exercises-objects, associative arrays, sets, maps/01.HeroicInventory.js
|
<gh_stars>1-10
function heroicInventory(input) {
// the result is an array of objects, where each object have
// name, level and array of items
let heroes = [];
for(let line of input){
line = line.split(" / ");
// check if the hero has items
let items = line.length > 2 ? line[2].split(', ') : [];
let object = { name: line[0].trim(), level: Number(line[1].trim()), items: items };
heroes.push(object);
}
return JSON.stringify(heroes);
}
|
cameronhunter/alexa
|
packages/alexa-constants/src/index.js
|
<gh_stars>10-100
import AudioPlayer from './AudioPlayer';
import CardType from './CardType';
import ConnectedHome from './ConnectedHome';
import Dialog from './Dialog';
import Intent from './Intent';
import PlaybackController from './PlaybackController';
import Request from './Request';
import SpeechType from './SpeechType';
export AudioPlayer from './AudioPlayer';
export CardType from './CardType';
export ConnectedHome from './ConnectedHome';
export Dialog from './Dialog';
export Intent from './Intent';
export PlaybackController from './PlaybackController';
export Request from './Request';
export SpeechType from './SpeechType';
export default {
AudioPlayer,
CardType,
ConnectedHome,
Dialog,
Intent,
PlaybackController,
Request,
SpeechType
};
|
ox-it/cucm-http-api
|
src/main/java/com/cisco/axl/api/_8/XParameterMember.java
|
<reponame>ox-it/cucm-http-api<gh_stars>1-10
package com.cisco.axl.api._8;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for XParameterMember complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="XParameterMember">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence minOccurs="0">
* <element name="parameter" type="{http://www.cisco.com/AXL/API/8.0}XDialParameter" minOccurs="0"/>
* <element name="value" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="selectionOrder" type="{http://www.cisco.com/AXL/API/8.0}XInteger"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "XParameterMember", propOrder = {
"parameter",
"value",
"selectionOrder"
})
public class XParameterMember {
protected String parameter;
protected String value;
@XmlElementRef(name = "selectionOrder", type = JAXBElement.class)
protected JAXBElement<String> selectionOrder;
/**
* Gets the value of the parameter property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getParameter() {
return parameter;
}
/**
* Sets the value of the parameter property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setParameter(String value) {
this.parameter = value;
}
/**
* Gets the value of the value property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getValue() {
return value;
}
/**
* Sets the value of the value property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setValue(String value) {
this.value = value;
}
/**
* Gets the value of the selectionOrder property.
*
* @return
* possible object is
* {@link JAXBElement }{@code <}{@link String }{@code >}
*
*/
public JAXBElement<String> getSelectionOrder() {
return selectionOrder;
}
/**
* Sets the value of the selectionOrder property.
*
* @param value
* allowed object is
* {@link JAXBElement }{@code <}{@link String }{@code >}
*
*/
public void setSelectionOrder(JAXBElement<String> value) {
this.selectionOrder = ((JAXBElement<String> ) value);
}
}
|
adelbennaceur/carla
|
Util/OSM2ODR/src/netbuild/NBPTStop.cpp
|
<filename>Util/OSM2ODR/src/netbuild/NBPTStop.cpp<gh_stars>1-10
/****************************************************************************/
// Eclipse SUMO, Simulation of Urban MObility; see https://eclipse.org/sumo
// Copyright (C) 2001-2020 German Aerospace Center (DLR) and others.
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License 2.0 which is available at
// https://www.eclipse.org/legal/epl-2.0/
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License 2.0 are satisfied: GNU General Public License, version 2
// or later which is available at
// https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later
/****************************************************************************/
/// @file NBPTStop.cpp
/// @author <NAME>
/// @date Tue, 20 Mar 2017
///
// The representation of a single pt stop
/****************************************************************************/
#include <config.h>
#include <utils/iodevices/OutputDevice.h>
#include <utils/common/StringUtils.h>
#include "NBPTStop.h"
#include "NBEdge.h"
#include "NBEdgeCont.h"
// ===========================================================================
// method definitions
// ===========================================================================
NBPTStop::NBPTStop(std::string ptStopId, Position position, std::string edgeId, std::string origEdgeId, double length,
std::string name, SVCPermissions svcPermissions, double parkingLength) :
myPTStopId(ptStopId),
myPosition(position),
myEdgeId(edgeId),
myOrigEdgeId(origEdgeId),
myPTStopLength(length),
myName(name),
myParkingLength(parkingLength),
myPermissions(svcPermissions),
myBidiStop(nullptr),
myIsLoose(origEdgeId == ""),
myIsMultipleStopPositions(false) {
}
std::string
NBPTStop::getID() const {
return myPTStopId;
}
const std::string
NBPTStop::getOrigEdgeId() const {
return myOrigEdgeId;
}
const std::string
NBPTStop::getEdgeId() const {
return myEdgeId;
}
const std::string
NBPTStop::getName() const {
return myName;
}
const Position&
NBPTStop::getPosition() const {
return myPosition;
}
void
NBPTStop::mirrorX() {
myPosition.mul(1, -1);
}
void
NBPTStop::computeExtent(double center, double edgeLength) {
myStartPos = MAX2(0.0, center - myPTStopLength / 2.);
myEndPos = MIN2(center + myPTStopLength / 2., edgeLength);
}
void
NBPTStop::addLine(const std::string& line) {
const std::string l = StringUtils::escapeXML(line);
if (std::find(myLines.begin(), myLines.end(), l) == myLines.end()) {
myLines.push_back(l);
}
}
void
NBPTStop::write(OutputDevice& device) {
device.openTag(SUMO_TAG_BUS_STOP);
device.writeAttr(SUMO_ATTR_ID, myPTStopId);
if (!myName.empty()) {
device.writeAttr(SUMO_ATTR_NAME, StringUtils::escapeXML(myName));
}
device.writeAttr(SUMO_ATTR_LANE, myLaneId);
device.writeAttr(SUMO_ATTR_STARTPOS, myStartPos);
device.writeAttr(SUMO_ATTR_ENDPOS, myEndPos);
device.writeAttr(SUMO_ATTR_FRIENDLY_POS, "true");
if (myLines.size() > 0) {
device.writeAttr(SUMO_ATTR_LINES, toString(myLines));
}
if (myParkingLength > 0) {
device.writeAttr(SUMO_ATTR_PARKING_LENGTH, myParkingLength);
}
if (!myAccesses.empty()) {
std::sort(myAccesses.begin(), myAccesses.end());
for (auto tuple : myAccesses) {
device.openTag(SUMO_TAG_ACCESS);
device.writeAttr(SUMO_ATTR_LANE, std::get<0>(tuple));
device.writeAttr(SUMO_ATTR_POSITION, std::get<1>(tuple));
device.writeAttr(SUMO_ATTR_LENGTH, std::get<2>(tuple));
device.writeAttr(SUMO_ATTR_FRIENDLY_POS, true);
device.closeTag();
}
}
device.closeTag();
}
void
NBPTStop::reshiftPosition(const double offsetX, const double offsetY) {
myPosition.add(offsetX, offsetY, 0);
for (NBPTPlatform& platformCand : myPlatformCands) {
platformCand.reshiftPosition(offsetX, offsetY);
}
}
SVCPermissions
NBPTStop::getPermissions() const {
return myPermissions;
}
void
NBPTStop::addPlatformCand(NBPTPlatform platform) {
myPlatformCands.push_back(platform);
}
const std::vector<NBPTPlatform>&
NBPTStop::getPlatformCands() {
return myPlatformCands;
}
bool
NBPTStop::getIsMultipleStopPositions() const {
return myIsMultipleStopPositions;
}
void
NBPTStop::setIsMultipleStopPositions(bool multipleStopPositions) {
myIsMultipleStopPositions = multipleStopPositions;
}
double
NBPTStop::getLength() const {
return myPTStopLength;
}
bool
NBPTStop::setEdgeId(std::string edgeId, const NBEdgeCont& ec) {
myEdgeId = edgeId;
return findLaneAndComputeBusStopExtent(ec);
}
void
NBPTStop::registerAdditionalEdge(std::string wayId, std::string edgeId) {
myAdditionalEdgeCandidates[wayId] = edgeId;
}
const std::map<std::string, std::string>&
NBPTStop::getMyAdditionalEdgeCandidates() const {
return myAdditionalEdgeCandidates;
}
void
NBPTStop::setMyOrigEdgeId(const std::string& myOrigEdgeId) {
NBPTStop::myOrigEdgeId = myOrigEdgeId;
}
void
NBPTStop::setMyPTStopLength(double myPTStopLength) {
NBPTStop::myPTStopLength = myPTStopLength;
}
bool
NBPTStop::findLaneAndComputeBusStopExtent(const NBEdgeCont& ec) {
NBEdge* edge = ec.getByID(myEdgeId);
return findLaneAndComputeBusStopExtent(edge);
}
bool
NBPTStop::findLaneAndComputeBusStopExtent(const NBEdge* edge) {
if (edge != nullptr) {
myEdgeId = edge->getID();
int laneNr = -1;
for (const auto& it : edge->getLanes()) {
if ((it.permissions & getPermissions()) == getPermissions()) {
++laneNr;
break;
}
laneNr++;
}
if (laneNr != -1) {
myLaneId = edge->getLaneID(laneNr);
const PositionVector& shape = edge->getLaneShape(laneNr);
double offset = shape.nearest_offset_to_point2D(getPosition(), false);
offset = offset * edge->getLoadedLength() / edge->getLength();
computeExtent(offset, edge->getLoadedLength());
return true;
}
}
return myEdgeId == ""; // loose stop. Try later when processing lines
}
void
NBPTStop::setMyPTStopId(std::string id) {
myPTStopId = id;
}
void
NBPTStop::clearAccess() {
myAccesses.clear();
}
void
NBPTStop::addAccess(std::string laneID, double offset, double length) {
const std::string newEdgeID = SUMOXMLDefinitions::getEdgeIDFromLane(laneID);
// avoid duplicate access
for (auto it = myAccesses.begin(); it != myAccesses.end();) {
if (SUMOXMLDefinitions::getEdgeIDFromLane(std::get<0>(*it)) == newEdgeID) {
it = myAccesses.erase(it);
} else {
it++;
}
}
myAccesses.push_back(std::make_tuple(laneID, offset, length));
}
/****************************************************************************/
|
cognitom/processwarp
|
src/core/builtin_gui.hpp
|
#pragma once
#include <string>
#include <vector>
#include "process.hpp"
#include "type.hpp"
#include "vmachine.hpp"
namespace processwarp {
class BuiltinGuiDelegate {
public:
virtual ~BuiltinGuiDelegate();
virtual void builtin_gui_send_command(Process& proc, const nid_t& dst_nid, Module::Type module,
const std::string& command, picojson::object& param) = 0;
};
class BuiltinGui {
public:
static BuiltinPostProc::Type create(Process& proc, Thread& thread, BuiltinFuncParam p,
vaddr_t dst, std::vector<uint8_t>& src);
static BuiltinPostProc::Type flush(Process& proc, Thread& thread, BuiltinFuncParam p,
vaddr_t dst, std::vector<uint8_t>& src);
static BuiltinPostProc::Type script(Process& proc, Thread& thread, BuiltinFuncParam p,
vaddr_t dst, std::vector<uint8_t>& src);
static void regist(VMachine& vm, BuiltinGuiDelegate& delegate);
};
} // namespace processwarp
|
MicheleBarcelos/Chocolateria
|
src/DAO/VendedorDAO.java
|
<reponame>MicheleBarcelos/Chocolateria
/*The MIT License (MIT)
Copyright (c) 2016 MicheleBarcelos
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.*/
package DAO;
import java.util.List;
import model.Vendedor;
import model.HibernateUtil;
import org.hibernate.Session;
import org.hibernate.Transaction;
/**
* @author <NAME>
*/
public class VendedorDAO {
Vendedor vendedor = new Vendedor();
private final Session session;
private Transaction transaction;
public VendedorDAO() {
session = HibernateUtil.getSessionFactory().openSession();
}
public void inserir(Vendedor vendedor) {
transaction = session.beginTransaction();
try {
session.save(vendedor);
session.flush();
session.getTransaction().commit();
} catch (Exception e) {
transaction.rollback();
}
session.close();
}
public List<Vendedor> findAll() {
List<Vendedor> vendedors = (List<Vendedor>) session.createQuery("from Vendedor").list();
return vendedors;
}
public Vendedor findById(int id) {
Vendedor vendedors = (Vendedor) session.get(Vendedor.class, id);
return vendedors;
}
public void atualizar(Vendedor vendedor) {
transaction = session.beginTransaction();
try {
session.update(vendedor);
session.getTransaction().commit();
} catch (Exception e) {
transaction.rollback();
}
}
public void deleteById(int id) {
transaction = session.beginTransaction();
try {
session.delete(findById(id));
session.getTransaction().commit();
} catch (Exception e) {
transaction.rollback();
}
}
public List<Vendedor> findByNome(String nome) {
String sql = "from Vendedor c where c.nome LIKE :nome";
List result = session.createQuery(sql).setParameter("nome", "%" + nome + "%").list();
return (List<Vendedor>) result;
}
public List<Vendedor> listarVendedor() {
String sql = "from Vendedor";
List<Vendedor> result = null;
try {
transaction = session.beginTransaction();
result = session.createQuery(sql).list();
session.getTransaction().commit();
} catch (Exception e) {
session.beginTransaction().rollback();
}
return (List<Vendedor>) result;
}
}
|
israelmessias/Atividades_Java
|
Atividades/src/JDBC/DAOTeste.java
|
<gh_stars>0
package src.JDBC;
import java.sql.SQLException;
public class DAOTeste {
public static void main(String[] args) throws SQLException {
DAO dao = new DAO();
String sql = "INSERT INTO pessoa (nome, id) VALUES(?,?)";
dao.input(sql, "<NAME>", 10);
}
}
|
eggman87/localr-android-mvp
|
app/src/test/java/com/eggman/localr/UnitTestRxScheduler.java
|
package com.eggman.localr;
import com.eggman.localr.utils.RxScheduler;
import rx.Observable;
/**
* Created by mharris on 7/30/16.
* eggmanapps.
*/
public class UnitTestRxScheduler implements RxScheduler {
@Override
public <T> Observable.Transformer<T, T> applySchedulers() {
return observable -> observable;
}
}
|
msleprosy/cloud-pipeline
|
client/src/components/search/SearchDialog.js
|
/*
* Copyright 2017-2019 EPAM Systems, Inc. (https://www.epam.com/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import React from 'react';
import PropTypes from 'prop-types';
import {inject, observer} from 'mobx-react';
import {observable} from 'mobx';
import {
Icon,
Input,
message,
Tooltip,
Row
} from 'antd';
import Preview from './preview';
import {PreviewIcons} from './preview/previewIcons';
import {SearchItemTypes} from '../../models/search';
import {SearchGroupTypes} from './searchGroupTypes';
import localization from '../../utils/localization';
import styles from './search.css';
import getStyle from '../../utils/browserDependentStyle';
import MetadataEntityLoad from '../../models/folderMetadata/MetadataEntityLoad';
import {
getPipelineFileInfo,
PipelineFileTypes
} from './utilities/getPipelineFileInfo';
import '../../staticStyles/Search.css';
const PAGE_SIZE = 50;
const INSTANT_SEARCH_DELAY = 1000;
const PREVIEW_AVAILABLE_DELAY = 500;
@localization.localizedComponent
@inject('searchEngine', 'preferences', 'pipelines')
@observer
export default class SearchDialog extends localization.LocalizedReactComponent {
static propTypes = {
onInitialized: PropTypes.func,
onVisibilityChanged: PropTypes.func,
blockInput: PropTypes.bool
};
state = {
visible: false,
searchString: null,
searching: false,
searchResults: [],
searchResultsFor: null,
page: 0,
hoveredIndex: null,
previewAvailable: false,
selectedGroupTypes: [],
aggregates: null,
totalPages: 0
};
inputControl;
@observable delayedSearch;
get searchTypesArray () {
const result = [];
for (let key in SearchGroupTypes) {
if (SearchGroupTypes.hasOwnProperty(key)) {
result.push({...SearchGroupTypes[key], key});
}
}
return result;
}
initializeInputControl = (input) => {
if (input && input.input) {
this.inputControl = input.input;
}
};
becomeVisible = () => {
if (this.inputControl && this.inputControl.focus) {
setTimeout(() => { this.inputControl.focus(); }, 125);
}
};
onSearchChanged = (event) => {
this.setState({
searchString: event.target.value
}, event.target.value ? this.performSearchDelayed : this.performSearch);
};
onPerformSearch = (text) => {
this.setState({
searchString: text
}, this.performSearch);
};
performSearchDelayed = () => {
if (this.delayedSearch) {
clearTimeout(this.delayedSearch);
}
this.delayedSearch = setTimeout(this.performSearch, INSTANT_SEARCH_DELAY);
};
navigate = (itemIndex) => async (e) => {
e.preventDefault();
e.stopPropagation();
if (!this.props.router) {
return false;
}
if (this.state.searchResults.length > itemIndex) {
const item = this.state.searchResults[itemIndex];
switch (item.type) {
case SearchItemTypes.azFile:
case SearchItemTypes.s3File:
case SearchItemTypes.NFSFile:
case SearchItemTypes.gsFile:
if (item.parentId) {
const path = item.id;
const parentFolder = path.split('/').slice(0, path.split('/').length - 1).join('/');
if (parentFolder) {
this.props.router.push(`/storage/${item.parentId}?path=${parentFolder}`);
} else {
this.props.router.push(`/storage/${item.parentId}`);
}
this.closeDialog();
}
break;
case SearchItemTypes.azStorage:
case SearchItemTypes.s3Bucket:
case SearchItemTypes.NFSBucket:
case SearchItemTypes.gsStorage:
this.props.router.push(`/storage/${item.id}`);
this.closeDialog();
break;
case SearchItemTypes.run:
this.props.router.push(`/run/${item.id}`);
this.closeDialog();
break;
case SearchItemTypes.pipeline:
this.props.router.push(`/${item.id}`);
this.closeDialog();
break;
case SearchItemTypes.tool:
this.props.router.push(`/tool/${item.id}`);
this.closeDialog();
break;
case SearchItemTypes.folder:
this.props.router.push(`/folder/${item.id}`);
this.closeDialog();
break;
case SearchItemTypes.configuration:
const [id, configName] = item.id.split('-');
this.props.router.push(`/configuration/${id}/${configName}`);
this.closeDialog();
break;
case SearchItemTypes.metadataEntity:
if (item.parentId) {
const request = new MetadataEntityLoad(item.id);
await request.fetch();
if (request.loaded && request.value.classEntity && request.value.classEntity.name) {
this.props.router.push(`/metadata/${item.parentId}/${request.value.classEntity.name}`);
this.closeDialog();
} else {
this.props.router.push(`/metadataFolder/${item.parentId}/`);
this.closeDialog();
}
}
break;
case SearchItemTypes.issue:
if (item.entity) {
const {entityClass, entityId} = item.entity;
switch (entityClass.toLowerCase()) {
case 'folder':
this.props.router.push(`/folder/${entityId}/`);
this.closeDialog();
break;
case 'pipeline':
this.props.router.push(`/${entityId}/`);
this.closeDialog();
break;
case 'tool':
this.props.router.push(`/tool/${entityId}/`);
this.closeDialog();
break;
}
}
break;
case SearchItemTypes.pipelineCode:
if (item.parentId && item.description && item.id) {
const versions = this.props.pipelines.versionsForPipeline(item.parentId);
await versions.fetch();
let version = item.description;
if (versions.loaded) {
let [v] = (versions.value || []).filter(v => v.name === item.description);
if (!v && versions.value.length > 0) {
const [draft] = versions.value.filter(v => v.draft);
if (draft) {
version = draft.name;
} else {
version = versions.value[0].name;
}
}
}
const hide = message.loading('Navigating...', 0);
const fileInfo = await getPipelineFileInfo(item.parentId, version, item.id);
let url = `/${item.parentId}/${version}`;
if (fileInfo) {
switch (fileInfo.type) {
case PipelineFileTypes.document:
url = `/${item.parentId}/${version}/documents`;
break;
case PipelineFileTypes.source:
if (fileInfo.path) {
url = `/${item.parentId}/${version}/code&path=${fileInfo.path}`;
} else {
url = `/${item.parentId}/${version}/code`;
}
break;
}
}
hide();
this.props.router.push(url);
this.closeDialog();
} else if (item.parentId && item.description) {
this.props.router.push(`/${item.parentId}/${item.description}`);
this.closeDialog();
} else if (item.parentId) {
this.props.router.push(`/${item.parentId}`);
this.closeDialog();
} else {
message.error('Cannot navigate to item', 3);
}
break;
}
}
return false;
};
generateSearchTypes = () => {
const result = [];
for (let i = 0; i < this.state.selectedGroupTypes.length; i++) {
const group = this.state.selectedGroupTypes[i];
result.push(...SearchGroupTypes[group].types);
}
return result;
};
processAggregates = () => {
const aggregates = {};
if (this.props.searchEngine.loaded && this.props.searchEngine.value.aggregates) {
const resultAggregates = this.props.searchEngine.value.aggregates;
for (let key in resultAggregates) {
if (resultAggregates.hasOwnProperty(key)) {
for (let i = 0; i < this.searchTypesArray.length; i++) {
if (this.searchTypesArray[i].types.indexOf(key) >= 0) {
aggregates[this.searchTypesArray[i].key] = (aggregates[this.searchTypesArray[i].key] || 0) +
resultAggregates[key];
}
}
}
}
}
return aggregates;
};
performSearch = (force = false) => {
this.delayedSearch && clearTimeout(this.delayedSearch);
if (!force &&
this.state.searchString &&
this.state.searchResultsFor &&
this.state.searchResultsFor.toLowerCase() === this.state.searchString.toLowerCase()) {
return;
}
if (!this.state.searchString) {
this.setState({
searchResults: [],
searchResultsFor: this.state.searchString,
hoveredIndex: null,
previewAvailable: false,
aggregates: null,
selectedGroupTypes: [],
totalPages: 0,
page: 0
});
return;
}
const searchCriteria = this.state.searchString;
this.setState({
searching: true
}, async () => {
await this.props.searchEngine.send(searchCriteria, this.state.page, PAGE_SIZE, this.generateSearchTypes());
if (this.state.searchString === searchCriteria) {
if (this.props.searchEngine.loaded) {
const totalPages = Math.ceil(this.props.searchEngine.value.totalHits / PAGE_SIZE);
const results = (this.props.searchEngine.value.documents || []).map(d => d);
const aggregates = this.processAggregates();
this.setState({
searching: false,
searchResultsFor: searchCriteria,
searchResults: results,
hoveredIndex: null,
previewAvailable: false,
aggregates,
totalPages
});
} else if (this.props.searchEngine.error) {
message.error(this.props.searchEngine.error, 5);
this.setState({
searching: false
});
}
} else {
this.setState({
searching: false
});
}
});
};
specifySearchGroups = () => {
if (!this.state.searchString) {
return;
}
const searchCriteria = this.state.searchString;
this.setState({
searching: true
}, async () => {
await this.props.searchEngine.send(searchCriteria, this.state.page, PAGE_SIZE, this.generateSearchTypes());
if (this.state.searchString === searchCriteria) {
if (this.props.searchEngine.loaded) {
const totalPages = Math.ceil(this.props.searchEngine.value.totalHits / PAGE_SIZE);
const results = (this.props.searchEngine.value.documents || []).map(d => d);
const aggregates = this.processAggregates();
this.setState({
aggregates,
searching: false,
searchResultsFor: searchCriteria,
searchResults: results,
hoveredIndex: null,
previewAvailable: false,
totalPages
});
} else if (this.props.searchEngine.error) {
message.error(this.props.searchEngine.error, 5);
this.setState({
searching: false
});
}
} else {
this.setState({
searching: false
});
}
});
};
renderIcon = (resultItem) => {
if (PreviewIcons[resultItem.type]) {
return (
<Icon
className={styles.searchResultItemIcon}
type={PreviewIcons[resultItem.type]} />
);
}
return null;
};
previewAvailableDelay;
previewAvailableTransition = false;
makePreviewAvailable = () => {
if (this.previewAvailableDelay) {
clearTimeout(this.previewAvailableDelay);
}
if (this.state.previewAvailable) {
return;
}
this.previewAvailableDelay = setTimeout(() => {
this.previewAvailableDelay = null;
this.previewAvailableTransition = true;
if (this.state.hoveredIndex !== null) {
this.setState({
previewAvailable: true
});
}
}, PREVIEW_AVAILABLE_DELAY);
};
makePreviewUnAvailable = () => {
if (this.previewAvailableDelay) {
clearTimeout(this.previewAvailableDelay);
}
if (!this.state.previewAvailable) {
return;
}
this.previewAvailableDelay = setTimeout(() => {
this.previewAvailableDelay = null;
this.previewAvailableTransition = true;
this.setState({
previewAvailable: false
});
}, PREVIEW_AVAILABLE_DELAY);
};
blockMouseEvents;
onHover = (index) => (e) => {
if (e && this.blockMouseEvents) {
return;
} else if (!e) {
if (this.blockMouseEvents) {
clearTimeout(this.blockMouseEvents);
}
this.blockMouseEvents = setTimeout(() => {
this.blockMouseEvents = null;
}, 500);
}
this.previewAvailableTransition = false;
this.setState({
hoveredIndex: index
}, this.makePreviewAvailable);
};
onUnHover = (index) => () => {
if (this.previewAvailableTransition || this.state.previewAvailable) {
return;
}
if (this.state.hoveredIndex === index) {
this.setState({
hoveredIndex: null
}, this.makePreviewUnAvailable);
}
};
renderSearchResultItem = (resultItem, index) => {
const classNames = [styles.searchResultItem];
let additionalStyle = getStyle({
ie: {backgroundColor: 'rgba(255, 255, 255, 0.75)'}
});
if (index === this.state.hoveredIndex) {
classNames.push(styles.hovered);
additionalStyle = getStyle({
ie: {backgroundColor: 'white'}
});
}
const renderName = () => {
switch (resultItem.type) {
case SearchItemTypes.run: {
if (resultItem.description) {
const parts = resultItem.description.split('/');
if (parts.length > 1) {
return `${resultItem.name} - ${parts.pop()}`;
}
return `${resultItem.name} - ${resultItem.description}`;
}
return resultItem.name;
}
default: return resultItem.name;
}
};
return (
<div
id={`search-result-item-${index}`}
key={index}
style={additionalStyle}
className={`${classNames.join(' ')}`}
onMouseOver={this.onHover(index)}
onMouseEnter={this.onHover(index)}
onMouseLeave={this.onUnHover(index)}
onClick={this.navigate(index)}>
<Row
type="flex"
align="middle"
onMouseEnter={this.onHover(index)}
>
<div style={{display: 'inline-block'}}>
{this.renderIcon(resultItem)}
</div>
<span className={styles.title}>
{renderName()}
</span>
</Row>
</div>
);
};
onKeyEnter = (event) => {
if (event.keyCode === 27 && this.state.searchString) {
event.preventDefault();
event.stopPropagation();
this.onPerformSearch('');
return false;
} else if (event.keyCode === 13) {
}
};
renderPreview = () => {
if (this.state.hoveredIndex !== null &&
this.state.searchResults.length > this.state.hoveredIndex) {
return (
<Preview
item={this.state.searchResults[this.state.hoveredIndex]} />
);
}
return null;
};
enableDisableSearchGroup = (group, isDisabled) => (e) => {
if (e) {
e.stopPropagation();
e.preventDefault();
}
if (isDisabled) {
return;
}
const types = this.state.selectedGroupTypes;
const index = types.indexOf(group);
if (index >= 0) {
types.splice(index, 1);
} else {
types.push(group);
}
if (this.inputControl && this.inputControl.focus) {
this.inputControl.focus();
}
this.setState({
page: 0,
selectedGroupTypes: types
}, this.specifySearchGroups);
};
static wait = (seconds) => new Promise(resolve => setTimeout(resolve, seconds * 1000));
loadMore = (e) => {
const obj = e.target;
if (obj && obj.scrollTop === (obj.scrollHeight - obj.offsetHeight) &&
this.state.page < this.state.totalPages &&
!this.state.searching) {
this.setState({
page: this.state.page + 1
}, async () => {
if (!this.state.searchString) {
return;
}
const searchCriteria = this.state.searchString;
this.setState({
searching: true
}, async () => {
await SearchDialog.wait(1);
await this.props.searchEngine.send(searchCriteria, this.state.page, PAGE_SIZE, this.generateSearchTypes());
if (this.state.searchString === searchCriteria) {
if (this.props.searchEngine.loaded) {
const totalPages = Math.ceil(this.props.searchEngine.value.totalHits / PAGE_SIZE);
const results = (this.props.searchEngine.value.documents || []).map(d => d);
this.setState({
searching: false,
searchResults: [...this.state.searchResults, ...results],
totalPages
});
} else if (this.props.searchEngine.error) {
message.error(this.props.searchEngine.error, 5);
this.setState({
searching: false
});
}
} else {
this.setState({
searching: false
});
}
});
});
}
};
renderHints = () => {
return (
<div className={styles.hints}>
<Row
style={{
borderBottom: '1px solid rgba(255, 255, 255, 0.1)',
paddingBottom: 10,
marginBottom: 10
}}
>
<span style={{fontSize: 'large'}}>
The query string supports the following special characters:
</span>
</Row>
<Row>
<code>+</code> signifies AND operation
</Row>
<Row>
<code>|</code> signifies OR operation
</Row>
<Row>
<code>-</code> negates a single token
</Row>
<Row>
<code>"</code> wraps a number of tokens to signify a phrase for searching
</Row>
<Row>
<code>*</code> at the end of a term signifies a prefix query
</Row>
<Row>
<code>(</code> and <code>)</code> signify precedence
</Row>
<Row>
<code>~N</code> after a word signifies edit distance (fuzziness)
</Row>
<Row>
<code>~N</code> after a phrase signifies slop amount
</Row>
<Row
style={{
borderTop: '1px solid rgba(255, 255, 255, 0.1)',
paddingTop: 10,
marginTop: 10
}}>
<span style={{fontSize: 'larger'}}>
In order to search for any of these special characters, they will need to be escaped with <code>\</code>
</span>
</Row>
</div>
);
};
render () {
const searchFormClassNames = [styles.searchForm];
if (this.state.searchResults.length) {
searchFormClassNames.push(styles.resultsAvailable);
}
if (this.state.previewAvailable) {
searchFormClassNames.push(styles.previewAvailable);
}
const typesFormClassNames = [styles.typesForm];
if (this.state.searchResults.length) {
typesFormClassNames.push(styles.resultsAvailable);
}
const hintContainerClassNames = [styles.hintContainer];
if (this.state.searchResults.length) {
hintContainerClassNames.push(styles.resultsAvailable);
}
if (this.state.previewAvailable) {
hintContainerClassNames.push(styles.previewAvailable);
}
let hintsTooltipPlacement;
if (this.state.previewAvailable && this.state.searchResults.length) {
hintsTooltipPlacement = 'bottomRight';
} else if (this.state.searchResults.length) {
hintsTooltipPlacement = 'bottom';
}
const previewClassNames = [styles.preview];
if (!this.state.previewAvailable) {
previewClassNames.push(styles.notAvailable);
}
return (
<div className={`${styles.searchContainer} ${this.state.visible ? styles.visible : ''}`}>
<div
className={`${styles.searchBackground} ${this.state.visible ? styles.visible : ''}`}
style={
this.state.visible
? getStyle({ie: {opacity: 0.75}})
: {}
}
onClick={this.closeDialog}>
{'\u00A0'}
</div>
<div
className={`${previewClassNames.join(' ')}`}
onClick={this.state.previewAvailable ? undefined : this.closeDialog}>
{this.renderPreview()}
</div>
<div className={`${hintContainerClassNames.join(' ')}`}>
<Tooltip
overlayClassName="search-hints-overlay"
placement={hintsTooltipPlacement}
title={this.renderHints()}>
<div className={styles.hintIconContainer}>
<Icon type="question" />
</div>
</Tooltip>
</div>
<div className={`${typesFormClassNames.join(' ')}`} onClick={this.closeDialog}>
<div style={{display: 'flex', overflowX: 'auto'}}>
{
this.searchTypesArray.map((type, index) => {
const disabled = this.state.aggregates && !this.state.aggregates[type.key];
const active = !disabled && this.state.selectedGroupTypes.indexOf(type.key) >= 0;
return (
<div
className={`${styles.typeButton} ${disabled ? styles.disabled : ''} ${active ? styles.active : ''}`}
onClick={this.enableDisableSearchGroup(type.key, disabled)}
key={index}>
<Icon type={type.icon} />
<span className={styles.typeTitle}>
{type.title(this.localizedString)(this.state.aggregates && this.state.aggregates[type.key])}
</span>
</div>
);
})
}
</div>
</div>
<Row type="flex" className={`${searchFormClassNames.join(' ')}`} align="middle">
<Input.Search
className={styles.searchInput}
placeholder={this.props.preferences.loaded
? `${this.props.preferences.deploymentName} search`
: 'Cloud Platform search'
}
ref={this.initializeInputControl}
value={this.state.searchString}
onChange={this.onSearchChanged}
onSearch={this.onPerformSearch}
onKeyDown={this.onKeyEnter}
style={{width: '100%'}} />
{
!this.state.searching && this.state.searchResultsFor && !this.state.searchResults.length &&
<Row type="flex" className={styles.searchingInProgressContainer} align="middle" justify="center">
<span>Nothing found</span>
</Row>
}
{
this.state.searchResults.length &&
<div
onScroll={this.loadMore}
id="search-results"
className={styles.searchResults}
onClick={this.closeDialog}>
{
this.state.searchResults.map(this.renderSearchResultItem)
}
</div>
}
{
this.state.searching &&
<Row type="flex" className={styles.searchingInProgressContainer} align="middle" justify="center">
<Icon type="loading" />
</Row>
}
</Row>
</div>
);
}
openDialog = () => {
this.setState({
visible: true
}, () => {
this.becomeVisible();
this.props.onVisibilityChanged && this.props.onVisibilityChanged(this.state.visible);
});
};
closeDialog = () => {
this.setState({
visible: false
}, () => {
this.props.onVisibilityChanged && this.props.onVisibilityChanged(this.state.visible);
});
};
handleKeyPress = (e) => {
if (this.props.blockInput) {
return;
}
const modals = Array.from(document.getElementsByClassName('ant-modal-mask'));
if (modals && modals.filter(m => m.className === 'ant-modal-mask').length) {
return;
}
if (e.keyCode === 114 || ((e.ctrlKey || e.metaKey) && e.keyCode === 70)) {
e.preventDefault();
if (!this.state.visible) {
this.openDialog();
}
return;
} else if (e.keyCode === 27) {
if (this.state.visible) {
this.closeDialog();
}
return;
}
let move = 0;
let initial = 0;
if (e.keyCode === 38) {
// 'Up' key
move = -1;
} else if (e.keyCode === 40) {
// 'Down' key
move = 1;
initial = -1;
}
if (move && this.state.searchResults && this.state.searchResults.length) {
const currentIndex = (
(this.state.hoveredIndex === null ? initial : this.state.hoveredIndex) +
this.state.searchResults.length +
move
) % (this.state.searchResults.length);
this.onHover(currentIndex)();
e.preventDefault();
e.stopPropagation();
const item = document.getElementById(`search-result-item-${currentIndex}`);
if (item) {
item.scrollIntoView({behavior: 'smooth'});
}
return false;
}
};
componentDidMount () {
window.addEventListener('keydown', this.handleKeyPress);
this.props.onInitialized && this.props.onInitialized(this);
}
componentWillUnmount () {
window.removeEventListener('keydown', this.handleKeyPress);
this.props.onInitialized && this.props.onInitialized(null);
}
}
|
ILLKO/scalawiki
|
scalawiki-core/src/main/scala/org/scalawiki/dto/cmd/query/prop/Info.scala
|
<gh_stars>10-100
package org.scalawiki.dto.cmd.query.prop
import org.scalawiki.dto.cmd._
import org.scalawiki.dto.cmd.query.Module
/**
* ?action=query&prop=info
*
*/
case class Info(override val params:InfoParam*) extends Module[PropArg]("in", "info", "Get basic page information.") with PropArg with ArgWithParams[InfoParam, PropArg]
/**
* Marker trait for parameters used with prop=info
*/
trait InfoParam extends Parameter[AnyRef]
/**
* ?action=query&prop=info&inprop=
*
*/
case class InProp(override val args: InPropArg*) extends EnumParameter[InPropArg]("inprop", "Which additional properties to get:") with InfoParam
/**
* Trait for inprop= arguments
*
*/
trait InPropArg extends EnumArg[InPropArg] { val param = InProp }
/**
* inprop= arguments
*
*/
object Protection extends EnumArgument[InPropArg]("protection", "List the protection level of each page.") with InPropArg
object TalkId extends EnumArgument[InPropArg]("talkid", "The page ID of the talk page for each non-talk page.") with InPropArg
object Watched extends EnumArgument[InPropArg]("watched", "List the watched status of each page.") with InPropArg
object Watchers extends EnumArgument[InPropArg]("watchers", "The page ID of the talk page for each non-talk page.") with InPropArg
object NotificationTimestamp extends EnumArgument[InPropArg]("notificationtimestamp", "The watchlist notification timestamp of each page.") with InPropArg
object SubjectId extends EnumArgument[InPropArg]("subjectid", "The page ID of the parent page for each talk page.") with InPropArg
object Url extends EnumArgument[InPropArg]("url", "Gives a full URL, an edit URL, and the canonical URL for each page.") with InPropArg
object Readable extends EnumArgument[InPropArg]("readable", "Whether the user can read this page.") with InPropArg
object Preload extends EnumArgument[InPropArg]("preload", "Gives the text returned by EditFormPreloadText.") with InPropArg
object DisplayTitle extends EnumArgument[InPropArg]("displaytitle", "Gives the way the page title is actually displayed.") with InPropArg
|
businesscode/BCD-UI
|
Server/src/main/java/de/businesscode/bcdui/web/cacheControl/HttpServletResponseInvocationHandler.java
|
/*
Copyright 2010-2017 BusinessCode GmbH, Germany
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package de.businesscode.bcdui.web.cacheControl;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import javax.servlet.http.HttpServletResponse;
/**
* This class provides the proxy implementation of the HttpServletResponse class.
*/
public class HttpServletResponseInvocationHandler implements InvocationHandler {
private HttpServletResponse target;
private String key;
private ServerCachingFilter serverCacheControl;
/**
* Constructor
* @param response
* @param key
* @param serverCacheControl
*/
public HttpServletResponseInvocationHandler(HttpServletResponse response, String key, ServerCachingFilter serverCacheControl) {
this.target = response;
this.key = key;
this.serverCacheControl = serverCacheControl;
}
/**
* @see java.lang.reflect.InvocationHandler#invoke(java.lang.Object, java.lang.reflect.Method, java.lang.Object[])
*/
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
if ("getOutputStream".equals(method.getName())) {
return serverCacheControl.wrap(key, target.getOutputStream());
} else if ("getWriter".equals(method.getName())) {
return serverCacheControl.wrap(key, target.getWriter());
}
return method.invoke(target, args);
}
}
|
pekim/gobbi
|
internal/generate/functions.go
|
package generate
import (
"github.com/pekim/jennifer/jen"
)
type Functions []*Function
func (ff Functions) init(ns *Namespace, namePrefix string) {
for _, function := range ff {
function.init(ns, nil, namePrefix)
}
}
func (ff Functions) versionList() Versions {
var versions Versions
for _, f := range ff {
if f.Version != "" {
versions = append(versions, VersionNew(f.Version))
}
}
return versions
}
func (ff Functions) entities() []Generatable {
var generatables []Generatable
for _, function := range ff {
generatables = append(generatables, function)
}
return generatables
}
func (ff Functions) forCIdentifier(cidentifier string) *Function {
for _, function := range ff {
if function.CIdentifier == cidentifier {
return function
}
}
return nil
}
func (ff Functions) mergeAddenda(addenda Functions) {
for _, addendaFunction := range addenda {
if function := ff.forCIdentifier(addendaFunction.CIdentifier); function != nil {
function.mergeAddenda(addendaFunction)
}
}
}
func (ff Functions) generate(g *jen.Group, version *Version) {
for _, fn := range ff {
fn.generate(g, version)
}
}
func (ff Functions) generateDocs(ns *Namespace, typeName string) {}
|
swax/Quakespasm-Rift
|
Quake/cmd.c
|
/*
Copyright (C) 1996-2001 Id Software, Inc.
Copyright (C) 2002-2009 <NAME> and others
Copyright (C) 2007-2008 <NAME>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
// cmd.c -- Quake script command processing module
#include "quakedef.h"
void Cmd_ForwardToServer (void);
#define MAX_ALIAS_NAME 32
#define CMDLINE_LENGTH 256 //johnfitz -- mirrored in common.c
typedef struct cmdalias_s
{
struct cmdalias_s *next;
char name[MAX_ALIAS_NAME];
char *value;
} cmdalias_t;
cmdalias_t *cmd_alias;
qboolean cmd_wait;
//=============================================================================
/*
============
Cmd_Wait_f
Causes execution of the remainder of the command buffer to be delayed until
next frame. This allows commands like:
bind g "impulse 5 ; +attack ; wait ; -attack ; impulse 2"
============
*/
void Cmd_Wait_f (void)
{
cmd_wait = true;
}
/*
=============================================================================
COMMAND BUFFER
=============================================================================
*/
sizebuf_t cmd_text;
/*
============
Cbuf_Init
============
*/
void Cbuf_Init (void)
{
SZ_Alloc (&cmd_text, 8192); // space for commands and script files
}
/*
============
Cbuf_AddText
Adds command text at the end of the buffer
============
*/
void Cbuf_AddText (const char *text)
{
int l;
l = Q_strlen (text);
if (cmd_text.cursize + l >= cmd_text.maxsize)
{
Con_Printf ("Cbuf_AddText: overflow\n");
return;
}
SZ_Write (&cmd_text, text, Q_strlen (text));
}
/*
============
Cbuf_InsertText
Adds command text immediately after the current command
Adds a \n to the text
FIXME: actually change the command buffer to do less copying
============
*/
void Cbuf_InsertText (const char *text)
{
char *temp;
int templen;
// copy off any commands still remaining in the exec buffer
templen = cmd_text.cursize;
if (templen)
{
temp = (char *) Z_Malloc (templen);
Q_memcpy (temp, cmd_text.data, templen);
SZ_Clear (&cmd_text);
}
else
temp = NULL; // shut up compiler
// add the entire text of the file
Cbuf_AddText (text);
SZ_Write (&cmd_text, "\n", 1);
// add the copied off data
if (templen)
{
SZ_Write (&cmd_text, temp, templen);
Z_Free (temp);
}
}
/*
============
Cbuf_Execute
============
*/
void Cbuf_Execute (void)
{
int i;
char *text;
char line[1024];
int quotes;
while (cmd_text.cursize)
{
// find a \n or ; line break
text = (char *)cmd_text.data;
quotes = 0;
for (i=0 ; i< cmd_text.cursize ; i++)
{
if (text[i] == '"')
quotes++;
if ( !(quotes&1) && text[i] == ';')
break; // don't break if inside a quoted string
if (text[i] == '\n')
break;
}
memcpy (line, text, i);
line[i] = 0;
// delete the text from the command buffer and move remaining commands down
// this is necessary because commands (exec, alias) can insert data at the
// beginning of the text buffer
if (i == cmd_text.cursize)
cmd_text.cursize = 0;
else
{
i++;
cmd_text.cursize -= i;
memmove (text, text + i, cmd_text.cursize);
}
// execute the command line
Cmd_ExecuteString (line, src_command);
if (cmd_wait)
{ // skip out while text still remains in buffer, leaving it
// for next frame
cmd_wait = false;
break;
}
}
}
/*
==============================================================================
SCRIPT COMMANDS
==============================================================================
*/
/*
===============
Cmd_StuffCmds_f -- johnfitz -- rewritten to read the "cmdline" cvar, for use with dynamic mod loading
Adds command line parameters as script statements
Commands lead with a +, and continue until a - or another +
quake +prog jctest.qp +cmd amlev1
quake -nosound +cmd amlev1
===============
*/
void Cmd_StuffCmds_f (void)
{
extern cvar_t cmdline;
char cmds[CMDLINE_LENGTH];
int i, j, plus;
plus = false; // On Unix, argv[0] is command name
for (i = 0, j = 0; cmdline.string[i]; i++)
{
if (cmdline.string[i] == '+')
{
plus = true;
if (j > 0)
{
cmds[j-1] = ';';
cmds[j++] = ' ';
}
}
else if (cmdline.string[i] == '-' &&
(i==0 || cmdline.string[i-1] == ' ')) //johnfitz -- allow hypenated map names with +map
plus = false;
else if (plus)
cmds[j++] = cmdline.string[i];
}
cmds[j] = 0;
Cbuf_InsertText (cmds);
}
/*
===============
Cmd_Exec_f
===============
*/
void Cmd_Exec_f (void)
{
char *f;
int mark;
if (Cmd_Argc () != 2)
{
Con_Printf ("exec <filename> : execute a script file\n");
return;
}
mark = Hunk_LowMark ();
f = (char *)COM_LoadHunkFile (Cmd_Argv(1), NULL);
if (!f)
{
Con_Printf ("couldn't exec %s\n",Cmd_Argv(1));
return;
}
Con_Printf ("execing %s\n",Cmd_Argv(1));
Cbuf_InsertText (f);
Hunk_FreeToLowMark (mark);
}
/*
===============
Cmd_Echo_f
Just prints the rest of the line to the console
===============
*/
void Cmd_Echo_f (void)
{
int i;
for (i=1 ; i<Cmd_Argc() ; i++)
Con_Printf ("%s ",Cmd_Argv(i));
Con_Printf ("\n");
}
/*
===============
Cmd_Alias_f -- johnfitz -- rewritten
Creates a new command that executes a command string (possibly ; seperated)
===============
*/
void Cmd_Alias_f (void)
{
cmdalias_t *a;
char cmd[1024];
int i, c;
const char *s;
switch (Cmd_Argc())
{
case 1: //list all aliases
for (a = cmd_alias, i = 0; a; a=a->next, i++)
Con_SafePrintf (" %s: %s", a->name, a->value);
if (i)
Con_SafePrintf ("%i alias command(s)\n", i);
else
Con_SafePrintf ("no alias commands found\n");
break;
case 2: //output current alias string
for (a = cmd_alias ; a ; a=a->next)
if (!strcmp(Cmd_Argv(1), a->name))
Con_Printf (" %s: %s", a->name, a->value);
break;
default: //set alias string
s = Cmd_Argv(1);
if (strlen(s) >= MAX_ALIAS_NAME)
{
Con_Printf ("Alias name is too long\n");
return;
}
// if the alias allready exists, reuse it
for (a = cmd_alias ; a ; a=a->next)
{
if (!strcmp(s, a->name))
{
Z_Free (a->value);
break;
}
}
if (!a)
{
a = (cmdalias_t *) Z_Malloc (sizeof(cmdalias_t));
a->next = cmd_alias;
cmd_alias = a;
}
strcpy (a->name, s);
// copy the rest of the command line
cmd[0] = 0; // start out with a null string
c = Cmd_Argc();
for (i = 2; i < c; i++)
{
q_strlcat (cmd, Cmd_Argv(i), sizeof(cmd));
if (i != c - 1)
q_strlcat (cmd, " ", sizeof(cmd));
}
if (q_strlcat(cmd, "\n", sizeof(cmd)) >= sizeof(cmd))
{
Con_Printf("alias value too long!\n");
cmd[0] = '\n'; // nullify the string
cmd[1] = 0;
}
a->value = Z_Strdup (cmd);
break;
}
}
/*
===============
Cmd_Unalias_f -- johnfitz
===============
*/
void Cmd_Unalias_f (void)
{
cmdalias_t *a, *prev;
switch (Cmd_Argc())
{
default:
case 1:
Con_Printf("unalias <name> : delete alias\n");
break;
case 2:
prev = NULL;
for (a = cmd_alias; a; a = a->next)
{
if (!strcmp(Cmd_Argv(1), a->name))
{
if (prev)
prev->next = a->next;
else
cmd_alias = a->next;
Z_Free (a->value);
Z_Free (a);
return;
}
prev = a;
}
Con_Printf ("No alias named %s\n", Cmd_Argv(1));
break;
}
}
/*
===============
Cmd_Unaliasall_f -- johnfitz
===============
*/
void Cmd_Unaliasall_f (void)
{
cmdalias_t *blah;
while (cmd_alias)
{
blah = cmd_alias->next;
Z_Free(cmd_alias->value);
Z_Free(cmd_alias);
cmd_alias = blah;
}
}
/*
=============================================================================
COMMAND EXECUTION
=============================================================================
*/
typedef struct cmd_function_s
{
struct cmd_function_s *next;
const char *name;
xcommand_t function;
} cmd_function_t;
#define MAX_ARGS 80
static int cmd_argc;
static char *cmd_argv[MAX_ARGS];
static char cmd_null_string[] = "";
static const char *cmd_args = NULL;
cmd_source_t cmd_source;
//johnfitz -- better tab completion
//static cmd_function_t *cmd_functions; // possible commands to execute
cmd_function_t *cmd_functions; // possible commands to execute
//johnfitz
/*
============
Cmd_List_f -- johnfitz
============
*/
void Cmd_List_f (void)
{
cmd_function_t *cmd;
const char *partial;
int len, count;
if (Cmd_Argc() > 1)
{
partial = Cmd_Argv (1);
len = Q_strlen(partial);
}
else
{
partial = NULL;
len = 0;
}
count=0;
for (cmd=cmd_functions ; cmd ; cmd=cmd->next)
{
if (partial && Q_strncmp (partial,cmd->name, len))
{
continue;
}
Con_SafePrintf (" %s\n", cmd->name);
count++;
}
Con_SafePrintf ("%i commands", count);
if (partial)
{
Con_SafePrintf (" beginning with \"%s\"", partial);
}
Con_SafePrintf ("\n");
}
/*
============
Cmd_Init
============
*/
void Cmd_Init (void)
{
Cmd_AddCommand ("cmdlist", Cmd_List_f); //johnfitz
Cmd_AddCommand ("unalias", Cmd_Unalias_f); //johnfitz
Cmd_AddCommand ("unaliasall", Cmd_Unaliasall_f); //johnfitz
Cmd_AddCommand ("stuffcmds",Cmd_StuffCmds_f);
Cmd_AddCommand ("exec",Cmd_Exec_f);
Cmd_AddCommand ("echo",Cmd_Echo_f);
Cmd_AddCommand ("alias",Cmd_Alias_f);
Cmd_AddCommand ("cmd", Cmd_ForwardToServer);
Cmd_AddCommand ("wait", Cmd_Wait_f);
}
/*
============
Cmd_Argc
============
*/
int Cmd_Argc (void)
{
return cmd_argc;
}
/*
============
Cmd_Argv
============
*/
const char *Cmd_Argv (int arg)
{
if (arg < 0 || arg >= cmd_argc)
return cmd_null_string;
return cmd_argv[arg];
}
/*
============
Cmd_Args
============
*/
const char *Cmd_Args (void)
{
return cmd_args;
}
/*
============
Cmd_TokenizeString
Parses the given string into command line tokens.
============
*/
void Cmd_TokenizeString (const char *text)
{
int i;
// clear the args from the last string
for (i=0 ; i<cmd_argc ; i++)
Z_Free (cmd_argv[i]);
cmd_argc = 0;
cmd_args = NULL;
while (1)
{
// skip whitespace up to a /n
while (*text && *text <= ' ' && *text != '\n')
{
text++;
}
if (*text == '\n')
{ // a newline seperates commands in the buffer
text++;
break;
}
if (!*text)
return;
if (cmd_argc == 1)
cmd_args = text;
text = COM_Parse (text);
if (!text)
return;
if (cmd_argc < MAX_ARGS)
{
cmd_argv[cmd_argc] = Z_Strdup (com_token);
cmd_argc++;
}
}
}
/*
============
Cmd_AddCommand
============
*/
void Cmd_AddCommand (const char *cmd_name, xcommand_t function)
{
cmd_function_t *cmd;
cmd_function_t *cursor,*prev; //johnfitz -- sorted list insert
if (host_initialized) // because hunk allocation would get stomped
Sys_Error ("Cmd_AddCommand after host_initialized");
// fail if the command is a variable name
if (Cvar_VariableString(cmd_name)[0])
{
Con_Printf ("Cmd_AddCommand: %s already defined as a var\n", cmd_name);
return;
}
// fail if the command already exists
for (cmd=cmd_functions ; cmd ; cmd=cmd->next)
{
if (!Q_strcmp (cmd_name, cmd->name))
{
Con_Printf ("Cmd_AddCommand: %s already defined\n", cmd_name);
return;
}
}
cmd = (cmd_function_t *) Hunk_Alloc (sizeof(cmd_function_t));
cmd->name = cmd_name;
cmd->function = function;
//johnfitz -- insert each entry in alphabetical order
if (cmd_functions == NULL || strcmp(cmd->name, cmd_functions->name) < 0) //insert at front
{
cmd->next = cmd_functions;
cmd_functions = cmd;
}
else //insert later
{
prev = cmd_functions;
cursor = cmd_functions->next;
while ((cursor != NULL) && (strcmp(cmd->name, cursor->name) > 0))
{
prev = cursor;
cursor = cursor->next;
}
cmd->next = prev->next;
prev->next = cmd;
}
//johnfitz
}
/*
============
Cmd_Exists
============
*/
qboolean Cmd_Exists (const char *cmd_name)
{
cmd_function_t *cmd;
for (cmd=cmd_functions ; cmd ; cmd=cmd->next)
{
if (!Q_strcmp (cmd_name,cmd->name))
return true;
}
return false;
}
/*
============
Cmd_CompleteCommand
============
*/
const char *Cmd_CompleteCommand (const char *partial)
{
cmd_function_t *cmd;
int len;
len = Q_strlen(partial);
if (!len)
return NULL;
// check functions
for (cmd=cmd_functions ; cmd ; cmd=cmd->next)
if (!Q_strncmp (partial,cmd->name, len))
return cmd->name;
return NULL;
}
/*
============
Cmd_ExecuteString
A complete command line has been parsed, so try to execute it
FIXME: lookupnoadd the token to speed search?
============
*/
void Cmd_ExecuteString (const char *text, cmd_source_t src)
{
cmd_function_t *cmd;
cmdalias_t *a;
cmd_source = src;
Cmd_TokenizeString (text);
// execute the command line
if (!Cmd_Argc())
return; // no tokens
// check functions
for (cmd=cmd_functions ; cmd ; cmd=cmd->next)
{
if (!Q_strcasecmp (cmd_argv[0],cmd->name))
{
cmd->function ();
return;
}
}
// check alias
for (a=cmd_alias ; a ; a=a->next)
{
if (!Q_strcasecmp (cmd_argv[0], a->name))
{
Cbuf_InsertText (a->value);
return;
}
}
// check cvars
if (!Cvar_Command ())
Con_Printf ("Unknown command \"%s\"\n", Cmd_Argv(0));
}
/*
===================
Cmd_ForwardToServer
Sends the entire command line over to the server
===================
*/
void Cmd_ForwardToServer (void)
{
if (cls.state != ca_connected)
{
Con_Printf ("Can't \"%s\", not connected\n", Cmd_Argv(0));
return;
}
if (cls.demoplayback)
return; // not really connected
MSG_WriteByte (&cls.message, clc_stringcmd);
if (Q_strcasecmp(Cmd_Argv(0), "cmd") != 0)
{
SZ_Print (&cls.message, Cmd_Argv(0));
SZ_Print (&cls.message, " ");
}
if (Cmd_Argc() > 1)
SZ_Print (&cls.message, Cmd_Args());
else
SZ_Print (&cls.message, "\n");
}
/*
================
Cmd_CheckParm
Returns the position (1 to argc-1) in the command's argument list
where the given parameter apears, or 0 if not present
================
*/
int Cmd_CheckParm (const char *parm)
{
int i;
if (!parm)
Sys_Error ("Cmd_CheckParm: NULL");
for (i = 1; i < Cmd_Argc (); i++)
if (! Q_strcasecmp (parm, Cmd_Argv (i)))
return i;
return 0;
}
|
vanvught/GD32F107RC-DMX512-RDM
|
lib-rdmnet/include/llrponlylightset.h
|
<gh_stars>100-1000
/**
* @file llrponlylightset.h
*
*/
/* Copyright (C) 2019-2020 by <NAME> mailto:<EMAIL>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#ifndef LLRPONLYLIGHTSET_H_
#define LLRPONLYLIGHTSET_H_
#include <cstdint>
#include <assert.h>
#include "lightset.h"
class LLRPOnlyLightSet: public LightSet {
LLRPOnlyLightSet(void);
~LLRPOnlyLightSet(void);
void Start(uint8_t nPort) {
assert(0);
}
void Stop(uint8_t nPort) {
assert(0);
}
void SetData(uint8_t nPort, const uint8_t *pData, uint32_t nLength) {
assert(0);
}
void Print(void) {
}
public: // RDM Optional
bool SetDmxStartAddress(uint16_t nDmxStartAddress) {
return false;
}
uint16_t GetDmxStartAddress(void) {
return DMX_ADDRESS_INVALID;
}
uint16_t GetDmxFootprint(void) {
return 0;
}
bool GetSlotInfo(uint16_t nSlotOffset, struct TLightSetSlotInfo &tSlotInfo) {
return false;
}
};
#endif /* LLRPONLYLIGHTSET_H_ */
|
mimelator/apv
|
APV/src/main/java/com/arranger/apv/menu/CommandBasedMenu.java
|
<reponame>mimelator/apv
package com.arranger.apv.menu;
import com.arranger.apv.APVPlugin;
import com.arranger.apv.Main;
import com.arranger.apv.cmd.Command;
public abstract class CommandBasedMenu extends BaseMenu {
protected static class MenuAdapterCallback extends APVPlugin {
@FunctionalInterface
public interface MenuCommand {
void onCommand();
}
private String text;
private MenuCommand menuCommand;
public MenuAdapterCallback(Main parent, String text, MenuCommand menuCommand) {
super(parent);
this.text = text;
this.menuCommand = menuCommand;
}
@Override
public void toggleEnabled() {
menuCommand.onCommand();
}
@Override
public String getDisplayName() {
return text;
}
}
public CommandBasedMenu(Main parent) {
super(parent);
showDetails = false;
}
protected void fireCommand(Command cmd) {
parent.getCommandSystem().invokeCommand(cmd, getDisplayName(), 0);
}
}
|
dingfeng/galaxykube
|
pkg/operator/v1/polardbx/controllers/polardbxmonitor_controller.go
|
<reponame>dingfeng/galaxykube
/*
Copyright 2022 Alibaba Group Holding Limited.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package controllers
import (
"context"
polardbxv1polardbx "github.com/alibaba/polardbx-operator/api/v1/polardbx"
"github.com/alibaba/polardbx-operator/pkg/k8s/control"
"github.com/alibaba/polardbx-operator/pkg/operator/hint"
"github.com/alibaba/polardbx-operator/pkg/operator/v1/config"
"github.com/alibaba/polardbx-operator/pkg/operator/v1/polardbx/helper"
polardbxreconcile "github.com/alibaba/polardbx-operator/pkg/operator/v1/polardbx/reconcile"
"k8s.io/apimachinery/pkg/types"
"time"
"github.com/go-logr/logr"
"golang.org/x/time/rate"
"k8s.io/client-go/util/workqueue"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/controller"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
polardbxv1 "github.com/alibaba/polardbx-operator/api/v1"
monitorsteps "github.com/alibaba/polardbx-operator/pkg/operator/v1/polardbx/steps/monitor"
)
type PolarDBXMonitorReconciler struct {
BaseRc *control.BaseReconcileContext
Client client.Client
Logger logr.Logger
config.LoaderFactory
MaxConcurrency int
}
func (r *PolarDBXMonitorReconciler) Reconcile(ctx context.Context, request reconcile.Request) (reconcile.Result, error) {
logger := r.Logger.WithValues("namespace", request.Namespace, "polardbxmonitor", request.Name)
if hint.IsNamespacePaused(request.Namespace) {
logger.Info("Monitor reconciling is paused, skip")
return reconcile.Result{}, nil
}
rc := polardbxreconcile.NewContext(
control.NewBaseReconcileContextFrom(r.BaseRc, ctx, request),
r.LoaderFactory(),
)
rc.SetPolardbxMonitorKey(request.NamespacedName)
monitor, err := rc.GetPolarDBXMonitor()
if err != nil {
logger.Error(err, "Unable get polardbxmonitor: "+request.NamespacedName.String())
return reconcile.Result{}, err
}
// monitor := rc.MustGetPolarDBXMonitor()
rc.SetPolarDBXKey(types.NamespacedName{
Namespace: monitor.Namespace,
Name: monitor.Spec.ClusterName,
})
defer rc.Close()
return r.reconcile(rc, monitor, logger)
}
func (r *PolarDBXMonitorReconciler) newReconcileTask(rc *polardbxreconcile.Context,
monitor *polardbxv1.PolarDBXMonitor,
log logr.Logger) *control.Task {
task := control.NewTask()
defer monitorsteps.PersistPolarDBXMonitor(task, true)
switch monitor.Status.MonitorStatus {
case polardbxv1polardbx.MonitorStatusPending:
monitorsteps.TransferMonitorStatusTo(polardbxv1polardbx.MonitorStatusCreating, true)(task)
case polardbxv1polardbx.MonitorStatusCreating:
monitorsteps.CheckServiceMonitorExists(task)
monitorsteps.CreateServiceMonitorIfNeeded(task)
monitorsteps.SyncPolarDBXMonitorSpecToStatus(task)
monitorsteps.TransferMonitorStatusTo(polardbxv1polardbx.MonitorStatusMonitoring, false)(task)
case polardbxv1polardbx.MonitorStatusMonitoring:
control.When(helper.IsMonitorConfigChanged(monitor),
monitorsteps.TransferMonitorStatusTo(polardbxv1polardbx.MonitorStatusUpdating, true))(task)
case polardbxv1polardbx.MonitorStatusUpdating:
monitorsteps.UpdateServiceMonitorIfNeeded(task)
monitorsteps.SyncPolarDBXMonitorSpecToStatus(task)
monitorsteps.TransferMonitorStatusTo(polardbxv1polardbx.MonitorStatusMonitoring, false)(task)
}
return task
}
func (r *PolarDBXMonitorReconciler) reconcile(rc *polardbxreconcile.Context,
polardbxmonitor *polardbxv1.PolarDBXMonitor,
log logr.Logger) (reconcile.Result, error) {
log = log.WithValues("status", polardbxmonitor.Status.MonitorStatus)
task := r.newReconcileTask(rc, polardbxmonitor, log)
return control.NewExecutor(log).Execute(rc, task)
}
func (r *PolarDBXMonitorReconciler) SetupWithManager(mgr ctrl.Manager) error {
return ctrl.NewControllerManagedBy(mgr).
WithOptions(controller.Options{
MaxConcurrentReconciles: r.MaxConcurrency,
RateLimiter: workqueue.NewMaxOfRateLimiter(
workqueue.NewItemExponentialFailureRateLimiter(5*time.Millisecond, 300*time.Second),
// 10 qps, 100 bucket size. This is only for retry speed. It's only the overall factor (not per item).
&workqueue.BucketRateLimiter{Limiter: rate.NewLimiter(rate.Limit(10), 100)},
),
}).
For(&polardbxv1.PolarDBXMonitor{}).
Complete(r)
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.