text stringlengths 1 1.05M |
|---|
#include <stdio.h>
#include <string.h>
int main()
{
char input_string[100];
scanf("%s", input_string);
char reverse_string[100];
int length = strlen(input_string);
for (int i = 0; i < length; i++)
{
reverse_string[i] = input_string[length-i-1];
}
reverse_string[length] = '\0';
printf("Reversed string is : %s\n", reverse_string);
return 0;
} |
import React, {useRef, useEffect, useState} from 'react'
import {useInView} from 'react-intersection-observer'
import './styles/Container.css'
function Container({children, assets, getOpacity}){
const [size, setSize] = useState(130)
const [opacity, setOpacity] = useState(0)
const [ref, inView, entry] = useInView({
threshold:0.5
})
useEffect(() => {
setSize(window.innerHeight/window.innerHeight * 130)
},[])
useEffect(() => {
let startPos = window.pageYOffset
const addScroll = () => {
if(inView){
let inViewLength = window.pageYOffset - startPos
setSize(() => {
// window.alert(window.outerHeight)
return window.innerHeight/window.innerHeight * 130 + (inViewLength/(window.innerHeight/8))
})
}
}
if(inView){
window.addEventListener('scroll', addScroll)
setOpacity(() => {
return 1
})
}
else if(!inView){
window.removeEventListener('scroll', addScroll)
setOpacity(() => {
return 0
})
}
return () => {
window.removeEventListener('scroll', addScroll)
}
}, [inView])
const childrenWithData = React.Children.map(children, child => {
return React.cloneElement(child, {
opacity
})
})
return(
<section
ref={ref}
className='container'
style={{
backgroundImage:`url(${assets.background})`,
backgroundPosition:`${assets.backgroundY} ${assets.backgroundX}`,
backgroundSize:`auto ${size}%`
}}>
{childrenWithData}
</section>
)
}
export default Container |
#!/bin/sh
set -e
if test -n "${HTTP_PROXY}"; then
echo "proxy=${HTTP_PROXY}" >> /etc/yum.conf
fi
echo "proxy=${HTTP_PROXY}"
apt-get update
dpkg --add-architecture arm64
#sources.list
echo 'deb-src [arch=arm64] http://ports.ubuntu.com/ xenial main restricted universe multiverse' > /etc/apt/sources.list.d/build.list
echo 'deb-src [arch=,arm64] http://ports.ubuntu.com/ xenial-updates main restricted universe multiverse' >> /etc/apt/sources.list.d/build.list
echo 'deb-src [arch=,arm64] http://ports.ubuntu.com/ xenial-security main restricted universe multiverse' >> /etc/apt/sources.list.d/build.list
echo 'deb-src [arch=,arm64] http://ports.ubuntu.com/ xenial-backports main restricted universe multiverse' >> /etc/apt/sources.list.d/build.list
echo 'deb [arch=arm64] http://ports.ubuntu.com/ xenial main restricted universe multiverse' >> /etc/apt/sources.list.d/build.list
echo 'deb [arch=arm64] http://ports.ubuntu.com/ xenial-updates main restricted universe multiverse' >> /etc/apt/sources.list.d/build.list
echo 'deb [arch=arm64] http://ports.ubuntu.com/ xenial-security main restricted universe multiverse' >> /etc/apt/sources.list.d/build.list
echo 'deb [arch=arm64] http://ports.ubuntu.com/ xenial-backports main restricted universe multiverse' >> /etc/apt/sources.list.d/build.list
cat /etc/apt/sources.list.d/build.list
#sources.list
echo 'deb [arch=i386,amd64] http://archive.ubuntu.com/ubuntu/ xenial main restricted universe multiverse' > /etc/apt/sources.list
echo 'deb [arch=i386,amd64] http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe multiverse' >> /etc/apt/sources.list
echo 'deb [arch=i386,amd64] http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse' >> /etc/apt/sources.list
echo 'deb-src [arch=i386,amd64] http://archive.ubuntu.com/ubuntu/ xenial main restricted universe multiverse' >> /etc/apt/sources.list
echo 'deb-src [arch=i386,amd64] http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe multiverse' >> /etc/apt/sources.list
echo 'deb-src [arch=i386,amd64] http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse' >> /etc/apt/sources.list
cat /etc/apt/sources.list
apt-get update
#apt-get purge -y libssl-dev
apt-get install -y libssl-dev:arm64
apt-get install -y tar curl bzip2 git cmake automake autoconf \
python-argparse python python-lockfile \
apt-utils apt-transport-https \
debhelper snapcraft libldap2-dev:arm64
# apt-get install -y mc nano
apt-get install -y g++-aarch64-linux-gnu
apt-get install -y libstdc++6:arm64
# enable this container to run arm binaries:
apt-get install -y qemu binfmt-support qemu-user-static
# for dpkg-shlibdebs we need this:
dpkg -r gcc g++ build-essential
cd /usr/bin
ln -s aarch64-linux-gnu-gcc gcc
useradd jenkins -u 1000
echo 'PATH=/opt/arangodb/bin/:${PATH}' >> /etc/bashrc
echo 'PATH=/opt/arangodb/bin/:${PATH}' >> /etc/profile
|
<gh_stars>10-100
//======================================================================
//
// HIGH DYNAMIC RANGE RENDERING DEMONSTRATION
// Written by <NAME>, October 2005
//
//======================================================================
// Operating System Includes
#include "DXUT.h"
#include "DXUTcamera.h"
#include "SDKmisc.h"
// Project Includes
#include "HDRScene.h"
#include "Enumeration.h"
// As defined in the appropriate header, this translation unit is
// wrapped up in it's own C++ namespace:
namespace HDRScene
{
//--------------------------------------------------------------------------------------
// Data Structure Definitions
//--------------------------------------------------------------------------------------
struct LitVertex
{
D3DXVECTOR3 p;
DWORD c;
};
static const DWORD FVF_LITVERTEX = D3DFVF_XYZ | D3DFVF_DIFFUSE;
struct TLVertex
{
D3DXVECTOR4 p;
D3DXVECTOR2 t;
};
static const DWORD FVF_TLVERTEX = D3DFVF_XYZRHW | D3DFVF_TEX1;
//--------------------------------------------------------------------------------------
// Namespace-level variables
//--------------------------------------------------------------------------------------
LPD3DXMESH g_pCubeMesh = NULL; // Mesh representing the HDR source in the middle of the scene
LPDIRECT3DPIXELSHADER9 g_pCubePS = NULL; // The pixel shader for the cube
LPD3DXCONSTANTTABLE g_pCubePSConsts = NULL; // Interface for setting parameters/constants for the above PS
LPDIRECT3DVERTEXSHADER9 g_pCubeVS = NULL; // The vertex shader for the cube
LPDIRECT3DVERTEXDECLARATION9 g_pCubeVSDecl = NULL; // The mapping from VB to VS
LPD3DXCONSTANTTABLE g_pCubeVSConsts = NULL; // Interface for setting params for the cube rendering
D3DXMATRIXA16 g_mCubeMatrix; // The computed world*view*proj transform for the inner cube
LPDIRECT3DTEXTURE9 g_pTexScene = NULL; // The main, floating point, render target
D3DFORMAT g_fmtHDR = D3DFMT_UNKNOWN; // Enumerated and either set to 128 or 64 bit
LPD3DXMESH g_pOcclusionMesh = NULL; // The occlusion mesh surrounding the HDR cube
LPDIRECT3DVERTEXDECLARATION9 g_pOcclusionVSDecl = NULL; // The mapping for the ID3DXMesh
LPDIRECT3DVERTEXSHADER9 g_pOcclusionVS = NULL; // The shader for drawing the occlusion mesh
LPD3DXCONSTANTTABLE g_pOcclusionVSConsts = NULL; // Entry point for configuring above shader
D3DXMATRIXA16 g_mOcclusionMatrix; // The world*view*proj transform for transforming the POSITIONS
D3DXMATRIXA16 g_mOcclusionNormals; // The transpose(inverse(world)) matrix for transforming NORMALS
//--------------------------------------------------------------------------------------
// Function Prototypes
//--------------------------------------------------------------------------------------
HRESULT LoadMesh( WCHAR* strFileName, LPD3DXMESH* ppMesh );
//--------------------------------------------------------------------------------------
// CreateResources( )
//
// DESC:
// This function creates all the necessary resources to render the HDR scene
// to a render target for later use. When this function completes successfully
// rendering can commence. A call to 'DestroyResources()' should be made when
// the application closes.
//
// PARAMS:
// pDevice : The current device that resources should be created with/from
// pDisplayDesc : Describes the back-buffer currently in use, can be useful when
// creating GUI based resources.
//
// NOTES:
// n/a
//--------------------------------------------------------------------------------------
HRESULT CreateResources( IDirect3DDevice9* pDevice, const D3DSURFACE_DESC* pDisplayDesc )
{
//[ 0 ] DECLARATIONS
//------------------
HRESULT hr = S_OK;
LPD3DXBUFFER pCode; // Container for the compiled HLSL code
//[ 1 ] DETERMINE FP TEXTURE SUPPORT
//----------------------------------
V( HDREnumeration::FindBestHDRFormat( &HDRScene::g_fmtHDR ) );
if( FAILED( hr ) )
{
OutputDebugString( L"HDRScene::CreateResources() - Current hardware does not support HDR rendering!\n" );
return hr;
}
//[ 2 ] CREATE HDR RENDER TARGET
//------------------------------
V( pDevice->CreateTexture(
pDisplayDesc->Width, pDisplayDesc->Height,
1, D3DUSAGE_RENDERTARGET, g_fmtHDR,
D3DPOOL_DEFAULT, &HDRScene::g_pTexScene, NULL
) );
if( FAILED( hr ) )
{
// We couldn't create the texture - lots of possible reasons for this. Best
// check the D3D debug output for more details.
OutputDebugString(
L"HDRScene::CreateResources() - Could not create floating point render target. Examine D3D Debug Output for details.\n" );
return hr;
}
//[ 3 ] CREATE HDR CUBE'S GEOMETRY
//--------------------------------
V( LoadMesh( L"misc\\Cube.x", &HDRScene::g_pCubeMesh ) );
if( FAILED( hr ) )
{
// Couldn't load the mesh, could be a file system error...
OutputDebugString( L"HDRScene::CreateResources() - Could not load 'Cube.x'.\n" );
return hr;
}
//[ 4 ] CREATE HDR CUBE'S PIXEL SHADER
//------------------------------------
WCHAR str[MAX_PATH];
V_RETURN( DXUTFindDXSDKMediaFileCch( str, MAX_PATH, L"Shader Code\\HDRSource.psh" ) );
V( D3DXCompileShaderFromFile(
str,
NULL, NULL,
"main", // Entry Point found in 'HDRSource.psh'
"ps_2_0", // Profile to target
0,
&pCode,
NULL,
&HDRScene::g_pCubePSConsts
) );
if( FAILED( hr ) )
{
// Couldn't compile the shader, use the 'compile_shaders.bat' script
// in the 'Shader Code' folder to get a proper compile breakdown.
OutputDebugString( L"HDRScene::CreateResources() - Compiling of 'HDRSource.psh' failed!\n" );
return hr;
}
V( pDevice->CreatePixelShader( reinterpret_cast< DWORD* >( pCode->GetBufferPointer() ), &HDRScene::g_pCubePS ) );
if( FAILED( hr ) )
{
// Couldn't turn the compiled shader into an actual, usable, pixel shader!
OutputDebugString(
L"HDRScene::CreateResources() : Couldn't create a pixel shader object from 'HDRSource.psh'.\n" );
pCode->Release();
return hr;
}
pCode->Release();
// [ 5 ] CREATE THE CUBE'S VERTEX DECL
//------------------------------------
D3DVERTEXELEMENT9 cubeVertElems[MAX_FVF_DECL_SIZE];
HDRScene::g_pCubeMesh->GetDeclaration( cubeVertElems );
V( pDevice->CreateVertexDeclaration( cubeVertElems, &HDRScene::g_pCubeVSDecl ) );
if( FAILED( hr ) )
{
// Couldn't create the declaration for the loaded mesh..
OutputDebugString(
L"HDRScene::CreateResources() - Couldn't create a vertex declaration for the HDR-Cube mesh.\n" );
return hr;
}
// [ 6 ] CREATE THE CUBE'S VERTEX SHADER
//--------------------------------------
V_RETURN( DXUTFindDXSDKMediaFileCch( str, MAX_PATH, L"Shader Code\\HDRSource.vsh" ) );
V( D3DXCompileShaderFromFile(
str,
NULL, NULL,
"main",
"vs_2_0",
0,
&pCode,
NULL,
&g_pCubeVSConsts
) );
if( FAILED( hr ) )
{
// Couldn't compile the shader, use the 'compile_shaders.bat' script
// in the 'Shader Code' folder to get a proper compile breakdown.
OutputDebugString( L"HDRScene::CreateResources() - Compilation of 'HDRSource.vsh' Failed!\n" );
return hr;
}
V( pDevice->CreateVertexShader( reinterpret_cast< DWORD* >( pCode->GetBufferPointer() ), &HDRScene::g_pCubeVS ) );
if( FAILED( hr ) )
{
// Couldn't turn the compiled shader into an actual, usable, vertex shader!
OutputDebugString(
L"HDRScene::CreateResources() - Could not create a VS object from the compiled 'HDRSource.vsh' code.\n" );
pCode->Release();
return hr;
}
pCode->Release();
//[ 5 ] LOAD THE OCCLUSION MESH
//-----------------------------
V( LoadMesh( L"misc\\OcclusionBox.x", &HDRScene::g_pOcclusionMesh ) );
if( FAILED( hr ) )
{
// Couldn't load the mesh, could be a file system error...
OutputDebugString( L"HDRScene::CreateResources() - Could not load 'OcclusionBox.x'.\n" );
return hr;
}
//[ 6 ] CREATE THE MESH VERTEX DECLARATION
//----------------------------------------
D3DVERTEXELEMENT9 vertElems[MAX_FVF_DECL_SIZE];
HDRScene::g_pOcclusionMesh->GetDeclaration( vertElems );
V( pDevice->CreateVertexDeclaration( vertElems, &HDRScene::g_pOcclusionVSDecl ) );
if( FAILED( hr ) )
{
// Couldn't create the declaration for the loaded mesh..
OutputDebugString(
L"HDRScene::CreateResources() - Couldn't create a vertex declaration for the occlusion mesh.\n" );
return hr;
}
//[ 7 ] CREATE THE OCCLUSION VERTEX SHADER
//----------------------------------------
V_RETURN( DXUTFindDXSDKMediaFileCch( str, MAX_PATH, L"Shader Code\\OcclusionMesh.vsh" ) );
V( D3DXCompileShaderFromFile(
str,
NULL, NULL,
"main",
"vs_2_0",
0,
&pCode,
NULL,
&HDRScene::g_pOcclusionVSConsts
) );
if( FAILED( hr ) )
{
// Couldn't compile the shader, use the 'compile_shaders.bat' script
// in the 'Shader Code' folder to get a proper compile breakdown.
OutputDebugString( L"HDRScene::CreateResources() - Compilation of 'OcclusionMesh.vsh' Failed!\n" );
return hr;
}
V( pDevice->CreateVertexShader( reinterpret_cast< DWORD* >( pCode->GetBufferPointer() ),
&HDRScene::g_pOcclusionVS ) );
if( FAILED( hr ) )
{
// Couldn't turn the compiled shader into an actual, usable, vertex shader!
OutputDebugString(
L"HDRScene::CreateResources() - Could not create a VS object from the compiled 'OcclusionMesh.vsh' code.\n"
);
pCode->Release();
return hr;
}
pCode->Release();
//[ 8 ] RETURN SUCCESS IF WE GOT THIS FAR
//---------------------------------------
return hr;
}
//--------------------------------------------------------------------------------------
// DestroyResources( )
//
// DESC:
// Makes sure that the resources acquired in CreateResources() are cleanly
// destroyed to avoid any errors and/or memory leaks.
//
//--------------------------------------------------------------------------------------
HRESULT DestroyResources()
{
SAFE_RELEASE( HDRScene::g_pCubeMesh );
SAFE_RELEASE( HDRScene::g_pCubePS );
SAFE_RELEASE( HDRScene::g_pCubePSConsts );
SAFE_RELEASE( HDRScene::g_pTexScene );
SAFE_RELEASE( HDRScene::g_pCubeVS );
SAFE_RELEASE( HDRScene::g_pCubeVSConsts );
SAFE_RELEASE( HDRScene::g_pCubeVSDecl );
SAFE_RELEASE( HDRScene::g_pOcclusionMesh );
SAFE_RELEASE( HDRScene::g_pOcclusionVSDecl );
SAFE_RELEASE( HDRScene::g_pOcclusionVS );
SAFE_RELEASE( HDRScene::g_pOcclusionVSConsts );
return S_OK;
}
//--------------------------------------------------------------------------------------
// CalculateResourceUsage( )
//
// DESC:
// Based on the known resources this function attempts to make an accurate
// measurement of how much VRAM is being used by this part of the application.
//
// NOTES:
// Whilst the return value should be pretty accurate, it shouldn't be relied
// on due to the way drivers/hardware can allocate memory.
//
// Only the first level of the render target is checked as there should, by
// definition, be no mip levels.
//
//--------------------------------------------------------------------------------------
DWORD CalculateResourceUsage()
{
// [ 0 ] DECLARATIONS
//-------------------
DWORD usage = 0;
// [ 1 ] RENDER TARGET SIZE
//-------------------------
D3DSURFACE_DESC texDesc;
HDRScene::g_pTexScene->GetLevelDesc( 0, &texDesc );
usage += ( ( texDesc.Width * texDesc.Height ) * ( HDRScene::g_fmtHDR == D3DFMT_A16B16G16R16F ? 8 : 16 ) );
// [ 2 ] OCCLUSION MESH SIZE
//--------------------------
usage += ( HDRScene::g_pOcclusionMesh->GetNumBytesPerVertex() * HDRScene::g_pOcclusionMesh->GetNumVertices() );
int index_size = ( ( HDRScene::g_pOcclusionMesh->GetOptions() & D3DXMESH_32BIT ) != 0 ) ? 4 : 2;
usage += ( index_size * 3 * HDRScene::g_pOcclusionMesh->GetNumFaces() );
return usage;
}
//--------------------------------------------------------------------------------------
// RenderScene( )
//
// DESC:
// This is the core function for this unit - when it succesfully completes the
// render target (obtainable via GetOutputTexture) will be a completed scene
// that, crucially, contains values outside the LDR (0..1) range ready to be
// fed into the various stages of the HDR post-processing pipeline.
//
// PARAMS:
// pDevice : The device that is currently being used for rendering
//
// NOTES:
// For future modifications, this is the entry point that should be used if
// you require a different image/scene to be displayed on the screen.
//
// This function assumes that the device is already in a ready-to-render
// state (e.g. BeginScene() has been called).
//
//--------------------------------------------------------------------------------------
HRESULT RenderScene( IDirect3DDevice9* pDevice )
{
// [ 0 ] CONFIGURE GEOMETRY INPUTS
//--------------------------------
pDevice->SetVertexShader( HDRScene::g_pCubeVS );
pDevice->SetVertexDeclaration( HDRScene::g_pCubeVSDecl );
HDRScene::g_pCubeVSConsts->SetMatrix( pDevice, "matWorldViewProj", &HDRScene::g_mCubeMatrix );
// [ 1 ] PIXEL SHADER ( + PARAMS )
//--------------------------------
pDevice->SetPixelShader( HDRScene::g_pCubePS );
HDRScene::g_pCubePSConsts->SetFloat( pDevice, "HDRScalar", 5.0f );
pDevice->SetTexture( 0, NULL );
// [ 2 ] GET PREVIOUS RENDER TARGET
//---------------------------------
LPDIRECT3DSURFACE9 pPrevSurf = NULL;
if( FAILED( pDevice->GetRenderTarget( 0, &pPrevSurf ) ) )
{
// Couldn't retrieve the current render target (for restoration later on)
OutputDebugString( L"HDRScene::RenderScene() - Could not retrieve a reference to the previous render target.\n"
);
return E_FAIL;
}
// [ 3 ] SET NEW RENDER TARGET
//----------------------------
LPDIRECT3DSURFACE9 pRTSurf = NULL;
if( FAILED( HDRScene::g_pTexScene->GetSurfaceLevel( 0, &pRTSurf ) ) )
{
// Bad news! couldn't get a reference to the HDR render target. Most
// Likely due to a failed/corrupt resource creation stage.
OutputDebugString( L"HDRScene::RenderScene() - Could not get the top level surface for the HDR render target\n"
);
return E_FAIL;
}
if( FAILED( pDevice->SetRenderTarget( 0, pRTSurf ) ) )
{
// For whatever reason we can't set the HDR texture as the
// the render target...
OutputDebugString( L"HDRScene::RenderScene() - Could not set the HDR texture as a new render target.\n" );
return E_FAIL;
}
// It is worth noting that the colour used to clear the render target will
// be considered for the luminance measurement stage.
pDevice->Clear( 0, NULL, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER, D3DCOLOR_ARGB( 0, 64, 64, 192 ), 1.0f, 0 );
// [ 4 ] RENDER THE HDR CUBE
//--------------------------
HDRScene::g_pCubeMesh->DrawSubset( 0 );
// [ 5 ] DRAW THE OCCLUSION CUBE
//------------------------------
pDevice->SetPixelShader( NULL );
pDevice->SetVertexDeclaration( HDRScene::g_pOcclusionVSDecl );
pDevice->SetVertexShader( HDRScene::g_pOcclusionVS );
HDRScene::g_pOcclusionVSConsts->SetMatrix( pDevice, "matWorldViewProj", &HDRScene::g_mOcclusionMatrix );
HDRScene::g_pOcclusionVSConsts->SetMatrix( pDevice, "matInvTPoseWorld", &HDRScene::g_mOcclusionNormals );
// Due to the way that the mesh was authored, there is
// only (and always) going to be 1 subset/group to render.
HDRScene::g_pOcclusionMesh->DrawSubset( 0 );
// [ 6 ] RESTORE PREVIOUS RENDER TARGET
//-------------------------------------
pDevice->SetRenderTarget( 0, pPrevSurf );
// [ 7 ] RELEASE TEMPORARY REFERENCES
//-----------------------------------
SAFE_RELEASE( pRTSurf );
SAFE_RELEASE( pPrevSurf );
return S_OK;
}
//--------------------------------------------------------------------------------------
// UpdateScene( )
//
// DESC:
// An entry point for updating various parameters and internal data on a
// per-frame basis.
//
// PARAMS:
// pDevice : The currently active device
// fTime : The number of milliseconds elapsed since the start of execution
// pCamera : The arcball based camera that the end-user controls
//
// NOTES:
// n/a
//
//--------------------------------------------------------------------------------------
HRESULT UpdateScene( IDirect3DDevice9* pDevice, float fTime, CModelViewerCamera* pCamera )
{
// The HDR cube in the middle of the scene never changes position in world
// space, but must respond to view changes.
HDRScene::g_mCubeMatrix = ( *pCamera->GetViewMatrix() ) * ( *pCamera->GetProjMatrix() );
D3DXMATRIXA16 matWorld;
D3DXMATRIXA16 matTemp;
// The occlusion cube must be slightly larger than the inner HDR cube, so
// a scaling constant is applied to the world matrix.
D3DXMatrixIdentity( &matTemp );
D3DXMatrixScaling( &matTemp, 2.5f, 2.5f, 2.5f );
D3DXMatrixMultiply( &matWorld, &matTemp, pCamera->GetWorldMatrix() ); //&matWorld );
// The occlusion cube contains lighting normals, so for the shader to operate
// on them correctly, the inverse transpose of the world matrix is needed.
D3DXMatrixIdentity( &matTemp );
D3DXMatrixInverse( &matTemp, NULL, &matWorld );
D3DXMatrixTranspose( &HDRScene::g_mOcclusionNormals, &matTemp );
HDRScene::g_mOcclusionMatrix = matWorld * ( *pCamera->GetViewMatrix() ) * ( *pCamera->GetProjMatrix() );
return S_OK;
}
//--------------------------------------------------------------------------------------
// GetOutputTexture( )
//
// DESC:
// The results of this modules rendering are used as inputs to several
// other parts of the rendering pipeline. As such it is necessary to obtain
// a reference to the internally managed texture.
//
// PARAMS:
// pTexture : Should be NULL on entry, will be a valid reference on exit
//
// NOTES:
// The code that requests the reference is responsible for releasing their
// copy of the texture as soon as they are finished using it.
//
//--------------------------------------------------------------------------------------
HRESULT GetOutputTexture( IDirect3DTexture9** pTexture )
{
// [ 0 ] ERASE ANY DATA IN THE INPUT
//----------------------------------
SAFE_RELEASE( *pTexture );
// [ 1 ] COPY THE PRIVATE REFERENCE
//---------------------------------
*pTexture = HDRScene::g_pTexScene;
// [ 2 ] INCREMENT THE REFERENCE COUNT..
//--------------------------------------
( *pTexture )->AddRef();
return S_OK;
}
//--------------------------------------------------------------------------------------
// DrawToScreen( )
//
// DESC:
// Part of the GUI in this application displays the "raw" HDR data as part
// of the process. This function places the texture, created by this
// module, in the correct place on the screen.
//
// PARAMS:
// pDevice : The device to be drawn to.
// pFont : The font to use when annotating the display
// pTextSprite : Used with the font for more efficient rendering
// pArrowTex : Stores the 4 (up/down/left/right) icons used in the GUI
//
// NOTES:
// n/a
//
//--------------------------------------------------------------------------------------
HRESULT DrawToScreen( IDirect3DDevice9* pDevice, ID3DXFont* pFont, ID3DXSprite* pTextSprite,
IDirect3DTexture9* pArrowTex )
{
// [ 0 ] GATHER NECESSARY INFORMATION
//-----------------------------------
LPDIRECT3DSURFACE9 pSurf = NULL;
D3DSURFACE_DESC d;
if( FAILED( pDevice->GetRenderTarget( 0, &pSurf ) ) )
{
// Couldn't get the current render target!
OutputDebugString( L"HDRScene::DrawToScreen() - Could not get current render target to extract dimensions.\n"
);
return E_FAIL;
}
pSurf->GetDesc( &d );
SAFE_RELEASE( pSurf );
// Cache the dimensions as floats for later use
float fCellWidth = ( static_cast< float >( d.Width ) - 48.0f ) / 4.0f;
float fCellHeight = ( static_cast< float >( d.Height ) - 36.0f ) / 4.0f;
CDXUTTextHelper txtHelper( pFont, pTextSprite, 12 );
txtHelper.SetForegroundColor( D3DXCOLOR( 1.0f, 0.5f, 0.0f, 1.0f ) );
// [ 1 ] CREATE TILE GEOMETRY
//---------------------------
HDRScene::TLVertex v[4];
v[0].p = D3DXVECTOR4( 0.0f, fCellHeight + 16.0f, 0.0f, 1.0f );
v[1].p = D3DXVECTOR4( fCellWidth, fCellHeight + 16.0f, 0.0f, 1.0f );
v[2].p = D3DXVECTOR4( 0.0f, ( 2.0f * fCellHeight ) + 16.0f, 0.0f, 1.0f );
v[3].p = D3DXVECTOR4( fCellWidth, ( 2.0f * fCellHeight ) + 16.0f, 0.0f, 1.0f );
v[0].t = D3DXVECTOR2( 0.0f, 0.0f );
v[1].t = D3DXVECTOR2( 1.0f, 0.0f );
v[2].t = D3DXVECTOR2( 0.0f, 1.0f );
v[3].t = D3DXVECTOR2( 1.0f, 1.0f );
// [ 2 ] DISPLAY TILE ON SCREEN
//-----------------------------
pDevice->SetVertexShader( NULL );
pDevice->SetFVF( HDRScene::FVF_TLVERTEX );
pDevice->SetTexture( 0, HDRScene::g_pTexScene );
pDevice->DrawPrimitiveUP( D3DPT_TRIANGLESTRIP, 2, v, sizeof( HDRScene::TLVertex ) );
// [ 3 ] RENDER CONNECTING ARROWS
//-------------------------------
pDevice->SetTexture( 0, pArrowTex );
v[0].p = D3DXVECTOR4( ( fCellWidth / 2.0f ) - 8.0f, fCellHeight, 0.0f, 1.0f );
v[1].p = D3DXVECTOR4( ( fCellWidth / 2.0f ) + 8.0f, fCellHeight, 0.0f, 1.0f );
v[2].p = D3DXVECTOR4( ( fCellWidth / 2.0f ) - 8.0f, fCellHeight + 16.0f, 0.0f, 1.0f );
v[3].p = D3DXVECTOR4( ( fCellWidth / 2.0f ) + 8.0f, fCellHeight + 16.0f, 0.0f, 1.0f );
v[0].t = D3DXVECTOR2( 0.0f, 0.0f );
v[1].t = D3DXVECTOR2( 0.25f, 0.0f );
v[2].t = D3DXVECTOR2( 0.0f, 1.0f );
v[3].t = D3DXVECTOR2( 0.25f, 1.0f );
pDevice->DrawPrimitiveUP( D3DPT_TRIANGLESTRIP, 2, v, sizeof( HDRScene::TLVertex ) );
v[0].p = D3DXVECTOR4( fCellWidth, fCellHeight + 8.0f + ( fCellHeight / 2.0f ), 0.0f, 1.0f );
v[1].p = D3DXVECTOR4( fCellWidth + 16.0f, fCellHeight + 8.0f + ( fCellHeight / 2.0f ), 0.0f, 1.0f );
v[2].p = D3DXVECTOR4( fCellWidth, fCellHeight + 24.0f + ( fCellHeight / 2.0f ), 0.0f, 1.0f );
v[3].p = D3DXVECTOR4( fCellWidth + 16.0f, fCellHeight + 24.0f + ( fCellHeight / 2.0f ), 0.0f, 1.0f );
v[0].t = D3DXVECTOR2( 0.25f, 0.0f );
v[1].t = D3DXVECTOR2( 0.50f, 0.0f );
v[2].t = D3DXVECTOR2( 0.25f, 1.0f );
v[3].t = D3DXVECTOR2( 0.50f, 1.0f );
pDevice->DrawPrimitiveUP( D3DPT_TRIANGLESTRIP, 2, v, sizeof( HDRScene::TLVertex ) );
float fLumCellSize = ( ( static_cast< float >( d.Height ) - ( ( 2.0f * fCellHeight ) + 32.0f ) ) - 32.0f ) / 3.0f;
float fLumStartX = ( fCellWidth + 16.0f ) - ( ( 2.0f * fLumCellSize ) + 32.0f );
v[0].p = D3DXVECTOR4( fLumStartX + ( fLumCellSize / 2.0f ) - 8.0f, ( 2.0f * fCellHeight ) + 16.0f, 0.0f, 1.0f );
v[1].p = D3DXVECTOR4( fLumStartX + ( fLumCellSize / 2.0f ) + 8.0f, ( 2.0f * fCellHeight ) + 16.0f, 0.0f, 1.0f );
v[2].p = D3DXVECTOR4( fLumStartX + ( fLumCellSize / 2.0f ) - 8.0f, ( 2.0f * fCellHeight ) + 32.0f, 0.0f, 1.0f );
v[3].p = D3DXVECTOR4( fLumStartX + ( fLumCellSize / 2.0f ) + 8.0f, ( 2.0f * fCellHeight ) + 32.0f, 0.0f, 1.0f );
v[0].t = D3DXVECTOR2( 0.50f, 0.0f );
v[1].t = D3DXVECTOR2( 0.75f, 0.0f );
v[2].t = D3DXVECTOR2( 0.50f, 1.0f );
v[3].t = D3DXVECTOR2( 0.75f, 1.0f );
pDevice->DrawPrimitiveUP( D3DPT_TRIANGLESTRIP, 2, v, sizeof( HDRScene::TLVertex ) );
// [ 4 ] ANNOTATE CELL
//--------------------
txtHelper.Begin();
{
txtHelper.SetInsertionPos( 5, static_cast< int >( ( 2.0f * fCellHeight ) + 16.0f - 25.0f ) );
txtHelper.DrawTextLine( L"Source HDR Frame" );
D3DSURFACE_DESC d2;
HDRScene::g_pTexScene->GetLevelDesc( 0, &d2 );
WCHAR str[100];
swprintf_s( str, 100, L"%dx%d", d2.Width, d2.Height );
txtHelper.DrawTextLine( str );
}
txtHelper.End();
return S_OK;
}
//--------------------------------------------------------------------------------------
// LoadMesh( )
//
// DESC:
// A utility method borrowed from the DXSDK samples. Loads a .X mesh into
// an ID3DXMesh object for rendering.
//
//--------------------------------------------------------------------------------------
HRESULT LoadMesh( WCHAR* strFileName, LPD3DXMESH* ppMesh )
{
LPD3DXMESH pMesh = NULL;
WCHAR str[MAX_PATH];
HRESULT hr = S_OK;
if( ppMesh == NULL )
return E_INVALIDARG;
V_RETURN( DXUTFindDXSDKMediaFileCch( str, MAX_PATH, strFileName ) );
hr = D3DXLoadMeshFromX( str, D3DXMESH_MANAGED,
DXUTGetD3D9Device(), NULL, NULL, NULL, NULL, &pMesh );
if( FAILED( hr ) || ( pMesh == NULL ) )
return hr;
DWORD* rgdwAdjacency = NULL;
// Make sure there are normals which are required for lighting
if( !( pMesh->GetFVF() & D3DFVF_NORMAL ) )
{
LPD3DXMESH pTempMesh;
hr = pMesh->CloneMeshFVF( pMesh->GetOptions(),
pMesh->GetFVF() | D3DFVF_NORMAL,
DXUTGetD3D9Device(), &pTempMesh );
if( FAILED( hr ) )
return hr;
D3DXComputeNormals( pTempMesh, NULL );
SAFE_RELEASE( pMesh );
pMesh = pTempMesh;
}
// Optimize the mesh to make it fast for the user's graphics card
rgdwAdjacency = new DWORD[pMesh->GetNumFaces() * 3];
if( rgdwAdjacency == NULL )
return E_OUTOFMEMORY;
V( pMesh->GenerateAdjacency( 1e-6f, rgdwAdjacency ) );
pMesh->OptimizeInplace( D3DXMESHOPT_VERTEXCACHE, rgdwAdjacency, NULL, NULL, NULL );
delete []rgdwAdjacency;
*ppMesh = pMesh;
return hr;
}
}
;
|
<gh_stars>100-1000
try:
import uctypes
except ImportError:
print("SKIP")
raise SystemExit
desc = {
"f32": uctypes.FLOAT32 | 0,
"f64": uctypes.FLOAT64 | 0,
}
data = bytearray(8)
S = uctypes.struct(uctypes.addressof(data), desc, uctypes.NATIVE)
S.f32 = 12.34
print('%.4f' % S.f32)
S.f64 = 12.34
print('%.4f' % S.f64)
|
import utils
import torch
from torch.utils.data import (
Dataset, DataLoader as DataLoaderBase
)
from librosa.core import load
from natsort import natsorted
import numpy as np
from os import listdir
from os.path import join
def sin_wave_data(batch_size, timesteps):
low_freq_factor = np.random.uniform(size=(batch_size,))
high_freq_factor = np.random.uniform(size=(batch_size,))
x = np.arange(0, np.pi, np.pi/timesteps)
low_y = (low_freq_factor + 1)[:, None]*x[None, :]
high_y = 20.*(high_freq_factor + 1)[:, None]*x[None, :]
noise = np.random.uniform(low=-0.001, high=0.001, size=high_y.shape)
batch = np.sin(high_y)*np.sin(low_y) + noise
return batch.astype('float32')
class FolderDataset(Dataset):
toy_data_count = 1024
toy_seq_len = 16000 * 8
def __init__(self, path=None, overlap_len=64, q_levels=0,
ratio_min=0, ratio_max=1, toy_sin_wave = False):
super().__init__()
self.overlap_len = overlap_len
self.q_levels = q_levels
self.toy_sin_wave = toy_sin_wave
if(toy_sin_wave == False):
file_names = natsorted(
[join(path, file_name) for file_name in listdir(path)]
)
self.file_names = file_names[
int(ratio_min * len(file_names)) : int(ratio_max * len(file_names))
]
def __getitem__(self, index):
if(self.toy_sin_wave == False):
(seq, _) = load(self.file_names[index], sr=None, mono=True)
# print(self.file_names[index])
return torch.cat([
torch.zeros(self.overlap_len),
torch.from_numpy(seq)
])
else:
return torch.from_numpy(
sin_wave_data(1, self.toy_seq_len + self.overlap_len).reshape(-1,)
)
def __len__(self):
if(self.toy_sin_wave == True):
return self.toy_data_count
else:
return len(self.file_names)
class DataLoader(DataLoaderBase):
def __init__(self, dataset, batch_size, seq_len, overlap_len,
*args, **kwargs):
super().__init__(dataset, batch_size, *args, **kwargs)
self.seq_len = seq_len
self.overlap_len = overlap_len
def __iter__(self):
for batch in super().__iter__():
(batch_size, n_samples) = batch.size()
reset = True
for seq_begin in range(self.overlap_len, n_samples, self.seq_len):
from_index = seq_begin - self.overlap_len
to_index = seq_begin + self.seq_len
sequences = batch[:, from_index : to_index]
input_sequences = sequences[:, : -1]
target_sequences = sequences[:, self.overlap_len :].contiguous()
# yield (input_sequences, reset, target_sequences)
yield (sequences, reset, target_sequences)
reset = False
def __len__(self):
raise NotImplementedError()
|
<gh_stars>1-10
package net.community.chest.util.logging.format;
import net.community.chest.util.logging.LogLevelWrapper;
/**
* Copyright 2007 as per GPLv2
* @author <NAME>.
* @since Jun 27, 2007 1:30:44 PM
*/
public class MessageTextFormatter extends LogMsgComponentFormatter<String> {
public MessageTextFormatter ()
{
super(MESSAGE);
}
/*
* @see net.community.chest.util.logging.format.LogMsgComponentFormatter#formatValue(java.lang.Object)
*/
@Override
public String formatValue (final String value)
{
final String tv=
((null == value) || (value.length() <= 0)) ? null : value.trim(),
cv=
((null == tv) || (tv.length() <= 0)) ? null : tv.replace('\r', ' '),
lv=
((null == cv) || (cv.length() <= 0)) ? null : cv.replace('\n', ' ');
return lv;
}
/*
* @see net.community.chest.util.logging.format.LogMsgComponentFormatter#format(java.lang.Thread, long, java.lang.Class, java.util.logging.Level, java.lang.Object, java.lang.String, java.lang.Throwable)
*/
@Override
public String format (Thread th, long logTime, Class<?> logClass, LogLevelWrapper l, Object ctx, String msg, Throwable t)
{
return formatValue(msg);
}
}
|
puts "Enter the first string:"
str1 = gets.chomp
puts "Enter the second string:"
str2 = gets.chomp
puts "The concatenated string is: #{str1 + str2}" |
#!/bin/bash
# this script was created by rshmelev@gmail.com
#
# you should have gox and gom installed before running this:
# #not important for go1.5 - go get github.com/mitchellh/gox
# go get github.com/mattn/gom
# ... i'm not sure about gom now... govendor is what should be in place
# go get github.com/kardianos/govendor
# go get github.com/alecthomas/gometalinter && gometalinter --install --update
# edit this
# sample: DEFAULT_OSARCH="windows/386 windows/amd64 linux/amd64 darwin/amd64"
# sample: FILENAME_SUFFIX="-{{.OS}}-{{.Arch}}"
# sample: APPNAME="default"
AUTHOR_LONG="_________AUTHORLONG__________"
AUTHOR_SHORT="_________AUTHORSHORT__________"
SITEURL="_________SITE__________"
DEFAULT_OSARCH="linux/amd64"
APPNAME="default"
BUILD_IN_PARALLEL=true
TRIM_GO_PREFIX=true
AUTHOR_PREFIX="${AUTHOR_SHORT}-"
FILENAME_SUFFIX="-{{.OS}}"
work() {
[ -z "${AUTHOR_SHORT}" ] && AUTHOR_PREFIX=
[ "$APPNAME" == "default" ] && APPNAME="$(basename $(pwd))"
BINDIR="./bin"
BUILDOSARCH=${1-${DEFAULT_OSARCH}}
[ "$TRIM_GO_PREFIX" == "true" ] && FILENAME=${FILENAME#go-*} && APPNAME=${APPNAME#go-*}
APPNAME="${AUTHOR_PREFIX}${APPNAME}"
FILENAME="${APPNAME}${FILENAME_SUFFIX}"
[ ! -z "${BUILD_OPTION}" ] && FILENAME="debug-${FILENAME}"
mkdir -p "$BINDIR"
# calculating
v=`go version`
goversion=${v#*go version }
buildtime=`date -u "+%Y-%m-%d %H:%M:%S"`
if [ -d .git ] ; then
rev=`git rev-parse HEAD 2>/dev/null`
if [ ! "$rev" == "HEAD" ]; then
rev=`git rev-parse HEAD`
branch=`git rev-parse --abbrev-ref HEAD`
gittag=`git describe --tags --exact-match 2>/dev/null`
modifiedfiles="$(git --no-pager diff --name-only --ignore-all-space)"
modifiedfiles="$(echo "${modifiedfiles}" | grep -v main-autogenerated.go )"
modifiedfiles="$(echo "${modifiedfiles}" | grep -v Gomfile )"
modsrc="$(process_modified_files "${modifiedfiles}")"
fi
fi
create_main_go
# gotype is not working well for some reason :(
echoo "running metalinter"
gometalinter ./code/... | grep -v gotype
echoo "running go fmt ./code/..."
go fmt ./code/...
echoo "updating Gomfile"
[ -f Gomfile ] && rm Gomfile
gom gen gomfile
gomfile="$(cat Gomfile | grep -v `get_current_package`)"
echo "$gomfile" > Gomfile
echoo "calculating dependencies"
calc_dependencies
echoo "building"
start=`date +%s`
build_all
end=`date +%s`
echo "...finished in $((end-start)) seconds"
}
#-------------------------------------------------------
echoo() {
echo -e "\n... $@"
}
calc_dependencies() {
d="$(go list -f '{{join .Deps "\n"}}' | xargs go list -f '{{if not .Standard}}{{.ImportPath}}{{end}}')"
d="$(echo "$d" | grep -v `get_current_package`/code )"
d="$(echo "${d}" | tr '\n\r' ';')"
thisdir=`pwd`
var=${d}
res=""
while [ "$var" ] ;do
iter=${var%%\;*}
libpath="${GOPATH}/src/$iter"
if [ -d "$libpath/.git" ] || [ -d "$libpath/../.git" ] || [ -d "$libpath/../../.git" ] ; then
cd "$libpath"
librev=`git rev-parse HEAD`
libbranch=`git rev-parse --abbrev-ref HEAD`
libmod=`git --no-pager show -s --format=%cI ${librev}`
dirty="clean"
[ "$(evil_git_dirty)" == "*" ] && dirty="dirty"
[ ! "$(evil_git_num_untracked_files)" == "0" ] && dirty="dirty"
if [ ! "$dirty" == "clean" ] ; then
echo "WARNING: library '$iter' is dirty"
git status -s
fi
res="${res};${iter}=${dirty}/${libbranch}/${librev}/${libmod}"
else
[[ ${iter} == *"/vendor"* ]] || echo "WARNING: library '${iter}' is probably not under version control"
res="${res};${iter}=unknown"
fi
[ "$var" = "$iter" ] && var='' || var="${var#*;}"
done
cd ${thisdir}
projdeps="${res}"
}
# Returns "*" if the current git branch is dirty.
function evil_git_dirty {
[[ $(git diff --shortstat 2> /dev/null | tail -n1) != "" ]] && echo "*"
}
# Returns the number of untracked files
function evil_git_num_untracked_files {
expr `git status --porcelain 2>/dev/null| grep "^??" | wc -l`
}
process_modified_files() {
modifiedfiles="$1"
modsrc="$(echo "${modifiedfiles}" | tr '\n\r' ';')"
var=${modsrc}
res=""
while [ "$var" ] ;do
iter=${var%%\;*}
if [ ! -z "$iter" ] ; then
mod=`portable_get_modification_time "${iter}"`
res="${res};${iter}=${mod}"
fi
[ "$var" = "$iter" ] && var='' || var="${var#*;}"
done
echo "$res"
}
# accepts file name as param
portable_get_modification_time() {
file=$1
if [ -f "${file}" ]; then
unamestr=`uname`
if [ "$unamestr" == "Linux" ]; then
res=`stat -c '%Y' "${file}"`
x=`date -u -d @${res}`
else
res=`stat -f "%m" "${file}"`
x=`date -u -r ${res}`
fi
echo "$res/$x"
else
echo "deleted"
fi
}
build_all() {
PACK="github.com/rshmelev/go-project-bootstrap-kit"
LDFLAGS="-X '${PACK}.Deps=$projdeps' -X '${PACK}.CodeRev=$rev' -X '${PACK}.GoVersion=$goversion' -X '${PACK}.BuildTime=$buildtime'"
LDFLAGS="${LDFLAGS} -X '${PACK}.BuildOption=${BUILD_OPTION}'"
LDFLAGS="${LDFLAGS} -X '${PACK}.Tag=${gittag}' -X '${PACK}.Branch=${branch}' -X '${PACK}.ModifiedSources=$modsrc'"
gover=`go version`
if [[ "$gover" =~ *go1.3* ]] || [[ "$gover" =~ *go1.4* ]] || [[ "$gover" =~ *go1.2* ]] || [[ "$gover" =~ *go1.1* ]] ; then
LDFLAGS=${LDFLAGS//=/ }
fi
if type "gox" >/dev/null 2>&1 ; then
gox -ldflags="$LDFLAGS" -osarch="$BUILDOSARCH" -output="$BINDIR/$FILENAME"
else
gox_sh "$LDFLAGS"
fi
ls -la ${BINDIR} | grep "${APPNAME}"
}
gox_sh() {
ldflags="$1"
var=${BUILDOSARCH}
while [ "$var" ] ;do
iter=${var%% *}
if [ "$BUILD_IN_PARALLEL" = "true" ]; then
go_build ${iter} "${ldflags}" &
else
go_build ${iter} "${ldflags}"
fi
[ "$var" = "$iter" ] && var='' || var="${var#* }"
done
# wait for all started
var=${BUILDOSARCH}
while [ "$var" ] ;do
iter=${var%% *}
wait
[ "$var" = "$iter" ] && var='' || var="${var#* }"
done
}
go_build() {
BUILDOS=${1%/*}
BUILDARCH=${1#*/}
LDFLAGS=$2
EXE="${FILENAME//\{\{.OS\}\}/$BUILDOS}"
EXE="${EXE//\{\{.Arch\}\}/$BUILDARCH}"
[ "$BUILDOS" == "windows" ] && EXE=${EXE}.exe
echo "building for $BUILDOS $BUILDARCH: ${EXE}"
env GOOS=${BUILDOS} GOARCH=${BUILDARCH} \
go build -i -ldflags "$LDFLAGS" -o "${BINDIR}/${EXE}"
echo "finished building ${EXE}"
}
get_current_package() {
packagepath=`pwd`
echo ${packagepath#*src/}
}
create_main_go() {
package=`get_current_package`
cat <<EOT > main-autogenerated.go
package main
// autogenerated by build.sh
import (
"${package}/code"
kit "github.com/rshmelev/go-any-project-bootstrap/launcher"
)
var v = code.BuildVars
func main() {
v["Author"] = "${AUTHOR_LONG}"
v["AuthorShort"] = "${AUTHOR_SHORT}"
v["AppName"] = "${APPNAME}"
v["Site"] = "${SITEURL}"
kit.Bootstrap(code.Main, v)
}
EOT
}
be_able_to_cancel() {
function handle_sigint() {
for proc in `jobs -p` ; do
kill ${proc}
done
}
trap handle_sigint SIGINT
}
#----------------------------------------------------
if [ ! "${BUILD_OPTION}" == "just-gather-project-details" ] ; then
be_able_to_cancel
work "$@"
fi
|
<reponame>zkan/odds-news<filename>odds_news/news/views.py
import json
from django.http import HttpResponse
from django.shortcuts import render
from django.views import View
from rest_framework import viewsets
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import News
from .serializers import NewsSerializer
class NewsViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
queryset = News.objects.all()
serializer_class = NewsSerializer
class NewsAPISimpleView(APIView):
def get(self, request):
news = News.objects.all()
data = []
for each in news:
item = {
'title': each.title,
'content': each.content,
'category': each.category.name,
}
data.append(item)
return Response(data)
class NewsAPIView(View):
def get(self, request):
news = News.objects.all()
data = []
for each in news:
item = {
'title': each.title,
'content': each.content,
'category': each.category.name,
}
data.append(item)
return HttpResponse(
json.dumps(data),
content_type='application/json'
)
class NewsView(View):
def get(self, request):
news = News.objects.all()
return render(
request,
'news.html',
{
'news': news
}
)
def news_view(request):
if request.method == 'GET':
news = News.objects.all()
html = '<div>'
for each in news:
html += '<div>'
html += f'<h1>{each.title}</h1>'
html += f'<p>{each.content}</p>'
html += '</div>'
html += '</div>'
return HttpResponse(html) |
package io.smallrye.reactive.converters.reactor;
import io.smallrye.reactive.converters.ReactiveTypeConverter;
import io.smallrye.reactive.converters.Registry;
import io.smallrye.reactive.converters.tck.FromRSPublisherTCK;
import org.junit.Before;
import reactor.core.publisher.Flux;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
public class FluxFromRSPublisherTest extends FromRSPublisherTCK<Flux> {
private ReactiveTypeConverter<Flux> converter;
@Before
public void lookup() {
converter = Registry.lookup(Flux.class)
.orElseThrow(() -> new AssertionError("Flux converter should be found"));
}
@Override
protected ReactiveTypeConverter<Flux> converter() {
return converter;
}
@Override
protected String getOne(Flux instance) {
return (String) instance.blockFirst();
}
@SuppressWarnings("unchecked")
@Override
protected Exception getFailure(Flux instance) {
AtomicReference<Exception> reference = new AtomicReference<>();
try {
instance.toIterable().forEach(x -> {
});
} catch (Exception e) {
reference.set(e);
}
return reference.get();
}
@SuppressWarnings("unchecked")
@Override
protected List<String> getAll(Flux instance) {
return (List<String>) instance.collectList().block();
}
@Override
protected void consume(Flux instance) {
instance.last().block();
}
}
|
#!/bin/sh
LOCATION=`pwd`
VERSION="$1"
NEXT_VERSION="$2"
echo "Releasing Otoroshi version $VERSION ..."
echo " "
#ack '(1\.1\.1|1\.1\.2|1\.2\.0-dev|1\.2\.0)' --ignore-dir=node_modules --ignore-dir=docs --ignore-dir=target --ignore-dir=bundle --ignore-file=is:yarn.lock --ignore-file=is:Cargo.lock --ignore-dir=.idea --ignore-dir=otoroshi/.idea --ignore-file=is:swagger-ui-bundle.js --ignore-dir=otoroshi/project --ignore-dir=manual/project --ignore-dir=docker/dev
mkdir -p "release-$VERSION"
# format code
sh ./scripts/fmt.sh
# clean
sh ./scripts/build.sh clean
# build doc with schemas
sh ./scripts/doc.sh all
# build ui
sh ./scripts/build.sh ui
# build server
sh ./scripts/build.sh server
cp -v "./otoroshi/target/scala-2.12/otoroshi.jar" "$LOCATION/release-$VERSION"
cp -v "./otoroshi/target/universal/otoroshi-$VERSION.zip" "$LOCATION/release-$VERSION"
# build cli for mac
sh ./scripts/build.sh cli
cp -v "./clients/cli/target/release/otoroshicli" "$LOCATION/release-$VERSION"
mv "$LOCATION/release-$VERSION/otoroshicli" "$LOCATION/release-$VERSION/mac-otoroshicli"
# build cli for linux
sh ./scripts/cli-linux-build.sh
cp -v "./clients/cli/target/release/otoroshicli" "$LOCATION/release-$VERSION"
mv "$LOCATION/release-$VERSION/otoroshicli" "$LOCATION/release-$VERSION/linux-otoroshicli"
# TODO : build cli for windows
# tag github
git commit -am "Prepare the release of Otoroshi version $VERSION"
git push origin master
git tag -am "Release Otoroshi version $VERSION" "v$VERSION"
git push --tags
cd $LOCATION/otoroshi
sbt publish
cd $LOCATION
create_release () {
curl -X POST -H 'Accept: application/json' -H 'Content-Type: application/json' -H "Authorization: token $GITHUB_TOKEN" "https://api.github.com/repos/MAIF/otoroshi/releases" -d "
{
\"tag_name\": \"v$VERSION\",
\"name\": \"$VERSION\",
\"body\": \"Otoroshi version $VERSION\",
\"draft\": true,
\"prerelease\": false
}" | jqn 'property("id")' --color=false
}
# Create github release
#ID=`create_release`
#echo "Release ID is $ID"
# push otoroshi.jar on github
# curl -T "$LOCATION/release-$VERSION/otoroshi.jar" -H "Content-Type: application/octet-stream" -H "Authorization: token $GITHUB_TOKEN" "https://uploads.github.com/repos/MAIF/otoroshi/releases/$ID/assets\?name\=otoroshi.jar"
# push otoroshi-dist on github
#curl -T "$LOCATION/release-$VERSION/otoroshi-$VERSION.zip" -H "Content-Type: application/zip" -H "Authorization: token $GITHUB_TOKEN" "https://uploads.github.com/repos/MAIF/otoroshi/releases/$ID/assets\?name\=otoroshi-dist.zip"
# push mac-otoroshicli on github
#curl -T "$LOCATION/release-$VERSION/mac-otoroshicli" -H "Content-Type: application/octet-stream" -H "Authorization: token $GITHUB_TOKEN" "https://uploads.github.com/repos/MAIF/otoroshi/releases/$ID/assets\?name\=mac-otoroshicli"
# push linux-otoroshicli on github
#curl -T "$LOCATION/release-$VERSION/linux-otoroshicli" -H "Content-Type: application/octet-stream" -H "Authorization: token $GITHUB_TOKEN" "https://uploads.github.com/repos/MAIF/otoroshi/releases/$ID/assets\?name\=linux-otoroshicli"
# push win-otoroshicli.exe on github
# curl -T "$LOCATION/release-$VERSION/win-otoroshicli.exe" -H "Content-Type: application/octet-stream" -H "Authorization: token $GITHUB_TOKEN" "https://uploads.github.com/repos/MAIF/otoroshi/releases/$ID/assets\?name\=otoroshicli.exe",
cd $LOCATION/docker/build
cp ../../otoroshi/target/universal/otoroshi-$VERSION.zip ./otoroshi-dist.zip
sh ./build.sh push-all $VERSION
cd $LOCATION
# update version number and commit / push
echo "Please change version in the following files and commit / push"
echo " "
echo " * clients/cli/Cargo.lock "
echo " * clients/cli/Cargo.toml "
echo " * clients/cli/src/main.rs "
echo " * docker/build/Dockerfile "
echo " * docker/otoroshicli/Dockerfile "
echo " * otoroshi/app/controllers/SwaggerController.scala "
echo " * otoroshi/app/env/Env.scala "
echo " * otoroshi/build.sbt "
echo " * otoroshi/javascript/package.json "
echo " "
# remove release folder
# rm -rf "$LOCATION/release-$VERSION" |
const { logTransaction } = require("./helpers/logger.js");
const Proxy = artifacts.require("TransparentUpgradeableProxy");
const ContractsRegistry = artifacts.require("ContractsRegistry");
const ClaimVoting = artifacts.require("ClaimVoting");
const ReputationSystem = artifacts.require("ReputationSystem");
const ReinsurancePool = artifacts.require("ReinsurancePool");
const VBMI = artifacts.require("VBMI");
module.exports = async (deployer) => {
const contractsRegistry = await ContractsRegistry.at((await Proxy.deployed()).address);
await deployer.deploy(ClaimVoting);
const claimVoting = await ClaimVoting.deployed();
await deployer.deploy(ReputationSystem);
const reputationSystem = await ReputationSystem.deployed();
await deployer.deploy(ReinsurancePool);
const reinsurancePool = await ReinsurancePool.deployed();
await deployer.deploy(VBMI);
const vBMIToken = await VBMI.deployed();
logTransaction(
await contractsRegistry.addProxyContract(await contractsRegistry.CLAIM_VOTING_NAME(), claimVoting.address),
"AddProxy ClaimVoting"
);
logTransaction(
await contractsRegistry.addProxyContract(
await contractsRegistry.REPUTATION_SYSTEM_NAME(),
reputationSystem.address
),
"AddProxy ReputationSystem"
);
logTransaction(
await contractsRegistry.addProxyContract(await contractsRegistry.REINSURANCE_POOL_NAME(), reinsurancePool.address),
"AddProxy ReinsurancePool"
);
logTransaction(
await contractsRegistry.addProxyContract(await contractsRegistry.VBMI_NAME(), vBMIToken.address),
"AddProxy VBMI"
);
};
|
<reponame>kanomdook/ecommerce<gh_stars>1-10
import { Component } from '@angular/core';
import { ListingPage } from '../listing/listing';
import { ProfilePage } from '../profile/profile';
import { NotificationsPage } from '../notifications/notifications';
import { SettingsPage } from "../settings/settings";
import { List1Page } from "../list-1/list-1";
import { CartPage } from "../cart/cart";
@Component({
selector: 'tabs-navigation',
templateUrl: 'tabs-navigation.html'
})
export class TabsNavigationPage {
tab1Root: any;
tab2Root: any;
tab3Root: any;
tab4Root: any;
tab5Root: any;
cartData: any;
constructor() {
this.tab1Root = ListingPage;
this.tab2Root = List1Page;
this.tab3Root = NotificationsPage;
this.tab4Root = SettingsPage;
this.tab5Root = CartPage;
}
countBadgeCart() {
let cart = JSON.parse(window.localStorage.getItem('cart'));
let length = 0;
if (cart) {
let cartLength = cart.products ? cart.products.length : 0;
for (let i = 0; i < cartLength; i++) {
length += cart.products[i].qty;
}
}
return length > 0 ? length.toString() : '';
}
}
|
<filename>presentation/spectacle-code-slide/getComputedCodeStyle.js<gh_stars>0
var computedCodeStyle;
export function getComputedCodeStyle() {
if (!computedCodeStyle) {
const div = document.createElement("code");
div.style.display = "none";
div.className = "language-xxx";
document.body.appendChild(div);
computedCodeStyle = window.getComputedStyle(div);
}
return computedCodeStyle;
}
|
#!/bin/bash
# Copyright 2021 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Ensure that we have the desired version of the ginkgo test runner.
set -xe
PROJECT_ROOT=$(git rev-parse --show-toplevel)
DRIVER="test"
install_ginkgo () {
apt update -y
apt install -y golang-ginkgo-dev
}
setup_e2e_binaries() {
mkdir /tmp/csi-azuredisk
# download k8s external e2e binary for kubernetes
curl -sL https://storage.googleapis.com/kubernetes-release/release/v1.22.0/kubernetes-test-linux-amd64.tar.gz --output e2e-tests.tar.gz
tar -xvf e2e-tests.tar.gz && rm e2e-tests.tar.gz
# enable fsGroupPolicy (only available from k8s 1.20)
export EXTRA_HELM_OPTIONS="--set feature.enableFSGroupPolicy=true --set image.csiProvisioner.tag=v3.0.0 --set snapshot.apiVersion=ga"
# test on alternative driver name
EXTRA_HELM_OPTIONS=$EXTRA_HELM_OPTIONS" --set driver.name=$DRIVER.csi.azure.com --set controller.name=csi-$DRIVER-controller --set linux.dsName=csi-$DRIVER-node --set windows.dsName=csi-$DRIVER-node-win"
# install the azuredisk-csi-driver driver
make e2e-bootstrap
sed -i "s/csi-azuredisk-controller/csi-$DRIVER-controller/g" deploy/example/metrics/csi-azuredisk-controller-svc.yaml
make create-metrics-svc
}
print_logs() {
sed -i "s/disk.csi.azure.com/$DRIVER.csi.azure.com/g" deploy/example/storageclass-azuredisk-csi.yaml
bash ./hack/verify-examples.sh linux azurepubliccloud ephemeral $DRIVER
echo "print out driver logs ..."
bash ./test/utils/azuredisk_log.sh $DRIVER
}
install_ginkgo
setup_e2e_binaries
trap print_logs EXIT
ginkgo -p --progress --v -focus="External.Storage.*$DRIVER.csi.azure.com" \
-skip='\[Disruptive\]|\[Slow\]|should resize volume when PVC is edited while pod is using it' kubernetes/test/bin/e2e.test -- \
-storage.testdriver=$PROJECT_ROOT/test/external-e2e/manifest/testdriver.yaml \
--kubeconfig=$KUBECONFIG
|
/*
TITLE Intrusive Doubly Linked List in C++ vs C Chapter27Exercise5.cpp
COMMENT
Objective: Compare the results of Exercises 3 and 4.
Input: -
Output: -
Author: <NAME>
Date: 13.06.2017
*/
#include <iostream>
/*
C++ Implementation:
- all functions encapsulated: less parameters, context provided (member functions).
- commonly / frequently used variables defined as member functions: front(); back().
- memory management easier and less error-prone through RAII - constructor; destructor.
- inheritance allows new Links containing data to be defined and integrated with ease.
C Implementation:
- all functions more general; applicable to more than one object (List).
- explicit and low level memory management.
- explicit casts neccessary; type convertion indicated.
---------------------------------------------
| DList | Time[sec] | Time [clicks] |
|-----------|--------------|------------------|
| C | 0.094 | 94 |
| C++ | 0.110 | 110 |
---------------------------------------------
C++ 0.100 100
Ex. 6
*/
int main()
{
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.hadoop.rdf.mapreduce.count.namespaces;
import java.io.IOException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.jena.graph.Node ;
import org.apache.jena.hadoop.rdf.mapreduce.TextCountReducer;
import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
import org.apache.jena.hadoop.rdf.types.NodeWritable;
/**
* Abstract mapper class for mappers which split node tuple values and extract
* the namespace URIs they use and outputs pairs of namespaces keys with a long
* value of 1. Can be used in conjunction with a {@link TextCountReducer} to
* count the usages of each unique namespace.
*
*
*
* @param <TKey>
* @param <TValue>
* @param <T>
*/
public abstract class AbstractNodeTupleNamespaceCountMapper<TKey, TValue, T extends AbstractNodeTupleWritable<TValue>> extends
Mapper<TKey, T, Text, LongWritable> {
private LongWritable initialCount = new LongWritable(1);
protected static final String NO_NAMESPACE = null;
@Override
protected void map(TKey key, T value, Context context) throws IOException, InterruptedException {
NodeWritable[] ns = this.getNodes(value);
for (NodeWritable n : ns) {
String namespace = this.extractNamespace(n);
if (namespace != null) {
context.write(new Text(namespace), this.initialCount);
}
}
}
/**
* Extracts the namespace from a node
* <p>
* Finds the URI for the node (if any) and then invokes
* {@link #extractNamespace(String)} to extract the actual namespace URI.
* </p>
* <p>
* Derived classes may override this to change the logic of how namespaces
* are extracted.
* </p>
*
* @param nw
* Node
* @return Namespace
*/
protected String extractNamespace(NodeWritable nw) {
Node n = nw.get();
if (n.isBlank() || n.isVariable())
return NO_NAMESPACE;
if (n.isLiteral()) {
String dtUri = n.getLiteralDatatypeURI();
if (dtUri == null)
return NO_NAMESPACE;
return extractNamespace(dtUri);
}
return extractNamespace(n.getURI());
}
/**
* Extracts the namespace from a URI
* <p>
* First tries to extract a hash based namespace. If that is not possible it
* tries to extract a slash based namespace, if this is not possible then
* the full URI is returned.
* </p>
* <p>
* Derived classes may override this to change the logic of how namespaces
* are extracted.
* </p>
*
* @param uri
* URI
* @return Namespace
*/
protected String extractNamespace(String uri) {
if (uri.contains("#")) {
// Extract hash namespace
return uri.substring(0, uri.lastIndexOf('#') + 1);
} else if (uri.contains("/")) {
// Ensure that this is not immediately after the scheme component or
// at end of URI
int index = uri.lastIndexOf('/');
int schemeSepIndex = uri.indexOf(':');
if (index - schemeSepIndex <= 2 || index == uri.length() - 1) {
// Use full URI
return uri;
}
// Otherwise safe to extract slash namespace
return uri.substring(0, uri.lastIndexOf('/') + 1);
} else {
// Use full URI
return uri;
}
}
/**
* Gets the nodes of the tuple whose namespaces are to be counted
*
* @param tuple
* Tuple
* @return Nodes
*/
protected abstract NodeWritable[] getNodes(T tuple);
}
|
import * as DependencyTable from "./DependencyTable/DependencyTable";
export { DependencyTable };
import * as Page from "./Page/Page";
export { Page };
import * as RankingPage from "./RankingPage/RankingPage";
export { RankingPage };
import * as TopPage from "./TopPage/TopPage";
export { TopPage };
import * as SideNavigation from "./SideNavigation/SideNavigation";
export { SideNavigation };
import * as LinkList from "./LinkList/LinkList";
export { LinkList };
import * as DependencyTableList from "./DependencyTableList/DependencyTableList";
export { DependencyTableList };
import * as PackageDetail from "./PackageDetail/PackageDetail";
export { PackageDetail };
|
#!/bin/bash
# Set Spacely P13-3 to 'off' state
#set the direction register P13-1-8 as outputs
i2cset -y -f 0x00 0x77 0x07 0x00
#get the existing output register contents
OUTREG=$(i2cget -y -f 0x00 0x77 0x03)
printf 'existing output register is : 0x%X \n' ${OUTREG}
#AND the value of the pin with the existing register values
HIREG=$(( ${OUTREG} & 2#11101111 ))
# output bew register (should be 64 if current reg is 0)
printf 'new output register will be : 0x%X \n' ${HIREG}
i2cset -y -f 0x00 0x77 0x03 ${HIREG}
#get the existing output register contents
NEWREG=$(i2cget -y -f 0x00 0x77 0x03)
printf 'new output register is : 0x%X \n' ${NEWREG}
|
package main
import (
"errors"
"fmt"
"log"
"sort"
"github.com/dmies/adventOfGo/filehandler"
)
// FindSum checks if the given numberToCheck is a sum of two numbers in previousNumbers
func FindSum(numberToCheck int, previousNumbers []int) bool {
for i, x := range previousNumbers {
for _, y := range previousNumbers[i+1:] {
if x+y == numberToCheck {
return true
}
}
}
return false
}
// FindFirstWrongNumber finds the first number in input that is no sum of two numbers of the previous preambleSize numbers
func FindFirstWrongNumber(input []int, preambleSize int) int {
for i, value := range input[preambleSize:] {
preamble := input[i : preambleSize+i]
if !FindSum(value, preamble) {
return value
}
}
return -1
}
// FindContiguousSetOfNumbersThatSumUpTo looks for a contiguous set of numbers in input that sum up to searched and returns them
func FindContiguousSetOfNumbersThatSumUpTo(searched int, input []int) ([]int, error) {
var result []int
for i, value := range input {
sum := value
j := i + 1
result = []int{value}
for sum < searched && j < len(input) {
sum += input[j]
result = append(result, input[j])
j++
}
if sum == searched {
return result, nil
}
}
return nil, errors.New("FindContiguousSetOfNumbersThatSumUpTo() couldn't find suitable set")
}
// GetMinAndMaxFromList returns the min and max in the given list
func GetMinAndMaxFromList(input []int) (int, int) {
copy := append([]int(nil), input...)
sort.Ints(copy)
return copy[0], copy[len(copy)-1]
}
// FindEncryptionWeakness finds the set of contiguous numbers that sum up to the illegalNumber, gets the min and max off this list and returns the sum of them
func FindEncryptionWeakness(illegalNumber int, input []int) (int, error) {
listToCheck, err := FindContiguousSetOfNumbersThatSumUpTo(illegalNumber, input)
if err != nil {
return -1, err
}
min, max := GetMinAndMaxFromList(listToCheck)
return min + max, nil
}
func main() {
numbers, err := filehandler.ImportNumberList("./input.txt")
if err != nil {
log.Fatal(err)
}
solution1 := FindFirstWrongNumber(numbers, 25)
fmt.Printf("day 09, part1 %v\n", solution1)
solution2, err := FindEncryptionWeakness(solution1, numbers)
if err != nil {
log.Fatal(err)
}
fmt.Printf("day 09, part2 %v\n", solution2)
}
|
<reponame>GbookingLTD/corev2-ts-sdk<filename>lib/client.ts
import * as GBookingCoreV2 from 'corev2-schemata/langs/typescript/GBookingCoreV2';
import {MedMeAPIBasic} from "./basic";
/**
* Методы для создания и/или получения клиента, редактирования данных клиента.
*/
export class MedMeAPIClient extends MedMeAPIBasic {
/**
*
* @param params
*/
addClient(params: GBookingCoreV2.ClientAddClientRequestParams):
Promise<GBookingCoreV2.ClientAddClientResponseResult> {
return this.apiRequest_("client.add_client", params)
.then((res) => res.result);
}
/**
*
* @param params
*/
findOrCreateClient(params: GBookingCoreV2.ClientFindOrCreateClientRequestParams):
Promise<GBookingCoreV2.ClientFindOfCreateClientResponseResult> {
return this.apiRequest_("client.find_or_create_client", params)
.then((res) => res.result);
}
/**
*
* @param params
*/
updateClient(params: GBookingCoreV2.ClientUpdateClientRequestParams):
Promise<boolean> {
return this.apiRequest_("client.update_client", params)
.then((res) => res.result.success);
}
}
|
<filename>Snippets/Source/UITraitCollection+Extras.h<gh_stars>10-100
//
// UITraitCollection+Extras.h
// Micro.blog
//
// Created by <NAME> on 8/8/19.
// Copyright © 2019 Riverfold Software. All rights reserved.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface UITraitCollection (Extras)
+ (BOOL) rf_isDarkMode;
@end
NS_ASSUME_NONNULL_END
|
#!/bin/sh
# Automatically attempt to fix linting issues
set -e
check_dep () {
if ! [ -x "$(command -v $1)" ]; then
echo "You don't have $1"
exit 1
fi
}
check_dep eslint
PROJ_DIR=$(pwd)
cd $PROJ_DIR/dapp && eslint --ext .jsx src --fix
cd $PROJ_DIR/client/meter && eslint . --fix
|
<filename>frontend/src/pages/Registrations/index.js
import React, { useState, useEffect } from 'react';
import { confirmAlert } from 'react-confirm-alert';
import { toast } from 'react-toastify';
import { MdCheckCircle } from 'react-icons/md';
import 'react-confirm-alert/src/react-confirm-alert.css';
import api from '~/services/api';
import history from '~/services/history';
import { formatDate } from '~/util/format';
import List from '~/components/List';
export default function Students() {
const [registrations, setRegistrations] = useState([]);
const columns = [
{
key: 'studentName',
title: 'ALUNO',
align: 'left',
width: 20
},
{
key: 'planTitle',
title: 'PLANO',
align: 'center',
width: 15
},
{
key: 'startDateFormatted',
title: 'INÍCIO',
align: 'center',
width: 20
},
{
key: 'endDateFormatted',
title: 'TÉRMINO',
align: 'center',
width: 20
},
{
key: 'icon',
title: 'ATIVA',
align: 'center',
width: 10
},
{
key: 'act',
title: '',
align: 'right',
width: 15,
actions: ['edit', 'remove']
}
];
const actions = [
{
title: 'Cadastrar',
icon: 'MdAdd',
to: '/registrations/add'
}
];
useEffect(() => {
async function loadRegistrations() {
const response = await api.get('/registrations');
if (response && response.data && response.data.length > 0) {
setRegistrations(
response.data.map(registration => ({
...registration,
studentName: registration.student
? registration.student.name
: '--',
planTitle: registration.plan ? registration.plan.title : '--',
startDateFormatted: formatDate(registration.start_date),
endDateFormatted: formatDate(registration.end_date),
icon: MdCheckCircle,
iconColor: registration.active ? '#42cb59' : '#ddd'
}))
);
}
}
loadRegistrations();
}, []);
async function remove(id) {
try {
await api.delete(`/registrations/${id}`);
setRegistrations(registrations.filter(x => x.id !== id));
toast.success('Matrícula removida com sucesso!');
} catch (err) {
console.tron.error(err);
toast.error('Falha ao remover matrícula!');
}
}
function handleEdit(id) {
history.push(`/registration/${id}`);
}
function handleRemove(id) {
const registration = registrations.find(x => x.id === id);
if (registration) {
confirmAlert({
title: 'Confirme para remover',
message: `Tem certeza que deseja remover a matrícula do aluno ${registration.student.name}?`,
buttons: [
{
label: 'Sim',
onClick: () => remove(id)
},
{
label: 'Não'
}
]
});
}
}
return (
<List
title="Gerenciando matrículas"
emptyError="Nenhuma matrícula encontrada!"
columns={columns}
actions={actions}
data={registrations}
onEdit={handleEdit}
onRemove={handleRemove}
keyColumn="id"
larger
/>
);
}
|
<filename>web_second_project/src/java/servlets/SignInServlet.java<gh_stars>0
package servlets;
import db.DBManager;
import db.beans.User;
import java.io.IOException;
import java.sql.SQLException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import net.tanesha.recaptcha.ReCaptchaImpl;
import net.tanesha.recaptcha.ReCaptchaResponse;
import static utilities.Constants.*;
public class SignInServlet extends HttpServlet {
private DBManager manager;
@Override
public void init() throws ServletException {
this.manager = (DBManager) super.getServletContext().getAttribute(DB_ATTRIBUTE_NAME);
}
/**
* Processes requests for both HTTP
* <code>GET</code> and
* <code>POST</code> methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
//controlli parametri
String username = request.getParameter(USERNAME_PARAM_NAME);
if (username.isEmpty()) {
request.setAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME, "Username " + EMPTY_FIELD);
}
String email = request.getParameter(EMAIL_PARAM_NAME);
if (email.isEmpty()) {
request.setAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME, "Email " + EMPTY_FIELD);
}
String address = request.getParameter(ADDRESS_PARAM_NAME);
if (address.isEmpty()) {
request.setAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME, "Address " + EMPTY_FIELD);
}
String password = request.getParameter(PASSWORD_PARAM_NAME);
if (password.isEmpty()) {
request.setAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME, "Password " + EMPTY_FIELD);
}
//CAPTCHA
String remoteAddr = request.getRemoteAddr();
ReCaptchaImpl reCaptcha = new ReCaptchaImpl();
reCaptcha.setPrivateKey("6LeMUdsSAAAAAPXozztyYzuM4axaQyHzrptWv29h");
String challenge = request.getParameter("recaptcha_challenge_field");
String uresponse = request.getParameter("recaptcha_response_field");
ReCaptchaResponse reCaptchaResponse = reCaptcha.checkAnswer(remoteAddr, challenge, uresponse);
if (!reCaptchaResponse.isValid()) {
request.setAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME, "CAPTCHA " + INVALID_VALUE);
}
//se parametri ok, creo user e aggiungo al database.
//direzione il forward alla pagina di login
boolean toLoginPage = false;
if (request.getAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME) == null) {
User user = new User();
user.setUsername(request.getParameter(USERNAME_PARAM_NAME));
user.setEmail(request.getParameter(EMAIL_PARAM_NAME));
user.setAddress(request.getParameter(ADDRESS_PARAM_NAME));
user.setRole(User.Role.USER);
try {
manager.registerNewUser(user, password);
request.setAttribute(SUCCESS_MESSAGE_ATTRIBUTE_NAME, SUCCESSFULLY_SIGN_IN);
toLoginPage = true;
} catch (SQLException ex) {
Logger.getLogger(SignInServlet.class.getName()).log(Level.SEVERE, null, ex);
if (ex.getErrorCode() == 1062){
request.setAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME, USERNAME_ALREADY_EXISTS);
}
}
}
RequestDispatcher reqDis = request.getRequestDispatcher(toLoginPage? JSP_LOGIN:JSP_REGISTER);
reqDis.forward(request, response);
}
// <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code.">
/**
* Handles the HTTP
* <code>GET</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Handles the HTTP
* <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Returns a short description of the servlet.
*
* @return a String containing servlet description
*/
@Override
public String getServletInfo() {
return "Short description";
}// </editor-fold>
}
|
let mongoose = require("mongoose");
let db = require("./models");
mongoose.connect(
process.env.MONGODB_URI || 'mongodb://localhost/memorygame',
{
useNewUrlParser: true,
useUnifiedTopology: true,
useCreateIndex: true,
useFindAndModify: false
}
);
let highscoreSeed = [
{
name: "Morgan",
highscore: 5
},
{
name: "Matt",
highscore: 10
},
{
name: "Angela",
highscore: 6
},
{
name: "Susan",
highscore: 8
},
{
name: "Fred",
highscore: 3
},
];
db.Highscore.deleteMany({})
.then(() => db.Highscore.collection.insertMany(highscoreSeed))
.then(data => {
console.log(data.result.n + " records inserted!");
process.exit(0);
})
.catch(err => {
console.error(err);
process.exit(1);
}); |
import { CodeBlock } from 'mithril-materialized';
import { EditableTable, IEditableTable } from 'mithril-table';
import m from 'mithril';
interface IPerson {
id: number;
first: string;
last: string;
}
export const InputPage = () => {
const state = {
data: [
{
id: 1,
first: 'John',
last: 'Doe',
},
{
id: 2,
first: 'Jane',
last: 'Doe',
},
{
id: 3,
first: 'Bob',
last: 'Bear',
},
] as IPerson[],
};
return {
view: () =>
m('.col.s12', [
m('h2.header', 'EditableTable from mithril-table'),
m(EditableTable, {
headers: [
{ column: 'id', title: 'ID' },
{ column: 'first', title: 'First name' },
{ column: 'last', title: 'Last name' },
],
data: state.data,
addRows: true,
deleteRows: true,
moveRows: true,
// disabled: true,
// sortRows: false,
onchange: data => {
state.data = data;
console.table(data);
},
} as IEditableTable<IPerson>),
m(CodeBlock, {
code: `import { EditableTable, IEditableTable } from 'mithril-table';
import m from 'mithril';
...
interface IPerson { id: number; first: string; last: string; }
const state = {
data: [{
id: 1,
first: 'John',
last: 'Doe',
}, {
id: 2,
first: 'Jane',
last: 'Doe',
}, {
id: 3,
first: 'Bob',
last: 'Bear',
}] as IPerson[],
};
m(EditableTable, {
headers: [
{ column: 'id', title: 'ID' },
{ column: 'first', title: 'First name' },
{ column: 'last', title: '<NAME>' },
],
data: state.data,
addRows: true,
deleteRows: true,
moveRows: true,
// disabled: true,
// sortRows: false,
onchange: (data) => {
state.data = data;
console.table(data);
},
} as IEditableTable<IPerson>),
`,
}),
]),
};
};
|
import _ = require("lodash");
/**
* Utility to get environment variables from a list of possible keys.
*
* E.g. you may want to check if there is an environment variable defined for PORT or SERVER_PORT.
*/
export class ProcessEnvUtils {
/**
* Gets the environment variables
* @param defaultValue
* @param {string} possibleKeys
* @returns {any}
*/
static getEnvVar(defaultValue: any, ...possibleKeys: string[]): any {
if (!process || !process.env) {
return defaultValue;
}
let val: any;
for (const key of possibleKeys) {
val = process.env[key];
if (val) {
return val;
}
}
// try all lowercase...only do this once we have checked case sensitive for performance reasons.
const processEnv: object = {};
if (process && process.env) {
// tslint:disable-next-line:forin
for (const key in process.env) {
processEnv[key.toLowerCase()] = process.env[key];
}
}
for (const key of possibleKeys) {
val = processEnv[key.toLowerCase()];
if (val) {
return val;
}
}
return defaultValue;
}
static getEnvVarAsBoolean(defaultValue: boolean, ...possibleKeys: string[]): boolean {
const retVal: string = this.getEnvVar(defaultValue, ...possibleKeys);
if (_.isString(retVal)) {
return retVal === "true";
} else {
return !!retVal;
}
}
static getEnvVarAsString(defaultValue: string, ...possibleKeys: string[]): string {
const retVal: string = this.getEnvVar(defaultValue, ...possibleKeys);
if (retVal === undefined || retVal === null) {
return retVal;
} else {
return retVal + "";
}
}
static getEnvVarAsNumber(defaultValue: number, ...possibleKeys: string[]): number {
const retVal: string = this.getEnvVar(defaultValue, ...possibleKeys);
if (_.isString(retVal)) {
return parseInt(retVal, 10);
} else if (_.isNumber(retVal)) {
return retVal as number;
} else {
return Number.NaN;
}
}
} |
<reponame>pvormste/graphql-go-tools<filename>pkg/proxy/config.go
package proxy
import (
"context"
"net/url"
)
// RequestConfigProvider is the interface to retrieve the configuration to handle a request
// based on the provided information in the context the implementation might decide how to set the request config
// This could be used to dynamically decide which backend should be used to satisfy a request
// On the other hand the context could be ignored and you simply return a static configuration for all requests
// You can basically use any http middleware in front of the request config provider and setup the request context
// After that it's up to the RequestConfigProvider implementation how to set the configuration
// This should give the user enough flexibility
type RequestConfigProvider interface {
GetRequestConfig(ctx context.Context) (*RequestConfig, error)
}
// RequestConfig configures how the proxy should handle a request
type RequestConfig struct {
// Schema is a pointer to the publicly exposed schema by the proxy
Schema *[]byte
// BackendURL is the URL of the backend origin graphql server
BackendURL url.URL
// AddHeadersToContext are the headers that should be extracted from a request to the proxy and added to the context
// from the context the headers are available to graphql middleWares, e.g. to set variables
AddHeadersToContext [][]byte
// BackendHeaders are headers that should be statically set to backend requests
// This could be used to add authentication to securely communicate with the origin server
BackendHeaders map[string][]string
}
type StaticRequestConfigProvider struct {
config RequestConfig
}
func (s *StaticRequestConfigProvider) GetRequestConfig(ctx context.Context) (*RequestConfig, error) {
return &s.config, nil
}
func NewStaticRequestConfigProvider(config RequestConfig) *StaticRequestConfigProvider {
return &StaticRequestConfigProvider{
config: config,
}
}
|
#!/bin/bash
#
# This prepares an empty Ubuntu system for running Docker.
#
# Just van den Broecke - 2017
# DEPRECATED - 2021 update: there are much quicker ways
# See https://docs.docker.com/engine/install/ubuntu/
#
# Below was based on
# https://docs.docker.com/engine/installation/linux/ubuntu/
# as on may 26 2017.
# Run as root or prepend all commands with "sudo"!
#
# Optional, comment out for your locale
# set time right and configure timezone and locale
# echo "Europe/Amsterdam" > /etc/timezone
# dpkg-reconfigure -f noninteractive tzdata
# Bring system uptodate
apt-get update
apt-get -y upgrade
# Install packages to allow apt to use a repository over HTTPS
apt-get install -y software-properties-common apt-transport-https ca-certificates curl
# Add keys and extra repos
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
# Verify key
apt-key fingerprint 0EBFCD88
# Add Docker repo to deb config
add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
# Bring packages uptodate
apt-get update
# The linux-image-extra package allows you use the aufs storage driver.
# at popup keep locally installed config option
# apt-get install -y linux-image-extra-$(uname -r)
apt-get install -y linux-image-extra-$(uname -r) linux-image-extra-virtual
# https://askubuntu.com/questions/98416/error-kernel-headers-not-found-but-they-are-in-place
apt-get install -y build-essential linux-headers-`uname -r` dkms
# Install Docker CE
apt-get install docker-ce
# If you are installing on Ubuntu 14.04 or 12.04, apparmor is required.
# You can install it using (usually already installed)
# apt-get install -y apparmor
# Start the docker daemon. Usually already running
# service docker start
# Docker compose
export dockerComposeVersion="1.20.1"
curl -L https://github.com/docker/compose/releases/download/${dockerComposeVersion}/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
|
package com.fasterxml.jackson.jr.annotationsupport;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.jr.ob.JSON;
import com.fasterxml.jackson.jr.ob.JSONObjectException;
public class BasicRenameTest extends ASTestBase
{
static class NameSimple {
@JsonProperty("firstName")
public String _first;
@JsonProperty // just explicit marker, no rename
public String _last;
protected NameSimple() { }
public NameSimple(String f, String l) {
_first = f;
_last = l;
}
}
/*
/**********************************************************************
/* Test methods
/**********************************************************************
*/
// for stricter validation, fail on unknown properties
private final JSON JSON_WITH_ANNO = jsonWithAnnotationSupport()
.with(JSON.Feature.FAIL_ON_UNKNOWN_BEAN_PROPERTY);
public void testBasicRenameOnSerialize() throws Exception
{
final NameSimple input = new NameSimple("Bob", "Burger");
// default, no ignorals:
assertEquals(a2q("{'_first':'Bob','_last':'Burger'}"), JSON.std.asString(input));
// but if we ignore 'x'...
assertEquals(a2q("{'_last':'Burger','firstName':'Bob'}"), JSON_WITH_ANNO.asString(input));
// and ensure no leakage to default one:
assertEquals(a2q("{'_first':'Bob','_last':'Burger'}"), JSON.std.asString(input));
}
public void testBasicRenameOnDeserialize() throws Exception
{
final String json = a2q("{'firstName':'Bob','_last':'Burger'}");
final JSON j = JSON.std
.with(JSON.Feature.FAIL_ON_UNKNOWN_BEAN_PROPERTY);
try {
j.beanFrom(NameSimple.class, json);
fail("Should not pass");
} catch (JSONObjectException e) {
verifyException(e, "Unrecognized JSON property \"firstName\"");
}
NameSimple result = JSON_WITH_ANNO.beanFrom(NameSimple.class, json);
assertEquals("Bob", result._first);
assertEquals("Burger", result._last);
}
}
|
<gh_stars>10-100
package io.opensphere.core.util.security;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.security.Provider;
import java.security.SecureRandom;
import java.security.Security;
import java.util.Arrays;
import javax.crypto.Cipher;
import javax.crypto.KeyGenerator;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import org.apache.log4j.Logger;
import org.junit.Assert;
import org.junit.Test;
/** Test for {@link EncryptedByteArray}. */
public class EncryptedByteArrayTest
{
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(EncryptedByteArrayTest.class);
/**
* Test creating and decrypting a {@link EncryptedByteArray}.
*
* @throws GeneralSecurityException If the test fails.
* @throws IOException If the test fails.
*/
@Test
public void test() throws GeneralSecurityException, IOException
{
SecureRandom random = SecureRandom.getInstance("SHA1PRNG");
int successCount = 0;
Provider[] providers = Security.getProviders();
for (Provider keyGeneratorProvider : providers)
{
for (Provider.Service keyGeneratorService : keyGeneratorProvider.getServices())
{
if ("KeyGenerator".equals(keyGeneratorService.getType()))
{
KeyGenerator keyGenerator = KeyGenerator.getInstance(keyGeneratorService.getAlgorithm());
SecretKey key;
try
{
key = keyGenerator.generateKey();
}
catch (IllegalStateException e)
{
// Skip key generators that require parameters.
if (LOGGER.isDebugEnabled())
{
LOGGER.debug(e, e);
}
continue;
}
for (Provider algProvider : providers)
{
for (Provider.Service algService : algProvider.getServices())
{
if ("Cipher".equals(algService.getType())
&& algService.getAlgorithm().equals(keyGeneratorService.getAlgorithm()))
{
String[] modes = algService.getAttribute("SupportedModes") == null ?
new String[0] : algService.getAttribute("SupportedModes").split("\\|");
String[] paddings = algService.getAttribute("SupportedPaddings") == null ?
new String[0] : algService.getAttribute("SupportedPaddings").split("\\|");
for (String mode : modes)
{
for (String padding : paddings)
{
String transform = algService.getAlgorithm() + "/" + mode + "/" + padding;
final Cipher encryptCipher;
try
{
encryptCipher = Cipher.getInstance(transform, algProvider);
}
catch (NoSuchPaddingException e)
{
// Some modes are not compatible
// with some paddings.
continue;
}
CipherFactory cipherFactory = new CipherFactory(new DefaultSecretKeyProvider(key),
transform, algProvider);
encryptCipher.init(Cipher.ENCRYPT_MODE, key);
byte[] testBytes = new byte[1133];
random.nextBytes(testBytes);
EncryptedByteArray encryptedByteArray;
try
{
encryptedByteArray = new EncryptedByteArray(testBytes, cipherFactory);
}
catch (CipherException e)
{
// Try again using the cipher block
// size.
if ("NOPADDING".equals(padding))
{
testBytes = new byte[encryptCipher.getBlockSize() * 7];
encryptedByteArray = new EncryptedByteArray(testBytes, cipherFactory);
}
else
{
// Shouldn't have to use the
// cipher block size if padding
// is enabled.
throw e;
}
}
byte[] decryptedData = encryptedByteArray.getDecryptedData(cipherFactory);
Assert.assertTrue(Arrays.equals(testBytes, decryptedData));
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("Test succeeded for " + encryptCipher.getAlgorithm());
}
successCount++;
}
}
}
}
}
}
}
}
Assert.assertTrue(successCount > 0);
}
}
|
#!/usr/bin/env bash
# Continue or exit
echo -n ">> Do you want to continue? [Y/n]: "
read ASKME
if [[ ${ASKME} =~ ^([yY][eE][sS]|[yY])$ ]]
then
echo -e ">> Fine, let's move on..."
else
echo -e ">> Aborting, goodbye."
ABORT=1
fi
export ABORT
|
#!/bin/bash
apt update
apt upgrade
echo "alias lint_me='npx tslint --fix -p tsconfig.json'" >> ~/.bashrc
# common
apt install -y nsnake
apt install -y git
apt install -y terminator
apt install -y taskwarrior
apt install -y figlet
apt install -y tree
# nodejs
curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash -
apt install nodejs
# vim
apt install vim
|
from bs4 import BeautifulSoup
def extract_category_name(html):
soup = BeautifulSoup(html, 'html.parser')
category_tag = soup.find('p', class_='path').find('a')
category_name = category_tag.text
return category_name
# Test the function
html_snippet = """
<table>
<tbody>
<tr>
<p class="path">
<a>category3</a>
</p>
</tr>
</tbody>
</table>
"""
print(extract_category_name(html_snippet)) # Output: "category3" |
#network interface on which to limit traffic
IF="eth0"
#limit of the network interface in question
LINKCEIL="1gbit"
#limit outbound Bitcoin protocol traffic to this rate
LIMIT="160kbit"
#defines the address space for which you wish to disable rate limiting
LOCALNET="192.168.0.0/16"
#delete existing rules
tc qdisc del dev ${IF} root
#add root class
tc qdisc add dev ${IF} root handle 1: htb default 10
#add parent class
tc class add dev ${IF} parent 1: classid 1:1 htb rate ${LINKCEIL} ceil ${LINKCEIL}
#add our two classes. one unlimited, another limited
tc class add dev ${IF} parent 1:1 classid 1:10 htb rate ${LINKCEIL} ceil ${LINKCEIL} prio 0
tc class add dev ${IF} parent 1:1 classid 1:11 htb rate ${LIMIT} ceil ${LIMIT} prio 1
#add handles to our classes so packets marked with <x> go into the class with "... handle <x> fw ..."
tc filter add dev ${IF} parent 1: protocol ip prio 1 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ip prio 2 handle 2 fw classid 1:11
#delete any existing rules
#disable for now
#ret=0
#while [ $ret -eq 0 ]; do
# iptables -t mangle -D OUTPUT 1
# ret=$?
#done
#limit outgoing traffic to and from port 33139. but not when dealing with a host on the local network
# (defined by $LOCALNET)
# --set-mark marks packages matching these criteria with the number "2"
# these packages are filtered by the tc filter with "handle 2"
# this filter sends the packages into the 1:11 class, and this class is limited to ${LIMIT}
iptables -t mangle -A OUTPUT -p tcp -m tcp --dport 33139 ! -d ${LOCALNET} -j MARK --set-mark 0x2
iptables -t mangle -A OUTPUT -p tcp -m tcp --sport 33139 ! -d ${LOCALNET} -j MARK --set-mark 0x2
|
# export BATCH_SIZE=64
# export ADDITIONAL='--critic_type=kplusone_fm --kplusone_mhinge_cond_discriminator_weight=1.0 --aux_mhinge_cond_generator_weight=0.05'
# export ADDITIONAL='--critic_type=kplusone_fm --generator_loss_fn=kplusone_ssl_featurematching_generator_loss --kplusone_mhinge_ssl_cond_discriminator_weight=1.0 --aux_mhinge_cond_generator_weight=0.05'
export EXPERIMENT_NAME=imagenette128
export BATCH_SIZE=512
export TRAIN_STEPS_PER_EVAL=2000
export DATASET_ARGS='--image_size=128 \
--dataset_name=imagenette/160px \
--num_classes=10 \
--dataset_val_split_name=validation \
--unlabelled_dataset_name=imagenet_resized/64x64 \
--unlabelled_dataset_split_name=train'
export ADDITIONAL='--critic_type=kplusone_fm \
--generator_loss_fn=kplusone_ssl_featurematching_generator_loss \
--kplusone_mhinge_ssl_cond_discriminator_weight=1.0 \
--aux_mhinge_cond_generator_weight=0.05 \
--tpu_gan_estimator_d_step=4'
bash tpu/_base.sh |
def product_list(num_list):
product = 1
for num in num_list:
product *= float(num)
return product
result = product_list(num_list)
print(result) |
<gh_stars>1-10
package com.book.service;
import com.book.dao.ActiveDao;
import com.book.domain.ActiveRank;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
@Service
public class ActiveService {
private ActiveDao activeDao;
@Autowired
public void setacDao(ActiveDao activeDao){
this.activeDao=activeDao;
}
public ArrayList<ActiveRank> acList(){
return activeDao.acList();
}
public ArrayList<ActiveRank> acList_asc(){
return activeDao.acList_asc();
}
public int deleteacList(String operatorId ){
return activeDao.deleteacList(operatorId);
}
public ArrayList<ActiveRank> matchAC(String searchWord){
return activeDao.matchac(searchWord);
}
public boolean addActiveItem(ActiveRank activeRank){
return activeDao.addActiveItem(activeRank)>0;
}
public boolean updateActiveItem(ActiveRank activeRank){
return activeDao.updateActiveItem(activeRank)>0;
}
}
|
#!/usr/bin/env bash
set -e
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." >/dev/null 2>&1 && pwd)"
LAUNCHER_DIR="$ROOT_DIR/../"
# NCTL compile requires casper-node-launcher
if [ ! -d "$LAUNCHER_DIR/casper-node-launcher" ]; then
pushd $LAUNCHER_DIR
git clone https://github.com/CasperLabs/casper-node-launcher.git
fi
# Activate Environment
pushd "$ROOT_DIR"
source $(pwd)/utils/nctl/activate
# NCTL Build
nctl-compile
function main() {
# Stage
get_remotes
stage_remotes "$1"
build_from_settings_file
# Start
start_upgrade_scenario_1
start_upgrade_scenario_3
}
# Pulls down all remotely staged files
# from s3 bucket to NCTL remotes directiory.
function get_remotes() {
local VERSION_ARRAY
log "... downloading remote files and binaries"
if [ -z "$AWS_SECRET_ACCESS_KEY" ] || [ -z "$AWS_ACCESS_KEY_ID" ]; then
log "ERROR: AWS KEYS neeeded to run. Contact SRE."
exit 1
fi
VERSION_ARRAY=(
$(aws s3 ls s3://nctl.casperlabs.io/ | \
awk '{ print $2 }' | \
grep 'v\|rel' | \
tr -d "[:alpha:]" | \
tr -d '-' | \
tr -d '/'
)
)
if [ -z "${VERSION_ARRAY[*]}" ]; then
log "ERROR: Version Array was blank. Exiting."
exit 1
fi
nctl-stage-set-remotes "${VERSION_ARRAY[*]}"
}
# Sets up settings.sh for CI test.
# If local arg is passed it will skip this step
# and use whats currently in settings.sh
# arg: local is for debug testing only
function stage_remotes() {
local BRANCH=${1}
local PATH_TO_STAGE
PATH_TO_STAGE="$(get_path_to_stage 1)"
if [ "$BRANCH" != "local" ]; then
log "... CI branch detected"
log "... setting up stage dir: $PATH_TO_STAGE"
dev_branch_settings "$PATH_TO_STAGE"
fi
}
# Generates stage-1 directory for test execution
# Just here for a log message
function build_from_settings_file() {
log "... setting build from settings.sh file"
nctl-stage-build-from-settings
}
# Produces settings.sh needed for CI testing.
# It will always setup latest RC -> minor incremented by 1.
# i.e: if current RC is 1.2 then dev will be setup as 1.3
function dev_branch_settings() {
local PATH_TO_STAGE=${1}
local INCREMENT
local RC_VERSION
pushd "$(get_path_to_remotes)"
RC_VERSION="$(ls --group-directories-first -d */ | sort -r | head -n 1 | tr -d '/')"
[[ "$RC_VERSION" =~ (.*[^0-9])([0-9])(.)([0-9]+) ]] && INCREMENT="${BASH_REMATCH[1]}$((${BASH_REMATCH[2]} + 1))${BASH_REMATCH[3]}${BASH_REMATCH[4]}"
RC_VERSION=$(echo "$RC_VERSION" | sed 's/\./\_/g')
INCREMENT=$(echo "$INCREMENT" | sed 's/\./\_/g')
mkdir -p "$(get_path_to_stage '1')"
cat <<EOF > "$(get_path_to_stage_settings 1)"
export NCTL_STAGE_SHORT_NAME="YOUR-SHORT-NAME"
export NCTL_STAGE_DESCRIPTION="YOUR-DESCRIPTION"
export NCTL_STAGE_TARGETS=(
"${RC_VERSION}:remote"
"${INCREMENT}:local"
)
EOF
cat "$(get_path_to_stage_settings 1)"
popd
}
# Kicks off the scenario
# Just here for a log message
function start_upgrade_scenario_1() {
log "... Starting Upgrade Scenario 1"
nctl-exec-upgrade-scenario-1
}
function start_upgrade_scenario_3() {
log "... Starting Upgrade Scenario 3"
nctl-exec-upgrade-scenario-3
}
main "$1"
|
<gh_stars>0
package controller;
import java.io.ByteArrayOutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import converter.RS2DatasetConverter;
import org.apache.jena.query.ResultSet;
import org.apache.jena.query.ResultSetFormatter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.arhscube.gameofcode.autocomplete.Autocomplete;
import com.arhscube.gameofcode.eurovoc.Parser.LANG;
import com.arhscube.gameofcode.eurovoc.Term;
import com.arhscube.gameofcode.search.Parser;
import com.arhscube.gameofcode.search.SearchTree;
import com.arhscube.gameofcode.search.Sparql;
import com.google.gson.Gson;
import model.DataSet;
import service.SparqlService;
@Controller
@RequestMapping("/search")
public class SearchController {
@Autowired
private SparqlService sparqlService;
@RequestMapping("/find")
public String search(Map<String, Object> model, @RequestParam("search") String search,@RequestParam("lang") String lang) {
System.out.println("In SearchController.search()");
LANG language = com.arhscube.gameofcode.eurovoc.Parser.getLangCode(lang);
SearchTree st = Parser.parse(search, language);
String sparql = Sparql.toSparql(st);
System.out.println(sparql);
List<DataSet> dataSetList = new ArrayList<>();
ResultSet resultSet = sparqlService.readSparqlQuery(sparql);
RS2DatasetConverter converter = new RS2DatasetConverter();
dataSetList.addAll(converter.toDataSet(resultSet));
model.put("results", dataSetList);
model.put("searchValue", search);
return "search";
}
@ResponseBody
@RequestMapping(value = "/api", method = RequestMethod.GET)
public String searchApi(@RequestParam("search") String search,@RequestParam("lang") String lang,@RequestParam("format") String format){
LANG language = com.arhscube.gameofcode.eurovoc.Parser.getLangCode(lang);
SearchTree st = Parser.parse(search, language);
String sparql = Sparql.toSparql(st);
ResultSet resultSet = sparqlService.readSparqlQuery(sparql);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
switch (format){
case "json" : ResultSetFormatter.outputAsJSON(outputStream, resultSet); break;
case "xml": ResultSetFormatter.outputAsXML(outputStream, resultSet);break;
case "csv": ResultSetFormatter.outputAsCSV(outputStream, resultSet);break;
default:break;
}
return new String(outputStream.toByteArray());
}
@ResponseBody
@RequestMapping(value = "/autocomplete", method = RequestMethod.GET)
public String autocomplete(Map<String, Object> model, @RequestParam("term") String term,
@RequestParam("lang") String lang, @RequestParam("callback") String callback) {
List<Term> terms = Autocomplete.getAutoComplete(term, lang);
System.out.println("hello from AutocompleteController");
String json = new Gson().toJson(terms);
return callback + "(" + json + ");";
}
public List<DataSet> mockDataSet() {
List<DataSet> dataSetList = new ArrayList<>();
DataSet one = new DataSet();
one.setTitle("Dataset one");
one.setDescription("This is a description");
DataSet two = new DataSet();
two.setTitle("Dataset two");
two.setDescription("This is a description");
DataSet three = new DataSet();
three.setTitle("Dataset three");
three.setDescription("This is a description");
dataSetList.add(one);
dataSetList.add(two);
dataSetList.add(three);
return dataSetList;
}
} |
#!/bin/python
## Copyright (c) 2015:
## The Italian Natinal Institute of Nuclear Physics (INFN), Italy
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
## http://www.apache.org/licenses/LICENSE-2.0
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import os, sys, time, pwd, datetime, optparse, time, re, uuid, grp
import logging, logging.handlers
import keystoneclient.v2_0.client as ksclient
import swiftclient.client as swclient
import swiftclient.exceptions as swclientexceptions
"""
eu.egi.cloud.SWIFT.py
This is a new NAGIOS probe for testing some OpenStack SWIFT functionalities.
The new probe performs the following operations:
- Creates a new swift container;
- Create a new swift object file;
- Download the content of the object file locally;
- Delete the swift object file;
- Delete the swift container.
"""
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__version__ = "$Revision: 0.0.3 $"
__date__ = "$Date: 22/10/2015 11:04:19 $"
__copyright__ = "Copyright (c) 2015 INFN"
__license__ = "Apache Licence v2.0"
def get_keystone_creds():
"Reading settings from env"
d = {}
d['username'] = os.environ['OS_USERNAME']
d['password'] = <PASSWORD>['<PASSWORD>']
d['auth_url'] = os.environ['OS_AUTH_URL']
d['tenant_name'] = os.environ['OS_TENANT_NAME']
return d
class OSSwift:
swift = None
def __init__(self, auth_url, username, password, auth_version, retries, insecure, creds, keystone, logger):
"Initialise the class and establish a new connection with the OpenStack Object Storage"
self.auth_url = auth_url
self.username = username
self.password = password
self.auth_version = auth_version
self.retries = retries
self.insecure = insecure
self.creds = creds
self.keystone = keystone
self.logger = logger
# Get Swift public URL from Keystone
swift_endpoint = self.keystone.service_catalog.url_for(service_type='object-store', endpoint_type='publicURL')
self.logger.debug ("\n[-] Establish a connection with the OpenStack Swift Object Storage")
self.logger.debug ("[-] Swift public URL = %s " % swift_endpoint)
self.logger.debug ("[-] Initialize the OSSwift() main class")
self.swift = swclient.Connection(
self.auth_url,
self.username,
self.password,
auth_version=self.auth_version,
retries=self.retries,
insecure=self.insecure,
os_options=self.creds)
def create_container(self, containerID, logger):
"NAGIOS metric to create a new Object Storage Container"
self.logger.debug ("[-] Create a new OpenStack Swift Container = %s " % containerID)
self.swift.put_container(containerID)
def create_object(self, containerID, objectID, data, logger):
"NAGIOS metric to create a new object file"
self.logger.debug ("[+] Call the put_object() method")
self.swift.put_object(containerID, objectID, data)
self.logger.debug ("[-] Create the objectID = " + objectID)
self.logger.debug ('_' * 71)
self.logger.debug ("[-] Print container statistics")
(container, containers) = self.swift.get_container(containerID)
self.logger.debug ("ContainerID: %s " % containerID)
self.logger.debug ("Objects: %(x-container-object-count)s" % container)
for container in containers:
self.logger.debug (">>> %(name)s [%(bytes)d bytes]" % container)
self.logger.debug (">>> %(content_type)s [MD5SUM: %(hash)s]" % container)
self.logger.debug ('_' * 71)
def download_object(self, containerID, objectID, filename, logger):
"NAGIOS metric to download the object file locally"
self.logger.debug ("[+] Call the get_object() method")
response, object_body = self.swift.get_object(containerID, objectID)
self.logger.debug ("[-] Download the objectID in the local file [%s]" % filename)
f = open(filename, 'wb')
f.write(object_body)
f.close()
#uid = pwd.getpwnam("swift").pw_uid
#gid = grp.getgrnam("swift").gr_gid
#os.chown(filename, uid, gid)
#os.chmod(filename, 750)
def delete_object(self, containerID, objectID, logger):
"NAGIOS metric to delete the object file from the Object Storrage Container"
self.logger.debug ("[+] Call the delete_object() method")
self.swift.delete_object(containerID, objectID)
self.logger.debug ("[-] Delete the objectID = " + objectID)
def delete_container(self, containerID, logger):
"NAGIOS metric to delete a new Object Storage Container"
self.logger.debug ("[-] Delete the OpenStack Swift Container = " + containerID)
self.swift.delete_container(containerID)
def close(self, logger):
"NAGIOS metric to close any connection with the OpenStack Object Storage"
self.logger.debug ("[+] Call the close() method")
self.swift.close()
|
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('../../multiplex')) :
typeof define === 'function' && define.amd ? define(['../../multiplex'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.mx));
}(this, (function (mx) { 'use strict';
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var mx__default = /*#__PURE__*/_interopDefaultLegacy(mx);
var array = [1, 2, 3, 4, 5];
var enumerable = mx__default['default'].range(1, 5);
var collection = new mx__default['default'].Collection(array);
var list = new mx__default['default'].List(array);
var linkedList = new mx__default['default'].LinkedList(array);
var hashSet = new mx__default['default'].HashSet(array);
var stack = new mx__default['default'].Stack(array);
var queue = new mx__default['default'].Queue(array);
var set = new mx__default['default'].Set(array);
var map = new mx__default['default'].Map();
var dictionary = new mx__default['default'].Dictionary();
var sortedList = new mx__default['default'].SortedList();
var readOnlyCollection = list.asReadOnly();
var lookup = new mx__default['default'].Lookup(array, function (t) {
return t;
});
for (var i = 0; i < array.length; i++) {
map.set(array[i], array[i]);
dictionary.set(array[i], array[i]);
sortedList.add(array[i], array[i]);
}
var qunit = typeof QUnit === 'undefined' ? require('qunitjs') : QUnit;
var qmodule = qunit.module;
var qtest = qunit.test;
qunit.expect;
qmodule('linq-all');
function simpleNumericPredicate(t) {
return t < 10;
}
qtest('basic all test', function (assert) {
assert.ok(mx__default['default'](array).all(simpleNumericPredicate), 'Test all numbers in an array are less than 10');
assert.ok(!mx__default['default'](array).all(function (t) {
return t < 2;
}), 'Test all numbers in an array are less than 2');
assert.ok(mx__default['default']([]).all(simpleNumericPredicate), 'Test all over an empty iterable results true');
});
qtest('collections all method tests', function (assert) {
assert.ok(enumerable.all(simpleNumericPredicate), 'Test all numbers in an enumerable are less than 10');
assert.ok(collection.all(simpleNumericPredicate), 'Test all numbers in a Collection are less than 10');
assert.ok(list.all(simpleNumericPredicate), 'Test all numbers in a List are less than 10');
assert.ok(readOnlyCollection.all(simpleNumericPredicate), 'Test all numbers in a ReadOnlyCollection are less than 10');
assert.ok(linkedList.all(simpleNumericPredicate), 'Test all numbers in a LinkedList are less than 10');
assert.ok(hashSet.all(simpleNumericPredicate), 'Test all numbers in a HashSet are less than 10');
assert.ok(stack.all(simpleNumericPredicate), 'Test all numbers in a Stack are less than 10');
assert.ok(queue.all(simpleNumericPredicate), 'Test all numbers in a Queue are less than 10');
assert.ok(set.all(simpleNumericPredicate), 'Test all numbers in a Set are less than 10');
assert.ok(map.all(function (t) {
return t[0] < 10;
}), 'Test all numbers in a Map are less than 10');
assert.ok(dictionary.all(function (t) {
return t.key < 10;
}), 'Test all numbers in a Dictionary are less than 10');
assert.ok(lookup.all(function (t) {
return t.key < 10;
}), 'Test all numbers in a Lookup are less than 10');
assert.ok(sortedList.all(function (t) {
return t.key < 10;
}), 'Test all numbers in a SortedList are less than 10');
});
qtest('all method validations', function (assert) {
assert.throws(function () {
mx__default['default']([1]).all();
}, 'null input');
assert.throws(function () {
mx__default['default']([1]).all([2], 1);
}, 'non-function predicate');
});
})));
|
<gh_stars>0
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package shootergame.panel.mainmenu;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.UnsupportedAudioFileException;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JPanel;
import shootergame.Game;
import shootergame.filemanager.OptionManager;
import shootergame.world.GameFrame;
/**
* Create Main Menu. This is the first JPanel to load.
*
* @author Tahmid
*/
public class MainMenu extends JPanel
{
private GridBagConstraints gridConstraints = new GridBagConstraints();
private GameFrame frame;
private String stringRes;
private Game game;
/**
* Constructor for Main Menu..
*
* @param winName
* Title of window.
* @param width
* width of the window.
* @param height
* height of the window.
* @throws IOException
* IO Error.
*/
public MainMenu(String winName, int width, int height)
{
super();
this.setBackground(new Color(0,96,255));
gridConstraints.gridx = 0;
gridConstraints.insets = new Insets(10,10,10,10);
gridConstraints.anchor = GridBagConstraints.CENTER;
//Buttons
JButton play = new JButton(new ImageIcon("data/Menu/MainMenu/play.png"));
//JButton score = new JButton(new ImageIcon("data/Menu/MainMenu/score.png"));
JButton option = new JButton(new ImageIcon("data/Menu/MainMenu/option.png"));
JButton quit = new JButton(new ImageIcon("data/Menu/quit.png"));
//get rid of the default button look...
play.setBorderPainted(false);
play.setFocusPainted(false);
play.setContentAreaFilled(false);
//score.setBorderPainted(false);
//score.setFocusPainted(false);
//score.setContentAreaFilled(false);
option.setBorderPainted(false);
option.setFocusPainted(false);
option.setContentAreaFilled(false);
quit.setBorderPainted(false);
quit.setFocusPainted(false);
quit.setContentAreaFilled(false);
setLayout(new GridBagLayout());
//Add buttons to menu
add(play,gridConstraints);
gridConstraints.gridx = 0;
gridConstraints.gridy = 10;
add(option, gridConstraints);
gridConstraints.gridx = 0;
gridConstraints.gridy = 20;
add(quit,gridConstraints);
frame = new GameFrame(winName,width,height,this);
//frame.add(this, BorderLayout.CENTER);
frame.getContentPane().add(this, BorderLayout.CENTER);
//resume.addActionListener(new PauseMenu.Resume(this,frame,view, world));
//restart.addActionListener(new PauseMenu.Restart(game, this));
play.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent ae)
{
try{
destroyMainMenu();
//frame.dispose();
//default
int winX=1280;
int winY=720;
OptionManager optionsManager = new OptionManager();
optionsManager.read();
switch(optionsManager.getLine(1).substring(12))
{
case "800x600":
winX = 800;
winY = 600;
break;
case "1280x720":
winX = 1280;
winY = 720;
break;
case "1920x1080":
winX = 1920;
winY = 1080;
break;
default:
}
game = new Game("Rumble", winX, winY, getFrame());
}catch(IOException ex){}
}
});
option.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent ae)
{
//frame.dispose();
try {
destroyMainMenu();
new Option(winName, width, height, frame);
} catch (IOException ex) {
Logger.getLogger(MainMenu.class.getName()).log(Level.SEVERE, null, ex);
} catch (UnsupportedAudioFileException ex) {
Logger.getLogger(MainMenu.class.getName()).log(Level.SEVERE, null, ex);
} catch (LineUnavailableException ex) {
Logger.getLogger(MainMenu.class.getName()).log(Level.SEVERE, null, ex);
}
}
});
quit.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent ae)
{
System.exit(0);
}
});
}
/**
* Constructor for Main Menu.
*
* @param winName
* Title of the window.
* @param width
* Width of the window.
* @param height
* Height of the window.
* @param frame
* Frame the window uses.
* @throws IOException
* IO Error.
*/
@SuppressWarnings("LeakingThisInConstructor")
public MainMenu(String winName, int width, int height, GameFrame frame)
{
super();
this.setBackground(new Color(0,96,255));
stringRes = frame.getWidth() + "x" + frame.getHeight();
//System.out.println(stringRes);
gridConstraints.gridx = 0;
gridConstraints.insets = new Insets(10,10,10,10);
gridConstraints.anchor = GridBagConstraints.CENTER;
//Buttons
JButton play = new JButton(new ImageIcon("data/Menu/MainMenu/play.png"));
//JButton score = new JButton(new ImageIcon("data/Menu/MainMenu/score.png"));
JButton option = new JButton(new ImageIcon("data/Menu/MainMenu/option.png"));
JButton quit = new JButton(new ImageIcon("data/Menu/quit.png"));
//get rid of the default button look...
play.setBorderPainted(false);
play.setFocusPainted(false);
play.setContentAreaFilled(false);
//score.setBorderPainted(false);
//score.setFocusPainted(false);
//score.setContentAreaFilled(false);
option.setBorderPainted(false);
option.setFocusPainted(false);
option.setContentAreaFilled(false);
quit.setBorderPainted(false);
quit.setFocusPainted(false);
quit.setContentAreaFilled(false);
setLayout(new GridBagLayout());
//Add buttons to menu
add(play,gridConstraints);
gridConstraints.gridx = 0;
gridConstraints.gridy = 10;
add(option, gridConstraints);
gridConstraints.gridx = 0;
gridConstraints.gridy = 20;
add(quit,gridConstraints);
//frame = new GameFrame(winName,width,height,this);
//frame.add(this, BorderLayout.CENTER);
OptionManager optionsManager = new OptionManager();
optionsManager.read();
if(stringRes.equals(optionsManager.getLine(1).substring(12)))
{
this.frame = frame;
}else{
frame.dispose();
this.frame = new GameFrame(winName,width,height,this);
}
this.frame.getContentPane().add(this, BorderLayout.CENTER);
this.frame.getContentPane().repaint();
this.frame.getContentPane().validate();
//resume.addActionListener(new PauseMenu.Resume(this,frame,view, world));
//restart.addActionListener(new PauseMenu.Restart(game, this));
play.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent ae)
{
try{
destroyMainMenu();
//frame.dispose();
//default
int winX=1280;
int winY=720;
OptionManager optionsManager = new OptionManager();
optionsManager.read();
switch(optionsManager.getLine(1).substring(12))
{
case "800x600":
winX = 800;
winY = 600;
break;
case "1280x720":
winX = 1280;
winY = 720;
break;
case "1920x1080":
winX = 1920;
winY = 1080;
break;
default:
}
new Game("Rumble", winX, winY, getFrame());
}catch(IOException ex){}
}
});
option.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent ae)
{
//frame.dispose();
try {
destroyMainMenu();
new Option(winName, width, height, getFrame());
} catch (IOException ex) {
Logger.getLogger(MainMenu.class.getName()).log(Level.SEVERE, null, ex);
} catch (UnsupportedAudioFileException ex) {
Logger.getLogger(MainMenu.class.getName()).log(Level.SEVERE, null, ex);
} catch (LineUnavailableException ex) {
Logger.getLogger(MainMenu.class.getName()).log(Level.SEVERE, null, ex);
}
}
});
quit.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent ae)
{
System.exit(0);
}
});
}
/**
* Remove Main Menu. Repaint and invalidate the current pane.
*/
public void destroyMainMenu()
{
//frame.getContentPane().remove(this);
this.frame.getContentPane().remove(this);
this.frame.getContentPane().repaint();
this.frame.getContentPane().invalidate();
System.out.println("Menu removed...");
}
/**
*
* @return
* GameFrame - JPanel
*/
public GameFrame getFrame()
{
return this.frame;
}
}
|
require "minitest/autorun"
require "test_helper"
require_relative "../../lib/dagoba"
class TestDagoba < MiniTest::Test
def assert_query_matches(query, expected_result, sort_by: :id)
assert_equal(
expected_result.sort_by { |n| n[sort_by] },
query.run.sort_by { |n| n[sort_by] }
)
end
def test_relationship_must_have_inverse
graph = Dagoba::Database.new
assert_raises(ArgumentError) { graph.relationship(:knows) }
assert_raises(ArgumentError) { graph.relationship(:knows, inverse: nil) }
end
def test_relationship_type_must_be_symbol
graph = Dagoba::Database.new
assert_raises(ArgumentError) { graph.relationship("knows", inverse: :known_by) }
assert_raises(ArgumentError) { graph.relationship(:knows, inverse: "known_by") }
assert_raises(ArgumentError) { graph.relationship("knows", inverse: "known_by") }
end
Dagoba::FindCommand.reserved_words.each do |word|
define_method "test_cannot_establish_relationship_named_#{word}" do
graph = Dagoba::Database.new
assert_raises(ArgumentError) { graph.relationship(word, inverse: :foobar) }
assert_raises(ArgumentError) { graph.relationship(:foobar, inverse: word) }
end
end
def test_relationship_must_have_valid_starting_index
graph = Dagoba::Database.new {
relationship(:knows, inverse: :knows)
add_entry("end")
}
assert_raises(ArgumentError, "Cannot establish relationship from nonexistent vertex start") do
graph.establish("start").knows("end")
end
end
def test_cannot_establish_relationship_with_invalid_type
graph = Dagoba::Database.new {
add_entry "start"
add_entry "end"
}
assert_raises(NoMethodError) { graph.establish("start").knows("end") }
end
def test_can_establish_relationship_between_two_vertices
graph = Dagoba::Database.new {
add_entry("start")
add_entry("end")
relationship(:knows, inverse: :knows)
}
graph.establish("start").knows("end")
end
def test_can_establish_inverse_relationships
graph = Dagoba::Database.new {
add_entry("start")
add_entry("end")
relationship(:knows, inverse: :known_by)
}
graph.establish("end").known_by("start")
end
def test_can_establish_self_relationships
graph = Dagoba::Database.new {
add_entry("start")
relationship(:knows, inverse: :knows)
}
graph.establish("start").knows("start")
end
def test_can_establish_duplicate_relationships
graph = Dagoba::Database.new {
add_entry("start")
add_entry("end")
relationship(:knows, inverse: :knows)
}
graph.establish("start").knows("end")
graph.establish("start").knows("end")
end
def test_can_establish_multiple_relationship_types
graph = Dagoba::Database.new {
add_entry("start")
add_entry("end")
relationship(:knows, inverse: :knows)
relationship(:is_parent_of, inverse: :is_child_of)
}
graph.establish("start").knows("end")
graph.establish("start").is_parent_of("end")
end
def test_cannot_declare_duplicate_relationship_types
graph = Dagoba::Database.new {
relationship(:knows, inverse: :knows)
}
assert_raises(ArgumentError, "A relationship type with the name knows already exists") do
graph.relationship(:knows, inverse: :knows)
end
assert_raises(ArgumentError, "A relationship type with the name knows already exists") do
graph.relationship(:known_by, inverse: :knows)
end
end
def test_cannot_use_id_as_attribute
graph = Dagoba::Database.new
assert_raises(ArgumentError) { graph.add_entry("alice", {id: 1}) }
end
def test_requires_all_attributes_to_be_symbols
graph = Dagoba::Database.new
assert_raises(ArgumentError) { graph.add_entry("alice", {"foo" => 1}) }
end
def test_requires_query_types_to_be_symbols
graph = Dagoba::Database.new
assert_raises(ArgumentError) { graph.add_query("something") { |x| x } }
end
def test_requires_queries_to_be_defined_with_blocks
graph = Dagoba::Database.new
assert_raises(ArgumentError) { graph.add_query(:something) }
end
def test_returns_empty_when_node_has_no_relations_of_given_type
graph = Dagoba::Database.new {
relationship(:knows, inverse: :knows)
add_entry("start")
}
assert_empty(graph.find("start").knows.run)
end
def test_returns_correct_results_when_node_has_relations_of_given_type
graph = Dagoba::Database.new {
relationship(:knows, inverse: :knows)
add_entry("start")
add_entry("end")
establish("start").knows("start")
establish("start").knows("end")
}
assert_query_matches(
graph.find("start").knows,
[{id: "end"}, {id: "start"}]
)
end
def test_allows_chaining_queries
graph = Dagoba::Database.new {
relationship(:parent_of, inverse: :child_of)
add_entry("alice")
add_entry("bob")
add_entry("charlie", {age: 35, education: nil})
add_attributes("alice", {age: 45, education: "Ph.D"})
establish("alice").parent_of("bob")
establish("charlie").parent_of("bob")
}
assert_query_matches(
graph.find("alice").parent_of.child_of,
[
{id: "alice", age: 45, education: "Ph.D"},
{id: "charlie", age: 35, education: nil}
]
)
end
def test_allows_filtering_queries
graph = Dagoba::Database.new {
relationship(:parent_of, inverse: :child_of)
add_entry("alice", {age: 40})
add_entry("bob", {age: 12})
add_entry("charlie", {age: 10})
establish("alice").parent_of("bob")
establish("alice").parent_of("charlie")
}
assert_query_matches(
graph.find("alice").parent_of.where { |child| child.attributes[:age] > 10 },
[{id: "bob", age: 12}]
)
end
def test_allows_taking_vertices
graph = Dagoba::Database.new {
relationship(:parent_of, inverse: :child_of)
add_entry("alice")
add_entry("bob")
add_entry("charlie")
add_entry("daniel")
add_entry("emilio")
add_entry("frank")
establish("alice").parent_of("bob")
establish("alice").parent_of("charlie")
establish("alice").parent_of("daniel")
establish("alice").parent_of("emilio")
establish("alice").parent_of("frank")
}
# TODO: Should ordering be defined?
base_query = graph.find("alice").parent_of.take(2)
assert_query_matches(base_query, [{id: "emilio"}, {id: "frank"}])
assert_query_matches(base_query, [{id: "charlie"}, {id: "daniel"}])
assert_query_matches(base_query, [{id: "bob"}])
end
def test_marking_nodes_and_merging_into_single_result_set
graph = Dagoba::Database.new {
relationship(:parent_of, inverse: :child_of)
add_entry("alice")
add_entry("bob")
add_entry("charlie")
add_entry("daniel")
establish("alice").parent_of("bob")
establish("bob").parent_of("charlie")
establish("charlie").parent_of("daniel")
}
query = graph.find("daniel")
.child_of.as(:parent)
.child_of.as(:grandparent)
.child_of.as(:great_grandparent)
.merge(:parent, :grandparent, :great_grandparent)
assert_query_matches(query, [{id: "alice"}, {id: "bob"}, {id: "charlie"}])
end
def test_allows_marking_nodes_and_excluding_from_result_set
graph = Dagoba::Database.new {
relationship(:parent_of, inverse: :child_of)
add_entry("alice")
add_entry("bob")
add_entry("charlie")
add_entry("daniel")
establish("alice").parent_of("bob")
establish("alice").parent_of("charlie")
establish("alice").parent_of("daniel")
}
query = graph.find("bob").as(:me)
.child_of
.parent_of
.except(:me)
assert_query_matches(query, [{id: "charlie"}, {id: "daniel"}])
end
def test_allows_making_result_sets_unique
graph = Dagoba::Database.new {
relationship(:parent_of, inverse: :child_of)
add_entry("alice")
add_entry("bob")
add_entry("charlie")
add_entry("daniel")
establish("alice").parent_of("bob")
establish("alice").parent_of("charlie")
establish("alice").parent_of("daniel")
}
assert_query_matches(
graph.find("alice").parent_of.child_of.unique,
[{id: "alice"}]
)
end
def test_allows_selecting_results_by_attribute
graph = Dagoba::Database.new {
relationship(:employee_of, inverse: :employer_of)
add_entry("alice", {programmer: true, salaried: true})
add_entry("bob", {programmer: false, salaried: true})
add_entry("charlie", {programmer: true, salaried: false})
add_entry("daniel")
establish("alice").employee_of("daniel")
establish("bob").employee_of("daniel")
establish("charlie").employee_of("daniel")
}
assert_query_matches(
graph.find("daniel").employer_of.with_attributes({programmer: true}),
[
{id: "alice", programmer: true, salaried: true},
{id: "charlie", programmer: true, salaried: false}
]
)
assert_query_matches(
graph.find("daniel").employer_of.with_attributes({programmer: true, salaried: false}),
[{id: "charlie", programmer: true, salaried: false}]
)
end
def test_allows_selecting_attributes_in_result
graph = Dagoba::Database.new {
relationship(:employee_of, inverse: :employer_of)
add_entry("alice", {salary: 100_000})
add_entry("bob", {salary: 70_000})
add_entry("charlie", {salary: 1_000_000})
establish("alice").employee_of("charlie")
establish("bob").employee_of("charlie")
}
assert_query_matches(
graph.find("charlie").employer_of.select_attributes(:salary),
[
{salary: 100_000},
{salary: 70_000}
],
sort_by: :salary
)
assert_query_matches(
graph.find("charlie").employer_of.select_attributes(:id, :salary),
[
{id: "alice", salary: 100_000},
{id: "bob", salary: 70_000}
]
)
end
def test_allows_chaining_with_nonexistent_vertices
graph = Dagoba::Database.new {
relationship(:employee_of, inverse: :employer_of)
add_entry("alice")
}
assert_query_matches(graph.find("bob").employee_of, [])
end
def test_allows_backtracking
graph = Dagoba::Database.new {
relationship(:employee_of, inverse: :employer_of)
add_entry("alice", {programmer: true})
add_entry("bob", {programmer: false})
add_entry("charlie", {programmer: false})
add_entry("daniel")
add_entry("emilio")
add_entry("frank")
establish("alice").employee_of("daniel")
establish("bob").employee_of("daniel")
establish("charlie").employee_of("emilio")
establish("daniel").employee_of("frank")
establish("emilio").employee_of("frank")
}
query = graph.find("frank")
.employer_of.as(:manager)
.employer_of.with_attributes({programmer: true})
.back(:manager)
assert_query_matches(query, [{id: "daniel"}])
end
def test_evaluates_queries_correctly
graph = Dagoba::Database.new {
relationship(:employee_of, inverse: :employer_of)
add_entry("alice")
add_entry("bob")
add_entry("charlie")
add_entry("daniel")
establish("alice").employer_of("bob")
establish("bob").employer_of("charlie")
establish("alice").employer_of("daniel")
add_query(:middle_managers) { |command| command.employer_of.as(:manager).employer_of.back(:manager) }
}
assert_query_matches(graph.find("alice").middle_managers, [{id: "bob"}])
end
def test_allows_redefining_queries
graph = Dagoba::Database.new {
relationship(:employee_of, inverse: :employer_of)
add_entry("alice")
add_entry("bob")
add_entry("charlie")
add_entry("daniel")
establish("alice").employer_of("bob")
establish("bob").employer_of("charlie")
establish("alice").employer_of("daniel")
}
graph.add_query(:interesting_managers) do |command|
command.employer_of.as(:manager).employer_of.back(:manager)
end
assert_query_matches(
graph.find("alice").interesting_managers,
[{id: "bob"}]
)
graph.add_query(:interesting_managers) do |command|
command.employer_of.as(:manager)
end
assert_query_matches(
graph.find("alice").interesting_managers,
[
{id: "bob"},
{id: "daniel"}
]
)
end
def test_allows_second_order_queries
graph = Dagoba::Database.new {
relationship(:employee_of, inverse: :employer_of)
add_entry("alice", {title: "CTO", salary: 200_000})
add_entry("bob", {title: "EM2", salary: 150_000})
add_entry("charlie", {title: "SE2", salary: 90_000})
add_entry("daniel", {title: "LSE2", salary: 120_000})
establish("alice").employer_of("bob")
establish("bob").employer_of("charlie")
establish("alice").employer_of("daniel")
add_query(:middle_managers) { |command| command.employer_of.as(:manager).employer_of.back(:manager) }
add_query(:titles) { |command| command.select_attributes(:title) }
}
assert_query_matches(graph.find("alice").middle_managers.titles, [{title: "EM2"}])
end
end
|
# frozen_string_literal: true
RSpec.describe ActiveWebhook, config: :defaults do
it "has a version number" do
expect(ActiveWebhook::VERSION).not_to be nil
end
it "has an identifier" do
expect(ActiveWebhook::IDENTIFIER).to eq "Active Webhook v#{ActiveWebhook::VERSION}"
end
describe ".configure" do
[
{
option: :origin
},
{
option: :enabled,
default: true,
value: false,
valid_values: ["true", "false"]
},
{
targets: :delivery,
option: :adapter,
default: :net_http,
value: :faraday,
valid_values: [":net_http", ":faraday"]
},
{
targets: :formatting,
option: :user_agent,
default: ActiveWebhook::IDENTIFIER
},
{
targets: :formatting,
option: :adapter,
default: :json,
value: :url_encoded,
valid_values: [":json", ":url_encoded"]
},
{
targets: :formatting,
option: :custom_header_prefix
},
{
targets: :queueing,
option: :adapter,
default: :syncronous,
value: :sidekiq,
valid_values: [":syncronous", ":sidekiq", ":delayed_job", ":active_job"]
},
{
targets: :queueing,
option: :format_first,
default: false,
value: true,
valid_values: ["true", "false"]
},
{
targets: :verification,
option: :adapter,
default: :unsigned,
value: :hmac_sha256,
valid_values: [":unsigned", ":hmac_sha256"]
}
].each do |option:, default: nil, value: nil, valid_values: [], targets: []|
targets = Array.wrap(targets)
unless valid_values.empty?
context "with valid value" do
subject do
described_class.configure do |config|
target = config
targets.each { |msg| target = target.send(msg) }
target.send("#{option}=", value)
end
end
it do
target = described_class.configuration
targets.each { |msg| target = target.send(msg) }
expect { subject }.to change(target, option).from(default).to(value)
end
end
end
context "with :xxx" do
subject do
described_class.configure do |config|
target = config
targets.each { |msg| target = target.send(msg) }
target.send("#{option}=", :xxx)
end
end
it do
if valid_values.empty?
target = described_class.configuration
targets.each { |msg| target = target.send(msg) }
expect { subject }.to change(target, option).from(default).to(:xxx)
else
expect { subject }.to raise_error(
ActiveWebhook::Configuration::InvalidOptionError,
"Invalid option for #{(targets + [option]).join('.')}: xxx. Must be one of [#{valid_values.join(', ')}]."
)
end
end
end
end
end
describe ".trigger", with_time: :frozen do
let(:key) { "abcdef" }
let(:topic) { create :topic, key: key }
let(:versioned_topic) { create :topic, key: key }
let(:subscription) { create :subscription, topic: topic }
let(:other_subscription) { create :subscription, topic: topic }
let(:versioned_subscription) { create :subscription, topic: versioned_topic }
let(:ignored_subscription) { create :subscription }
it "should succeed with default configuration" do
expect_subscription_requests(
subscription,
other_subscription,
versioned_subscription,
skip: ignored_subscription
) { expect(described_class.trigger(key: key)).to be_truthy }
end
context "with version" do
it do
expect_subscription_requests(
subscription,
other_subscription,
skip: [
ignored_subscription,
versioned_subscription
]
) { expect(described_class.trigger(key: key, version: topic.version)).to be_truthy }
end
end
context "with disabled" do
before do
ActiveWebhook.configure { |config| config.enabled = false }
end
it do
subscription
expect(described_class.trigger(key: key, version: topic.version)).to be_truthy
end
end
context "with origin" do
before do
ActiveWebhook.configure { |config| config.origin = "http://my-custom-domain.com" }
end
it do
requests = subscription_requests(
subscription,
other_subscription,
versioned_subscription,
skip: ignored_subscription
) do |_subscription, _url, params, _requests|
params[:headers]["Origin"] = "http://my-custom-domain.com"
end
expect_requests(requests) { expect(ActiveWebhook.trigger(key: key)).to be_truthy }
end
end
context "with log_level == debug", log_level: :debug do
before do
ActiveWebhook.configure do |config|
# make payload as complex as possible
config.origin = "http://my-custom-domain.com"
config.formatting.custom_header_prefix = "XXX"
config.queueing.adapter = :sidekiq
config.verification.adapter = :hmac_sha256
end
allow_any_instance_of(ActiveWebhook::Delivery::NetHTTPAdapter).to receive(:deliver!) do |i|
instance_double("Response", :code => 200)
end
end
it "should dump payloads" do
ActiveWebhook.trigger(key: subscription.topic.key)
Sidekiq::Worker.drain_all
end
end
end
end
|
<filename>server/routes/rental.js
const express = require("express");
const multer = require("multer");
const { RentalInfo, RentalApply, User } = require("../models");
const { isLoggedIn, isUnionManager } = require("./middlewares");
const router = express.Router();
const upload = multer();
//--------사용자----------
// Read
// 개인 신청 내역 조회 (상세)
// itemId에는 대여 품목의 id가 들어갑니다.
router.get(
"/my-application/read/:itemId",
isLoggedIn,
async (req, res, next) => {
try {
const myRental = await RentaApply.findOne({
where: { id: req.params.itemId, user_id: req.user.id },
});
res.json(myRental);
} catch (error) {
console.error(error);
res.send(error);
}
}
);
// 개인 신청 내역 전체 조회
router.get(
"/my-application/readAll",
isLoggedIn,
async (req, res, next) => {
try {
const myRental = await RentalApply.findAll({
where: { user_id: req.user.id },
});
res.json(myRental);
} catch (error) {
console.error(error);
res.send(error);
}
}
);
// Read
// 대여 공간 및 물품 상세 조회 (게시판에서)
// itemId에는 대여 품목의 id가 들어갑니다.
router.get(
"/read/:itemId",
isLoggedIn,
async (req, res, next) => {
try {
const rental = await RentalInfo.findOne({
where: { id: req.params.itemId },
});
res.json(rental);
} catch (error) {
console.error(error);
res.send(error);
}
}
);
// 대여 공간 및 물품 전체 조회 (게시판에서)
// itemId에는 대여 품목의 id가 들어갑니다.
router.get(
"/readAll",
async (req, res, next) => {
try {
const rental = await RentalInfo.findAll({
attributes: [
"room_name",
"rental_state",
"room_img",
],
order: [["room_name", "DESC"]],
});
res.json(rental);
} catch (error) {
console.error(error);
res.send(error);
}
}
);
//--------사용자(대여 신청)----------
// Create
// 신청
// 날짜 먼저 선택 후 대여인지, 대여 신청페이지에서 날짜 선택인지..
// 름 선택 후 신청인지, 신청페이지에서 룸 선택인지
router.post(
// itemId에는 대여 품목의 id가 들어갑니다.
"/application/:itemId",
isLoggedIn,
upload.none(),
async (req, res, next) => {
try {
const rentalItem = await RentalInfo.findOne({
where: { id: req.params.itemId },
});
if (rentalItem.rental_state != 1) {
const rental = await RentalApply.create({
room_name: rentalItem.room_name,
rental_date: req.body.rental_date,
start: req.body.start,
end: req.body.end,
rental_time: req.body.rental_time, // 수정할것
rep_member_name: req.body.rep_member_name,
member_count: req.body.member_count,
apply_state: 0,
});
const user = await User.findByPk(req.user.id);
await user.addRentalApply(rental);
console.log("대여 신청(사용자)");
res.json(rental);
}
else {
const error = new Error("잔여 수량이 부족하여 신청할 수 없습니다.");
res.send(error);
}
} catch (error) {
console.error(error);
res.send(error);
}
}
);
// Update
// 신청 수정
// itemId에는 대여 품목의 id가 들어갑니다.
router.post(
"/application/update/:itemId",
isLoggedIn,
// apply_state -> 0,1,2에 따른 구분 설정 추가 필요
upload.none(),
async (req, res, next) => {
try {
const rentalItem = await RentalInfo.findOne({
where: { id: req.params.itemId },
});
const rental = await RentalApply.update({
rental_date: req.body.rental_date,
start: req.body.start,
end: req.body.end,
rental_time: req.body.rental_time,
rep_member_name: req.body.rep_member_name,
member_count: req.body.member_count,
},
{
where: { id: rentalItem.id }
});
console.log("신청 수정");
res.json(rental);
} catch (error) {
console.error(error);
}
}
);
// Delete
// 신청 취소(사용자)
// itemId에는 대여 품목의 id가 들어갑니다.
router.delete(
"/application/delete/:itemId",
isLoggedIn,
async (req, res, next) => {
try {
const rental = await RentalApply.destroy({
where: { id: req.params.itemId },
});
console.log("신청 취소");
res.json(rental);
} catch (err) {
console.error(err);
}
}
);
//--------관리자----------
// 대여서비스 공간 또는 물품 추가(관리자)
// Create
router.post(
"/create",
isLoggedIn,
upload.none(),
async (req, res, next) => {
try {
const rental = await RentalInfo.create({
room_name: req.body.room_name,
rental_state: req.body.rental_state,
room_img: req.body.room_img,
});
console.log("대여 공간/물품 등록");
res.json(rental);
} catch (error) {
console.error(error);
res.send(error);
}
}
);
// Update
// 대여서비스 공간 또는 물품 수정(관리자)
// itemId에는 대여 품목의 id가 들어갑니다.
router.post(
"/update/:itemId",
isLoggedIn,
upload.none(),
async (req, res, next) => {
try {
const rental = await RentalInfo.update({
room_name: req.body.room_name,
rental_state: req.body.rental_state,
room_img: req.body.room_img,
},
{
where: { id: req.body.itemId }
});
console.log("대여 공간/물품 수정");
res.json(rental);
} catch (error) {
console.error(error);
}
}
);
// Delete
// 대여서비스 공간 또는 물품 삭제(관리자)
// itemId에는 대여 품목의 id가 들어갑니다.
router.delete(
"/delete/:itemId",
isLoggedIn,
async (req, res, next) => {
try {
const rental = await RentalInfo.destroy({
where: { id: req.params.itemId },
});
console.log("대여 공간/물품 삭제");
res.json(rental);
} catch (err) {
console.error(err);
}
}
);
module.exports = router;
|
package com.it.zzb.niceweibo.ui.profile;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.view.View;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.it.zzb.niceweibo.R;
import com.it.zzb.niceweibo.activity.LoginActivity;
import com.it.zzb.niceweibo.api.StatusesAPI;
import com.it.zzb.niceweibo.api.UsersAPI;
import com.it.zzb.niceweibo.bean.Status;
import com.it.zzb.niceweibo.bean.StatusList;
import com.it.zzb.niceweibo.bean.User;
import com.it.zzb.niceweibo.constant.AccessTokenKeeper;
import com.it.zzb.niceweibo.constant.Constants;
import com.nostra13.universalimageloader.core.ImageLoader;
import com.sina.weibo.sdk.auth.Oauth2AccessToken;
import com.sina.weibo.sdk.exception.WeiboException;
import com.sina.weibo.sdk.net.RequestListener;
/**
* Created by zzb on 2017/4/16.
*/
public class UserActivity2 extends AppCompatActivity {
private RecyclerView profile_list;
private Oauth2AccessToken mAccessToken;
private StatusesAPI mStatusApi;
private UsersAPI userApi;
private ImageView icon_image;//头像
private TextView name;//名字
private TextView description;//介绍
private TextView weibo;
private TextView friend;
private TextView follower;
private LinearLayout ll_profile_friends;
private LinearLayout ll_profile_followers;
private User user;
private Status status;
private Context mContext ;
private ImageLoader imageLoader = ImageLoader.getInstance();
public Handler mHandler;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_profile);
initView();
}
public void initView(){
profile_list = (RecyclerView) findViewById(R.id.profile_weibo_list);
profile_list.setLayoutManager(new LinearLayoutManager(this));
icon_image = (ImageView) findViewById(R.id.icon_image);
name = (TextView) findViewById(R.id.name);
description = (TextView)findViewById(R.id.description);
weibo = (TextView) findViewById(R.id.profile_weibo);
friend = (TextView) findViewById(R.id.profile_friends);
follower= (TextView) findViewById(R.id.profile_follower);
mHandler=new Handler()
{
public void handleMessage(Message msg)
{
switch(msg.what)
{
case 1:
name.setText(user.screen_name);
imageLoader.displayImage(user.avatar_hd,icon_image);
description.setText(user.description);
weibo.setText("微博 :"+user.statuses_count);
friend.setText("关注 :"+user.friends_count);
follower.setText("粉丝 :"+user.followers_count);
break;
default:
break;
}
super.handleMessage(msg);
}
};
loadData();
Thread thread=new Thread(new Runnable()
{
@Override
public void run()
{
Message message=new Message();
message.what=1;
mHandler.sendMessage(message);
}
});
thread.start();
//点击关注跳到关注列表
friend.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(UserActivity2.this,FriendsActivity.class);
startActivity(intent);
}
});
//点击粉丝跳到粉丝列表
follower.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(UserActivity2.this, FollowerActivity.class);
startActivity(intent);
}
});
}
private void loadData() {
// 获取当前已保存过的 Token
mAccessToken = AccessTokenKeeper.readAccessToken(this);
mStatusApi = new StatusesAPI(this, Constants.APP_KEY, mAccessToken);
user = (User) getIntent().getSerializableExtra("user");
if (mAccessToken != null && mAccessToken.isSessionValid()) {
String screen_name = user.screen_name;
mStatusApi.userTimeline(screen_name,0,0,30,1,false,0,false,mListener);
} else {
Toast.makeText(this, "token不存在,请重新授权", Toast.LENGTH_LONG).show();
Intent intent = new Intent(this, LoginActivity.class);
startActivity(intent);
}
}
private RequestListener mListener = new RequestListener() {
@Override
public void onComplete(final String response) {
if (!TextUtils.isEmpty(response)) {
if (response.startsWith("{\"statuses\"")) {
// 调用 StatusList#parse 解析字符串成微博列表对象
StatusList statuses = StatusList.parse(response);
if (statuses != null && statuses.total_number > 0) {
ProfileAdapter profileAdpter = new ProfileAdapter(UserActivity2.this,statuses);
profile_list.setAdapter(profileAdpter);
}
}
}
}
@Override
public void onWeiboException(WeiboException e) {
// TODO Auto-generated method stub
e.printStackTrace();
}
};
}
|
from django.test import TestCase
from django.urls import reverse
class TeamMemberPermissionTestCase(TestCase):
@fixture
def path(self):
return reverse('sentry-new-team-member', kwargs={'team_slug': self.team.slug})
def _assertPerm(self, path, template, username):
# Implement permission assertion logic here
user = User.objects.get(username=username)
response = self.client.get(path)
if user.is_staff or user == self.team.owner:
self.assertEqual(response.status_code, 200, f"Expected {username} to have permission to access {path}")
self.assertTemplateUsed(response, template, f"Expected {username} to see the {template} template")
else:
self.assertEqual(response.status_code, 403, f"Expected {username} to be denied access to {path}")
def test_admin_can_load(self):
self._assertPerm(self.path, self.template, self.admin.username)
def test_owner_can_load(self):
self._assertPerm(self.path, self.template, self.owner.username) |
package org.rs2server.util.functional;
import java.util.Objects;
/**
* @author twelve
*/
@FunctionalInterface
public interface TriConsumer<A, B, C> {
void accept(A a, B b, C c);
default TriConsumer<A, B, C> andThen(TriConsumer<? super A, ? super B, ? super C> consumer) {
Objects.requireNonNull(consumer);
return (a, b, c) -> {
this.accept(a, b, c);
consumer.accept(a, b, c);
};
}
}
|
#!/usr/bin/env bash
export ASAN_OPTIONS=abort_on_error=1:disable_core=0:unmap_shadow_on_exit=1:disable_coredump=0
ulimit -c unlimited
mkdir -p CORPUS_LISTEN
./fuzzer_listen -jobs=36 -timeout=10 -max_len=4086 CORPUS_LISTEN
|
package net.community.chest.net.proto.text.imap4.test;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintStream;
import java.util.Collection;
import java.util.Map;
import net.community.chest.mail.address.MessageAddressType;
import net.community.chest.mail.headers.RFCHeaderDefinitions;
import net.community.chest.net.proto.text.NetServerWelcomeLine;
import net.community.chest.net.proto.text.imap4.IMAP4Accessor;
import net.community.chest.net.proto.text.imap4.IMAP4Capabilities;
import net.community.chest.net.proto.text.imap4.IMAP4FastMsgInfo;
import net.community.chest.net.proto.text.imap4.IMAP4FastResponse;
import net.community.chest.net.proto.text.imap4.IMAP4FetchModifier;
import net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler;
import net.community.chest.net.proto.text.imap4.IMAP4FolderFlag;
import net.community.chest.net.proto.text.imap4.IMAP4FolderInfo;
import net.community.chest.net.proto.text.imap4.IMAP4FolderSelectionInfo;
import net.community.chest.net.proto.text.imap4.IMAP4FoldersListInfo;
import net.community.chest.net.proto.text.imap4.IMAP4MessageFlag;
import net.community.chest.net.proto.text.imap4.IMAP4Namespace;
import net.community.chest.net.proto.text.imap4.IMAP4NamespacesInfo;
import net.community.chest.net.proto.text.imap4.IMAP4Protocol;
import net.community.chest.net.proto.text.imap4.IMAP4QuotarootInfo;
import net.community.chest.net.proto.text.imap4.IMAP4ServerIdentityAnalyzer;
import net.community.chest.net.proto.text.imap4.IMAP4Session;
import net.community.chest.net.proto.text.imap4.IMAP4StatusInfo;
import net.community.chest.net.proto.text.imap4.IMAP4TaggedResponse;
import net.community.chest.test.TestBase;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @author <NAME>.
* @since Apr 14, 2008 12:19:33 PM
*/
public class IMAP4Tester extends TestBase {
public static class TestIMAP4FetchRspHandler implements IMAP4FetchResponseHandler {
private final PrintStream _out /* =null */;
private int _numMsgs /* =0 */;
protected TestIMAP4FetchRspHandler (PrintStream out)
{
_out=out;
}
public int getProcessedMsgs ()
{
return _numMsgs;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleMsgResponseState(int, boolean)
*/
@Override
public int handleMsgResponseState (int msgSeqNo, boolean fStarting)
{
if (_out != null)
_out.println('\t' + IMAP4Protocol.IMAP4FetchCmd + " " + (fStarting ? "start" : "end") + " " + msgSeqNo);
if (fStarting)
_numMsgs++;
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleUID(int, long)
*/
@Override
public int handleUID (int msgSeqNo, long msgUID)
{
if (_out != null)
_out.println("\t\t" + msgSeqNo + ":\t" + IMAP4FetchModifier.IMAP4_UID + "=" + msgUID);
return 0;
}
private int numIndent=0;
private String getMsgPartDisplay (String msgPart)
{
final int partLen=(null == msgPart) ? 0 : msgPart.length();
final StringBuilder sb=new StringBuilder(numIndent + 2 * partLen + 2);
for (int index=0; index < numIndent; index++)
sb.append('\t');
for (int index=0; index < partLen; index += 2)
sb.append('\t');
if (partLen != 0)
sb.append(msgPart);
else
sb.append("???");
return sb.toString();
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleMsgPartStage(int, java.lang.String, boolean)
*/
@Override
public int handleMsgPartStage (int msgSeqNo, String msgPart, boolean fStarting)
{
if (!fStarting)
numIndent--;
if (_out != null)
_out.println("\t\t" + msgSeqNo + ":\t" + "Part=" + getMsgPartDisplay(msgPart) + " " + (fStarting ? "start" : "end"));
if (fStarting)
numIndent++;
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleMsgPartHeader(int, java.lang.String, java.lang.String, java.lang.String, java.lang.String)
*/
@Override
public int handleMsgPartHeader (int msgSeqNo, String msgPart, String hdrName, String attrName, String attrValue)
{
if (_out != null)
{
String p1="\t\t" + msgSeqNo + ":\tPart=" + getMsgPartDisplay(msgPart) + " " + hdrName + ' ';
boolean emptyAttrName=(null == attrName) || (0 == attrName.length());
String p2=(emptyAttrName ? "" : ('\t' + attrName)), p4=null;
boolean emptyAttrValue=(null == attrValue) || (0 == attrValue.length());
if (!emptyAttrValue)
{
final String realValue=RFCHeaderDefinitions.decodeHdrValue(attrValue, true);
if (!realValue.equals(attrValue))
p4 = realValue;
}
String p3="";
if (emptyAttrName)
p3 = emptyAttrValue ? "" : attrValue;
else if (!emptyAttrValue)
p3 = "=\"" + attrValue + '\"';
if (p4 != null)
_out.println(p1 + p2 + p3 + "[" + p4 + "]");
else
_out.println(p1 + p2 + p3);
}
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleMsgPartAddress(int, java.lang.String, net.community.chest.mail.address.MessageAddressType, java.lang.String, java.lang.String)
*/
@Override
public int handleMsgPartAddress (int msgSeqNo, String msgPart, MessageAddressType addrType, String dispName, String addrVal)
{
if (_out != null)
{
final String msgPartDisp=getMsgPartDisplay(msgPart);
_out.println("\t\t" + msgSeqNo + ":\tPart=" + msgPartDisp + ":\ttype=" + addrType.toString());
_out.println("\t\t\t\t\tName: " + ((null == dispName) ? "NONE" : dispName));
_out.println("\t\t\t\t\tAddress: " + addrVal);
}
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleFlagsStage(int, boolean)
*/
@Override
public int handleFlagsStage (int msgSeqNo, boolean fStarting)
{
if (_out != null)
_out.println("\t\t" + msgSeqNo + ":\t" + IMAP4FetchModifier.IMAP4_FLAGS + " " + (fStarting ? "start" : "end"));
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleFlagValue(int, java.lang.String)
*/
@Override
public int handleFlagValue (int msgSeqNo, String flagValue)
{
if (_out != null)
_out.println("\t\t" + msgSeqNo + ":\t\t" + flagValue);
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleInternalDate(int, java.lang.String)
*/
@Override
public int handleInternalDate (int msgSeqNo, String dateValue)
{
if (_out != null)
_out.println("\t\t" + msgSeqNo + ":\t" + IMAP4FetchModifier.IMAP4_INTERNALDATE + "=" + dateValue);
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleMsgPartSize(int, java.lang.String, long)
*/
@Override
public int handleMsgPartSize (int msgSeqNo, String msgPart, long partSize)
{
if (_out != null)
_out.println("\t\t" + msgSeqNo + ":\tPart=" + getMsgPartDisplay(msgPart) + " [" + IMAP4FetchModifier.IMAP4_RFC822SIZE + "]=" + partSize);
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handlePartDataStage(int, java.lang.String, boolean)
*/
@Override
public int handlePartDataStage (int msgSeqNo, String msgPart, boolean fStarting)
{
if (_out != null)
_out.println("\t\t" + msgSeqNo + ":\t" + "Data Part=" + getMsgPartDisplay(msgPart) + " " + (fStarting ? "start" : "end"));
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handlePartData(int, java.lang.String, byte[], int, int)
*/
@Override
public int handlePartData (int msgSeqNo, String msgPart, byte[] bData, int nOffset, int nLen)
{
if (_out != null)
{
int ctlsCount=0;
// run through the data buffer to make sure it is accessible, and count the non-printable characters num
for (int index=0, offset=nOffset; index < nLen; index++, offset++)
if ((bData[offset] < 0x20) || (bData[offset] > 0x7E))
ctlsCount++;
_out.println("\t\t" + msgSeqNo + ":\t\t" + "Data Part=" + getMsgPartDisplay(msgPart) + " len=" + nLen + " (CTLs=" + ctlsCount + ")");
}
return 0;
}
}
private static final void showFlags (final PrintStream out, final String flagsType, final Collection<? extends IMAP4MessageFlag> flags)
{
out.print("\t" + flagsType + " (");
if ((flags != null) && (flags.size() > 0))
{
for (final IMAP4MessageFlag f : flags)
out.print(" " + f);
}
out.println(" )");
}
private static final void showAllFolders (final PrintStream out, final IMAP4Accessor sess) throws IOException
{
final long listStart=System.currentTimeMillis();
final IMAP4FoldersListInfo rsp=sess.listAllFolders();
final long listEnd=System.currentTimeMillis(), listDuration=(listEnd - listStart);
if (rsp.isOKResponse())
{
final Collection<IMAP4FolderInfo> fldrs=rsp.getFolders();
final int numFldrs=(null == fldrs) ? 0 : fldrs.size();
out.println(IMAP4Protocol.IMAP4ListCmd);
if (numFldrs > 0)
{
for (final IMAP4FolderInfo f : fldrs)
{
if (f != null)
{
char[] c={ f.getHierarchySeparator() };
out.println("\t" + f.toString() + " sep=" + (('\0' == c[0]) ? "NIL" : String.valueOf(c)));
}
final Collection<IMAP4FolderFlag> flags=(null == f) ? null : f.getFlags();
final int numFlags=(null == flags) ? 0 : flags.size();
if (numFlags > 0)
{
for (final IMAP4FolderFlag ff : flags)
{
if (ff != null)
out.println("\t\t" + ff);
}
}
}
}
out.println(rsp + " (" + numFldrs + " folders in " + listDuration + " msec.)");
}
else
System.err.println("Failed to LIST (after " + listDuration + " msec.) : " + rsp);
}
private static final int testIMAP4FolderAccess (final PrintStream out, final BufferedReader in, final String folder, final IMAP4Accessor sess) throws IOException
{
final TestIMAP4FetchRspHandler hndlr=new TestIMAP4FetchRspHandler(out);
for ( ; ; )
{
final long fastStart=System.currentTimeMillis();
final IMAP4FastResponse rspFast=sess.fetchFastMsgsAllInfo(true);
final long fastEnd=System.currentTimeMillis(), fastDuration=(fastEnd - fastStart);
if (rspFast.isOKResponse())
{
final Collection<? extends IMAP4FastMsgInfo> msgs=rspFast.getMessages();
final int numMsgs=(null == msgs) ? 0 : msgs.size();
out.println("Got[" + folder + "] " + numMsgs + " entries in " + fastDuration + " msec.: " + rspFast);
if (numMsgs > 0)
{
for (final IMAP4FastMsgInfo mi : msgs)
{
if (null == mi) // should not happen
continue;
out.println("\t" + mi.getSeqNo() + ": " + mi);
}
}
}
else
System.err.println("Failed to [" + folder + "] FETCH FAST (after " + fastDuration + " msec.): " + rspFast);
final String msgId=getval(out, in, "message ID to FETCH (or Quit)");
if ((null == msgId) || (msgId.length() <= 0))
continue;
if (isQuit(msgId)) break;
final long idValue;
try
{
if ((idValue=Long.parseLong(msgId)) <= 0L)
throw new NumberFormatException("ID must be POSITIVE");
}
catch(NumberFormatException e)
{
System.err.println("Bad/Illegal ID value: " + e.getMessage());
continue;
}
final String ans=getval(out, in, "use UID (y)/[n]/q");
if (isQuit(ans)) break;
final boolean useUID=(ans != null) && (ans.length() > 0) && ('y' == Character.toLowerCase(ans.charAt(0)));
final String part=getval(out, in, "[E]nvelope/(B)ody/(S)tructure/(Q)uit");
if (isQuit(part)) break;
final char pChar=((null == part) || (part.length() <= 0)) ? '\0' : Character.toUpperCase(part.charAt(0));
final long fStart=System.currentTimeMillis();
try
{
final IMAP4TaggedResponse rsp;
switch(pChar)
{
case '\0' :
case 'E' :
rsp = sess.fetchMsgInfo(idValue, useUID, new IMAP4FetchModifier[] { IMAP4FetchModifier.ENVELOPE }, hndlr);
break;
case 'S' :
rsp = sess.fetchMsgInfo(idValue, useUID, new IMAP4FetchModifier[] { IMAP4FetchModifier.BODYSTRUCTURE }, hndlr);
break;
case 'B' :
default :
throw new UnsupportedOperationException("Unknown part requested: " + part);
}
final long fEnd=System.currentTimeMillis(), fDuration=fEnd - fStart;
out.println("Got response after " + fDuration + " msec.: " + rsp);
}
catch(Exception e)
{
final long fEnd=System.currentTimeMillis(), fDuration=fEnd - fStart;
System.err.println(e.getClass().getName() + " on FETCH data after " + fDuration + " msec.: " + e.getMessage());
}
}
return 0;
}
private static final int testIMAP4Access (final PrintStream out, final BufferedReader in, final IMAP4Accessor sess) throws IOException
{
for ( ; ; )
{
showAllFolders(out, sess);
String ans=getval(out, in, "choose folder (ENTER=" + IMAP4FolderInfo.IMAP4_INBOX + "/(Q)uit)");
if (isQuit(ans)) break;
if ((null == ans) || (ans.length() <= 0))
ans = IMAP4FolderInfo.IMAP4_INBOX;
final long fStart=System.currentTimeMillis();
final IMAP4FolderSelectionInfo rsp=sess.select(ans);
final long fEnd=System.currentTimeMillis(), fDuration=fEnd - fStart;
if (!rsp.isOKResponse())
{
System.err.println("Failed to select folder=" + ans + " after " + fDuration + " msec.: " + rsp);
}
out.println("Selected folder=" + ans + " in " + fDuration + " msec.: " + rsp);
out.println("\t" + IMAP4FolderSelectionInfo.IMAP4_EXISTS + "=" + rsp.getNumExist());
out.println("\t" + IMAP4StatusInfo.IMAP4_RECENT + "=" + rsp.getNumRecent());
out.println("\t" + IMAP4StatusInfo.IMAP4_UNSEEN + "=" + rsp.getNumRecent());
out.println("\t" + IMAP4StatusInfo.IMAP4_UIDNEXT + "=" + rsp.getUIDNext());
out.println("\t" + IMAP4StatusInfo.IMAP4_UIDVALIDITY + "=" + rsp.getUIDValidity());
showFlags(out, IMAP4FolderSelectionInfo.IMAP4_PERMANENTFLAGS, rsp.getPrmFlags());
showFlags(out, IMAP4FetchModifier.IMAP4_FLAGS, rsp.getDynFlags());
testIMAP4FolderAccess(out, in, ans, sess);
}
return 0;
}
private static final void showNamespace (PrintStream out, String nsType, IMAP4Namespace nsInfo)
{
if (out != null)
{
final String nsPrefix=(null == nsInfo) ? null : nsInfo.getPrefix();
final char[] delim={ (null == nsInfo) ? '\0' : nsInfo.getDelimiter() };
out.println("\t" + nsType + ": "
+ (((null == nsPrefix) || (0 == nsPrefix.length())) ? "NIL" : nsPrefix) + " delim="
+ (('\0' == delim[0]) ? "EOS" : String.valueOf(delim)));
}
}
private static final int testIMAP4Access (final PrintStream out, final BufferedReader in,
final String url, final String user, final String pass)
{
final int pp=url.lastIndexOf(':');
final String host=(pp < 0) ? url : url.substring(0, pp);
final int port=(pp < 0) ? IMAP4Protocol.IPPORT_IMAP4 : Integer.parseInt(url.substring(pp+1));
final NetServerWelcomeLine wl=new NetServerWelcomeLine();
for ( ; ; )
{
final String ans=getval(out, in, "(re-)run test ([y]/n)");
if ((ans != null) && (ans.length() > 0) && (Character.toUpperCase(ans.charAt(0)) != 'Y'))
break;
final IMAP4Session sess=new IMAP4Session();
try
{
sess.setReadTimeout(30 * 1000);
{
final long cStart=System.currentTimeMillis();
sess.connect(host, port, wl);
final long cEnd=System.currentTimeMillis(), cDuration=cEnd - cStart;
out.println("Connected to " + host + " on port " + port + " in " + cDuration + " msec.: " + wl);
}
{
final Map.Entry<String,String> ident=
IMAP4ServerIdentityAnalyzer.DEFAULT.getServerIdentity(wl.getLine());
if (null == ident)
System.err.println("Failed to identify server");
else
out.println("\tType=" + ident.getKey() + "/Version=" + ident.getValue());
}
boolean hasQuota=false, hasNamespace=false;
{
final long capStart=System.currentTimeMillis();
final IMAP4Capabilities rsp=sess.capability();
final long capEnd=System.currentTimeMillis(), capDuration=(capEnd - capStart);
if (rsp.isOKResponse())
{
final Collection<String> caps=rsp.getCapabilities();
final int numCaps=(null == caps) ? 0 : caps.size();
out.println("got " + numCaps + " capabilities in " + capDuration + "msec.: " + rsp);
if (numCaps > 0)
{
out.println(IMAP4Protocol.IMAP4CapabilityCmd);
for (final String c : caps)
out.println("\t" + c);
}
hasQuota = rsp.hasQuota();
hasNamespace = rsp.hasNamespace();
}
else
System.err.println("Failed to get CAPABILITY after " + capDuration + " msec.: " + rsp);
}
{
final long aStart=System.currentTimeMillis();
final IMAP4TaggedResponse rsp=sess.login(user, pass);
final long aEnd=System.currentTimeMillis(), aDuration=aEnd - aStart;
if (!rsp.isOKResponse())
{
System.err.println("Authentication failed in " + aDuration + " msec.: " + rsp);
continue;
}
out.println("Authenticated in " + aDuration + " msec.: " + rsp);
}
if (hasQuota)
{
final long qtStart=System.currentTimeMillis();
final IMAP4QuotarootInfo qtInfo=sess.getquotaroot();
final long qtEnd=System.currentTimeMillis(), qtDuration=(qtEnd - qtStart);
if (qtInfo.isOKResponse())
{
out.println("Got response in " + qtDuration + " msec.: " + qtInfo);
out.println(IMAP4Protocol.IMAP4GetQuotaRootCmd);
out.println("\t" + IMAP4Protocol.IMAP4QuotaStorageRes + ": " + qtInfo.getCurStorageKB() + " out of " + qtInfo.getMaxStorageKB());
out.println("\t" + IMAP4Protocol.IMAP4QuotaMessageRes + ": " + qtInfo.getCurMessages() + " out of " + qtInfo.getMaxMessages());
}
else
System.err.println("Failed to get QUOTAROOT (after " + qtDuration + " msec.) : " + qtInfo);
}
if (hasNamespace)
{
final long nsStart=System.currentTimeMillis();
final IMAP4NamespacesInfo nsInfo=sess.namespace();
final long nsEnd=System.currentTimeMillis(), nsDuration=(nsEnd - nsStart);
if (nsInfo.isOKResponse())
{
out.println("Got response in " + nsDuration + " msec.: " + nsInfo);
out.println(IMAP4Protocol.IMAP4NamespaceCmd);
showNamespace(out, "Pesonal", nsInfo.getPersonal());
showNamespace(out, "Shared", nsInfo.getShared());
showNamespace(out, "Other", nsInfo.getOther());
}
else
System.err.println("Failed to get NAMESPACE (after " + nsDuration + " msec.) : " + nsInfo);
}
testIMAP4Access(out, in, sess);
}
catch(IOException ce)
{
System.err.println(ce.getClass().getName() + " on handle session: " + ce.getMessage());
}
finally
{
try
{
sess.close();
}
catch(IOException ce)
{
System.err.println(ce.getClass().getName() + " on close session: " + ce.getMessage());
}
}
}
return 0;
}
// arg[0]=server, arg[1]=username, arg[2]=password
private static final int testIMAP4Access (final PrintStream out, final BufferedReader in, final String[] args)
{
final String[] prompts={ "Server", "Username", "Password" },
tpa=resolveTestParameters(out, in, args, prompts);
if ((null == tpa) || (tpa.length < prompts.length))
return (-1);
return testIMAP4Access(out, in, tpa[0], tpa[1], tpa[2]);
}
//////////////////////////////////////////////////////////////////////////
public static final void main (final String args[])
{
final BufferedReader in=getStdin();
final int nErr=testIMAP4Access(System.out, in, args);
if (nErr != 0)
System.err.println("test failed (err=" + nErr + ")");
else
System.out.println("OK");
}
}
|
#!/usr/bin/env bash
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
jupyter notebook "$@" --allow-root |
package com.crowdin.client.projectsgroups.model;
import lombok.Data;
import java.util.List;
@Data
public class AddProjectRequest {
private String name;
private String sourceLanguageId;
private Long templateId;
private Long groupId;
private List<String> targetLanguageIds;
private Long vendorId;
private Long mtEngineId;
private String description;
}
|
import createHiDPICanvas from "../../lib/hidpi-canvas";
export default function init() {
const canvas = createHiDPICanvas(500, 100, 2);
document.body.appendChild(canvas);
const stage = new createjs.Stage(canvas);
stage.scaleX = 4;
stage.scaleY = 4;
stage.addChild(
new txt.PathText({
x: -100,
y: 20,
text: "It's supercalifragilisticexpialidocious!",
font: "lobster",
size: 16,
valign: txt.VerticalAlign.Center,
path:
"M 226 159.333333333333 C 350.816352746667 159.333333333333 452 123.665351484444 452 79.6666666666667 C 452 35.667981848889 350.816352746667 0 226 0 C 101.183647253333 0 0 35.667981848889 0 79.6666666666667 C 0 123.665351484444 101.183647253333 159.333333333333 226 159.333333333333 Z",
start: 620.5843673934,
end: 394.750579307083,
tracking: 0
})
);
stage.update();
return stage;
}
|
import random
def generate_random():
return random.randint(1, 10) |
<gh_stars>1-10
package yascaif.cli;
import java.util.List;
import yascaif.CA;
public class SetArray implements Command {
@Override
public void process(CA ca, List<String> PVs) {
if(PVs.size()==0) {
System.out.println("Must provide a PV name and zero or more element values");
System.exit(1);
}
String name = PVs.remove(0);
String arr[] = new String[PVs.size()];
for(int i=0; i<arr.length; i++)
arr[i] = PVs.get(i);
ca.write(name, arr);
}
}
|
#!/bin/sh
set -e
image="${namespace:-minidocks}/caddy"
versions="
latest
"
#all;dns,docker,dyndns,hook.service,http.authz,http.awses,http.awslambda,http.cache,http.cgi,http.cors,http.datadog,http.expires,http.filter,http.forwardproxy,http.geoip,http.git,http.gopkg,http.grpc,http.ipfilter,http.jwt,http.locale,http.mailout,http.minify,http.nobots,http.prometheus,http.proxyprotocol,http.ratelimit,http.realip,http.reauth,http.restic,http.s3browser,http.supervisor,http.webdav,net,supervisor,tls.dns.auroradns,tls.dns.azure,tls.dns.cloudflare,tls.dns.cloudxns,tls.dns.digitalocean,tls.dns.dnsimple,tls.dns.dnsmadeeasy,tls.dns.dnspod,tls.dns.duckdns,tls.dns.dyn,tls.dns.exoscale,tls.dns.gandi,tls.dns.gandiv5,tls.dns.godaddy,tls.dns.googlecloud,tls.dns.lightsail,tls.dns.linode,tls.dns.namecheap,tls.dns.namedotcom,tls.dns.ns1,tls.dns.otc,tls.dns.ovh,tls.dns.powerdns,tls.dns.rackspace,tls.dns.rfc2136,tls.dns.route53,tls.dns.vultr
build() {
IFS=" "
docker buildx build $docker_opts --build-arg plugins="$2" -t "$image:$1" "$(dirname $0)"
}
case "$1" in
--versions) echo "$versions" | awk 'NF' | cut -d';' -f1;;
'') echo "$versions" | grep -v "^$" | while read -r version; do IFS=';'; build $version; done;;
*) args="$(echo "$versions" | grep -E "^$1(;|$)")"; if [ -n "$args" ]; then IFS=';'; build $args; else echo "Version $1 does not exist." >/dev/stderr; exit 1; fi
esac
|
Then(/^I should see <%= generator.send :table_name %> content$/) do
pending
end
Then(/^I should be on the <%= generator.send :table_name %> page$/) do
expect(current_path).to eq(<%= generator.send :table_name %>_path)
end
|
<filename>src/day01/main.js
// Advent of Code - Day 1
import { readFile } from 'fs/promises';
import { part1, part2 } from '.';
(async function() {
try {
const input = await readFile('src/day01/resources/input.txt', 'utf8');
console.log("--- Part One ---");
console.log("Result", part1(input));
console.log("--- Part Two ---");
console.log("Result", part2(input));
} catch (err) {
console.error(err);
}
})()
|
tick="✓"
cross="✗"
step_log() {
message=$1
printf "\n\033[90;1m==> \033[0m\033[37;1m%s\033[0m\n" "$message"
}
add_log() {
mark=$1
subject=$2
message=$3
if [ "$mark" = "$tick" ]; then
printf "\033[32;1m%s \033[0m\033[34;1m%s \033[0m\033[90;1m%s\033[0m\n" "$mark" "$subject" "$message"
else
printf "\033[31;1m%s \033[0m\033[34;1m%s \033[0m\033[90;1m%s\033[0m\n" "$mark" "$subject" "$message"
fi
}
step_log "Setup PHP and Composer"
version=$1
export HOMEBREW_NO_INSTALL_CLEANUP=TRUE
brew tap shivammathur/homebrew-php >/dev/null 2>&1
brew install shivammathur/php/php@"$1" composer >/dev/null 2>&1
brew link --force --overwrite php@"$1" >/dev/null 2>&1
ini_file=$(php -d "date.timezone=UTC" --ini | grep "Loaded Configuration" | sed -e "s|.*:s*||" | sed "s/ //g")
echo "date.timezone=UTC" >> "$ini_file"
ext_dir=$(php -i | grep "extension_dir => /usr" | sed -e "s|.*=> s*||")
sudo chmod 777 "$ini_file"
mkdir -p "$(pecl config-get ext_dir)"
composer global require hirak/prestissimo >/dev/null 2>&1
semver=$(php -v | head -n 1 | cut -f 2 -d ' ')
add_log "$tick" "PHP" "Installed PHP $semver"
add_log "$tick" "Composer" "Installed"
add_extension() {
extension=$1
install_command=$2
prefix=$3
if ! php -m | grep -i -q "$extension" && [ -e "$ext_dir/$extension.so" ]; then
echo "$prefix=$extension" >>"$ini_file" && add_log $tick "$extension" "Enabled"
elif php -m | grep -i -q "$extension"; then
add_log "$tick" "$extension" "Enabled"
elif ! php -m | grep -i -q "$extension"; then
exists=$(curl -sL https://pecl.php.net/json.php?package="$extension" -w "%{http_code}" -o /dev/null)
if [ "$exists" = "200" ]; then
(
eval "$install_command" && \
add_log "$tick" "$extension" "Installed and enabled"
) || add_log "$cross" "$extension" "Could not install $extension on PHP $semver"
else
if ! php -m | grep -i -q "$extension"; then
add_log "$cross" "$extension" "Could not find $extension for PHP $semver on PECL"
fi
fi
fi
} |
#!/usr/bin/env bash
# Copyright 2021 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
#set -o verbose
root=$(dirname "${BASH_SOURCE[0]}")/..
kustomize="${root}/hack/tools/bin/kustomize"
test_dir_path="test/e2e/data/infrastructure-aws"
test_dir="${root}/${test_dir_path}"
src_dir="${test_dir}/kustomize_sources/"
generated_dir="${test_dir}/generated"
test_templates="${test_dir}/e2e_test_templates"
echo Checking for template sources in "$test_dir"
mkdir -p "${generated_dir}"
# Ignore non kustomized
find "${src_dir}"* -maxdepth 1 -type d \
-print0 | xargs -0 -I {} basename {} | grep -v -E '(patches|addons|cni|base)' | xargs -t -I {} ${kustomize} build --load-restrictor LoadRestrictionsNone --reorder none ${src_dir}{} -o ${generated_dir}/cluster-template-{}.yaml
## move the default template to the default file expected by clusterctl
mv "${generated_dir}/cluster-template-default.yaml" "${generated_dir}/cluster-template.yaml"
cp -r ${generated_dir} ${test_templates}
|
#!/bin/bash
wget --no-check-certificate https://github.com/Jacksgong/across/raw/master/bbr.sh
yes | bash bbr.sh
|
#!/bin/bash
echo "Compiling Okcash GUI: Okcash - from source on its latest version"
echo "This could take a minute, enjoy some coffee or water and come back soon..."
date
# Build okcash graphical client
cd ../..
export BDB_PREFIX="/usr/local/BerkeleyDB.4.8"
export BDB_INCLUDE_PATH="/usr/local/BerkeleyDB.4.8/include"
export BDB_LIB_PATH="/usr/local/BerkeleyDB.4.8/lib"
export CPATH="/usr/local/BerkeleyDB.4.8/include"
export LIBRARY_PATH="/usr/local/BerkeleyDB.4.8/lib"
qmake
make -j4
strip okcash
# Install okcash graphical client in the system
sudo cp okcash /usr/local/bin
# end Client
echo "Done compiling + installing: Okcash GUI > on its latest version for:"
uname -a
echo "Okcash is now installed in your /usr/local/bin directory"
echo "You are now empowered with Okcash!"
echo "enjoy your OK experience"
exit 0
|
<filename>QuienEsQuien/include/pregunta.h
#ifndef _PREGUNTA_H_
#define _PREGUNTA_H_
#include <string>
#include <iostream>
#include <cassert>
using namespace std;
/**
* @brief En cada estructura pregunta se almacena la cadena de la pregunta y el
* número de personajes que aún no han sido eliminados. Si el
* número de personajes es 1, entonces la cadena pregunta
* contiene el nombre del personaje.
*/
class Pregunta{
private:
/**
* @brief Atributo sobre el que se pregunta en este nodo. En el caso de que haya un único
* personaje restante, este campo almacena su nombre.
*/
string atributo;
/**
* @brief Número de personajes que quedan al llegar a esta pregunta.
*/
int num_personajes;
public:
/**
* @brief Constructor por defecto de la pregunta.
*
* Reserva los recursos.
*/
Pregunta();
/**
* @brief Constructor de copias
* @param pregunta Pregunta a copiar
*
* Construye la pregunta duplicando el contenido de @e pregunta en la pregunta
* receptora.
*/
Pregunta(const Pregunta & pregunta);
/**
* @brief Constructor de la pregunta
* @param atributo Atributo sobre el que se pregunta en este nodo. En el caso de que haya un único
* personaje restante, este campo almacena su nombre.
* @param num_personajes Número de personajes que quedan al llegar a esta pregunta.
*/
Pregunta(const string atributo, const int num_personajes);
/**
* @brief Destructor
*
* Libera los recursos ocupados por la pregunta receptora.
*/
~Pregunta();
/**
* @brief Operador de asignación
* @param pregunta Pregunta a copiar
* @return Referencia a la pregunta receptora.
*
* Asigna el valor de la pregunta duplicando el contenido de @e pregunta en la
* pregunta receptora.
*/
Pregunta & operator = (const Pregunta & pregunta);
/**
* @brief Operador de comparación
* @param pregunta Pregunta a comparar
* @return true o false.
*
* Compara dos preguntas comparando el valor de la pregunta y el número de
* personajes asociados.
*/
bool operator == (const Pregunta & pregunta) const;
/**
* @brief Operador de inserción de flujo.
* @param os Stream de salida.
* @param pregunta Pregunta a escribir.
* @return Referencia al stream de salida.
*
* Escribe en la salida la pregunta, escribiendo primero la cadena de la pregunta y
* después el número de personajes que quedan al llegar a esta pregunta.
*/
friend ostream& operator<< (ostream& os, const Pregunta &pregunta);
/**
* @brief Devuelve el atributo sobre el que se pregunta en el nodo.
*
* @pre El nodo debe ser un nodo de pregunta (num_personaje>1).
*/
string obtener_pregunta() const;
/**
* @brief Devuelve el personaje del nodo
*
* @pre El nodo debe ser un nodo de personaje (num_personaje==1).
*/
string obtener_personaje() const;
/**
* @brief Devuelve el número de personajes sin eliminar al llegar a esta pregunta.
*/
int obtener_num_personajes() const;
/**
* @brief Devuelve true si el nodo es de personaje.
*/
bool es_personaje() const;
/**
* @brief Devuelve true si el nodo es de pregunta.
*/
bool es_pregunta() const;
};
#endif |
<reponame>buland-usgs/neic-traveltime<filename>src/main/java/gov/usgs/traveltime/TTSessionLocal.java
package gov.usgs.traveltime;
import gov.usgs.traveltime.tables.MakeTables;
import gov.usgs.traveltime.tables.TablesUtil;
import gov.usgs.traveltime.tables.TauIntegralException;
import java.io.IOException;
import java.util.TreeMap;
/**
* Manage travel-time calculations locally, but in a manner similar to the travel time server pool.
*
* @author <NAME>
*/
public class TTSessionLocal {
String lastModel = "";
TreeMap<String, AllBrnRef> modelData;
MakeTables make;
TtStatus status;
AuxTtRef auxTT;
AllBrnVol allBrn;
// Set up serialization.
String serName; // Serialized file name for this model
String[] fileNames; // Raw input file names for this model
/**
* Initialize auxiliary data common to all models.
*
* @param readStats If true, read the phase statistics
* @param readEllip If true, read the ellipticity corrections
* @param readTopo If true, read the topography file
* @param modelPath If not null, path to model files
* @param serializedPath If not null, path to serialized files
* @throws IOException On any read error
* @throws ClassNotFoundException In input serialization is hosed
*/
public TTSessionLocal(
boolean readStats,
boolean readEllip,
boolean readTopo,
String modelPath,
String serializedPath)
throws IOException, ClassNotFoundException {
// Read in data common to all models.
auxTT = new AuxTtRef(readStats, readEllip, readTopo, modelPath, serializedPath);
}
/**
* Set up a "simple" travel-time session.
*
* @param earthModel Earth model name
* @param sourceDepth Source depth in kilometers
* @param phases Array of phase use commands
* @param returnAllPhases If true, provide all phases
* @param returnBackBranches If true, return all back branches
* @param tectonic If true, map Pb and Sb onto Pg and Sg
* @param useRSTT If true, use RSTT crustal phases
* @throws BadDepthException If the depth is out of range
* @throws TauIntegralException If the tau integrals fail
*/
public void newSession(
String earthModel,
double sourceDepth,
String[] phases,
boolean returnAllPhases,
boolean returnBackBranches,
boolean tectonic,
boolean useRSTT)
throws BadDepthException, TauIntegralException {
setModel(earthModel.toLowerCase());
allBrn.newSession(sourceDepth, phases, returnAllPhases, returnBackBranches, tectonic, useRSTT);
}
/**
* Set up a "complex" travel-time session.
*
* @param earthModel Earth model name
* @param sourceDepth Source depth in kilometers
* @param phases Array of phase use commands
* @param srcLat Source geographical latitude in degrees
* @param srcLong Source longitude in degrees
* @param returnAllPhases If true, provide all phases
* @param returnBackBranches If true, return all back branches
* @param tectonic If true, map Pb and Sb onto Pg and Sg
* @param useRSTT If true, use RSTT crustal phases
* @throws BadDepthException If the depth is out of range
* @throws TauIntegralException If the tau integrals fail
*/
public void newSession(
String earthModel,
double sourceDepth,
String[] phases,
double srcLat,
double srcLong,
boolean returnAllPhases,
boolean returnBackBranches,
boolean tectonic,
boolean useRSTT)
throws BadDepthException, TauIntegralException {
setModel(earthModel.toLowerCase());
allBrn.newSession(
srcLat,
srcLong,
sourceDepth,
phases,
returnAllPhases,
returnBackBranches,
tectonic,
useRSTT);
}
/**
* Get travel times for a "simple" session.
*
* @param recElev Station elevation in kilometers
* @param delta Source receiver distance desired in degrees
* @return An array list of travel times
*/
public TTime getTT(double recElev, double delta) {
return allBrn.getTT(recElev, delta);
}
/**
* Get travel times for a "complex" session.
*
* @param recLat Receiver geographic latitude in degrees
* @param recLong Receiver longitude in degrees
* @param recElev Station elevation in kilometers
* @param delta Source receiver distance desired in degrees
* @param azimuth Receiver azimuth at the source in degrees
* @return An array list of travel times
*/
public TTime getTT(double recLat, double recLong, double recElev, double delta, double azimuth) {
return allBrn.getTT(recLat, recLong, recElev, delta, azimuth);
}
/**
* Get plot data suitable for a travel-time chart.
*
* @param earthModel Earth model name
* @param sourceDepth Source depth in kilometers
* @param phases Array of phase use commands
* @param returnAllPhases If true, provide all phases
* @param returnBackBranches If true, return all back branches
* @param tectonic If true, map Pb and Sb onto Pg and Sg
* @param maxDelta Maximum distance in degrees to generate
* @param maxTime Maximum travel time in seconds to allow
* @param deltaStep Distance increment in degrees for travel-time plots
* @return Travel-time plot data
* @throws BadDepthException If the depth is out of range
* @throws TauIntegralException If the tau integrals fail
*/
public TtPlot getPlot(
String earthModel,
double sourceDepth,
String[] phases,
boolean returnAllPhases,
boolean returnBackBranches,
boolean tectonic,
double maxDelta,
double maxTime,
double deltaStep)
throws BadDepthException, TauIntegralException {
PlotData plotData;
setModel(earthModel.toLowerCase());
plotData = new PlotData(allBrn);
plotData.makePlot(
sourceDepth,
phases,
returnAllPhases,
returnBackBranches,
tectonic,
maxDelta,
maxTime,
deltaStep);
return plotData.getPlot();
}
/**
* Set up for a new Earth model.
*
* @param earthModel Earth model name
*/
private void setModel(String earthModel) {
AllBrnRef allRef;
ReadTau readTau = null;
if (!earthModel.equals(lastModel)) {
lastModel = earthModel;
// Initialize model storage if necessary.
if (modelData == null) {
modelData = new TreeMap<String, AllBrnRef>();
}
// See if we know this model.
allRef = modelData.get(earthModel);
// If not, set it up.
if (allRef == null) {
if (modelChanged(earthModel)) {
if (TauUtil.useFortranFiles) {
// Read the tables from the Fortran files.
try {
readTau = new ReadTau(earthModel);
readTau.readHeader(fileNames[0]);
readTau.readTable(fileNames[1]);
} catch (IOException e) {
System.out.println("Unable to read Earth model " + earthModel + ".");
System.exit(202);
}
// Reorganize the reference data.
try {
allRef = new AllBrnRef(serName, readTau, auxTT);
} catch (IOException e) {
System.out.println(
"Unable to write Earth model " + earthModel + " serialization file.");
}
} else {
// Generate the tables.
TablesUtil.deBugLevel = 1;
make = new MakeTables(earthModel);
try {
status = make.buildModel(fileNames[0], fileNames[1]);
} catch (Exception e) {
System.out.println(
"Unable to generate Earth model " + earthModel + " (" + status + ").");
e.printStackTrace();
System.exit(202);
}
// Build the branch reference classes.
try {
allRef = make.fillAllBrnRef(serName, auxTT);
} catch (IOException e) {
System.out.println(
"Unable to write Earth model " + earthModel + " serialization file.");
}
}
} else {
// If the model input hasn't changed, just serialize the model in.
try {
allRef = new AllBrnRef(serName, earthModel, auxTT);
} catch (ClassNotFoundException | IOException e) {
System.out.println("Unable to read Earth model " + earthModel + " serialization file.");
System.exit(202);
}
}
// allRef.dumpHead();
// allRef.dumpMod('P', true);
// allRef.dumpMod('S', true);
// allRef.dumpBrn(true);
// allRef.dumpBrn("pS", true);
// allRef.dumpUp('P');
// allRef.dumpUp('S');
modelData.put(earthModel, allRef);
}
// Set up the (depth dependent) volatile part.
allBrn = new AllBrnVol(allRef);
// allBrn.dumpHead();
// allBrn.dumpBrn("PnPn", false, false, true);
}
}
/**
* Determine if the input files have changed.
*
* @param earthModel Earth model name
* @return True if the input files have changed
*/
private boolean modelChanged(String earthModel) {
// We need two files in either case.
fileNames = new String[2];
if (TauUtil.useFortranFiles) {
// Names for the Fortran files.
serName = TauUtil.serialize(earthModel + "_for.ser");
fileNames[0] = TauUtil.model(earthModel + ".hed");
fileNames[1] = TauUtil.model(earthModel + ".tbl");
} else {
// Names for generating the model.
serName = TauUtil.serialize(earthModel + "_gen.ser");
fileNames[0] = TauUtil.model("m" + earthModel + ".mod");
fileNames[1] = TauUtil.model("phases.txt");
}
return FileChanged.isChanged(serName, fileNames);
}
/**
* Get a list of available Earth models.
*
* @return A list of available Earth model names
*/
public String[] getAvailModels() {
return TauUtil.availableModels();
}
/**
* Get a pointer to the auxiliary travel-time information.
*
* @return Auxiliary travel-time data
*/
public AuxTtRef getAuxTT() {
return auxTT;
}
/** Print phase groups. */
public void printGroups() {
auxTT.printGroups();
}
/** Print phase statistics. */
public void printStats() {
auxTT.printStats();
}
/** Print phase flags. */
public void printFlags() {
auxTT.printFlags();
}
/**
* Print phase table.
*
* @param returnAllPhases If false, only print "useful" phases.
*/
public void logTable(boolean returnAllPhases) {
allBrn.logTable(returnAllPhases);
}
public int getBranchCount(boolean returnAllPhases) {
return (allBrn.getBranchCount(returnAllPhases));
}
/**
* Print volatile phase branch information.
*
* @param full If true, print the detailed branch specification as well
* @param all If true print even more specifications
* @param sci if true, print in scientific notation
* @param returnAllPhases If false, only print "useful" crustal phases
*/
public void printBranches(boolean full, boolean all, boolean sci, boolean returnAllPhases) {
allBrn.dumpBrn(full, all, sci, returnAllPhases);
}
/**
* Print volatile phase branches that have at least one caustic.
*
* @param full If true, print the detailed branch specification as well
* @param all If true print even more specifications
* @param sci if true, print in scientific notation
* @param returnAllPhases If false, only print "useful" crustal phases
*/
public void printCaustics(boolean full, boolean all, boolean sci, boolean returnAllPhases) {
allBrn.dumpCaustics(full, all, sci, returnAllPhases);
}
/**
* Print reference phase branch information.
*
* @param full If true, print the detailed branch specification as well
*/
public void printRefBranches(boolean full) {
allBrn.ref.dumpBrn(full);
}
}
|
<reponame>sdsmnc221/nexus-tests-rn
import React from 'react';
import PropTypes from 'prop-types';
import * as icons from './svg';
function Icon(props) {
const IconType = icons[props.type];
if (!IconType) {
console.warn('The icon ', props.type, ' is not available.');
}
return IconType ? <IconType {...props} /> : null;
}
Icon.propTypes = {
type: PropTypes.string.isRequired,
};
export default Icon;
|
# Count the number of words in the sentence
def count_words(sentence)
words = sentence.split(" ")
return words.length
end
# Get the most common word
def get_most_common_word(sentence)
frequencies = Hash.new(0)
words = sentence.split(" ")
words.each { |word| frequencies[word] += 1 }
frequencies = frequencies.sort_by { |word, frequency| frequency }
return frequencies.last[0]
end
# main
sentence = "This is an example sentence"
word_count = count_words(sentence)
most_common_word = get_most_common_word(sentence)
puts "The number of words in the sentence is #{word_count}"
puts "The most common word is #{most_common_word}" |
package model.player;
import model.objects.*;
import java.util.*;
public class Player {
String name;
Dictionary stats;
ArrayList<Content> inventory;
}
|
#!/bin/bash
curl --include --request POST http://localhost:3000/sign-up \
--header "Content-Type: application/json" \
--data "{
\"credentials\": {
\"email\": \"$EMAIL\",
\"password\": \"$PASSWORD\",
\"password_confirmation\": \"$PASSWORD\"
}
}"
curl --include --request POST http://localhost:3000/sign-up \
--header "Content-Type: application/json" \
--data '{
"credentials": {
"email": "jon",
"password": "p",
"password_confirmation": "p"
}
}'
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.21 on 2019-10-04 09:44
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('smartpanel', '0013_display_map_reload_limit'),
]
operations = [
migrations.AlterField(
model_name='display',
name='map_reload_limit',
field=models.IntegerField(blank=True, null=True),
),
]
|
/*!
* jquery-toad v1.0.35
* jQuery TOAD - O velho e querido jQuery (https://e5r.github.io/jquery-toad)
* Copyright (c) <NAME>. All rights reserved.
* Licensed under the Apache-2.0 License. More license information in LICENSE.
*/
(function(global, factory ) { "use strict";
if(typeof module === 'object' && typeof module.exports === 'object' && typeof require === 'function') {
module.exports = function(w) {
/* DOM - Document Object Model é pré-requisito */
if (typeof w !== 'object' || typeof w.document !== 'object') {
throw new Error("jQuery TOAD\'s requires a DOM (Document Object Model)!");
}
var jQuery = require('jquery')(w);
return factory(w, jQuery, 'CommonJS');
}
}
else if(typeof define === 'function' && typeof define.amd === 'object') {
define('jquery-toad', ['jquery'], function(jQuery) {
return factory(global, jQuery, 'AMD');
});
}
else {
/* É necessário definir um valor para __TOAD__ explicitamente.
Esse será o nome do objeto de aplicação disponível em window. */
if (typeof global.__TOAD__ !== 'string') {
throw new Error('You have not set a value for __TOAD__!');
}
global[global.__TOAD__] = factory(global, global.jQuery, 'Browser');
}
})(typeof window !== "undefined" ? window : this, function(window, $, $ml) {
/* jQuery 1.12.4 é pré-requisito */
(function () {
'use strict';
if (typeof $ !== 'function') {
throw new Error('jQuery TOAD\'s requires jQuery!');
}
var versionAll = $.fn.jquery.split(' ')[0].split('.'),
vMajor = versionAll[0],
vMinor = versionAll[1],
vPath = versionAll[2];
if (vMajor > 1) return;
if (vMajor == 1 && vMinor > 12) return;
if (vMajor == 1 && vMinor == 12 && vPath >= 4) return;
throw new Error('jQuery TOAD\'s requires jQuery version 1.12.4 or higher!');
})();
var $toad = {
'$jq': $,
'$toad': {
version: '1.0.35',
author: '<NAME>',
license: 'Apache-2.0',
homepage: 'https://e5r.github.io/jquery-toad',
moduleLoader: $ml
}
};
var document = window.document;
var _NAMESPACES_ = [];
var _APP_NAMESPACE_KEY_ = '_app_namespace_';
// Inicializa namespace exclusivo para aplicação do usuário
var _APP_ = $toad[_APP_NAMESPACE_KEY_] = {};
var $require = function (name) {
return $toad[name] = $toad[name] || {};
};
var $namespace = function (order, name, factory) {
_NAMESPACES_.push({
idx: order,
cb: function () {
factory(($toad[name] = $toad[name] || {}));
}
});
};
/**
* @code
{__TOAD__}.namespace('utils', function(exports) {
var myData = {}
var myFunction = function(){}
exports.data = myData
exports.func = myFunction
})
*/
$toad.namespace = function (name, factory) {
if (typeof name !== 'string') {
throw new Error('Invalid namespace.');
}
// TODO: Mudar para abordagem semelhante a $namespace
factory((_APP_[name] = _APP_[name] || {}));
}
/**
* @code
var utils = {__TOAD__}.require('utils')
utils.func(utils.data)
*/
$toad.require = function (name) {
if (typeof name !== 'string' || $.trim(name).length < 1) {
throw new Error('Invalid namespace to import.');
}
var required = {},
exportGlobals = [
'core',
'utils'
],
atIgnore = [
'__internals__'
];
if (name.charAt(0) === '@') {
var parts = name.split('@');
if (parts.length !== 2 || $.inArray(parts[1], atIgnore) >= 0)
return;
return $toad['@'][parts[1]];
}
// Objetos globais exceto o namespace da aplicação
if ($.inArray(name, exportGlobals) > -1) {
for (var k in $toad[name]) {
required[k] = $toad[name][k];
}
}
// Objetos da aplicação. Esses sobrescrevem os globais se existirem
for (var k in _APP_[name]) {
required[k] = _APP_[name][k];
}
return required;
}
/**
* @code
var MY_CONST = {__TOAD__}.constant('MY_CONST', VALUE_FOR_MY_CONST)
*/
$toad.constant = function (constName, constValue) {
var internals = $require('@').__internals__;
return internals.setConstant(constName, constValue);
}
// ========================================================================
// app.js
// ========================================================================
$namespace(9, 'core', function (exports) {
var utils = $require('utils'),
internals = $require('@').__internals__;
var CONTROLLER_IDENTIFIER = 'controller',
CONTROLLER_DATA_IDENTIFIER = 'data-' + CONTROLLER_IDENTIFIER,
CONTROLLER_SELECTOR = '[' + CONTROLLER_DATA_IDENTIFIER + ']',
CONTROLLER_ELEMENT_DATA = '$ctrl',
CONTROLLER_VIEW_FIELD = '__view__',
CONTROLLER_OPTIONS_FIELD = '$options',
COMPONENT_SELECTOR_KEY = '$jqSelector',
COMPONENT_NAME_KEY = '$name',
BIND_DATA_IDENTIFIER = 'data-events',
BIND_SELECTOR = '[' + BIND_DATA_IDENTIFIER + ']',
BIND_EVENT_COLLECTION_SPLITER = ',',
BIND_EVENT_SPLITER = '=>';
function _installControllers() {
$(CONTROLLER_SELECTOR, document).each(function () {
var el = $(this),
name = el.attr(CONTROLLER_DATA_IDENTIFIER),
ctor = internals.getController(name),
options = {},
dataset = el.data();
// Lê opções dos elementos [data-*] exceto [data-controller]
for (var opt in dataset) {
if (opt === CONTROLLER_IDENTIFIER)
continue;
options[opt] = dataset[opt];
}
var ctrl = new ctor(el, options);
el.data(CONTROLLER_ELEMENT_DATA, ctrl);
ctrl[CONTROLLER_VIEW_FIELD] = el;
ctrl[CONTROLLER_OPTIONS_FIELD] = options;
_setupEvents(el, ctrl)
_setupComponents(el, ctrl);
internals.callLazyTriggers(ctrl);
});
}
function _setupEvents(ctrlElm, ctrl) {
$(BIND_SELECTOR, ctrlElm).each(function () {
var el = $(this),
binder = el.attr(BIND_DATA_IDENTIFIER);
if (!utils.isString(binder) || 0 > binder.indexOf(BIND_EVENT_SPLITER))
return;
binder = binder.split(BIND_EVENT_COLLECTION_SPLITER);
if (!utils.isArray(binder) || binder.length < 1)
return;
for (var b in binder) {
var binderExpr = $.trim(binder[b]),
bind = binderExpr.split(BIND_EVENT_SPLITER);
if (!utils.isArray(bind) || bind.length < 2)
continue;
var bEvent = bind[0],
bHandler = ctrl[bind[1]];
if (!utils.isString(bEvent) || !utils.isFunction(bHandler))
return;
el.on(bEvent, bHandler);
el.data(CONTROLLER_ELEMENT_DATA, ctrl);
}
});
}
function _setupComponents(ctrlElm, ctrl) {
var cmpList = internals.listComponents();
for (var c in cmpList) {
var cmp = cmpList[c];
if (!utils.isString(cmp.id)) return;
if (!utils.isFunction(cmp.component)) return;
if (!utils.isString(cmp.component[COMPONENT_SELECTOR_KEY])) return;
if (!utils.isString(cmp.component[COMPONENT_NAME_KEY])) return;
var jqSelector = cmp.component[COMPONENT_SELECTOR_KEY];
var jqFn = cmp.component[COMPONENT_NAME_KEY];
$(jqSelector, ctrlElm)[jqFn](ctrl);
};
}
function _installToad() {
_installControllers();
}
$(document).ready(_installToad);
})
// ========================================================================
// config.js
// ========================================================================
$namespace(1, '@', function (exports) {
var CONFIG = {},
utils = $require('utils');
function _getConfig(key, defaultValue) {
if (!utils.isString(key))
return;
return utils.getObjectItemByPath(CONFIG, key) || defaultValue;
}
function _setConfig(key, newValue) {
if (!utils.isString(key))
return;
return utils.setObjectItemByPath(CONFIG, key, newValue);
}
exports.config = {
get: _getConfig,
set: _setConfig
};
})
// ========================================================================
// constants.js
// ========================================================================
$namespace(1, '@', function (exports) {
var constants = {},
internals = exports.__internals__ = exports.__internals__ || {};
internals.setConstant = _setConstant;
/**
* Constantes
*/
exports.constants = constants;
/**
* Define uma constante global no sistema
*
* @param {string} constName - Nome da constante
* @param {any} constValue - Valor da constante
*/
function _setConstant(constName, constValue) {
if (typeof constName !== 'string')
throw new Error('Invalid constName "' + constName + '"');
if (!constValue)
throw new Error('constValue is required!');
if (typeof constants[constName] !== 'undefined')
throw new Error('Constant "' + constName + '" already exists!');
// Quando [Object.defineProperty] não está disponível (ex: IE8 <) simplesmente
// guardamos um valor para uso, porém sem nenhuma proteção de imutabilidade
//
// OBS: Apesar de usarmos [Object.defineProperty] para definir a constante,
// testamos [Object.defineProperties] porque o IE8 implementa [Object.defineProperty]
// mas somente para objetos DOM. Com suporte completo no IE9 junto com a
// implementação de [Object.defineProperties]
if (typeof Object.defineProperties !== 'function') {
console.warn('WARNING!', 'Object.defineProperty is not supported!');
return constants[constName] = constValue;
}
return Object.defineProperty(constants, constName, {
// TODO: Mudar para [false], isso irá impedir o uso de [for(var c in constants)]
// porém será necessário fornecer algum meio para listar as constantes
enumerable: true,
configurable: false,
writable: false,
value: constValue
})[constName];
}
})
// ========================================================================
// controller-component.js
// ========================================================================
$namespace(2, 'core', function (exports) {
var CONTROLLER_ELEMENT_DATA = '$ctrl';
exports.ControllerComponent = $.fn['controller'] = function ControllerComponent() {
return $(this).data(CONTROLLER_ELEMENT_DATA);
}
})
// ========================================================================
// plain-object-cloner.js
// ========================================================================
$namespace(1, '@', function (exports) {
var utils = $require('utils'),
internals = exports.__internals__ = exports.__internals__ || {};
internals.PlainObjectCloner = PlainObjectCloner;
/**
* Clonador de objetos
*
* @param {object} target - Object to clone
*/
function PlainObjectCloner(target) {
this.target = target;
this.cloning = [];
this.validTypes = [
(typeof true),
(typeof 0),
(typeof ''),
(typeof {})
];
}
PlainObjectCloner.prototype.isValidProp = function (prop) {
return this.validTypes.indexOf(typeof prop) >= 0;
}
PlainObjectCloner.prototype.cloneArray = function (target) {
if (!utils.isArray(target))
return;
var arr = [];
for (var p in target) {
var prop = target[p];
if (!this.isValidProp(prop))
continue;
if (this.cloning.indexOf(prop) >= 0)
throw new Error('Circular reference detected!');
this.cloning.push(prop);
if (utils.isArray(prop)) {
arr.push(this.cloneArray(prop));
}
else if (utils.isObject(prop)) {
arr.push(this.cloneObject(prop));
} else {
arr.push(prop);
}
var cloningIdx = this.cloning.indexOf(prop);
this.cloning.splice(cloningIdx, 1);
}
return arr;
}
PlainObjectCloner.prototype.cloneObject = function () {
var target = arguments[0] || this.target;
if (utils.isArray(target)) {
return this.cloneArray(target);
}
if (!utils.isObject(target))
return;
var clone = {};
for (var p in target) {
var prop = target[p];
if (!this.isValidProp(prop))
continue;
if (this.cloning.indexOf(prop) >= 0)
throw new Error('Circular reference detected!');
this.cloning.push(prop);
if (utils.isArray(prop)) {
clone[p] = this.cloneArray(prop);
}
else if (utils.isObject(prop)) {
clone[p] = this.cloneObject(prop);
} else {
clone[p] = prop;
}
var cloningIdx = this.cloning.indexOf(prop);
this.cloning.splice(cloningIdx, 1);
}
return clone;
}
})
// ========================================================================
// register-component.js
// ========================================================================
$namespace(3, '@', function (exports) {
var NAME_FIELD = 'name',
COMPONENT_IDENTIFIER = 'gui',
CONSTRUCTOR_FIELD = 'ctor',
EXPORT_NAME_FIELD = '$name',
EXPORT_SELECTOR_FIELD = '$jqSelector';
var components = [];
var internals = exports.__internals__ = exports.__internals__ || {};
internals.getComponent = _getComponent;
internals.listComponents = _listComponents;
/**
* Registra um componente
*
* @param {string} name - Nome do componente
* @param {function} ctor - Função construtora do componente
*/
exports.registerComponent = function (name, ctor) {
var options = ensureOptions({ name: name, ctor: ctor });
var componentName = options[NAME_FIELD],
componentJqName = 'gui-{name}'.replace('{name}', componentName),
selector = '[data-gui="{name}"]'.replace('{name}', componentName);
if (components[componentName]) {
throw new Error('Component ' + componentName + ' already registered!');
}
var fnCmp = function (ctrl) {
return this.each(function (_, htmlEl) {
var dataOptions = {},
el = $(htmlEl),
dataset = el.data();
// Lê opções dos elementos [data-*] exceto [data-gui]
for (var opt in dataset) {
if (opt === COMPONENT_IDENTIFIER)
continue;
dataOptions[opt] = dataset[opt];
}
// [apply] ao invés de [bind] por compatibilidade ao IE8
return options[CONSTRUCTOR_FIELD].apply(this, [ctrl, dataOptions])
});
};
fnCmp[EXPORT_NAME_FIELD] = componentJqName;
fnCmp[EXPORT_SELECTOR_FIELD] = selector;
components[componentName] = fnCmp;
$.fn[componentJqName] = fnCmp;
return fnCmp;
}
function ensureOptions(options) {
options = options || {};
if (typeof options[NAME_FIELD] != 'string')
throw invalidOptionMessage(NAME_FIELD, 'string');
if (typeof options[CONSTRUCTOR_FIELD] != 'function')
throw invalidOptionMessage(CONSTRUCTOR_FIELD, 'function');
return options;
}
function invalidOptionMessage(fieldName, fieldType) {
return 'Invalid @component.{name}. Must be a {type}.'
.replace('{name}', fieldName)
.replace('{type}', fieldType);
}
function _getComponent(componentName) {
if (typeof componentName !== 'string' || componentName == '') {
throw new Error('Parameter componentName is required.');
}
if (!components[componentName]) {
throw new Error('Controller ' + componentName + ' not registered!');
}
return components[componentName];
}
function _listComponents() {
var list = []
for (var c in components)
list.push({
id: c,
component: components[c]
})
return list
}
})
// ========================================================================
// register-controller.js
// ========================================================================
$namespace(3, '@', function (exports) {
var utils = $require('utils');
var NAME_FIELD = 'name',
CONSTRUCTOR_FIELD = 'ctor',
EXPORT_NAME_FIELD = '$name',
CONTROLLER_VIEW_FIELD_PRIVATE = '__view__',
CONTROLLER_VIEW_FIELD = '$view',
CONTROLLER_MODEL_FIELD_PRIVATE = '__model__',
CONTROLLER_MODEL_FIELD = '$model',
CONTROLLER_TRIGGER_FIELD_PRIVATE = '__triggers__',
CONTROLLER_TRIGGER_FIELD = '$onUpdateModel';
var lazyTriggers = [];
var controllers = [];
var internals = exports.__internals__ = exports.__internals__ || {};
internals.getController = _getController;
internals.callLazyTriggers = _callLazyTriggers;
// Registra constantes públicas
internals.setConstant('VIEW_BY_ID', 1);
/**
* Registra um controlador
*
* @param {string} name - Nome do controlador
* @param {function} ctor - Função construtora do controlador
*/
exports.registerController = function (name, ctor) {
var options = ensureOptions({ name: name, ctor: ctor });
var controllerName = options[NAME_FIELD];
if (controllers[controllerName]) {
throw new Error('Controller ' + controllerName + ' already registered!');
}
var fnCtrl = options[CONSTRUCTOR_FIELD];
fnCtrl[EXPORT_NAME_FIELD] = options[NAME_FIELD];
controllers[controllerName] = fnCtrl;
fnCtrl.prototype[CONTROLLER_VIEW_FIELD] = _getViewElement;
fnCtrl.prototype[CONTROLLER_MODEL_FIELD] = _manageModel;
fnCtrl.prototype[CONTROLLER_TRIGGER_FIELD] = _manageTriggers;
return fnCtrl;
}
function ensureOptions(options) {
options = options || {};
if (typeof options[NAME_FIELD] != 'string')
throw invalidOptionMessage(NAME_FIELD, 'string');
if (typeof options[CONSTRUCTOR_FIELD] != 'function')
throw invalidOptionMessage(CONSTRUCTOR_FIELD, 'function');
return options;
}
function invalidOptionMessage(fieldName, fieldType) {
return 'Invalid @controller.{name}. Must be a {type}.'
.replace('{name}', fieldName)
.replace('{type}', fieldType);
}
function _getController(controllerName) {
if (typeof controllerName !== 'string' || controllerName == '') {
throw new Error('Parameter controllerName is required.');
}
if (!controllers[controllerName]) {
throw new Error('Controller ' + controllerName + ' not registered!');
}
return controllers[controllerName];
}
/**
* Retorna uma coleção de elementos dentro do escopo da controller
*
* @param {DOM} elType - Elemento DOM
* @param {string} selector - jQuery selector
*/
function _getViewElement(elType, selector) {
var view = this[CONTROLLER_VIEW_FIELD_PRIVATE],
VIEW_BY_ID = $require('@').constants.VIEW_BY_ID;
if (!(view instanceof $))
return;
if (typeof elType === 'string' && arguments.length === 1)
selector = elType;
else if (typeof selector !== 'string')
throw new Error('Invalid view selector.');
else switch (elType) {
case VIEW_BY_ID:
selector = '[data-id="{id}"]'.replace('{id}', selector);
break;
default:
throw new Error('Invalid view type "' + elType + '".');
}
return $(selector, view);
}
/**
* Gerencia o modelo
*/
function _manageModel() {
var clonerCurrent = new internals.PlainObjectCloner(this[CONTROLLER_MODEL_FIELD_PRIVATE]);
// this.$model(): Get a full model
if (!arguments.length) {
return clonerCurrent.cloneObject();
}
// this.$model({ object }): Set a full model
if (arguments.length === 1
&& utils.isObject(arguments[0])) {
var clonerNew = new internals.PlainObjectCloner(arguments[0]),
newState = clonerNew.cloneObject();
this[CONTROLLER_MODEL_FIELD_PRIVATE] = newState;
_callTriggers(clonerCurrent.cloneObject(), newState, null, this);
return;
}
// this.$model('string'): Get path of model
if (arguments.length === 1
&& utils.isString(arguments[0])) {
var path = $.trim(arguments[0]),
stateFull = clonerCurrent.cloneObject();
if (path.length === 0)
return stateFull;
return utils.getObjectItemByPath(stateFull, path)
}
// this.$model('string', { object }): Get path of model
if (arguments.length === 2
&& utils.isString(arguments[0])
&& utils.isObject(arguments[1])) {
var path = arguments[0],
stateFull = clonerCurrent.cloneObject(),
clonerNew = new internals.PlainObjectCloner(arguments[1]),
newState = clonerNew.cloneObject();
utils.setObjectItemByPath(stateFull, path, newState);
this[CONTROLLER_MODEL_FIELD_PRIVATE] = stateFull;
_callTriggers(clonerCurrent.cloneObject(), stateFull, path, this);
return;
}
throw new Error('Call with invalid parameters for ' + CONTROLLER_MODEL_FIELD + '!');
}
function _attachTrigger(ctrl, path, trigger) {
if (!utils.isArray(ctrl[CONTROLLER_TRIGGER_FIELD_PRIVATE]))
ctrl[CONTROLLER_TRIGGER_FIELD_PRIVATE] = [];
var triggers = ctrl[CONTROLLER_TRIGGER_FIELD_PRIVATE];
for (var t in triggers) {
var trg = triggers[t];
if (trg.path === path && trg.trigger === trigger)
return;
}
triggers.push({ path: path, trigger: trigger });
}
function _manageTriggers() {
if (arguments.length === 1
&& utils.isFunction(arguments[0])) {
return _attachTrigger(this, null, arguments[0]);
}
if (arguments.length === 2
&& utils.isString(arguments[0])
&& utils.isFunction(arguments[1])) {
return _attachTrigger(this, arguments[0], arguments[1]);
}
throw new Error('Call with invalid parameters for ' + CONTROLLER_TRIGGER_FIELD + '!');
}
function _callTriggers(oldState, newState, modelPath, controller) {
if (!controller[CONTROLLER_VIEW_FIELD_PRIVATE]) {
_setLazyTriggers(oldState, newState, modelPath, controller);
return;
}
var eligibleTriggers = [],
path = (modelPath || ''),
pathParts = path === '' ? [] : path.split('.');
for (var idx = pathParts.length - 1; idx >= 0; idx--) {
var pathPartsBegin = pathParts.splice(0, idx + 1);
pathParts = pathPartsBegin.concat(pathParts);
eligibleTriggers.push(pathPartsBegin.join('.'));
}
eligibleTriggers.push('');
var triggers = controller[CONTROLLER_TRIGGER_FIELD_PRIVATE] || [],
triggerFilter = function (prefix) {
return $.grep(triggers, function (item) {
return (item.path || '') === prefix;
});
};
$.each(eligibleTriggers, function (_, itemPath) {
$.each(triggerFilter(itemPath), function (_, tgr) {
if (!utils.isFunction(tgr.trigger))
return;
var _oldState = oldState,
_newState = newState;
if (tgr.path) {
_oldState = utils.getObjectItemByPath(_oldState, tgr.path);
_newState = utils.getObjectItemByPath(_newState, tgr.path);
}
// function(oldState, newState, modelPath, controller) { }
tgr.trigger.call(
null, /* this -> null */
_oldState,
_newState,
modelPath,
controller);
});
});
}
function _setLazyTriggers(oldState, newState, modelPath, controller) {
var triggerIdx = -1,
triggerRecord = {
controller: controller,
trigger: {
oldState: oldState,
newState: newState,
modelPath: modelPath
}
};
$.each(lazyTriggers, function (idx, item) {
if (item.controller === controller)
triggerIdx = idx;
});
if (triggerIdx >= 0)
lazyTriggers[triggerIdx] = triggerRecord;
else
lazyTriggers.push(triggerRecord);
}
function _callLazyTriggers(controller) {
var triggerIdx = -1,
triggerRecord;
$.each(lazyTriggers, function (idx, record) {
if (record.controller === controller) {
triggerIdx = idx;
triggerRecord = record;
}
});
if (triggerRecord) {
try {
_callTriggers(
triggerRecord.trigger.oldState,
triggerRecord.trigger.newState,
triggerRecord.trigger.modelPath,
triggerRecord.controller
);
} catch (_) { }
}
if (triggerIdx >= 0) {
lazyTriggers.splice(triggerIdx, 1);
}
}
})
// ========================================================================
// utils.js
// ========================================================================
$namespace(0, 'utils', function (exports) {
/**
* Extrai o valor da propridade de um objeto por um caminho informado
*
* @param {object} obj - Objeto com a propriedade
* @param {string} path - Caminho da propriedade
*/
exports.getObjectItemByPath = function (obj, path) {
var value = obj,
keys = path.split('.'),
k = 0;
while (value && k < keys.length) {
value = value[keys[k]];
k++;
}
return value;
}
/**
* Altera o valor da propriedade de um objeto por um caminho informado
*
* @param {object} obj - Objeto com a propriedade
* @param {string} path - Caminho da propriedade
* @param {any} newValue - Novo valor da propriedade
*/
exports.setObjectItemByPath = function (obj, path, newValue) {
var value = obj,
keys = path.split('.'),
k = 0;
while (value && k < keys.length) {
if (typeof value[keys[k]] !== 'object')
value[keys[k]] = {};
if (k + 1 !== keys.length)
value = value[keys[k]];
k++;
}
return value[keys[--k]] = newValue;
}
/**
* Verifica se referencia uma string
*
* @param {any} value - Instância a verificar
*/
exports.isString = function (value) {
return typeof value === 'string';
}
/**
* Verifica se referencia uma função
*
* @param {any} value - Instância a verificar
*/
exports.isFunction = function (value) {
return typeof value === 'function';
}
/**
* Verifica se referencia uma indefinição
*
* @param {any} value - Instância a verificar
*/
exports.isUndefined = function (value) {
return typeof value === 'undefined';
}
/**
* Verifica se referencia um objeto
*
* @param {any} value - Instância a verificar
*/
exports.isObject = function (value) {
// http://jsperf.com/isobject4
return value !== null && typeof value === 'object';
}
/**
* Verifica se referencia um número
*
* @param {any} value - Instância a verificar
*/
exports.isNumber = function (value) {
return typeof value === 'number';
}
/**
* Verifica se referencia um array
*
* @param {any} value - Instância a verificar
*/
exports.isArray = function (value) {
return value instanceof Array
|| Object.prototype.toString.call(value) === "[object Array]";
}
/**
* Recupera o titulo do elemento principal (document)
*/
exports.getPageTitle = function (newTitle) {
return $(document).attr('title');
}
/**
* Altera o titulo do elemento principal (document)
*
* @param {string} newTitle - Novo título
*/
exports.setPageTitle = function (title) {
$(document).attr('title', title);
}
})
// Inicializa os namespaces na ordem especificada
_NAMESPACES_.sort(function (a, b) {
return a.idx - b.idx;
});
// Não usamos (map) para compatibilidade com IE8
for (var n in _NAMESPACES_) {
_NAMESPACES_[n].cb();
}
return $toad;
});
|
# ----------------------------------------------------------------------------
#
# Package : Apache Jsvc
# Version : latest (1.2.3)
# Source repo : https://github.com/apache/commons-daemon/tree/master/src/native/unix
# Tested on : rhel_7.6
# Script License: Apache License, Version 2
# Maintainer : Vrushali Inamdar <vrushali.inamdar@ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
#!/bin/bash
export COMMONS_DAEMON_VERSION=""
yum install -y git java-1.8.0-openjdk-devel maven
# set JAVA_HOME
export JAVA_HOME=/usr/lib/jvm/java-1.8.0
git clone https://github.com/apache/commons-daemon.git
cd commons-daemon
if [ "$COMMONS_DAEMON_VERSION" == "" ]
then
echo "No specific version specified. Using latest ..."
else
echo "Building the specified version $COMMONS_DAEMON_VERSION"
git checkout ${COMMONS_DAEMON_VERSION}
fi
wrkdir=`pwd`
# Build commons-daemon JAR file from source code on Power
cd $wrkdir
mvn clean install
# Make sure commons-daemon-*.jar file is generated under target directory
ls target/commons-daemon-*.jar
# Run all the tests
mvn clean verify
# ---- Build Apache Jsvc from source -----
echo "Installing dependencies required for Jsvc ..."
# To build under a UNIX operating system you will need:
# GNU AutoConf (at least version 2.53)
# An ANSI-C compliant compiler (GCC is good)
# GNU Make
# A Java Platform 2 compliant SDK
# NOTE::GNU make is provided by the devtoolset-7-make package and is automatically installed with devtoolset-7-toolchain
# Install the required dependencies
yum install -y gcc autoconf automake devtoolset-7-make devtoolset-7-toolchain
echo "Done"
echo "Building jsvc binary from source .. "
cd $wrkdir/src/native/unix
sh support/buildconf.sh
echo "Built 'configure' program ... "
./configure
# Run make command. NOTE:: Path might change based on the installation path
/opt/rh/devtoolset-7/root/usr/bin/make
echo "Generated the executable file jsvc .. "
echo "Verifying jsvc binary ..."
./jsvc -help
echo "Jsvc built successfully !"
# The generated jsvc binary can be validated by implementing a src/samples/SimpleDaemon
# Refer https://github.com/apache/commons-daemon/tree/master/src/samples/README.txt to build the samples and execute them using 'jsvc' binary
|
<reponame>SirAlb3rt/react-mobx-universal-starter<filename>webpack/webpack.config.client.babel.js<gh_stars>1-10
import { client } from 'universal-webpack/config';
import settings from './universal-webpack-settings';
import configuration from './webpack.config.babel';
export default client(configuration, settings);
|
#include <stdlib.h>
#include <string.h>
#include "double.h"
#include "mesh_qc.h"
static void mesh_qc_elasticity_cbd_star_1_fprint_raw(
FILE * out, const mesh_qc * m, const matrix_sparse * m_bd_1,
const double * m_inner_1, const double * m_inner_0, double lambda, double mu)
{
int m_elasticity_cbd_star_1_nonzero_max;
matrix_sparse * m_elasticity_cbd_star_1;
m_elasticity_cbd_star_1 =
mesh_qc_elasticity_cbd_star_1(m, m_bd_1, m_inner_1, m_inner_0, lambda, mu);
/* NULL pointer check */
m_elasticity_cbd_star_1_nonzero_max
= m_elasticity_cbd_star_1->cols_total[m_elasticity_cbd_star_1->cols];
double_array_fprint(out, m_elasticity_cbd_star_1_nonzero_max,
m_elasticity_cbd_star_1->values, "--raw");
matrix_sparse_free_shared(m_elasticity_cbd_star_1);
}
int main(int argc, char * argv[])
{
int m_dim;
int * m_cn;
mesh_qc * m;
matrix_sparse ** m_bd;
double lambda, mu;
double ** m_inner;
lambda = atof(argv[1]);
mu = atof(argv[2]);
m = mesh_fscan(stdin, "--raw");
/* NULL pointer check */
m_dim = m->dim;
m_cn = m->cn;
m_bd = mesh_fscan_bd(stdin, m);
/* NULL pointer check */
m_inner = double_array2_fscan(stdin, m_dim + 1, m_cn, "--raw");
/* NULL pointer check */
mesh_qc_elasticity_cbd_star_1_fprint_raw(
stdout, m, m_bd[0], m_inner[1], m_inner[0], lambda, mu);
/* NULL pointer check */
double_array2_free(m_inner, m_dim + 1);
matrix_sparse_array_free(m_bd, m_dim);
mesh_free(m);
return 0;
}
|
#! /bin/bash
#
# Copy the files from the source and build areas to create a UPS product.
#
# This script unconditionally deletes previously existing installs of the
# same product+version+qualifiers: use with care.
#
export COMPILER_CODE=e19
export DEBUG_LEVEL=prof
export PACKAGE_NAME=offline
export PACKAGE_SOURCE=${MU2E_BASE_RELEASE}
export PACKAGE_VERSION=v09_00_01
# Check that the installation directoy has been defined.
if [ "${PRODUCTS_INSTALL}" = '' ];then
echo "The environment variable PRODUCTS_INSTALL is not set."
echo "You must define where to install the products before sourcing this script."
return 1
fi
# Learn if the extra products needed for the trigger are active.
# Use mu2e_artdaq_core as a proxy for the ensemble.
if [ "`ups active | grep mu2e_artdaq_core`" != "" ]; then
haveTrigger=".trig"
else
haveTrigger=""
fi
# There are two representations of operating system UPS flavor:
# old style, for example: Linux64bit+2.6-2.12_e7
# new style, for example: slf6.x86_64
# We need them both.
old_flavour=`ups flavor`
new_flavour=`get-directory-name subdir`
# Build the names of the directories into which we will write things
fq=${new_flavour}.${COMPILER_CODE}.${MU2E_ART_SQUALIFIER}${haveTrigger}.${DEBUG_LEVEL}
topdir=${PRODUCTS_INSTALL}/${PACKAGE_NAME}
proddir=${PRODUCTS_INSTALL}/${PACKAGE_NAME}/${PACKAGE_VERSION}
verdir=${PRODUCTS_INSTALL}/${PACKAGE_NAME}/${PACKAGE_VERSION}.version
fqdir=${PRODUCTS_INSTALL}/${PACKAGE_NAME}/${PACKAGE_VERSION}/${fq}
incdir=${PRODUCTS_INSTALL}/${PACKAGE_NAME}/${PACKAGE_VERSION}/include
cfgdir=${PRODUCTS_INSTALL}/${PACKAGE_NAME}/${PACKAGE_VERSION}/config
upsdir=${PRODUCTS_INSTALL}/${PACKAGE_NAME}/${PACKAGE_VERSION}/ups
# Make directories, if needed.
if ! [ -e ${topdir} ];then
mkdir ${topdir}
fi
if ! [ -e ${proddir} ];then
mkdir ${proddir}
fi
if ! [ -e ${verdir} ];then
mkdir ${verdir}
fi
if ! [ -e ${fqdir} ];then
mkdir ${fqdir}
fi
if ! [ -e ${cfgdir} ];then
mkdir ${cfgdir}
fi
if ! [ -e ${incdir} ];then
mkdir ${incdir}
fi
if ! [ -e ${upsdir} ];then
mkdir ${upsdir}
fi
# Copy the required parts of the source directory to the installation area:
# Header files:
rsync -ar --exclude-from ${PACKAGE_SOURCE}/ups_install/tar_exclude_for_include.txt \
${PACKAGE_SOURCE} ${proddir}/include
# UPS table file
${PACKAGE_SOURCE}/ups_install/installTableFile.sh ${upsdir}/${PACKAGE_NAME}.table
# Configuration files ( .fcl, .txt and all files that will go into databases).
rsync -ar --exclude-from ${PACKAGE_SOURCE}/ups_install/tar_exclude_for_config.txt \
${PACKAGE_SOURCE} ${cfgdir}
# Libaries and binaries
rsync -ar lib ${fqdir}
rsync -ar bin ${fqdir}
# A copy of the full source
rsync -ar --exclude-from ${PACKAGE_SOURCE}/ups_install/tar_exclude_for_source.txt \
${PACKAGE_SOURCE} ${proddir}/source
# Create the ups fq files.
${PACKAGE_SOURCE}/ups_install/installFQFile.sh \
${verdir}/${old_flavour}_${COMPILER_CODE}_${MU2E_ART_SQUALIFIER} \
${COMPILER_CODE}:${MU2E_ART_SQUALIFIER}
# Repeat for the trig qualified fq files.
${PACKAGE_SOURCE}/ups_install/installFQFile.sh \
${verdir}/${old_flavour}_${COMPILER_CODE}_${MU2E_ART_SQUALIFIER}_trig \
${COMPILER_CODE}:${MU2E_ART_SQUALIFIER}:trig
unset old_flavour
unset new_flavour
unset fq
unset topdir
unset proddir
unset verdir
unset fqdir
unset incdir
unset upsdir
|
#!/bin/bash
set -e
if [[ "$1" == "tokenpayd" ]]; then
mkdir -p "$TOKENPAY_DATA"
if [[ ! -s "$TOKENPAY_DATA/tokenpay.conf" ]]; then
cat <<-EOF > "$TOKENPAY_DATA/tokenpay.conf"
printtoconsole=1
rpcallowip=*
txindex=1
rpcpassword=${TOKENPAY_RPC_PASSWORD:-password}
rpcuser=${TOKENPAY_RPC_USER:-tokenpay}
EOF
chown tokenpay:tokenpay "$TOKENPAY_DATA/tokenpay.conf"
fi
# ensure correct ownership and linking of data directory
# we do not update group ownership here, in case users want to mount
# a host directory and still retain access to it
chown -R tokenpay "$TOKENPAY_DATA"
ln -sfn "$TOKENPAY_DATA" /home/tokenpay/.tokenpay
chown -h tokenpay:tokenpay /home/tokenpay/.tokenpay
exec gosu tokenpay "$@"
fi
exec "$@"
|
export LSCOLORS="exfxcxdxbxegedabagacad"
export CLICOLOR=true
fpath=($ZSH/functions $fpath)
autoload -U $ZSH/functions/*(:t)
# Better history
# Credits to https://coderwall.com/p/jpj_6q/zsh-better-history-searching-with-arrow-keys
autoload -U up-line-or-beginning-search
autoload -U down-line-or-beginning-search
zle -N up-line-or-beginning-search
zle -N down-line-or-beginning-search
HISTFILE=~/.zsh_history
HISTSIZE=10000
SAVEHIST=10000
setopt NO_BG_NICE # don't nice background tasks
setopt NO_HUP
setopt NO_LIST_BEEP
setopt LOCAL_OPTIONS # allow functions to have local options
setopt LOCAL_TRAPS # allow functions to have local traps
setopt HIST_VERIFY
setopt EXTENDED_HISTORY # add timestamps to history
setopt PROMPT_SUBST
setopt CORRECT
setopt COMPLETE_IN_WORD
setopt IGNORE_EOF
setopt INC_APPEND_HISTORY # append history incrementally
setopt SHARE_HISTORY # share history across sessions
setopt HIST_IGNORE_ALL_DUPS # don't record dupes in history
setopt HIST_REDUCE_BLANKS
# don't expand aliases _before_ completion has finished
# like: git comm-[tab]
setopt complete_aliases
bindkey '^[^[[D' backward-word
bindkey '^[^[[C' forward-word
bindkey '^[[5D' beginning-of-line
bindkey '^[[5C' end-of-line
bindkey '^[[3~' delete-char
bindkey '^?' backward-delete-char
# History-related bindings
# Credit: https://superuser.com/questions/446594/separate-up-arrow-lookback-for-local-and-global-zsh-history
up-line-or-local-history() {
zle set-local-history 1
zle up-line-or-search
zle set-local-history 0
}
zle -N up-line-or-local-history
down-line-or-local-history() {
zle set-local-history 1
zle down-line-or-search
zle set-local-history 0
}
zle -N down-line-or-local-history
# The up[down]-line-or-beginning-search commands does not work well when toggling local history in between so have this
# separately for when trying to search a multi-word command from the history. This search will be on the global history.
# The escape keys can be checked with ctrl+v.
# Partly taken from https://superuser.com/questions/446594/separate-up-arrow-lookback-for-local-and-global-zsh-history
bindkey '^[[A' up-line-or-local-history # Cursor up
bindkey '^[[B' down-line-or-local-history # Cursor down
bindkey '^[[5~' up-line-or-beginning-search # [ALT] + Cursor up
bindkey '^[[6~' down-line-or-beginning-search # [ALT] + Cursor down
bindkey "^[[1;5A" up-line-or-search # [CTRL] + Cursor up
bindkey "^[[1;5B" down-line-or-search # [CTRL] + Cursor down
|
package com.attendancefortjit.tjitattendance;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
/**
* Created by jbran on 29-01-2016.
*/
public class Contactus extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.contactus);
}
@Override
public void onBackPressed() {
super.onBackPressed();
Intent intent = new Intent(Contactus.this,Welcome.class);
startActivity(intent);
finish();
}
}
|
<filename>src/utils/GraphQlApiError.ts<gh_stars>1-10
import { ApolloError } from 'apollo-server-errors';
import { EGraphQlErrorCode } from '../types';
export default class GraphQlApiError extends ApolloError {
constructor(message: string, code?: string) {
super(message, code || EGraphQlErrorCode.INTERNAL_SERVER_ERROR);
Object.defineProperty(this, 'name', { value: 'GraphQlApiError' });
}
} |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import java.time.ZonedDateTime
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import base.ControllerUnitSpec
import connectors.CustomsDataStoreConnector
import models.VerifiedEmailAddress
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito._
import play.api.libs.json.Json
import play.api.test.Helpers._
import testdata.TestData
import uk.gov.hmrc.http.{HeaderCarrier, UpstreamErrorResponse}
class EmailByEoriControllerUnitSpec extends ControllerUnitSpec {
private val connector = mock[CustomsDataStoreConnector]
private val controller = new EmailByEoriController(authAction, connector, stubControllerComponents())
override protected def beforeEach(): Unit = {
super.beforeEach()
reset(connector)
authorisedUser()
}
override protected def afterEach(): Unit = {
reset(connector)
super.afterEach()
}
"GET EmailIfVerified endpoint" should {
"return 200(OK) status if the email address for the given EORI is verified" in {
val expectedEmailAddress = VerifiedEmailAddress("<EMAIL>", ZonedDateTime.now)
when(connector.getEmailAddress(any[String])(any[HeaderCarrier]))
.thenReturn(Future.successful(Some(expectedEmailAddress)))
val response = controller.getEmailIfVerified(TestData.eori)(getRequest())
status(response) mustBe OK
contentAsJson(response) mustBe Json.toJson(expectedEmailAddress)
verify(connector).getEmailAddress(meq(TestData.eori))(any[HeaderCarrier])
}
"return 404(NOT_FOUND) status if the email address for the given EORI was not provided or was not verified yet" in {
when(connector.getEmailAddress(any[String])(any[HeaderCarrier])).thenReturn(Future.successful(None))
val response = controller.getEmailIfVerified(TestData.eori)(getRequest())
status(response) mustBe NOT_FOUND
}
"return 500(INTERNAL_SERVER_ERROR) status for any 4xx returned by the downstream service, let apart 404" in {
when(connector.getEmailAddress(any[String])(any[HeaderCarrier])).thenAnswer(_ => upstreamErrorResponse(BAD_REQUEST))
val response = controller.getEmailIfVerified(TestData.eori)(getRequest())
status(response) mustBe INTERNAL_SERVER_ERROR
}
"return 500(INTERNAL_SERVER_ERROR) status for any 5xx http error code returned by the downstream service" in {
when(connector.getEmailAddress(any[String])(any[HeaderCarrier])).thenAnswer(_ => upstreamErrorResponse(BAD_GATEWAY))
val response = controller.getEmailIfVerified(TestData.eori)(getRequest())
status(response) mustBe INTERNAL_SERVER_ERROR
}
}
def upstreamErrorResponse(status: Int): Future[UpstreamErrorResponse] =
Future.successful(UpstreamErrorResponse("An error", status))
}
|
echo "Running rest.sh"
echo "Configuring REST Proxy..."
# Here's the install doc:
# https://docs.confluent.io/current/installation/installing_cp/rhel-centos.html#crest-long
restConfig="/etc/kafka-rest/kafka-rest.properties"
sed -i "s/^#zookeeper\.connect=localhost\:2181/zookeeper\.connect=$zookeeperConnect/g" $restConfig
sed -i "s/^bootstrap.servers=PLAINTEXT\:\/\/localhost\:9092/bootstrap.servers=$brokerConnect/g" $restConfig
sed -i "s/^#schema\.registry\.url=http\:\/\/localhost\:8081/schema.registry.url=http\:\/\/$schemaRegistryConnect/g" $restConfig
nodeIndex=`hostname | sed 's/rest-//'`
sed -i "s/^#id=kafka-rest-test-server/id=kafka-rest-${nodeIndex}/g" $restConfig
# wait for all zookeepers to be up and running
wait_for_zk_quorum
# wait for all brokers to be up and running
wait_for_brokers
# wait for schema registry to be up and running
wait_for_schema_registry
echo "Starting REST Proxy service"
systemctl enable confluent-kafka-rest
systemctl start confluent-kafka-rest
|
<gh_stars>0
module FortniteExperience
class Calculator
def initialize(api_key)
@api_key = api_key
end
def experience_by_pois
result = {}
Services::FortniteAPI.get_poi_names(@api_key).each do |poi|
Services::FortniteAPI.get_weekly_challenges(@api_key).each do |challenge|
if challenge['title'].match?(/#{poi}/i)
result[poi] = (result[poi] || 0) + challenge['xp']
end
end
end
result
end
end
end
|
def sort_dict_by_value(dictionary):
sorted_dict = {}
for key, value in sorted(dictionary.items(), key=lambda item: item[1]):
sorted_dict[key] = value
return sorted_dict |
<reponame>daniOrtiz11/EDA
#include <stdio.h>
#include <iostream>
using namespace std;
//{P == n >= 0 ^ 0 <= longitud(v) < 10000 }
void casoDePrueba() {
//Aqui has de escribir tu codigo
int v[10000];
int n = 0;
cin >> n;
for(int i = 0; i < n; i++){
cin >> v[i];
}
int cont = 0;
for(int i = 0; i < n; i++){
if(v[i]%2 == 0)
cont++;
}
cout << cont << endl;
} // resuelve
// {Q == cont = (#w : 0<=w<n : a[w]%2 = 0)}
int main() {
unsigned int numCasos, i;
cin>> numCasos;
for(i = 0; i < numCasos; ++i)
{
casoDePrueba();
}
return 0;
} // main |
#!/usr/bin/env bash
# script to get categorization for all websites/domain names.
# this script invokes a python program which in turn invokes a Symantec K9
# REST API (which is currently free, probably will remain so) for each
# domain name. Because this is a very slow process especially when we
# have upwards of 100,000 domain names so this scripts splits the
# input file into 7,000 lines files (number choosen trial and error
# because we cannot feed the entire file as a command line argument
# to the python program, this number works) and then creates another
# script which needs to be run manually which would launch multiple python
# programs, one for each split of the original file.
echo input file name is $1
# create a folder called categories, we will do all our business
# in this folder and not change anything in this current folder
WORK_FOLDER="categories"
mkdir ${WORK_FOLDER}
# copy input file to the new folder
cp $1 ${WORK_FOLDER}
# copy code to the new folder
cp k9.py ${WORK_FOLDER}
# change to the new folder
cd ./${WORK_FOLDER}
# delete any malformed domains that begin with "-", they mess up
# the command line handling when passed to the python program
grep -v "^-" $1 > websites1.txt
# delete any lines with ":" could be due to IPv6 address
# or hostname:port number, bottom line is that it causes the python
# program to crash so we dont need it
grep -v ":" websites1.txt > websites.txt
# splt into multiple files
split -l 700 websites.txt
# delete any previously existing version of the script
GENERATED_SCRIPT="cmd.sh"
rm -f ${GENERATED_SCRIPT}
# the split command splits the file into fixed names like xaa, xab and so on
filelist=`ls x??`
# for each split create a command line
for f in `ls x??`
do
# cant put the exact command with "`" because the shell tried to
# execute it right here, we just want to echo the command so instead of
# ` just put a __ as placeholder which we will later replace
cmd="/bin/python k9.py __cat ${f}__ > website_categories_${f}.csv &"
echo ${cmd} >> ${GENERATED_SCRIPT}
done
# the generated script is now ready, give it execute permissions
sed -i 's/__/`/g' ${GENERATED_SCRIPT}
chmod +x ${GENERATED_SCRIPT}
echo ./${WORK_FOLDER}/${GENERATED_SCRIPT} is ready....
echo you would need to run ./${WORK_FOLDER}/${GENERATED_SCRIPT} manually and then
echo once all domain categories have been retrieved i.e. website_categories_x??.csv
echo files have stopped updating and there is no python program running for k9.py
echo then join the generated website_categories_x??.csv files as
echo "cat website_categories_x??.csv > website_categories_k9.csv"
# all done back to the original dir
cd -
|
import java.io.Serializable;
import java.util.Random;
/**
* The basis for an opponent
* Created by claudedaniel on 2/27/17.
*/
public class Opponent extends Trainer implements Serializable{
/**
* The opponent's attack speech.
*/
String atkSpeech;
/**
* The opponent's win speech.
*/
String winSpeech;
/**
* The opponent's loss speech.
*/
String lossSpeech;
Random random = new Random();
/**
* The constructor for the opponent. Assigns a random
* pokemon to opponent.
* @param name Name of the opponent
* @param hp HP of the opponent
* @param atk Attack speech of the opponent
* @param win Win speech of the opponent
* @param loss Loss speech of the opponent
*/
public Opponent(String name, int hp, String atk, String win, String loss){
super(name, hp);
atkSpeech = atk;
winSpeech = win;
lossSpeech = loss;
addPokemon(PokemonMaker.makeWildPokemon());
}
/**
* Return the attack speech
* @return the attack speech
*/
public String getAttackSpeech(){
return atkSpeech;
}
/**
* Return the win speech
* @return the win speech
*/
public String getWinSpeech(){
return winSpeech;
}
/**
* Return the loss speech
* @return the loss speech
*/
public String getLossSpeech(){
return lossSpeech;
}
/**
* Chooses a random style for th eopponent
* @return the chosen style
*/
public int chooseStyle(){
return (random.nextInt(2)+1);
}
/**
* Chooses a random move for the opponent
* @param style the style that was already chosen.
* @return
*/
public int chooseMove(int style){
return (random.nextInt(3) + 1);
}
}
|
import { types } from "taggr-shared";
import dbFactory from "./database";
const IMAGES: types.Image[] = [
{
hash: "10c483cc2ef59dcc2009ae662917e704",
path:
"file:///Users/alain/Library/Application Support/taggr-nodejs/10c483cc2ef59dcc2009ae662917e704.jpeg",
rawPath: "/Users/alain/temp/pictures/surface-aqdPtCtq3dY-unsplash.jpg",
tags: ["people"],
location: null,
creationDate: 1613300791762,
},
{
hash: "1469690b94ff799038735e2813ea607f",
path:
"file:///Users/alain/Library/Application Support/taggr-nodejs/1469690b94ff799038735e2813ea607f.jpeg",
rawPath: "/Users/alain/temp/pictures/wexor-tmg-L-2p8fapOA8-unsplash.jpg",
tags: ["animals"],
location: null,
creationDate: 1613300789393,
},
{
hash: "f3a868effff645384d46dabaf7d9dcaf",
path:
"file:///Users/alain/Library/Application Support/taggr-nodejs/f3a868effff645384d46dabaf7d9dcaf.jpeg",
rawPath: "/Users/alain/temp/pictures/will-norbury--aDYQJdETkA-unsplash.jpg",
tags: [],
location: null,
creationDate: 1616707235139,
},
];
describe.skip("database module", () => {
// clean up dbs
beforeEach(() => {
dbFactory(true).clear();
dbFactory(false).clear();
});
afterEach(() => {
dbFactory(true).clear();
dbFactory(false).clear();
});
it("should create db with default values", () => {
const db = dbFactory(true);
expect(db.get("allImages")).toEqual({});
expect(db.get("currentImageHashes")).toEqual([]);
});
it("should create db when in development mode", () => {
const db = dbFactory(true);
const insertedImages = {
hash1: IMAGES[0],
};
db.set("allImages", insertedImages);
const images = db.get("allImages");
expect(images).toEqual(insertedImages);
});
it("should create db when in non-development mode", () => {
const db = dbFactory(false);
const insertedImages = {
hash1: IMAGES[0],
};
db.set("allImages", insertedImages);
const images = db.get("allImages");
expect(images).toEqual(insertedImages);
});
});
|
def calculate_initial_total_volume(E_Na_sg, E_K_sn, E_K_sg, E_Cl_sn, E_Cl_sg, E_Ca_sn, psi_se, psi_sn, psi_sg, psi_de, psi_dn, psi_dg, V_sn, V_se, V_sg, V_dn, V_de, V_dg) -> float:
total_volume = V_sn + V_se + V_sg + V_dn + V_de + V_dg
return total_volume
# Test the function
E_Na_sg = 0.05
E_K_sn = -0.07
E_K_sg = 0.03
E_Cl_sn = -0.05
E_Cl_sg = 0.06
E_Ca_sn = 0.04
psi_se = 0.02
psi_sn = 0.03
psi_sg = 0.01
psi_de = 0.04
psi_dn = 0.02
psi_dg = 0.03
V_sn = 0.1
V_se = 0.2
V_sg = 0.15
V_dn = 0.08
V_de = 0.18
V_dg = 0.12
print(calculate_initial_total_volume(E_Na_sg, E_K_sn, E_K_sg, E_Cl_sn, E_Cl_sg, E_Ca_sn, psi_se, psi_sn, psi_sg, psi_de, psi_dn, psi_dg, V_sn, V_se, V_sg, V_dn, V_de, V_dg)) |
<filename>core/src/main/java/brooklyn/internal/storage/Reference.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.internal.storage;
/**
* A reference to a value, backed by the storage-medium.
*
* @see BrooklynStorage#getReference(String)
*
* @author aled
*/
public interface Reference<T> {
// TODO We can add compareAndSet(T,T) as and when required
T get();
T set(T val);
/**
* @return true if the value is null; false otherwise.
*/
boolean isNull();
/**
* Sets the value back to null. Similar to {@code set(null)}.
*/
void clear();
/**
* @return true if the value equals the given parameter; false otherwise
*/
boolean contains(Object other);
}
|
<gh_stars>0
/* **** Notes
Schedule
Remarks:
Currently under construction
//*/
# define CALEND
# define CAR
# include "../../../incl/config.h"
signed(__cdecl cals_sched_events(signed char(*content),cals_roll_t(*argp))) {
/* **** DATA, BSS and STACK */
auto cals_event_t *event;
auto signed char *b;
auto time_t t;
auto signed i,r;
auto signed short flag;
/* **** CODE/TEXT */
if(!content) return(0x00);
if(!argp) return(0x00);
event = (*(CLI_INDEX+(R(event,*argp))));
// after parsing the content..
time(&t);
r = cals_sched(t,event);
if(!r) return(0x00);
b = (content);
r = keep(&(R(b,*event)),b);
if(!r) return(0x00);
return(0x01);
}
|
import requests
from bs4 import BeautifulSoup
# url of the website to crawl
URL = "https://www.example.com/"
# get the whole page content in
# the form of text
page = requests.get(URL).text
# number of words in the page
word_count = 0
# create a Beautiful Soup parsable string
soup = BeautifulSoup(page, 'html.parser')
# find all the tags of type p
paragraphs = soup.find_all('p')
for para in paragraphs:
words = para.text.split()
word_count += len(words)
# print the word count
print("Total words in the page:", word_count) |
<reponame>devilry/devilry-django<filename>devilry/devilry_markup/urls.py
from django.urls import path
from django.contrib.auth.decorators import login_required
from devilry.devilry_markup.views import DevilryFlavouredMarkdownFull
urlpatterns = [
path('devilry_flavoured_markdown_full', login_required(DevilryFlavouredMarkdownFull.as_view()))
]
# from django.conf.urls import url
# from django.contrib.auth.decorators import login_required
# from devilry.devilry_markup.views import DevilryFlavouredMarkdownFull
# urlpatterns = [
# url(r'^devilry_flavoured_markdown_full$', login_required(DevilryFlavouredMarkdownFull.as_view()))
# ]
|
#!/bin/bash
############################################################################
# (C) Copyright IBM Corporation 2015, 2019 #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
############################################################################
# Traveler Docker Build Script
# Usage : ./build.sh <URL for download repository> [ Traveler Version ]
# Example: ./build-image.sh http://192.168.1.1
SCRIPT_NAME=$0
DOWNLOAD_FROM=$1
# Select product to install
PROD_NAME=traveler
# Get product name from file name
if [ -z $PROD_NAME ]; then
PROD_NAME=`basename $0 | cut -f 2 -d"_" | cut -f 1 -d"."`
fi
# Specify Version to install
# Can be overwritten on command-line
PROD_VER=10.0.1.1
CUSTOM_VER=`echo "$2" | awk '{print toupper($0)}'`
if [ ! -z "$CUSTOM_VER" ]; then
PROD_VER=$CUSTOM_VER
fi
DOCKER_IMAGE_NAME="ibmcom/$PROD_NAME"
DOCKER_IMAGE_VERSION=$PROD_VER
DOCKER_FILE=dockerfile
# Latest Tag not set when specifying explicit version
if [ -z "$CUSTOM_VER" ]; then
DOCKER_TAG_LATEST="$DOCKER_IMAGE_NAME:latest"
fi
usage ()
{
echo
echo "Usage: `basename $SCRIPT_NAME` <URL for download repository> [TRAVELER-VERSION] "
echo
return 0
}
print_runtime()
{
echo
# the following line does not work on OSX
# echo "Completed in" `date -d@$SECONDS -u +%T`
hours=$((SECONDS / 3600))
seconds=$((SECONDS % 3600))
minutes=$((seconds / 60))
seconds=$((seconds % 60))
h=""; m=""; s=""
if [ ! $hours = "1" ] ; then h="s"; fi
if [ ! $minutes = "1" ] ; then m="s"; fi
if [ ! $seconds = "1" ] ; then s="s"; fi
if [ ! $hours = 0 ] ; then echo "Completed in $hours hour$h, $minutes minute$m and $seconds second$s"
elif [ ! $minutes = 0 ] ; then echo "Completed in $minutes minute$m and $seconds second$s"
else echo "Completed in $seconds second$s"; fi
}
docker_build ()
{
echo "Building Image : " $IMAGENAME
if [ -z "$DOCKER_TAG_LATEST" ]; then
DOCKER_IMAGE=$DOCKER_IMAGE_NAMEVERSION
DOCKER_TAG_LATEST_CMD=""
else
DOCKER_IMAGE=$DOCKER_TAG_LATEST
DOCKER_TAG_LATEST_CMD="-t $DOCKER_TAG_LATEST"
fi
# Get Build Time
BUILDTIME=`date +"%d.%m.%Y %H:%M:%S"`
case "$PROD_NAME" in
traveler)
DOCKER_DESCRIPTION="IBM Traveler"
;;
*)
echo "Unknown product [$PROD_NAME] - Terminating installation"
exit 1
;;
esac
# Get build arguments
DOCKER_IMAGE=$DOCKER_IMAGE_NAME:$DOCKER_IMAGE_VERSION
BUILD_ARG_PROD_NAME="--build-arg PROD_NAME=$PROD_NAME"
BUILD_ARG_PROD_VER="--build-arg PROD_VER=$PROD_VER"
BUILD_ARG_DOWNLOAD_FROM="--build-arg DownloadFrom=$DOWNLOAD_FROM"
# Switch to current directory and remember current directory
pushd .
CURRENT_DIR=`dirname $SCRIPT_NAME`
cd $CURRENT_DIR
# Finally build the image
docker build --no-cache --label "TravelerDocker.description"="$DOCKER_DESCRIPTION" --label "TravelerDocker.version"="$DOCKER_IMAGE_VERSION" --label "TravelerDocker.buildtime"="$BUILDTIME" -t $DOCKER_IMAGE $DOCKER_TAG_LATEST_CMD -f $DOCKER_FILE $BUILD_ARG_DOWNLOAD_FROM $BUILD_ARG_PROD_NAME $BUILD_ARG_PROD_VER .
popd
echo
# echo "Completed in" `date -d@$SECONDS -u +%T`
# echo
return 0
}
if [ -z "$DOWNLOAD_FROM" ]; then
echo
echo "No download location specified!"
echo
usage
exit 0
fi
docker_build
echo
print_runtime
echo
exit 0
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
current_dir=`dirname "$0"`
current_dir=`cd "$current_dir"; pwd`
root_dir=${current_dir}/../../../../..
workload_config=${root_dir}/conf/workloads/micro/terasort.conf
. "${root_dir}/bin/functions/load_bench_config.sh"
enter_bench HadoopPrepareTerasort ${workload_config} ${current_dir}
show_bannar start
rmr_hdfs $INPUT_HDFS || true
START_TIME=`timestamp`
run_hadoop_job ${HADOOP_EXAMPLES_JAR} teragen \
-D mapreduce.job.maps=${NUM_MAPS} \
-D mapreduce.job.reduces=${NUM_REDS} \
${DATASIZE} ${INPUT_HDFS}
END_TIME=`timestamp`
show_bannar finish
leave_bench
|
def search_for_pattern(long_string, pattern):
try:
return long_string.index(pattern)
except ValueError:
return -1 |
"""Prediction tasks for MIMIC."""
import os
import pandas as pd
from dask import dataframe as dd
from .task import TaskMeta
from .transform import Transform
from database import dbs
# Define overall tables
MIMIC = dbs['MIMIC']
try:
patients = dd.read_csv(MIMIC.frame_paths['patients']).set_index('ROW_ID')
diagnoses_icd = dd.read_csv(MIMIC.frame_paths['diagnoses_icd'], assume_missing=True).set_index('ROW_ID')
patients_diagnosis = patients.merge(diagnoses_icd.drop(['SEQ_NUM'], axis=1), how='left', on='SUBJECT_ID')
# Tasks specific tables
septic_shock = dd.from_pandas(pd.DataFrame({'ICD9_CODE': ['78552']}), npartitions=1)
hemo_shock = dd.from_pandas(pd.DataFrame({'ICD9_CODE': ['78559', '99809', '9584']}), npartitions=1)
except FileNotFoundError:
patients = None
diagnoses_icd = None
patients_diagnosis = None
septic_shock = None
hemo_shock = None
# Task 1: Septic shock prediciton
# -------------------------------
def septic_task(**kwargs):
"""Return TaskMeta for septic shock prediction."""
# Define y
def define_predict_septic(df):
"""Compute y from patients table."""
# Ignore given df
positives = patients_diagnosis.merge(septic_shock, how='inner', on='ICD9_CODE')
positives = positives.drop_duplicates(subset=['SUBJECT_ID']).set_index('SUBJECT_ID').index
positives_idx = positives.compute()
# Get full idx from df and set the complementary to 0
# idx = patients.set_index('SUBJECT_ID').index.compute()
idx = df.index
# need to intersect because one index of positives_idx is not in idx
negatives_idx = idx.difference(positives_idx).intersection(idx)
positives_idx = positives_idx.intersection(idx)
positives = pd.DataFrame({'y': 1}, index=positives_idx)
negatives = pd.DataFrame({'y': 0}, index=negatives_idx)
df = pd.concat((positives, negatives), axis=0).sort_index()
return df
septic_predict_transform = Transform(
input_features=[],
transform=define_predict_septic,
output_features=['y'],
)
assert 'n_top_pvals' in kwargs
n_top_pvals = kwargs['n_top_pvals']
if n_top_pvals is None:
septic_pvals_keep_transform = None
septic_idx_transform = None
else:
assert 'RS' in kwargs
assert 'T' in kwargs
RS = kwargs['RS']
T = kwargs['T']
septic_pvals_dir = 'pvals/MIMIC/septic_pvals/'
septic_idx_path = f'{septic_pvals_dir}RS{RS}-T{T}-used_idx.csv'
septic_pvals_path = f'{septic_pvals_dir}RS{RS}-T{T}-pvals.csv'
assert os.path.exists(septic_idx_path)
assert os.path.exists(septic_pvals_path)
pvals = pd.read_csv(septic_pvals_path, header=None,
index_col=0, squeeze=True)
pvals = pvals.sort_values()[:n_top_pvals]
septic_top_pvals = list(pvals.index.astype(str))
septic_pvals_keep_transform = Transform(
output_features=septic_top_pvals
)
septic_drop_idx = pd.read_csv(septic_idx_path, index_col=0,
squeeze=True)
septic_idx_transform = Transform(
input_features=[],
transform=lambda df: df.drop(septic_drop_idx.index, axis=0),
)
return TaskMeta(
name='septic_pvals',
db='MIMIC',
df_name='X_labevents',
classif=True,
idx_column='subject_id',
idx_selection=septic_idx_transform,
predict=septic_predict_transform,
transform=None,
select=septic_pvals_keep_transform,
encode_select=None,
encode_transform=None,
)
# Task 2: Hemorrhagic shock prediciton
# ------------------------------------
def hemo_task(**kwargs):
"""Return TaskMeta for Hemorrhagic shock prediction."""
# Define y
def define_predict_hemo(df):
"""Compute y from patients table."""
# Ignore given df
positives = patients_diagnosis.merge(hemo_shock, how='inner', on='ICD9_CODE')
positives = positives.drop_duplicates(subset=['SUBJECT_ID']).set_index('SUBJECT_ID').index
positives_idx = positives.compute()
# Get full idx from df and set the complementary to 0
# idx = patients.set_index('SUBJECT_ID').index.compute()
idx = df.index
# need to intersect because one index of positives_idx is not in idx
negatives_idx = idx.difference(positives_idx).intersection(idx)
positives_idx = positives_idx.intersection(idx)
positives = pd.DataFrame({'y': 1}, index=positives_idx)
negatives = pd.DataFrame({'y': 0}, index=negatives_idx)
df = pd.concat((positives, negatives), axis=0).sort_index()
return df
hemo_predict_transform = Transform(
input_features=[],
transform=define_predict_hemo,
output_features=['y'],
)
assert 'n_top_pvals' in kwargs
n_top_pvals = kwargs['n_top_pvals']
if n_top_pvals is None:
hemo_pvals_keep_transform = None
hemo_idx_transform = None
else:
assert 'RS' in kwargs
assert 'T' in kwargs
RS = kwargs['RS']
T = kwargs['T']
hemo_pvals_dir = 'pvals/MIMIC/hemo_pvals/'
hemo_idx_path = f'{hemo_pvals_dir}RS{RS}-T{T}-used_idx.csv'
hemo_pvals_path = f'{hemo_pvals_dir}RS{RS}-T{T}-pvals.csv'
assert os.path.exists(hemo_idx_path)
assert os.path.exists(hemo_pvals_path)
pvals = pd.read_csv(hemo_pvals_path, header=None,
index_col=0, squeeze=True)
pvals = pvals.sort_values()[:n_top_pvals]
hemo_top_pvals = list(pvals.index.astype(str))
hemo_pvals_keep_transform = Transform(
output_features=hemo_top_pvals
)
hemo_drop_idx = pd.read_csv(hemo_idx_path, index_col=0, squeeze=True)
hemo_idx_transform = Transform(
input_features=[],
transform=lambda df: df.drop(hemo_drop_idx.index, axis=0),
)
return TaskMeta(
name='hemo_pvals',
db='MIMIC',
df_name='X_labevents',
classif=True,
idx_column='subject_id',
idx_selection=hemo_idx_transform,
predict=hemo_predict_transform,
transform=None,
select=hemo_pvals_keep_transform,
encode_select=None,
encode_transform=None,
)
task_metas = {
'septic_pvals': septic_task,
'hemo_pvals': hemo_task,
}
|
<reponame>LiuFang07/bk-cmdb
/*
* Tencent is pleased to support the open source community by making 蓝鲸 available.
* Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package service
import (
"context"
"net/http"
restful "github.com/emicklei/go-restful"
"configcenter/src/common/backbone"
"configcenter/src/common/blog"
"configcenter/src/storage/mongodb"
"configcenter/src/storage/rpc"
"configcenter/src/storage/tmserver/app/options"
"configcenter/src/storage/tmserver/core"
"configcenter/src/storage/tmserver/core/session"
"configcenter/src/storage/types"
)
// Service service methods
type Service interface {
WebService() *restful.WebService
SetConfig(engin *backbone.Engine, db mongodb.Client, txnCfg options.TransactionConfig) error
}
// New create a new service instance
func New(ip string, port uint) Service {
return &coreService{
listenIP: ip,
listenPort: port,
}
}
type coreService struct {
engine *backbone.Engine
rpc *rpc.Server
dbProxy mongodb.Client
core core.Core
listenIP string
listenPort uint
}
func (s *coreService) SetConfig(engin *backbone.Engine, db mongodb.Client, txnCfg options.TransactionConfig) error {
// set config
s.engine = engin
s.dbProxy = db
s.rpc = rpc.NewServer()
// init all handlers
s.rpc.Handle(types.CommandRDBOperation, s.DBOperation)
s.rpc.HandleStream(types.CommandWatchTransactionOperation, s.WatchTransaction)
// create a new core instance
sess, err := session.New(
core.ContextParams{
Context: context.Background(),
ListenIP: s.listenIP,
}, txnCfg, db, s.listenIP)
if err != nil {
return err
}
go func() {
if err := sess.Run(); err != nil {
blog.Errorf("tmserver stoped with error: %v", err)
} else {
blog.Errorf("tmserver stoped")
}
}()
s.core = core.New(sess, db)
return nil
}
func (s *coreService) WebService() *restful.WebService {
restful.DefaultRequestContentType(restful.MIME_JSON)
restful.DefaultResponseContentType(restful.MIME_JSON)
restful.SetLogger(&blog.GlogWriter{})
restful.TraceLogger(&blog.GlogWriter{})
ws := &restful.WebService{}
ws.Path("/txn/v3").Filter(s.engine.Metric().RestfulMiddleWare)
ws.Route(ws.Method(http.MethodConnect).Path("rpc").To(func(req *restful.Request, resp *restful.Response) {
if sub, ok := resp.ResponseWriter.(*restful.Response); ok {
s.rpc.ServeHTTP(sub.ResponseWriter, req.Request)
return
}
s.rpc.ServeHTTP(resp.ResponseWriter, req.Request)
}))
return ws
}
|
import os
import pandas as pd
def upload_to_database(data_df, user, password, host, port, database):
try:
# Securely format the credentials
user = "{}".format(user.strip())
password = "{}".format(password.strip())
host = "{}".format(host.strip())
port = "{}".format(port.strip())
database = "{}".format(database.strip())
# Uploading data to the database
status, log = import_data(data_df, CONST_ADVANTICSYS, user, password, host, port, database)
# Logging the database upload event
if status:
log_message = "Data uploaded to database '{}' successfully.".format(database)
else:
log_message = "Failed to upload data to database '{}'.".format(database)
return status, log_message
except Exception as e:
return False, "Error occurred during database upload: {}".format(str(e))
# Sample usage
data_df = pd.DataFrame(...) # Create or load a pandas DataFrame
user = os.environ["CROP_SQL_USER"]
password = os.environ["CROP_SQL_PASS"]
host = os.environ["CROP_SQL_HOST"]
port = os.environ["CROP_SQL_PORT"]
database = os.environ["CROP_SQL_DBNAME"]
status, log_message = upload_to_database(data_df, user, password, host, port, database)
print(status, log_message) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.