text
stringlengths 30
1.67M
|
|---|
<s> package com . senseidb . search . node . impl ; import java . util . Collection ; import java . util . Comparator ; import java . util . LinkedList ; import java . util . List ; import proj . zoie . api . DataConsumer ; import proj . zoie . api . ZoieException ; public class CompositeDataConsumer < T > implements DataConsumer < T > { private List < DataConsumer < T > > _consumerList ; private Comparator < String > _versionComparator ; public CompositeDataConsumer ( Comparator < String > versionComparator ) { _consumerList = new LinkedList < DataConsumer < T > > ( ) ; _versionComparator = versionComparator ; } public void addDataConsumer ( DataConsumer < T > dataConsumer ) { _consumerList . add ( dataConsumer ) ; } @ Override public void consume ( Collection < DataEvent < T > > events ) throws ZoieException { for ( DataConsumer < T > consumer : _consumerList ) { consumer . consume ( events ) ; } } @ Override public String getVersion ( ) { String version = null ; if ( _consumerList != null ) { for ( DataConsumer < T > consumer : _consumerList ) { String ver = consumer . getVersion ( ) ; if ( _versionComparator . compare ( ver , version ) < <NUM_LIT:0> ) { version = ver ; } } } return version ; } @ Override public Comparator < String > getVersionComparator ( ) { return _versionComparator ; } } </s>
|
<s> package com . senseidb . search . node . impl ; import org . json . JSONObject ; import com . senseidb . search . node . SenseiQueryBuilder ; import com . senseidb . search . node . SenseiQueryBuilderFactory ; import com . senseidb . search . req . SenseiQuery ; public abstract class AbstractJsonQueryBuilderFactory implements SenseiQueryBuilderFactory { @ Override public SenseiQueryBuilder getQueryBuilder ( SenseiQuery query ) throws Exception { JSONObject jsonQuery = null ; if ( query != null ) { byte [ ] bytes = query . toBytes ( ) ; jsonQuery = new JSONObject ( new String ( bytes , SenseiQuery . utf8Charset ) ) ; } return buildQueryBuilder ( jsonQuery ) ; } public abstract SenseiQueryBuilder buildQueryBuilder ( JSONObject jsonQuery ) ; } </s>
|
<s> package com . senseidb . search . node . impl ; import java . util . Set ; import com . linkedin . norbert . javacompat . cluster . Node ; import com . senseidb . search . node . SenseiRequestScatterRewriter ; import com . senseidb . search . req . SenseiRequest ; public class NoopRequestScatterRewriter implements SenseiRequestScatterRewriter { @ Override public SenseiRequest rewrite ( SenseiRequest origReq , Node node , Set < Integer > partitions ) { return origReq ; } } </s>
|
<s> package com . senseidb . search . node . impl ; import com . linkedin . norbert . javacompat . cluster . ClusterClient ; import com . linkedin . norbert . javacompat . network . NettyNetworkServer ; import com . linkedin . norbert . javacompat . network . NetworkServer ; import com . linkedin . norbert . javacompat . network . NetworkServerConfig ; public class SenseiBuilderHelper { private SenseiBuilderHelper ( ) { } public static NetworkServer buildDefaultNetworkServer ( ClusterClient clusterClient ) { NetworkServerConfig serverConfig = new NetworkServerConfig ( ) ; serverConfig . setClusterClient ( clusterClient ) ; serverConfig . setRequestThreadCorePoolSize ( <NUM_LIT:5> ) ; serverConfig . setRequestThreadKeepAliveTimeSecs ( <NUM_LIT> ) ; serverConfig . setRequestThreadMaxPoolSize ( <NUM_LIT:10> ) ; return new NettyNetworkServer ( serverConfig ) ; } } </s>
|
<s> package com . senseidb . search . node ; import com . senseidb . search . req . SenseiQuery ; public interface SenseiQueryBuilderFactory { SenseiQueryBuilder getQueryBuilder ( SenseiQuery query ) throws Exception ; } </s>
|
<s> package com . senseidb . search . node ; public interface SenseiServerAdminMBean { public int getId ( ) ; public int getPort ( ) ; public String getPartitions ( ) ; boolean isAvailable ( ) ; void setAvailable ( boolean available ) ; } </s>
|
<s> package com . senseidb . search . node ; import java . io . Serializable ; import java . util . ArrayList ; import java . util . Arrays ; import java . util . Collection ; import java . util . Collections ; import java . util . Comparator ; import java . util . HashMap ; import java . util . HashSet ; import java . util . Iterator ; import java . util . LinkedList ; import java . util . List ; import java . util . Map ; import java . util . Map . Entry ; import java . util . Set ; import org . apache . log4j . Logger ; import org . apache . lucene . index . IndexReader ; import org . apache . lucene . search . ScoreDoc ; import org . apache . lucene . search . SortField ; import org . apache . lucene . util . PriorityQueue ; import proj . zoie . api . ZoieIndexReader ; import com . browseengine . bobo . api . BoboIndexReader ; import com . browseengine . bobo . api . BrowseFacet ; import com . browseengine . bobo . api . BrowseSelection ; import com . browseengine . bobo . api . FacetAccessible ; import com . browseengine . bobo . api . FacetIterator ; import com . browseengine . bobo . api . FacetSpec ; import com . browseengine . bobo . api . FacetSpec . FacetSortSpec ; import com . browseengine . bobo . facets . CombinedFacetAccessible ; import com . browseengine . bobo . facets . FacetHandler ; import com . browseengine . bobo . facets . data . FacetDataCache ; import com . browseengine . bobo . facets . data . PrimitiveLongArrayWrapper ; import com . browseengine . bobo . sort . DocComparator ; import com . browseengine . bobo . sort . DocIDPriorityQueue ; import com . browseengine . bobo . sort . SortCollector ; import com . browseengine . bobo . sort . SortCollector . CollectorContext ; import com . browseengine . bobo . util . ListMerger ; import com . senseidb . search . req . ErrorType ; import com . senseidb . search . req . SenseiError ; import com . senseidb . search . req . SenseiHit ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiResult ; import com . senseidb . search . req . mapred . impl . SenseiReduceFunctionWrapper ; public class ResultMerger { private final static Logger logger = Logger . getLogger ( ResultMerger . class . getName ( ) ) ; private final static class MyScoreDoc extends ScoreDoc { private static final long serialVersionUID = <NUM_LIT:1L> ; private BoboIndexReader reader ; private int finalDoc ; public int groupPos ; public Object rawGroupValue ; public Comparable sortValue ; public MyScoreDoc ( int docid , float score , int finalDoc , BoboIndexReader reader ) { super ( docid , score ) ; this . finalDoc = finalDoc ; this . reader = reader ; } SenseiHit getSenseiHit ( SenseiRequest req ) { SenseiHit hit = new SenseiHit ( ) ; if ( req . isFetchStoredFields ( ) || req . isFetchStoredValue ( ) ) { if ( req . isFetchStoredFields ( ) ) { try { hit . setStoredFields ( reader . document ( doc ) ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } } try { IndexReader innerReader = reader . getInnerReader ( ) ; if ( innerReader instanceof ZoieIndexReader ) { hit . setStoredValue ( ( ( ZoieIndexReader ) innerReader ) . getStoredValue ( ( ( ZoieIndexReader ) innerReader ) . getUID ( doc ) ) ) ; } } catch ( Exception e ) { } } Collection < FacetHandler < ? > > facetHandlers = reader . getFacetHandlerMap ( ) . values ( ) ; Map < String , String [ ] > map = new HashMap < String , String [ ] > ( ) ; Map < String , Object [ ] > rawMap = new HashMap < String , Object [ ] > ( ) ; for ( FacetHandler < ? > facetHandler : facetHandlers ) { map . put ( facetHandler . getName ( ) , facetHandler . getFieldValues ( reader , doc ) ) ; rawMap . put ( facetHandler . getName ( ) , facetHandler . getRawFieldValues ( reader , doc ) ) ; } hit . setFieldValues ( map ) ; hit . setRawFieldValues ( rawMap ) ; hit . setUID ( ( ( ZoieIndexReader < BoboIndexReader > ) reader . getInnerReader ( ) ) . getUID ( doc ) ) ; hit . setDocid ( finalDoc ) ; hit . setScore ( score ) ; hit . setComparable ( sortValue ) ; hit . setGroupPosition ( groupPos ) ; String [ ] groupBy = req . getGroupBy ( ) ; if ( groupBy != null && groupBy . length > groupPos && groupBy [ groupPos ] != null ) { hit . setGroupField ( groupBy [ groupPos ] ) ; hit . setGroupValue ( hit . getField ( groupBy [ groupPos ] ) ) ; hit . setRawGroupValue ( hit . getRawField ( groupBy [ groupPos ] ) ) ; } return hit ; } } private final static class HitWithGroupQueue { public SenseiHit hit ; public PriorityQueue < MyScoreDoc > queue ; public ArrayList < Iterator < SenseiHit > > iterList = new ArrayList < Iterator < SenseiHit > > ( ) ; public HitWithGroupQueue ( SenseiHit hit , PriorityQueue < MyScoreDoc > queue ) { this . hit = hit ; this . queue = queue ; } } private static Map < String , FacetAccessible > mergeFacetContainer ( Collection < Map < String , FacetAccessible > > subMaps , SenseiRequest req ) { Map < String , Map < String , Integer > > counts = new HashMap < String , Map < String , Integer > > ( ) ; for ( Map < String , FacetAccessible > subMap : subMaps ) { for ( Map . Entry < String , FacetAccessible > entry : subMap . entrySet ( ) ) { String facetname = entry . getKey ( ) ; Map < String , Integer > count = counts . get ( facetname ) ; if ( count == null ) { count = new HashMap < String , Integer > ( ) ; counts . put ( facetname , count ) ; } Set < String > values = new HashSet < String > ( ) ; String [ ] rawvalues = null ; BrowseSelection selection = req . getSelection ( facetname ) ; if ( selection != null && ( rawvalues = selection . getValues ( ) ) != null ) { values . addAll ( Arrays . asList ( rawvalues ) ) ; } FacetAccessible facetAccessible = entry . getValue ( ) ; for ( BrowseFacet facet : facetAccessible . getFacets ( ) ) { if ( facet == null ) continue ; String val = facet . getValue ( ) ; int oldValue = count . containsKey ( val ) ? count . get ( val ) : <NUM_LIT:0> ; count . put ( val , oldValue + facet . getFacetValueHitCount ( ) ) ; values . remove ( val ) ; } if ( ! values . isEmpty ( ) ) { for ( String val : values ) { int oldValue = count . containsKey ( val ) ? count . get ( val ) : <NUM_LIT:0> ; BrowseFacet facet = facetAccessible . getFacet ( val ) ; int delta = <NUM_LIT:0> ; if ( facet != null ) { delta = facet . getFacetValueHitCount ( ) ; count . put ( val , oldValue + delta ) ; } } } facetAccessible . close ( ) ; } } Map < String , FacetAccessible > mergedFacetMap = new HashMap < String , FacetAccessible > ( ) ; for ( Entry < String , Map < String , Integer > > entry : counts . entrySet ( ) ) { String facet = entry . getKey ( ) ; Map < String , Integer > facetValueCounts = entry . getValue ( ) ; List < BrowseFacet > facets = new ArrayList < BrowseFacet > ( facetValueCounts . size ( ) ) ; for ( Entry < String , Integer > subEntry : facetValueCounts . entrySet ( ) ) { facets . add ( new BrowseFacet ( subEntry . getKey ( ) , subEntry . getValue ( ) ) ) ; } FacetSpec fspec = null ; Set < String > values = new HashSet < String > ( ) ; String [ ] rawvalues = null ; if ( req != null ) { fspec = req . getFacetSpec ( facet ) ; BrowseSelection selection = req . getSelection ( facet ) ; if ( selection != null && ( rawvalues = selection . getValues ( ) ) != null ) { values . addAll ( Arrays . asList ( rawvalues ) ) ; } } Comparator < BrowseFacet > facetComp = getComparator ( fspec ) ; Collections . sort ( facets , facetComp ) ; if ( fspec != null ) { int maxCount = fspec . getMaxCount ( ) ; int numToShow = facets . size ( ) ; if ( maxCount > <NUM_LIT:0> ) { numToShow = Math . min ( maxCount , numToShow ) ; } for ( int i = facets . size ( ) - <NUM_LIT:1> ; i >= numToShow ; i -- ) { if ( ! values . contains ( facets . get ( i ) . getValue ( ) ) ) { facets . remove ( i ) ; } } } MappedFacetAccessible mergedFacetAccessible = new MappedFacetAccessible ( facets . toArray ( new BrowseFacet [ facets . size ( ) ] ) ) ; mergedFacetMap . put ( facet , mergedFacetAccessible ) ; } return mergedFacetMap ; } private static Map < String , FacetAccessible > mergeFacetContainerServerSide ( Collection < Map < String , FacetAccessible > > subMaps , SenseiRequest req ) { Map < String , List < FacetAccessible > > counts = new HashMap < String , List < FacetAccessible > > ( ) ; for ( Map < String , FacetAccessible > subMap : subMaps ) { for ( Map . Entry < String , FacetAccessible > entry : subMap . entrySet ( ) ) { String facetname = entry . getKey ( ) ; List < FacetAccessible > count = counts . get ( facetname ) ; if ( count == null ) { count = new LinkedList < FacetAccessible > ( ) ; counts . put ( facetname , count ) ; } count . add ( entry . getValue ( ) ) ; } } Map < String , FacetAccessible > fieldMap = new HashMap < String , FacetAccessible > ( ) ; for ( Entry < String , List < FacetAccessible > > entry : counts . entrySet ( ) ) { String fieldname = entry . getKey ( ) ; List < FacetAccessible > facetAccs = entry . getValue ( ) ; if ( facetAccs . size ( ) == <NUM_LIT:1> ) { fieldMap . put ( fieldname , facetAccs . get ( <NUM_LIT:0> ) ) ; } else { fieldMap . put ( fieldname , new CombinedFacetAccessible ( req . getFacetSpec ( fieldname ) , facetAccs ) ) ; } } Map < String , FacetAccessible > mergedFacetMap = new HashMap < String , FacetAccessible > ( ) ; for ( Entry < String , FacetAccessible > entry : fieldMap . entrySet ( ) ) { String fieldname = entry . getKey ( ) ; FacetAccessible facetAcc = entry . getValue ( ) ; FacetSpec fspec = req . getFacetSpec ( fieldname ) ; BrowseSelection sel = req . getSelection ( fieldname ) ; Set < String > values = new HashSet < String > ( ) ; String [ ] rawvalues = null ; if ( sel != null && ( rawvalues = sel . getValues ( ) ) != null ) { values . addAll ( Arrays . asList ( rawvalues ) ) ; } List < BrowseFacet > facets = new ArrayList < BrowseFacet > ( ) ; facets . addAll ( facetAcc . getFacets ( ) ) ; for ( BrowseFacet bf : facets ) { values . remove ( bf . getValue ( ) ) ; } if ( values . size ( ) > <NUM_LIT:0> ) { for ( String value : values ) { facets . add ( facetAcc . getFacet ( value ) ) ; } } facetAcc . close ( ) ; Comparator < BrowseFacet > facetComp = getComparator ( fspec ) ; Collections . sort ( facets , facetComp ) ; MappedFacetAccessible mergedFacetAccessible = new MappedFacetAccessible ( facets . toArray ( new BrowseFacet [ facets . size ( ) ] ) ) ; mergedFacetMap . put ( fieldname , mergedFacetAccessible ) ; } return mergedFacetMap ; } private static Comparator < BrowseFacet > getComparator ( FacetSpec fspec ) { Comparator < BrowseFacet > facetComp ; if ( ( fspec == null ) || fspec . getOrderBy ( ) == FacetSortSpec . OrderHitsDesc ) { facetComp = new BrowseFacetHitsDescComparator ( ) ; } else { if ( fspec . getOrderBy ( ) == FacetSortSpec . OrderValueAsc ) { facetComp = new BrowseFacetValueAscComparator ( ) ; } else { facetComp = fspec . getCustomComparatorFactory ( ) . newComparator ( ) ; } } return facetComp ; } private static final class BrowseFacetValueAscComparator implements Comparator < BrowseFacet > { public int compare ( BrowseFacet f1 , BrowseFacet f2 ) { if ( f1 == null && f2 == null ) { return <NUM_LIT:0> ; } if ( f1 == null ) { return - <NUM_LIT:1> ; } if ( f2 == null ) { return <NUM_LIT:1> ; } int ret = f1 . getValue ( ) . compareTo ( f2 . getValue ( ) ) ; if ( f1 . getValue ( ) . startsWith ( "<STR_LIT:->" ) && f2 . getValue ( ) . startsWith ( "<STR_LIT:->" ) ) { ret *= - <NUM_LIT:1> ; } return ret ; } } private static final class BrowseFacetHitsDescComparator implements Comparator < BrowseFacet > { public int compare ( BrowseFacet f1 , BrowseFacet f2 ) { if ( f1 == null && f2 == null ) { return <NUM_LIT:0> ; } if ( f1 == null ) { return - <NUM_LIT:1> ; } if ( f2 == null ) { return <NUM_LIT:1> ; } int h1 = f1 . getFacetValueHitCount ( ) ; int h2 = f2 . getFacetValueHitCount ( ) ; int val = h2 - h1 ; if ( val == <NUM_LIT:0> ) { val = f1 . getValue ( ) . compareTo ( f2 . getValue ( ) ) ; } return val ; } } private static final class SenseiHitComparator implements Comparator < SenseiHit > { SortField [ ] _sortFields ; public SenseiHitComparator ( SortField [ ] sortFields ) { _sortFields = sortFields ; } public int compare ( SenseiHit o1 , SenseiHit o2 ) { if ( _sortFields . length == <NUM_LIT:0> ) { return o1 . getDocid ( ) - o2 . getDocid ( ) ; } else { int equalCount = <NUM_LIT:0> ; for ( int i = <NUM_LIT:0> ; i < _sortFields . length ; ++ i ) { String field = _sortFields [ i ] . getField ( ) ; int reverse = _sortFields [ i ] . getReverse ( ) ? - <NUM_LIT:1> : <NUM_LIT:1> ; if ( _sortFields [ i ] . getType ( ) == SortField . SCORE ) { float score1 = o1 . getScore ( ) ; float score2 = o2 . getScore ( ) ; if ( score1 == score2 ) { equalCount ++ ; continue ; } else { return ( score1 > score2 ) ? - reverse : reverse ; } } else if ( _sortFields [ i ] . getType ( ) == SortField . DOC ) { return o1 . getDocid ( ) - o2 . getDocid ( ) ; } else { String value1 = o1 . getField ( field ) ; String value2 = o2 . getField ( field ) ; if ( value1 == null && value2 == null ) { equalCount ++ ; continue ; } else if ( value1 == null ) return - reverse ; else if ( value2 == null ) return reverse ; else { int comp = value1 . compareTo ( value2 ) ; if ( value1 . startsWith ( "<STR_LIT:->" ) && value2 . startsWith ( "<STR_LIT:->" ) ) { comp *= - <NUM_LIT:1> ; } if ( comp != <NUM_LIT:0> ) { return comp * reverse ; } else { equalCount ++ ; continue ; } } } } if ( equalCount == _sortFields . length ) { return o1 . getDocid ( ) - o2 . getDocid ( ) ; } else { return <NUM_LIT:0> ; } } } } private static class MappedFacetAccessible implements FacetAccessible , Serializable { private static final long serialVersionUID = <NUM_LIT:1L> ; private final HashMap < String , BrowseFacet > _facetMap ; private final BrowseFacet [ ] _facets ; public MappedFacetAccessible ( BrowseFacet [ ] facets ) { _facetMap = new HashMap < String , BrowseFacet > ( ) ; for ( BrowseFacet facet : facets ) { if ( facet != null ) { _facetMap . put ( facet . getValue ( ) , facet ) ; } } _facets = facets ; } public BrowseFacet getFacet ( String value ) { return _facetMap . get ( value ) ; } public int getFacetHitsCount ( Object value ) { BrowseFacet facet = _facetMap . get ( value ) ; if ( facet != null ) return facet . getHitCount ( ) ; return <NUM_LIT:0> ; } public List < BrowseFacet > getFacets ( ) { return Arrays . asList ( _facets ) ; } @ Override public void close ( ) { } @ Override public FacetIterator iterator ( ) { throw new IllegalStateException ( "<STR_LIT>" ) ; } } public static int getNumHits ( Collection < SenseiResult > results ) { int numHits = <NUM_LIT:0> ; for ( SenseiResult res : results ) { numHits += res . getNumHits ( ) ; } return numHits ; } public static int getTotalDocs ( Collection < SenseiResult > results ) { int totalDocs = <NUM_LIT:0> ; for ( SenseiResult res : results ) { totalDocs += res . getTotalDocs ( ) ; } return totalDocs ; } public static int getNumGroups ( Collection < SenseiResult > results ) { int numGroups = <NUM_LIT:0> ; for ( SenseiResult res : results ) { numGroups += res . getNumGroups ( ) ; } return numGroups ; } public static long findLongestTime ( Collection < SenseiResult > results ) { long time = <NUM_LIT> ; for ( SenseiResult res : results ) { time = Math . max ( time , res . getTime ( ) ) ; } return time ; } public static String findParsedQuery ( Collection < SenseiResult > results ) { for ( SenseiResult res : results ) { return res . getParsedQuery ( ) ; } return "<STR_LIT>" ; } public static boolean hasSortCollector ( Collection < SenseiResult > results ) { for ( SenseiResult res : results ) { if ( res . getSortCollector ( ) != null && res . getSortCollector ( ) . contextList != null ) { return true ; } } return false ; } public static void createUniqueDocIds ( Collection < SenseiResult > results ) { int totalDocs = <NUM_LIT:0> ; for ( SenseiResult res : results ) { SenseiHit [ ] hits = res . getSenseiHits ( ) ; if ( hits != null ) { for ( SenseiHit hit : hits ) { hit . setDocid ( hit . getDocid ( ) + totalDocs ) ; } } totalDocs += res . getTotalDocs ( ) ; } } public static List < Iterator < SenseiHit > > flattenHits ( Collection < SenseiResult > results ) { List < Iterator < SenseiHit > > hitList = new ArrayList < Iterator < SenseiHit > > ( results . size ( ) ) ; for ( SenseiResult res : results ) { hitList . add ( Arrays . asList ( res . getSenseiHits ( ) ) . iterator ( ) ) ; } return hitList ; } private static final int UNKNOWN_GROUP_VALUE_TYPE = <NUM_LIT:0> ; private static final int NORMAL_GROUP_VALUE_TYPE = <NUM_LIT:1> ; private static final int LONG_ARRAY_GROUP_VALUE_TYPE = <NUM_LIT:2> ; public static SenseiResult merge ( final SenseiRequest req , Collection < SenseiResult > results , boolean onSearchNode ) { long start = System . currentTimeMillis ( ) ; List < Map < String , FacetAccessible > > facetList = new ArrayList < Map < String , FacetAccessible > > ( results . size ( ) ) ; final int topHits = req . getOffset ( ) + req . getCount ( ) ; final int numHits = getNumHits ( results ) ; final int numGroups = getNumGroups ( results ) ; int totalDocs = getTotalDocs ( results ) ; final long longestTime = findLongestTime ( results ) ; final String parsedQuery = findParsedQuery ( results ) ; final boolean hasSortCollector = hasSortCollector ( results ) ; createUniqueDocIds ( results ) ; List < Iterator < SenseiHit > > hitLists = flattenHits ( results ) ; List < FacetAccessible > [ ] groupAccessibles = extractFacetAccessible ( results ) ; for ( SenseiResult res : results ) { Map < String , FacetAccessible > facetMap = res . getFacetMap ( ) ; if ( facetMap != null ) { facetList . add ( facetMap ) ; } } Map < String , FacetAccessible > mergedFacetMap = null ; if ( onSearchNode ) { mergedFacetMap = mergeFacetContainerServerSide ( facetList , req ) ; } else { mergedFacetMap = mergeFacetContainer ( facetList , req ) ; } Comparator < SenseiHit > comparator = new SenseiHitComparator ( req . getSort ( ) ) ; SenseiHit [ ] hits ; if ( req . getGroupBy ( ) == null || req . getGroupBy ( ) . length == <NUM_LIT:0> ) { List < SenseiHit > mergedList = ListMerger . mergeLists ( req . getOffset ( ) , req . getCount ( ) , hitLists . toArray ( new Iterator [ hitLists . size ( ) ] ) , comparator ) ; hits = mergedList . toArray ( new SenseiHit [ mergedList . size ( ) ] ) ; } else { int [ ] rawGroupValueType = new int [ req . getGroupBy ( ) . length ] ; PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp = new PrimitiveLongArrayWrapper ( null ) ; Iterator < SenseiHit > mergedIter = ListMerger . mergeLists ( hitLists , comparator ) ; List < SenseiHit > hitsList = null ; if ( ! hasSortCollector ) { hitsList = buildHitsListNoSortCollector ( req , topHits , rawGroupValueType , mergedIter , req . getOffset ( ) ) ; } else { int offsetLeft = req . getOffset ( ) ; MyScoreDoc pre = null ; if ( topHits > <NUM_LIT:0> && groupAccessibles != null && groupAccessibles . length > <NUM_LIT:1> ) { hitsList = buildHitsList ( req , results , topHits , groupAccessibles , rawGroupValueType , primitiveLongArrayWrapperTmp ) ; } else { hitsList = buildHitsListNoGroupAccessibles ( req , topHits , rawGroupValueType , primitiveLongArrayWrapperTmp , mergedIter , offsetLeft ) ; } } hits = hitsList . toArray ( new SenseiHit [ hitsList . size ( ) ] ) ; PrepareGroupMappings prepareGroupMappings = new PrepareGroupMappings ( req , results , hasSortCollector , hits , rawGroupValueType , primitiveLongArrayWrapperTmp ) . invoke ( ) ; Map < Object , HitWithGroupQueue > [ ] groupMaps = prepareGroupMappings . getGroupMaps ( ) ; totalDocs = prepareGroupMappings . getTotalDocs ( ) ; if ( hasSortCollector ) { for ( Map < Object , HitWithGroupQueue > map : groupMaps ) { for ( HitWithGroupQueue hwg : map . values ( ) ) { int index = hwg . queue . size ( ) - <NUM_LIT:1> ; if ( index >= <NUM_LIT:0> ) { SenseiHit [ ] groupHits = new SenseiHit [ index + <NUM_LIT:1> ] ; while ( index >= <NUM_LIT:0> ) { groupHits [ index ] = hwg . queue . pop ( ) . getSenseiHit ( req ) ; -- index ; } hwg . hit . setGroupHits ( groupHits ) ; } } } } else { for ( Map < Object , HitWithGroupQueue > map : groupMaps ) { for ( HitWithGroupQueue hwg : map . values ( ) ) { List < SenseiHit > mergedList = ListMerger . mergeLists ( <NUM_LIT:0> , req . getMaxPerGroup ( ) , hwg . iterList . toArray ( new Iterator [ hwg . iterList . size ( ) ] ) , comparator ) ; SenseiHit [ ] groupHits = mergedList . toArray ( new SenseiHit [ mergedList . size ( ) ] ) ; hwg . hit . setGroupHits ( groupHits ) ; } } } } if ( groupAccessibles != null ) { for ( List < FacetAccessible > list : groupAccessibles ) { if ( list != null ) { for ( FacetAccessible acc : list ) { if ( acc != null ) acc . close ( ) ; } } } } SenseiResult merged = new SenseiResult ( ) ; merged . setHits ( hits ) ; merged . setNumHits ( numHits ) ; merged . setNumGroups ( numGroups ) ; merged . setTotalDocs ( totalDocs ) ; merged . addAll ( mergedFacetMap ) ; long end = System . currentTimeMillis ( ) ; merged . setTime ( longestTime + end - start ) ; mergerErrors ( merged , req , results , parsedQuery ) ; if ( req . getMapReduceFunction ( ) != null ) { if ( onSearchNode ) { merged . setMapReduceResult ( SenseiReduceFunctionWrapper . combine ( req . getMapReduceFunction ( ) , SenseiReduceFunctionWrapper . extractMapReduceResults ( results ) ) ) ; } else { merged . setMapReduceResult ( SenseiReduceFunctionWrapper . reduce ( req . getMapReduceFunction ( ) , SenseiReduceFunctionWrapper . extractMapReduceResults ( results ) ) ) ; } } return merged ; } private static List < SenseiHit > buildHitsListNoGroupAccessibles ( SenseiRequest req , int topHits , int [ ] rawGroupValueType , PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp , Iterator < SenseiHit > mergedIter , int offsetLeft ) { List < SenseiHit > hitsList = new ArrayList < SenseiHit > ( req . getCount ( ) ) ; Object rawGroupValue = null ; Object firstRawGroupValue = null ; Set < Object > [ ] groupSets = new Set [ <NUM_LIT:1> ] ; groupSets [ <NUM_LIT:0> ] = new HashSet < Object > ( topHits ) ; while ( mergedIter . hasNext ( ) ) { SenseiHit hit = mergedIter . next ( ) ; firstRawGroupValue = null ; int i = <NUM_LIT:0> ; for ( ; i < groupSets . length ; ++ i ) { rawGroupValue = extractRawGroupValue ( rawGroupValueType , i , primitiveLongArrayWrapperTmp , hit ) ; if ( firstRawGroupValue == null ) firstRawGroupValue = rawGroupValue ; if ( groupSets [ i ] . contains ( rawGroupValue ) ) { i = - <NUM_LIT:1> ; break ; } } if ( i >= <NUM_LIT:0> ) { if ( i >= groupSets . length ) { i = <NUM_LIT:0> ; rawGroupValue = firstRawGroupValue ; } if ( offsetLeft > <NUM_LIT:0> ) -- offsetLeft ; else { hitsList . add ( hit ) ; if ( hitsList . size ( ) >= req . getCount ( ) ) break ; } if ( rawGroupValueType [ i ] == LONG_ARRAY_GROUP_VALUE_TYPE ) groupSets [ i ] . add ( new PrimitiveLongArrayWrapper ( primitiveLongArrayWrapperTmp . data ) ) ; else groupSets [ i ] . add ( rawGroupValue ) ; } } return hitsList ; } private static List < SenseiHit > buildHitsList ( SenseiRequest req , Collection < SenseiResult > results , int topHits , List < FacetAccessible > [ ] groupAccessibles , int [ ] rawGroupValueType , PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp ) { List < SenseiHit > hitsList = new ArrayList < SenseiHit > ( req . getCount ( ) ) ; MyScoreDoc pre = null ; Object rawGroupValue = null ; Object firstRawGroupValue = null ; CombinedFacetAccessible [ ] combinedFacetAccessibles = new CombinedFacetAccessible [ groupAccessibles . length ] ; for ( int i = <NUM_LIT:0> ; i < groupAccessibles . length ; i ++ ) { combinedFacetAccessibles [ i ] = new CombinedFacetAccessible ( new FacetSpec ( ) , groupAccessibles [ i ] ) ; } Set < Object > [ ] groupSets = new Set [ groupAccessibles . length ] ; for ( int i = <NUM_LIT:0> ; i < groupAccessibles . length ; ++ i ) { groupSets [ i ] = new HashSet < Object > ( topHits ) ; } Map < Object , MyScoreDoc > [ ] valueDocMaps = new Map [ groupAccessibles . length ] ; for ( int i = <NUM_LIT:0> ; i < groupAccessibles . length ; ++ i ) { valueDocMaps [ i ] = new HashMap < Object , MyScoreDoc > ( topHits ) ; } int totalDocs = <NUM_LIT:0> ; MyScoreDoc tmpScoreDoc = new MyScoreDoc ( <NUM_LIT:0> , <NUM_LIT:0.0f> , <NUM_LIT:0> , null ) ; MyScoreDoc bottom = null ; boolean queueFull = false ; DocIDPriorityQueue docQueue = new DocIDPriorityQueue ( new DocComparator ( ) { public int compare ( ScoreDoc doc1 , ScoreDoc doc2 ) { return ( ( MyScoreDoc ) doc1 ) . sortValue . compareTo ( ( ( MyScoreDoc ) doc2 ) . sortValue ) ; } public Comparable value ( ScoreDoc doc ) { return ( ( MyScoreDoc ) doc ) . sortValue ; } } , topHits , <NUM_LIT:0> ) ; for ( SenseiResult res : results ) { SortCollector sortCollector = res . getSortCollector ( ) ; if ( sortCollector == null ) continue ; Iterator < CollectorContext > contextIter = sortCollector . contextList . iterator ( ) ; CollectorContext currentContext = null ; int contextLeft = <NUM_LIT:0> ; FacetDataCache [ ] dataCaches = new FacetDataCache [ sortCollector . groupByMulti . length ] ; while ( contextIter . hasNext ( ) ) { currentContext = contextIter . next ( ) ; contextLeft = currentContext . length ; if ( contextLeft > <NUM_LIT:0> ) { for ( int j = <NUM_LIT:0> ; j < sortCollector . groupByMulti . length ; ++ j ) dataCaches [ j ] = ( FacetDataCache ) sortCollector . groupByMulti [ j ] . getFacetData ( currentContext . reader ) ; break ; } } Iterator < float [ ] > scoreArrayIter = sortCollector . scorearraylist != null ? sortCollector . scorearraylist . iterator ( ) : null ; if ( contextLeft > <NUM_LIT:0> ) { for ( int [ ] docs : sortCollector . docidarraylist ) { float [ ] scores = scoreArrayIter != null ? scoreArrayIter . next ( ) : null ; for ( int i = <NUM_LIT:0> ; i < SortCollector . BLOCK_SIZE ; ++ i ) { tmpScoreDoc . doc = docs [ i ] ; tmpScoreDoc . score = scores != null ? scores [ i ] : <NUM_LIT:0.0f> ; tmpScoreDoc . finalDoc = currentContext . base + totalDocs + tmpScoreDoc . doc ; tmpScoreDoc . reader = currentContext . reader ; tmpScoreDoc . sortValue = currentContext . comparator . value ( tmpScoreDoc ) ; firstRawGroupValue = null ; int j = <NUM_LIT:0> ; for ( ; j < sortCollector . groupByMulti . length ; ++ j ) { rawGroupValue = dataCaches [ j ] . valArray . getRawValue ( dataCaches [ j ] . orderArray . get ( tmpScoreDoc . doc ) ) ; rawGroupValue = extractRawGroupValue ( rawGroupValueType , j , primitiveLongArrayWrapperTmp , rawGroupValue ) ; if ( firstRawGroupValue == null ) firstRawGroupValue = rawGroupValue ; pre = valueDocMaps [ j ] . get ( rawGroupValue ) ; if ( pre != null ) { j = - <NUM_LIT:1> ; break ; } if ( rawGroupValueType [ j ] == LONG_ARRAY_GROUP_VALUE_TYPE ) { if ( combinedFacetAccessibles [ j ] . getCappedFacetCount ( primitiveLongArrayWrapperTmp . data , <NUM_LIT:2> ) != <NUM_LIT:1> ) break ; } else { if ( combinedFacetAccessibles [ j ] . getCappedFacetCount ( rawGroupValue , <NUM_LIT:2> ) != <NUM_LIT:1> ) break ; } } if ( j < <NUM_LIT:0> ) { if ( tmpScoreDoc . sortValue . compareTo ( pre . sortValue ) < <NUM_LIT:0> ) { tmpScoreDoc . groupPos = pre . groupPos ; tmpScoreDoc . rawGroupValue = rawGroupValue ; MyScoreDoc tmp = pre ; bottom = ( MyScoreDoc ) docQueue . replace ( tmpScoreDoc , pre ) ; valueDocMaps [ tmpScoreDoc . groupPos ] . put ( rawGroupValue , tmpScoreDoc ) ; tmpScoreDoc = tmp ; } } else { if ( j >= sortCollector . groupByMulti . length ) { j = <NUM_LIT:0> ; rawGroupValue = firstRawGroupValue ; } if ( ! queueFull || tmpScoreDoc . sortValue . compareTo ( bottom . sortValue ) < <NUM_LIT:0> ) { if ( queueFull ) { tmpScoreDoc . groupPos = j ; tmpScoreDoc . rawGroupValue = rawGroupValue ; MyScoreDoc tmp = bottom ; valueDocMaps [ tmp . groupPos ] . remove ( tmp . rawGroupValue ) ; bottom = ( MyScoreDoc ) docQueue . replace ( tmpScoreDoc ) ; valueDocMaps [ j ] . put ( rawGroupValue , tmpScoreDoc ) ; tmpScoreDoc = tmp ; } else { MyScoreDoc tmp = new MyScoreDoc ( tmpScoreDoc . doc , tmpScoreDoc . score , currentContext . base + totalDocs + tmpScoreDoc . doc , currentContext . reader ) ; tmp . groupPos = j ; tmp . rawGroupValue = rawGroupValue ; tmp . sortValue = tmpScoreDoc . sortValue ; bottom = ( MyScoreDoc ) docQueue . add ( tmp ) ; valueDocMaps [ j ] . put ( rawGroupValue , tmp ) ; queueFull = ( docQueue . size >= topHits ) ; } } } -- contextLeft ; if ( contextLeft <= <NUM_LIT:0> ) { while ( contextIter . hasNext ( ) ) { currentContext = contextIter . next ( ) ; contextLeft = currentContext . length ; if ( contextLeft > <NUM_LIT:0> ) { for ( j = <NUM_LIT:0> ; j < sortCollector . groupByMulti . length ; ++ j ) dataCaches [ j ] = ( FacetDataCache ) sortCollector . groupByMulti [ j ] . getFacetData ( currentContext . reader ) ; break ; } } if ( contextLeft <= <NUM_LIT:0> ) break ; } } } } totalDocs += res . getTotalDocs ( ) ; } int len = docQueue . size ( ) - req . getOffset ( ) ; if ( len < <NUM_LIT:0> ) len = <NUM_LIT:0> ; SenseiHit [ ] hitArray = new SenseiHit [ len ] ; for ( int i = hitArray . length - <NUM_LIT:1> ; i >= <NUM_LIT:0> ; -- i ) { tmpScoreDoc = ( MyScoreDoc ) docQueue . pop ( ) ; hitArray [ i ] = tmpScoreDoc . getSenseiHit ( req ) ; } for ( int i = <NUM_LIT:0> ; i < hitArray . length ; ++ i ) hitsList . add ( hitArray [ i ] ) ; return hitsList ; } private static List < SenseiHit > buildHitsListNoSortCollector ( SenseiRequest req , int topHits , int [ ] rawGroupValueType , Iterator < SenseiHit > mergedIter , int offsetLeft ) { List < SenseiHit > hitsList = new ArrayList < SenseiHit > ( req . getCount ( ) ) ; PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp = new PrimitiveLongArrayWrapper ( null ) ; Map < Object , SenseiHit > [ ] groupHitMaps = new Map [ req . getGroupBy ( ) . length ] ; for ( int i = <NUM_LIT:0> ; i < groupHitMaps . length ; ++ i ) { groupHitMaps [ i ] = new HashMap < Object , SenseiHit > ( topHits ) ; } while ( mergedIter . hasNext ( ) ) { SenseiHit hit = mergedIter . next ( ) ; Object rawGroupValue = extractRawGroupValue ( rawGroupValueType , hit . getGroupPosition ( ) , primitiveLongArrayWrapperTmp , hit ) ; SenseiHit pre = groupHitMaps [ hit . getGroupPosition ( ) ] . get ( rawGroupValue ) ; if ( pre != null ) { if ( offsetLeft <= <NUM_LIT:0> ) { pre . setGroupHitsCount ( pre . getGroupHitsCount ( ) + hit . getGroupHitsCount ( ) ) ; } } else { if ( offsetLeft > <NUM_LIT:0> ) -- offsetLeft ; else if ( hitsList . size ( ) < req . getCount ( ) ) hitsList . add ( hit ) ; if ( rawGroupValueType [ <NUM_LIT:0> ] == <NUM_LIT:2> ) groupHitMaps [ hit . getGroupPosition ( ) ] . put ( new PrimitiveLongArrayWrapper ( primitiveLongArrayWrapperTmp . data ) , hit ) ; else groupHitMaps [ hit . getGroupPosition ( ) ] . put ( rawGroupValue , hit ) ; } } return hitsList ; } private static Object extractRawGroupValue ( int [ ] rawGroupValueType , int groupPosition , PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp , SenseiHit hit ) { return extractRawGroupValue ( rawGroupValueType , groupPosition , primitiveLongArrayWrapperTmp , hit . getRawGroupValue ( ) ) ; } private static Object extractRawGroupValue ( int [ ] rawGroupValueType , int groupPosition , PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp , Object rawGroupValue ) { if ( rawGroupValueType [ groupPosition ] == LONG_ARRAY_GROUP_VALUE_TYPE ) { primitiveLongArrayWrapperTmp . data = ( long [ ] ) rawGroupValue ; rawGroupValue = primitiveLongArrayWrapperTmp ; } else if ( rawGroupValueType [ groupPosition ] == UNKNOWN_GROUP_VALUE_TYPE ) { if ( rawGroupValue != null ) { if ( rawGroupValue instanceof long [ ] ) { rawGroupValueType [ groupPosition ] = LONG_ARRAY_GROUP_VALUE_TYPE ; primitiveLongArrayWrapperTmp . data = ( long [ ] ) rawGroupValue ; rawGroupValue = primitiveLongArrayWrapperTmp ; } else rawGroupValueType [ groupPosition ] = NORMAL_GROUP_VALUE_TYPE ; } } return rawGroupValue ; } private static List < FacetAccessible > [ ] extractFacetAccessible ( Collection < SenseiResult > results ) { List < FacetAccessible > [ ] groupAccessibles = null ; for ( SenseiResult res : results ) { if ( res . getGroupAccessibles ( ) != null ) { if ( groupAccessibles == null ) { groupAccessibles = new List [ res . getGroupAccessibles ( ) . length ] ; for ( int i = <NUM_LIT:0> ; i < groupAccessibles . length ; ++ i ) { groupAccessibles [ i ] = new ArrayList < FacetAccessible > ( results . size ( ) ) ; } } for ( int i = <NUM_LIT:0> ; i < groupAccessibles . length ; ++ i ) { groupAccessibles [ i ] . add ( res . getGroupAccessibles ( ) [ i ] ) ; } } } return groupAccessibles ; } public static class PrepareGroupMappings { private final SenseiRequest req ; private final Collection < SenseiResult > results ; private final boolean hasSortCollector ; private final SenseiHit [ ] hits ; private final int [ ] rawGroupValueType ; private final PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp ; private int totalDocs ; private Map < Object , HitWithGroupQueue > [ ] groupMaps ; public PrepareGroupMappings ( SenseiRequest req , Collection < SenseiResult > results , boolean hasSortCollector , SenseiHit [ ] hits , int [ ] rawGroupValueType , PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp ) { this . req = req ; this . results = results ; this . hasSortCollector = hasSortCollector ; this . hits = hits ; this . rawGroupValueType = rawGroupValueType ; this . primitiveLongArrayWrapperTmp = primitiveLongArrayWrapperTmp ; groupMaps = new Map [ req . getGroupBy ( ) . length ] ; for ( int i = <NUM_LIT:0> ; i < groupMaps . length ; ++ i ) { groupMaps [ i ] = new HashMap < Object , HitWithGroupQueue > ( hits . length * <NUM_LIT:2> ) ; } } public int getTotalDocs ( ) { return totalDocs ; } public Map < Object , HitWithGroupQueue > [ ] getGroupMaps ( ) { return groupMaps ; } public PrepareGroupMappings invoke ( ) { Object rawGroupValue ; for ( SenseiHit hit : hits ) { rawGroupValue = hit . getRawField ( req . getGroupBy ( ) [ hit . getGroupPosition ( ) ] ) ; rawGroupValue = extractRawGroupValue ( rawGroupValueType , hit . getGroupPosition ( ) , primitiveLongArrayWrapperTmp , rawGroupValue ) ; groupMaps [ hit . getGroupPosition ( ) ] . put ( rawGroupValue , new HitWithGroupQueue ( hit , new PriorityQueue < MyScoreDoc > ( ) { private int r ; { this . initialize ( req . getMaxPerGroup ( ) <= <NUM_LIT:1> ? <NUM_LIT:0> : req . getMaxPerGroup ( ) ) ; } protected boolean lessThan ( MyScoreDoc a , MyScoreDoc b ) { r = a . sortValue . compareTo ( b . sortValue ) ; if ( r > <NUM_LIT:0> ) return true ; else if ( r < <NUM_LIT:0> ) return false ; else return ( a . finalDoc > b . finalDoc ) ; } } ) ) ; } MyScoreDoc tmpScoreDoc = null ; int doc = <NUM_LIT:0> ; float score = <NUM_LIT:0.0f> ; HitWithGroupQueue hitWithGroupQueue = null ; totalDocs = <NUM_LIT:0> ; for ( SenseiResult res : results ) { if ( hasSortCollector ) { SortCollector sortCollector = res . getSortCollector ( ) ; if ( sortCollector == null ) continue ; Iterator < CollectorContext > contextIter = sortCollector . contextList . iterator ( ) ; CollectorContext currentContext = null ; int contextLeft = <NUM_LIT:0> ; FacetDataCache [ ] dataCaches = new FacetDataCache [ sortCollector . groupByMulti . length ] ; while ( contextIter . hasNext ( ) ) { currentContext = contextIter . next ( ) ; contextLeft = currentContext . length ; if ( contextLeft > <NUM_LIT:0> ) { for ( int j = <NUM_LIT:0> ; j < sortCollector . groupByMulti . length ; ++ j ) dataCaches [ j ] = ( FacetDataCache ) sortCollector . groupByMulti [ j ] . getFacetData ( currentContext . reader ) ; break ; } } Iterator < float [ ] > scoreArrayIter = sortCollector . scorearraylist != null ? sortCollector . scorearraylist . iterator ( ) : null ; if ( contextLeft > <NUM_LIT:0> ) { for ( int [ ] docs : sortCollector . docidarraylist ) { float [ ] scores = scoreArrayIter != null ? scoreArrayIter . next ( ) : null ; for ( int i = <NUM_LIT:0> ; i < SortCollector . BLOCK_SIZE ; ++ i ) { doc = docs [ i ] ; score = scores != null ? scores [ i ] : <NUM_LIT:0.0f> ; int j = <NUM_LIT:0> ; for ( ; j < sortCollector . groupByMulti . length ; ++ j ) { rawGroupValue = extractRawGroupValue ( rawGroupValueType , j , primitiveLongArrayWrapperTmp , dataCaches [ j ] . valArray . getRawValue ( dataCaches [ j ] . orderArray . get ( doc ) ) ) ; hitWithGroupQueue = groupMaps [ j ] . get ( rawGroupValue ) ; if ( hitWithGroupQueue != null ) { hitWithGroupQueue . hit . setGroupHitsCount ( hitWithGroupQueue . hit . getGroupHitsCount ( ) + <NUM_LIT:1> ) ; if ( tmpScoreDoc == null ) tmpScoreDoc = new MyScoreDoc ( doc , score , currentContext . base + totalDocs + doc , currentContext . reader ) ; else { tmpScoreDoc . doc = doc ; tmpScoreDoc . score = score ; tmpScoreDoc . finalDoc = currentContext . base + totalDocs + doc ; tmpScoreDoc . reader = currentContext . reader ; } tmpScoreDoc . sortValue = currentContext . comparator . value ( tmpScoreDoc ) ; tmpScoreDoc . groupPos = j ; tmpScoreDoc . rawGroupValue = rawGroupValue ; tmpScoreDoc = hitWithGroupQueue . queue . insertWithOverflow ( tmpScoreDoc ) ; break ; } } -- contextLeft ; if ( contextLeft <= <NUM_LIT:0> ) { while ( contextIter . hasNext ( ) ) { currentContext = contextIter . next ( ) ; contextLeft = currentContext . length ; if ( contextLeft > <NUM_LIT:0> ) { for ( j = <NUM_LIT:0> ; j < sortCollector . groupByMulti . length ; ++ j ) dataCaches [ j ] = ( FacetDataCache ) sortCollector . groupByMulti [ j ] . getFacetData ( currentContext . reader ) ; break ; } } if ( contextLeft <= <NUM_LIT:0> ) break ; } } } } sortCollector . close ( ) ; } else { if ( res . getSenseiHits ( ) != null ) { for ( SenseiHit hit : res . getSenseiHits ( ) ) { if ( hit . getGroupHits ( ) != null ) { rawGroupValue = hit . getRawGroupValue ( ) ; if ( rawGroupValueType [ hit . getGroupPosition ( ) ] == LONG_ARRAY_GROUP_VALUE_TYPE ) { primitiveLongArrayWrapperTmp . data = ( long [ ] ) rawGroupValue ; rawGroupValue = primitiveLongArrayWrapperTmp ; } hitWithGroupQueue = groupMaps [ hit . getGroupPosition ( ) ] . get ( rawGroupValue ) ; if ( hitWithGroupQueue != null ) hitWithGroupQueue . iterList . add ( Arrays . asList ( hit . getSenseiGroupHits ( ) ) . iterator ( ) ) ; } } } } totalDocs += res . getTotalDocs ( ) ; } return this ; } } private static void mergerErrors ( SenseiResult merged , final SenseiRequest req , Collection < SenseiResult > results , String parsedQuery ) { merged . setParsedQuery ( parsedQuery ) ; merged . getErrors ( ) . addAll ( req . getErrors ( ) ) ; for ( SenseiResult res : results ) { merged . getErrors ( ) . addAll ( res . getErrors ( ) ) ; if ( res . getBoboErrors ( ) . size ( ) > <NUM_LIT:0> ) { for ( String boboError : res . getBoboErrors ( ) ) { merged . addError ( new SenseiError ( boboError , ErrorType . BoboExecutionError ) ) ; } } } } } </s>
|
<s> package com . senseidb . search . node ; import java . util . Set ; import com . linkedin . norbert . javacompat . cluster . Node ; import com . senseidb . search . req . SenseiRequest ; public interface SenseiRequestScatterRewriter { SenseiRequest rewrite ( SenseiRequest origReq , Node node , Set < Integer > partitions ) ; } </s>
|
<s> package com . senseidb . search . node ; import org . apache . lucene . queryParser . ParseException ; import org . apache . lucene . search . Filter ; import org . apache . lucene . search . Query ; public interface SenseiQueryBuilder { Query buildQuery ( ) throws ParseException ; Filter buildFilter ( ) throws ParseException ; } </s>
|
<s> package com . senseidb . search . node ; import com . senseidb . metrics . MetricFactory ; import it . unimi . dsi . fastutil . ints . IntOpenHashSet ; import java . lang . management . ManagementFactory ; import java . util . List ; import java . util . Map ; import java . util . Set ; import javax . management . MBeanServer ; import javax . management . ObjectName ; import org . apache . log4j . Logger ; import org . apache . lucene . document . Document ; import proj . zoie . api . indexing . AbstractZoieIndexable ; import com . browseengine . bobo . api . FacetSpec ; import com . linkedin . norbert . NorbertException ; import com . linkedin . norbert . javacompat . cluster . ClusterClient ; import com . linkedin . norbert . javacompat . cluster . Node ; import com . linkedin . norbert . javacompat . network . PartitionedNetworkClient ; import com . senseidb . conf . SenseiSchema ; import com . senseidb . indexing . DefaultJsonSchemaInterpreter ; import com . senseidb . search . req . ErrorType ; import com . senseidb . search . req . SenseiError ; import com . senseidb . search . req . SenseiHit ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiResult ; import com . senseidb . svc . impl . CoreSenseiServiceImpl ; import com . yammer . metrics . core . Counter ; import com . yammer . metrics . core . MetricName ; public class SenseiBroker extends AbstractConsistentHashBroker < SenseiRequest , SenseiResult > { private final static Logger logger = Logger . getLogger ( SenseiBroker . class ) ; private final boolean allowPartialMerge ; private final ClusterClient clusterClient ; private final Counter numberOfNodesInTheCluster = MetricFactory . newCounter ( new MetricName ( SenseiBroker . class , "<STR_LIT>" ) ) ; public SenseiBroker ( PartitionedNetworkClient < String > networkClient , ClusterClient clusterClient , boolean allowPartialMerge ) throws NorbertException { super ( networkClient , CoreSenseiServiceImpl . JAVA_SERIALIZER ) ; this . clusterClient = clusterClient ; this . allowPartialMerge = allowPartialMerge ; clusterClient . addListener ( this ) ; logger . info ( "<STR_LIT>" + networkClient + "<STR_LIT:U+0020>" + clusterClient ) ; } public static void recoverSrcData ( SenseiResult res , SenseiHit [ ] hits , boolean isFetchStoredFields ) { if ( hits != null ) { for ( SenseiHit hit : hits ) { try { byte [ ] dataBytes = hit . getStoredValue ( ) ; if ( dataBytes == null || dataBytes . length == <NUM_LIT:0> ) { Document doc = hit . getStoredFields ( ) ; if ( doc != null ) { dataBytes = doc . getBinaryValue ( AbstractZoieIndexable . DOCUMENT_STORE_FIELD ) ; if ( dataBytes == null || dataBytes . length == <NUM_LIT:0> ) { dataBytes = doc . getBinaryValue ( SenseiSchema . SRC_DATA_COMPRESSED_FIELD_NAME ) ; if ( dataBytes == null || dataBytes . length == <NUM_LIT:0> ) { dataBytes = doc . getBinaryValue ( SenseiSchema . SRC_DATA_FIELD_NAME ) ; if ( dataBytes != null && dataBytes . length > <NUM_LIT:0> ) { hit . setSrcData ( new String ( dataBytes , "<STR_LIT:UTF-8>" ) ) ; dataBytes = null ; } } doc . removeFields ( SenseiSchema . SRC_DATA_COMPRESSED_FIELD_NAME ) ; doc . removeFields ( SenseiSchema . SRC_DATA_FIELD_NAME ) ; } } } if ( dataBytes != null && dataBytes . length > <NUM_LIT:0> ) { byte [ ] data ; try { data = DefaultJsonSchemaInterpreter . decompress ( dataBytes ) ; } catch ( Exception ex ) { data = dataBytes ; } hit . setSrcData ( new String ( data , "<STR_LIT:UTF-8>" ) ) ; } } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; res . getErrors ( ) . add ( new SenseiError ( e . getMessage ( ) , ErrorType . BrokerGatherError ) ) ; } recoverSrcData ( res , hit . getSenseiGroupHits ( ) , isFetchStoredFields ) ; if ( ! isFetchStoredFields ) hit . setStoredFields ( null ) ; } } } @ Override public SenseiResult mergeResults ( SenseiRequest request , List < SenseiResult > resultList ) { SenseiResult res = ResultMerger . merge ( request , resultList , false ) ; if ( request . isFetchStoredFields ( ) || request . isFetchStoredValue ( ) ) recoverSrcData ( res , res . getSenseiHits ( ) , request . isFetchStoredFields ( ) ) ; return res ; } @ Override public SenseiResult getEmptyResultInstance ( ) { return new SenseiResult ( ) ; } @ Override public SenseiRequest customizeRequest ( SenseiRequest request ) { request . setCount ( request . getOffset ( ) + request . getCount ( ) ) ; request . setOffset ( <NUM_LIT:0> ) ; Map < String , FacetSpec > facetSpecs = request . getFacetSpecs ( ) ; if ( facetSpecs != null ) { for ( Map . Entry < String , FacetSpec > entry : facetSpecs . entrySet ( ) ) { FacetSpec spec = entry . getValue ( ) ; if ( spec != null && spec . getMaxCount ( ) < <NUM_LIT> ) spec . setMaxCount ( <NUM_LIT> ) ; } } if ( ! request . isFetchStoredFields ( ) ) request . setFetchStoredFields ( request . isFetchStoredValue ( ) ) ; return request ; } public void handleClusterConnected ( Set < Node > nodes ) { _partitions = getPartitions ( nodes ) ; numberOfNodesInTheCluster . clear ( ) ; numberOfNodesInTheCluster . inc ( getNumberOfNodes ( ) ) ; logger . info ( "<STR_LIT>" + nodes . toString ( ) ) ; logger . info ( "<STR_LIT>" + _partitions . toString ( ) ) ; } public void handleClusterDisconnected ( ) { logger . info ( "<STR_LIT>" ) ; _partitions = new IntOpenHashSet ( ) ; } public void handleClusterNodesChanged ( Set < Node > nodes ) { _partitions = getPartitions ( nodes ) ; numberOfNodesInTheCluster . clear ( ) ; numberOfNodesInTheCluster . inc ( getNumberOfNodes ( ) ) ; logger . info ( "<STR_LIT>" + nodes . toString ( ) ) ; logger . info ( "<STR_LIT>" + _partitions . toString ( ) ) ; } @ Override public void handleClusterShutdown ( ) { logger . info ( "<STR_LIT>" ) ; } @ Override public boolean allowPartialMerge ( ) { return allowPartialMerge ; } public int getNumberOfNodes ( ) { return clusterClient . getNodes ( ) . size ( ) ; } } </s>
|
<s> package com . senseidb . search . node ; import com . senseidb . metrics . MetricFactory ; import java . io . File ; import java . util . HashMap ; import java . util . Map ; import java . util . concurrent . TimeUnit ; import org . apache . log4j . Logger ; import org . apache . lucene . search . Filter ; import proj . zoie . api . DefaultDirectoryManager ; import proj . zoie . api . DirectoryManager ; import proj . zoie . api . DirectoryManager . DIRECTORY_MODE ; import proj . zoie . api . indexing . IndexingEventListener ; import proj . zoie . api . indexing . ZoieIndexableInterpreter ; import proj . zoie . impl . indexing . IndexUpdatedEvent ; import proj . zoie . impl . indexing . ZoieConfig ; import proj . zoie . impl . indexing . ZoieSystem ; import com . browseengine . bobo . api . BoboIndexReader ; import com . senseidb . metrics . MetricsConstants ; import com . yammer . metrics . core . Histogram ; import com . yammer . metrics . core . Meter ; import com . yammer . metrics . core . MetricName ; public class SenseiZoieSystemFactory < T > extends SenseiZoieFactory < T > { private static Logger log = Logger . getLogger ( SenseiZoieSystemFactory . class ) ; private Filter _purgeFilter = null ; private Map < Integer , IndexingMetrics > metricsMap = new HashMap < Integer , IndexingMetrics > ( ) ; public SenseiZoieSystemFactory ( File idxDir , DIRECTORY_MODE dirMode , ZoieIndexableInterpreter < T > interpreter , SenseiIndexReaderDecorator indexReaderDecorator , ZoieConfig zoieConfig ) { super ( idxDir , dirMode , interpreter , indexReaderDecorator , zoieConfig ) ; } public void setPurgeFilter ( Filter purgeFilter ) { _purgeFilter = purgeFilter ; } @ Override public ZoieSystem < BoboIndexReader , T > getZoieInstance ( int nodeId , final int partitionId ) { File partDir = getPath ( nodeId , partitionId ) ; if ( ! partDir . exists ( ) ) { partDir . mkdirs ( ) ; log . info ( "<STR_LIT>" + nodeId + "<STR_LIT>" + partitionId + "<STR_LIT>" ) ; } DirectoryManager dirMgr = new DefaultDirectoryManager ( partDir , _dirMode ) ; ZoieSystem < BoboIndexReader , T > zoie = new ZoieSystem < BoboIndexReader , T > ( dirMgr , _interpreter , _indexReaderDecorator , _zoieConfig ) ; if ( _purgeFilter != null ) { zoie . setPurgeFilter ( _purgeFilter ) ; } metricsMap . put ( partitionId , new IndexingMetrics ( partitionId ) ) ; zoie . addIndexingEventListener ( new IndexingEventListener ( ) { @ Override public void handleUpdatedDiskVersion ( String updateDiskVersion ) { } @ Override public void handleIndexingEvent ( IndexingEvent evt ) { if ( evt instanceof IndexUpdatedEvent ) { IndexingMetrics metrics = SenseiZoieSystemFactory . this . metricsMap . get ( partitionId ) ; IndexUpdatedEvent updateEvent = ( IndexUpdatedEvent ) evt ; metrics . docsIndexedMetric . mark ( updateEvent . getNumDocsIndexed ( ) ) ; metrics . docsLeftoverMetric . mark ( updateEvent . getNumDocsLeftInQueue ( ) ) ; metrics . flushTimeHistogram . update ( updateEvent . getEndIndexingTime ( ) - updateEvent . getStartIndexingTime ( ) ) ; } } } ) ; return zoie ; } public File getPath ( int nodeId , int partitionId ) { return getPath ( _idxDir , nodeId , partitionId ) ; } private static class IndexingMetrics { final Meter docsIndexedMetric ; final Meter docsLeftoverMetric ; final Histogram flushTimeHistogram ; IndexingMetrics ( int partition ) { MetricName docsIndexedName = new MetricName ( MetricsConstants . Domain , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ; docsIndexedMetric = MetricFactory . newMeter ( docsIndexedName , "<STR_LIT>" , TimeUnit . SECONDS ) ; MetricName docsLeftoverName = new MetricName ( MetricsConstants . Domain , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ; docsLeftoverMetric = MetricFactory . newMeter ( docsLeftoverName , "<STR_LIT>" , TimeUnit . SECONDS ) ; MetricName flushTimeName = new MetricName ( MetricsConstants . Domain , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ; flushTimeHistogram = MetricFactory . newHistogram ( flushTimeName , false ) ; } } } </s>
|
<s> package com . senseidb . search . node ; import java . io . ByteArrayInputStream ; import java . io . ByteArrayOutputStream ; import java . util . List ; import java . util . Map ; import java . util . Set ; import java . util . zip . GZIPInputStream ; import com . browseengine . bobo . api . FacetSpec ; import com . linkedin . norbert . javacompat . cluster . Node ; import org . apache . log4j . Logger ; import org . apache . lucene . document . Document ; import com . senseidb . conf . SenseiSchema ; import com . senseidb . search . req . ErrorType ; import com . senseidb . search . req . SenseiError ; import com . senseidb . search . req . SenseiHit ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiResult ; public class SenseiScatterGatherHandler extends AbstractSenseiScatterGatherHandler < SenseiRequest , SenseiResult > { private final static Logger logger = Logger . getLogger ( SenseiScatterGatherHandler . class ) ; private final static long TIMEOUT_MILLIS = <NUM_LIT> ; private final SenseiRequestScatterRewriter _reqRewriter ; private long _timeoutMillis = TIMEOUT_MILLIS ; public SenseiScatterGatherHandler ( SenseiRequest request , SenseiRequestScatterRewriter reqRewriter ) { super ( request ) ; _reqRewriter = reqRewriter ; } public void setTimeoutMillis ( long timeoutMillis ) { _timeoutMillis = timeoutMillis ; } public long getTimeoutMillis ( ) { return _timeoutMillis ; } @ Override public SenseiResult mergeResults ( SenseiRequest request , List < SenseiResult > resultList ) { SenseiResult res = ResultMerger . merge ( request , resultList , false ) ; if ( request . isFetchStoredFields ( ) ) { for ( SenseiHit hit : res . getSenseiHits ( ) ) { try { Document doc = hit . getStoredFields ( ) ; byte [ ] dataBytes = doc . getBinaryValue ( SenseiSchema . SRC_DATA_COMPRESSED_FIELD_NAME ) ; if ( dataBytes != null && dataBytes . length > <NUM_LIT:0> ) { ByteArrayOutputStream bout = new ByteArrayOutputStream ( ) ; byte [ ] buf = new byte [ <NUM_LIT> ] ; ByteArrayInputStream bin = new ByteArrayInputStream ( dataBytes ) ; GZIPInputStream gzipStream = new GZIPInputStream ( bin ) ; int len ; while ( ( len = gzipStream . read ( buf ) ) > <NUM_LIT:0> ) { bout . write ( buf , <NUM_LIT:0> , len ) ; } bout . flush ( ) ; byte [ ] uncompressed = bout . toByteArray ( ) ; hit . setSrcData ( new String ( uncompressed , "<STR_LIT:UTF-8>" ) ) ; } else { dataBytes = doc . getBinaryValue ( SenseiSchema . SRC_DATA_FIELD_NAME ) ; if ( dataBytes != null && dataBytes . length > <NUM_LIT:0> ) { hit . setSrcData ( new String ( dataBytes , "<STR_LIT:UTF-8>" ) ) ; } } doc . removeFields ( SenseiSchema . SRC_DATA_COMPRESSED_FIELD_NAME ) ; doc . removeFields ( SenseiSchema . SRC_DATA_FIELD_NAME ) ; } catch ( Exception e ) { res . addError ( new SenseiError ( e . getMessage ( ) , ErrorType . BrokerGatherError ) ) ; logger . error ( e . getMessage ( ) , e ) ; } } } return res ; } @ Override public SenseiRequest customizeRequest ( SenseiRequest senseiReq , Node node , Set < Integer > partitions ) { Map < String , FacetSpec > facetSpecs = senseiReq . getFacetSpecs ( ) ; if ( facetSpecs != null ) { for ( Map . Entry < String , FacetSpec > entry : facetSpecs . entrySet ( ) ) { FacetSpec spec = entry . getValue ( ) ; if ( spec != null ) spec . setMaxCount ( <NUM_LIT:0> ) ; } } int oldOffset = senseiReq . getOffset ( ) ; int oldCount = senseiReq . getCount ( ) ; if ( _reqRewriter != null ) { senseiReq = _reqRewriter . rewrite ( senseiReq , node , partitions ) ; } if ( oldCount > <NUM_LIT:0> ) { senseiReq . setOffset ( <NUM_LIT:0> ) ; senseiReq . setCount ( oldOffset + oldCount ) ; } senseiReq . setPartitions ( partitions ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "<STR_LIT>" + partitions . toString ( ) ) ; } return senseiReq ; } } </s>
|
<s> package com . senseidb . search . node ; import java . io . File ; import java . util . List ; import org . apache . log4j . Logger ; import proj . zoie . api . DirectoryManager . DIRECTORY_MODE ; import proj . zoie . api . indexing . ZoieIndexableInterpreter ; import proj . zoie . hourglass . impl . HourGlassScheduler ; import proj . zoie . hourglass . impl . HourGlassScheduler . FREQUENCY ; import proj . zoie . hourglass . impl . Hourglass ; import proj . zoie . hourglass . impl . HourglassDirectoryManagerFactory ; import proj . zoie . hourglass . impl . HourglassListener ; import proj . zoie . impl . indexing . ZoieConfig ; import com . browseengine . bobo . api . BoboIndexReader ; public class SenseiHourglassFactory < T > extends SenseiZoieFactory < T > { private static Logger log = Logger . getLogger ( SenseiHourglassFactory . class ) ; private final String schedule ; private final boolean appendOnly ; private final int trimThreshold ; private final HourGlassScheduler . FREQUENCY frequency ; private final List < HourglassListener > hourglassListeners ; @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) public SenseiHourglassFactory ( File idxDir , DIRECTORY_MODE dirMode , ZoieIndexableInterpreter < T > interpreter , SenseiIndexReaderDecorator indexReaderDecorator , ZoieConfig zoieConfig , String schedule , boolean appendOnly , int trimThreshold , FREQUENCY frequency , List < HourglassListener > hourglassListeners ) { super ( idxDir , dirMode , interpreter , indexReaderDecorator , zoieConfig ) ; this . schedule = schedule ; this . appendOnly = appendOnly ; this . trimThreshold = trimThreshold ; this . frequency = frequency ; this . hourglassListeners = hourglassListeners ; log . info ( "<STR_LIT>" + this . getClass ( ) . getName ( ) + "<STR_LIT>" + schedule + "<STR_LIT>" + frequency + "<STR_LIT>" + trimThreshold ) ; } @ Override public Hourglass < BoboIndexReader , T > getZoieInstance ( int nodeId , int partitionId ) { File partDir = getPath ( nodeId , partitionId ) ; if ( ! partDir . exists ( ) ) { partDir . mkdirs ( ) ; log . info ( "<STR_LIT>" + nodeId + "<STR_LIT>" + partitionId + "<STR_LIT>" ) ; } HourGlassScheduler scheduler = new HourGlassScheduler ( frequency , schedule , appendOnly , trimThreshold ) ; HourglassDirectoryManagerFactory dirmgr = new HourglassDirectoryManagerFactory ( partDir , scheduler , _dirMode ) ; log . info ( "<STR_LIT>" + nodeId + "<STR_LIT>" + partitionId ) ; return new Hourglass < BoboIndexReader , T > ( dirmgr , _interpreter , _indexReaderDecorator , _zoieConfig , hourglassListeners ) ; } public File getPath ( int nodeId , int partitionId ) { return getPath ( _idxDir , nodeId , partitionId ) ; } } </s>
|
<s> package com . senseidb . search . node ; public class SenseiClusterConfig { private static final String DEFAULT_ZK_URL = "<STR_LIT>" ; private String _clusterName ; private String _zooKeeperURL ; private int zooKeeperSessionTimeoutMillis ; public void setClusterName ( String clusterName ) { _clusterName = clusterName ; } public void setZooKeeperSessionTimeoutMillis ( int zooKeeperSessionTimeout ) { zooKeeperSessionTimeoutMillis = zooKeeperSessionTimeout ; } public int getZooKeeperSessionTimeoutMillis ( ) { return zooKeeperSessionTimeoutMillis ; } public String getClusterName ( ) { return _clusterName ; } public void setZooKeeperURL ( String zookeeperURL ) { _zooKeeperURL = zookeeperURL ; } public String getZooKeeperURL ( ) { return ( _zooKeeperURL != null ? _zooKeeperURL : DEFAULT_ZK_URL ) ; } } </s>
|
<s> package com . senseidb . search . node . inmemory ; import java . io . IOException ; import java . util . List ; import org . apache . lucene . analysis . Analyzer ; import org . apache . lucene . analysis . standard . StandardAnalyzer ; import org . apache . lucene . util . Version ; import proj . zoie . api . IndexReaderFactory ; import proj . zoie . api . ZoieIndexReader ; import com . browseengine . bobo . api . BoboIndexReader ; public class MockIndexReaderFactory < T > implements IndexReaderFactory < ZoieIndexReader < BoboIndexReader > > { private Analyzer analyzer = new StandardAnalyzer ( Version . LUCENE_35 ) ; private final List < ZoieIndexReader < BoboIndexReader > > readers ; public MockIndexReaderFactory ( List < ZoieIndexReader < BoboIndexReader > > readers ) { this . readers = readers ; } @ Override public List < ZoieIndexReader < BoboIndexReader > > getIndexReaders ( ) throws IOException { return readers ; } @ Override public Analyzer getAnalyzer ( ) { return analyzer ; } @ Override public void returnIndexReaders ( List < ZoieIndexReader < BoboIndexReader > > r ) { } @ Override public String getCurrentReaderVersion ( ) { return null ; } } </s>
|
<s> package com . senseidb . search . node . inmemory ; import java . io . File ; import java . lang . management . ManagementFactory ; import java . util . ArrayList ; import java . util . Arrays ; import java . util . List ; import javax . management . MBeanServer ; import org . apache . commons . configuration . Configuration ; import org . apache . commons . configuration . ConfigurationException ; import org . apache . commons . configuration . PropertiesConfiguration ; import org . apache . lucene . analysis . standard . StandardAnalyzer ; import org . apache . lucene . document . Document ; import org . apache . lucene . document . Field ; import org . apache . lucene . index . IndexReader ; import org . apache . lucene . index . IndexWriter ; import org . apache . lucene . index . IndexWriterConfig ; import org . apache . lucene . store . Directory ; import org . apache . lucene . store . RAMDirectory ; import org . apache . lucene . util . Version ; import org . json . JSONException ; import org . json . JSONObject ; import proj . zoie . api . ZoieIndexReader ; import proj . zoie . api . ZoieMultiReader ; import proj . zoie . api . ZoieSegmentReader ; import proj . zoie . api . indexing . AbstractZoieIndexable ; import proj . zoie . api . indexing . ZoieIndexable ; import proj . zoie . impl . indexing . ZoieConfig ; import com . browseengine . bobo . api . BoboIndexReader ; import com . browseengine . bobo . facets . FacetHandler ; import com . browseengine . bobo . facets . RuntimeFacetHandlerFactory ; import com . senseidb . conf . SenseiConfParams ; import com . senseidb . conf . SenseiFacetHandlerBuilder ; import com . senseidb . conf . SenseiSchema ; import com . senseidb . conf . SenseiServerBuilder ; import com . senseidb . indexing . DefaultJsonSchemaInterpreter ; import com . senseidb . indexing . ShardingStrategy ; import com . senseidb . indexing . activity . ActivityPersistenceFactory ; import com . senseidb . jmx . JmxUtil ; import com . senseidb . jmx . MockJMXServer ; import com . senseidb . plugin . SenseiPluginRegistry ; import com . senseidb . search . node . SenseiIndexReaderDecorator ; import com . senseidb . search . plugin . PluggableSearchEngineManager ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiResult ; import com . senseidb . search . req . SenseiSystemInfo ; import com . senseidb . svc . impl . CoreSenseiServiceImpl ; public class InMemorySenseiService { private DefaultJsonSchemaInterpreter defaultJsonSchemaInterpreter ; private List < FacetHandler < ? > > facets ; private List < RuntimeFacetHandlerFactory < ? , ? > > runtimeFacets ; private CoreSenseiServiceImpl coreSenseiServiceImpl ; private PluggableSearchEngineManager pluggableSearchEngineManager ; private MockSenseiCore mockSenseiCore ; private SenseiSystemInfo senseiSystemInfo ; public InMemorySenseiService ( SenseiSchema schema , SenseiPluginRegistry pluginRegistry ) { MBeanServer platformMBeanServer = ManagementFactory . getPlatformMBeanServer ( ) ; schema . setCompressSrcData ( false ) ; try { platformMBeanServer = JmxUtil . registerNewJmxServer ( new MockJMXServer ( ) ) ; defaultJsonSchemaInterpreter = new DefaultJsonSchemaInterpreter ( schema ) ; facets = new ArrayList < FacetHandler < ? > > ( ) ; runtimeFacets = new ArrayList < RuntimeFacetHandlerFactory < ? , ? > > ( ) ; ShardingStrategy strategy = new ShardingStrategy ( ) { public int caculateShard ( int maxShardId , JSONObject dataObj ) throws JSONException { return <NUM_LIT:0> ; } } ; ActivityPersistenceFactory . setOverrideForCurrentThread ( ActivityPersistenceFactory . getInMemoryInstance ( ) ) ; pluggableSearchEngineManager = new PluggableSearchEngineManager ( ) ; pluggableSearchEngineManager . init ( "<STR_LIT>" , <NUM_LIT:0> , schema , ZoieConfig . DEFAULT_VERSION_COMPARATOR , pluginRegistry , strategy ) ; senseiSystemInfo = SenseiFacetHandlerBuilder . buildFacets ( schema . getSchemaObj ( ) , pluginRegistry , facets , runtimeFacets , pluggableSearchEngineManager ) ; int [ ] partitions = new int [ ] { <NUM_LIT:0> } ; mockSenseiCore = new MockSenseiCore ( partitions ) ; pluggableSearchEngineManager . start ( mockSenseiCore ) ; coreSenseiServiceImpl = new CoreSenseiServiceImpl ( mockSenseiCore ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } finally { JmxUtil . registerNewJmxServer ( platformMBeanServer ) ; ActivityPersistenceFactory . setOverrideForCurrentThread ( null ) ; } } private void addDocuments ( Directory directory , IndexWriter writer , List < JSONObject > documents ) { try { writer . deleteAll ( ) ; for ( JSONObject doc : documents ) { if ( doc == null ) continue ; writer . addDocument ( buildDoc ( doc ) ) ; pluggableSearchEngineManager . update ( doc , "<STR_LIT>" ) ; } writer . commit ( ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public SenseiResult doQuery ( SenseiRequest senseiRequest , List < JSONObject > documents ) { Directory directory = null ; IndexWriter writer = null ; try { directory = new RAMDirectory ( ) ; writer = new IndexWriter ( directory , new IndexWriterConfig ( Version . LUCENE_35 , new StandardAnalyzer ( Version . LUCENE_35 ) ) ) ; addDocuments ( directory , writer , documents ) ; ZoieIndexReader < BoboIndexReader > zoieMultiReader = new ZoieMultiReader < BoboIndexReader > ( IndexReader . open ( directory ) , new SenseiIndexReaderDecorator ( facets , runtimeFacets ) ) ; MockIndexReaderFactory mockIndexReaderFactory = new MockIndexReaderFactory < ZoieIndexReader < BoboIndexReader > > ( Arrays . asList ( zoieMultiReader ) ) ; mockSenseiCore . setIndexReaderFactory ( mockIndexReaderFactory ) ; SenseiResult result = coreSenseiServiceImpl . execute ( senseiRequest ) ; mockSenseiCore . setIndexReaderFactory ( null ) ; return result ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } finally { try { if ( writer != null ) { writer . close ( ) ; } if ( directory != null ) { directory . close ( ) ; } } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } } public Document buildDoc ( JSONObject json ) { ZoieIndexable indexable = defaultJsonSchemaInterpreter . convertAndInterpret ( json ) ; Document ret = indexable . buildIndexingReqs ( ) [ <NUM_LIT:0> ] . getDocument ( ) ; ret . add ( new Field ( AbstractZoieIndexable . DOCUMENT_STORE_FIELD , indexable . getStoreValue ( ) ) ) ; ZoieSegmentReader . fillDocumentID ( ret , indexable . getUID ( ) ) ; return ret ; } public static InMemorySenseiService valueOf ( File confDir ) { try { JSONObject schema = SenseiServerBuilder . loadSchema ( confDir ) ; File senseiConfFile = new File ( confDir , SenseiServerBuilder . SENSEI_PROPERTIES ) ; if ( ! senseiConfFile . exists ( ) ) { throw new ConfigurationException ( "<STR_LIT>" + senseiConfFile . getAbsolutePath ( ) + "<STR_LIT>" ) ; } Configuration senseiConf = new PropertiesConfiguration ( ) ; ( ( PropertiesConfiguration ) senseiConf ) . setDelimiterParsingDisabled ( true ) ; ( ( PropertiesConfiguration ) senseiConf ) . load ( senseiConfFile ) ; return new InMemorySenseiService ( SenseiSchema . build ( schema ) , SenseiPluginRegistry . build ( senseiConf ) ) ; } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; } } public SenseiSystemInfo getSenseiSystemInfo ( ) { return senseiSystemInfo ; } public void setSenseiSystemInfo ( SenseiSystemInfo senseiSystemInfo ) { this . senseiSystemInfo = senseiSystemInfo ; } } </s>
|
<s> package com . senseidb . search . node . inmemory ; import java . util . Collections ; import java . util . List ; import org . apache . lucene . analysis . standard . StandardAnalyzer ; import org . apache . lucene . queryParser . QueryParser ; import org . apache . lucene . util . Version ; import proj . zoie . api . IndexReaderFactory ; import proj . zoie . api . ZoieIndexReader ; import com . browseengine . bobo . api . BoboIndexReader ; import com . senseidb . indexing . SenseiIndexPruner ; import com . senseidb . search . node . SenseiCore ; import com . senseidb . search . node . impl . DefaultJsonQueryBuilderFactory ; public class MockSenseiCore extends SenseiCore { private final ThreadLocal < MockIndexReaderFactory < ZoieIndexReader < BoboIndexReader > > > mockIndexReaderFactory = new ThreadLocal < MockIndexReaderFactory < ZoieIndexReader < BoboIndexReader > > > ( ) ; private final int [ ] partitions ; private static MockIndexReaderFactory < ZoieIndexReader < BoboIndexReader > > emptyIndexFactory = new MockIndexReaderFactory < ZoieIndexReader < BoboIndexReader > > ( Collections . EMPTY_LIST ) ; public MockSenseiCore ( int [ ] partitions ) { super ( <NUM_LIT:0> , new int [ ] { <NUM_LIT:0> } , null , null , new DefaultJsonQueryBuilderFactory ( new QueryParser ( Version . LUCENE_35 , "<STR_LIT>" , new StandardAnalyzer ( Version . LUCENE_35 ) ) ) ) ; this . partitions = partitions ; setIndexPruner ( new SenseiIndexPruner . DefaultSenseiIndexPruner ( ) ) ; } @ Override public IndexReaderFactory < ZoieIndexReader < BoboIndexReader > > getIndexReaderFactory ( int partition ) { if ( partition == partitions [ <NUM_LIT:0> ] ) return mockIndexReaderFactory . get ( ) ; else { return emptyIndexFactory ; } } public void setIndexReaderFactory ( IndexReaderFactory < ZoieIndexReader < BoboIndexReader > > indexReaderFactory ) { mockIndexReaderFactory . set ( ( MockIndexReaderFactory < ZoieIndexReader < BoboIndexReader > > ) indexReaderFactory ) ; } @ Override public int [ ] getPartitions ( ) { return partitions ; } } </s>
|
<s> package com . senseidb . search . node ; import com . linkedin . norbert . javacompat . network . NetworkClientConfig ; public class SenseiNetworkClientConfig { private String _serviceName ; private String _zooKeeperURL ; private int _zooKeeperSessionTimeoutMillis ; private int _connectTimeoutMillis ; private int _writeTimeoutMillis ; private int _maxConnectionsPerNode ; private int _staleRequestTimeoutMins ; private int _staleRequestCleanupFrequencyMins ; public String getserviceName ( ) { return _serviceName ; } public void setserviceName ( String serviceName ) { _serviceName = serviceName ; } public String getZooKeeperURL ( ) { return _zooKeeperURL ; } public void setZooKeeperURL ( String zookeeperURL ) { _zooKeeperURL = zookeeperURL ; } public int getZooKeeperSessionTimeoutMillis ( ) { return _zooKeeperSessionTimeoutMillis ; } public void setZooKeeperSessionTimeoutMillis ( int zooKeeperSessionTimeoutMillis ) { _zooKeeperSessionTimeoutMillis = zooKeeperSessionTimeoutMillis ; } public int getConnectTimeoutMillis ( ) { return _connectTimeoutMillis ; } public void setConnectTimeoutMillis ( int connectTimeoutMillis ) { this . _connectTimeoutMillis = connectTimeoutMillis ; } public int getWriteTimeoutMillis ( ) { return _writeTimeoutMillis ; } public void setWriteTimeoutMillis ( int writeTimeoutMillis ) { this . _writeTimeoutMillis = writeTimeoutMillis ; } public int getMaxConnectionsPerNode ( ) { return _maxConnectionsPerNode ; } public void setMaxConnectionsPerNode ( int maxConnectionsPerNode ) { this . _maxConnectionsPerNode = maxConnectionsPerNode ; } public int getStaleRequestTimeoutMins ( ) { return _staleRequestTimeoutMins ; } public void setStaleRequestTimeoutMins ( int staleRequestTimeoutMins ) { this . _staleRequestTimeoutMins = staleRequestTimeoutMins ; } public int getStaleRequestCleanupFrequencyMins ( ) { return _staleRequestCleanupFrequencyMins ; } public void setStaleRequestCleanupFrequencyMins ( int staleRequestCleanupFrequencyMins ) { this . _staleRequestCleanupFrequencyMins = staleRequestCleanupFrequencyMins ; } public NetworkClientConfig getNetworkConfigObject ( ) { NetworkClientConfig netConfig = new NetworkClientConfig ( ) ; netConfig . setServiceName ( _serviceName ) ; netConfig . setZooKeeperSessionTimeoutMillis ( _zooKeeperSessionTimeoutMillis ) ; netConfig . setZooKeeperConnectString ( _zooKeeperURL ) ; netConfig . setConnectTimeoutMillis ( _connectTimeoutMillis ) ; netConfig . setMaxConnectionsPerNode ( _maxConnectionsPerNode ) ; netConfig . setStaleRequestCleanupFrequencyMins ( _staleRequestCleanupFrequencyMins ) ; netConfig . setStaleRequestTimeoutMins ( _staleRequestTimeoutMins ) ; netConfig . setWriteTimeoutMillis ( _writeTimeoutMillis ) ; return netConfig ; } } </s>
|
<s> package com . senseidb . search . node ; import java . util . Map ; import proj . zoie . api . DataProvider ; import proj . zoie . api . Zoie ; import proj . zoie . api . ZoieException ; import com . browseengine . bobo . api . BoboIndexReader ; public interface SenseiIndexingManager < D > { void initialize ( Map < Integer , Zoie < BoboIndexReader , D > > zoieSystemMap ) throws Exception ; void start ( ) throws Exception ; void shutdown ( ) ; DataProvider < D > getDataProvider ( ) ; void syncWithVersion ( long timeToWait , String version ) throws ZoieException ; } </s>
|
<s> package com . senseidb . search . node ; import java . io . File ; import java . util . Comparator ; import proj . zoie . api . Zoie ; import proj . zoie . api . DirectoryManager . DIRECTORY_MODE ; import proj . zoie . api . indexing . ZoieIndexableInterpreter ; import proj . zoie . impl . indexing . ZoieConfig ; import com . browseengine . bobo . api . BoboIndexReader ; public abstract class SenseiZoieFactory < D > { protected final File _idxDir ; protected final ZoieIndexableInterpreter < D > _interpreter ; protected final SenseiIndexReaderDecorator _indexReaderDecorator ; protected final ZoieConfig _zoieConfig ; protected final DIRECTORY_MODE _dirMode ; public SenseiZoieFactory ( File idxDir , DIRECTORY_MODE dirMode , ZoieIndexableInterpreter < D > interpreter , SenseiIndexReaderDecorator indexReaderDecorator , ZoieConfig zoieConfig ) { _idxDir = idxDir ; _interpreter = interpreter ; _indexReaderDecorator = indexReaderDecorator ; _zoieConfig = zoieConfig ; _dirMode = dirMode ; } public static File getPath ( File idxDir , int nodeId , int partitionId ) { File nodeLevelFile = new File ( idxDir , "<STR_LIT>" + nodeId ) ; return new File ( nodeLevelFile , "<STR_LIT>" + partitionId ) ; } public SenseiIndexReaderDecorator getDecorator ( ) { return _indexReaderDecorator ; } public ZoieIndexableInterpreter < D > getInterpreter ( ) { return _interpreter ; } public Comparator < String > getVersionComparator ( ) { return _zoieConfig . getVersionComparator ( ) ; } public abstract Zoie < BoboIndexReader , D > getZoieInstance ( int nodeId , int partitionId ) ; public abstract File getPath ( int nodeId , int partitionId ) ; } </s>
|
<s> package com . senseidb . search . node ; import com . linkedin . norbert . javacompat . network . RequestBuilder ; import com . linkedin . norbert . network . ResponseIterator ; import com . linkedin . norbert . network . common . TimeoutIterator ; import it . unimi . dsi . fastutil . ints . IntOpenHashSet ; import it . unimi . dsi . fastutil . ints . IntSet ; import java . util . ArrayList ; import java . util . Collections ; import java . util . Comparator ; import java . util . HashMap ; import java . util . HashSet ; import java . util . List ; import java . util . Map ; import java . util . Set ; import java . util . concurrent . Callable ; import java . util . concurrent . ExecutionException ; import java . util . concurrent . Future ; import java . util . concurrent . TimeUnit ; import org . apache . log4j . Logger ; import com . linkedin . norbert . NorbertException ; import com . linkedin . norbert . javacompat . cluster . ClusterClient ; import com . linkedin . norbert . javacompat . cluster . Node ; import com . linkedin . norbert . javacompat . network . PartitionedNetworkClient ; import com . senseidb . cluster . routing . RoutingInfo ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiSystemInfo ; import com . senseidb . svc . impl . SysSenseiCoreServiceImpl ; public class SenseiSysBroker extends AbstractConsistentHashBroker < SenseiRequest , SenseiSystemInfo > { private final static Logger logger = Logger . getLogger ( SenseiSysBroker . class ) ; private final static long TIMEOUT_MILLIS = <NUM_LIT> ; private long _timeoutMillis = TIMEOUT_MILLIS ; private final Comparator < String > _versionComparator ; private final boolean allowPartialMerge ; protected Set < Node > _nodes = Collections . EMPTY_SET ; public SenseiSysBroker ( PartitionedNetworkClient < String > networkClient , ClusterClient clusterClient , Comparator < String > versionComparator , boolean allowPartialMerge ) throws NorbertException { super ( networkClient , SysSenseiCoreServiceImpl . JAVA_SERIALIZER ) ; _versionComparator = versionComparator ; this . allowPartialMerge = allowPartialMerge ; clusterClient . addListener ( this ) ; logger . info ( "<STR_LIT>" + networkClient + "<STR_LIT:U+0020>" + clusterClient ) ; } @ Override public SenseiSystemInfo mergeResults ( SenseiRequest request , List < SenseiSystemInfo > resultList ) { SenseiSystemInfo result = new SenseiSystemInfo ( ) ; if ( resultList == null ) return result ; for ( SenseiSystemInfo res : resultList ) { result . setNumDocs ( result . getNumDocs ( ) + res . getNumDocs ( ) ) ; result . setSchema ( res . getSchema ( ) ) ; if ( result . getLastModified ( ) < res . getLastModified ( ) ) result . setLastModified ( res . getLastModified ( ) ) ; if ( result . getVersion ( ) == null || _versionComparator . compare ( result . getVersion ( ) , res . getVersion ( ) ) < <NUM_LIT:0> ) result . setVersion ( res . getVersion ( ) ) ; if ( res . getFacetInfos ( ) != null ) result . setFacetInfos ( res . getFacetInfos ( ) ) ; if ( res . getClusterInfo ( ) != null ) { if ( result . getClusterInfo ( ) != null ) result . getClusterInfo ( ) . addAll ( res . getClusterInfo ( ) ) ; else result . setClusterInfo ( res . getClusterInfo ( ) ) ; } } return result ; } @ Override protected List < SenseiSystemInfo > doCall ( final SenseiRequest req ) throws ExecutionException { final List < SenseiSystemInfo > resultList = new ArrayList < SenseiSystemInfo > ( ) ; List < Future < SenseiSystemInfo > > futures = new ArrayList < Future < SenseiSystemInfo > > ( _nodes . size ( ) ) ; for ( Node n : _nodes ) { futures . add ( _networkClient . sendRequestToNode ( req , n , _serializer ) ) ; } for ( Future < SenseiSystemInfo > future : futures ) { try { resultList . add ( future . get ( <NUM_LIT> , TimeUnit . MILLISECONDS ) ) ; } catch ( Exception e ) { logger . error ( "<STR_LIT>" , e ) ; } } logger . debug ( String . format ( "<STR_LIT>" , resultList . size ( ) ) ) ; return resultList ; } @ Override public SenseiSystemInfo getEmptyResultInstance ( ) { return new SenseiSystemInfo ( ) ; } public void handleClusterConnected ( Set < Node > nodes ) { _partitions = getPartitions ( nodes ) ; _nodes = nodes ; logger . info ( "<STR_LIT>" + nodes . toString ( ) ) ; logger . info ( "<STR_LIT>" + _partitions . toString ( ) ) ; } public void handleClusterDisconnected ( ) { logger . info ( "<STR_LIT>" ) ; _partitions = new IntOpenHashSet ( ) ; _nodes = Collections . EMPTY_SET ; } public void handleClusterNodesChanged ( Set < Node > nodes ) { _partitions = getPartitions ( nodes ) ; _nodes = nodes ; logger . info ( "<STR_LIT>" + nodes . toString ( ) ) ; logger . info ( "<STR_LIT>" + _partitions . toString ( ) ) ; } @ Override public void handleClusterShutdown ( ) { logger . info ( "<STR_LIT>" ) ; } @ Override public boolean allowPartialMerge ( ) { return allowPartialMerge ; } } </s>
|
<s> package com . senseidb . search . node ; import java . io . IOException ; import java . util . List ; import org . apache . log4j . Logger ; import proj . zoie . api . ZoieIndexReader ; import proj . zoie . impl . indexing . AbstractIndexReaderDecorator ; import com . browseengine . bobo . api . BoboIndexReader ; import com . browseengine . bobo . facets . FacetHandler ; import com . browseengine . bobo . facets . RuntimeFacetHandlerFactory ; public class SenseiIndexReaderDecorator extends AbstractIndexReaderDecorator < BoboIndexReader > { private final List < FacetHandler < ? > > _facetHandlers ; private static final Logger logger = Logger . getLogger ( SenseiIndexReaderDecorator . class ) ; private final List < RuntimeFacetHandlerFactory < ? , ? > > _facetHandlerFactories ; public SenseiIndexReaderDecorator ( List < FacetHandler < ? > > facetHandlers , List < RuntimeFacetHandlerFactory < ? , ? > > facetHandlerFactories ) { _facetHandlers = facetHandlers ; _facetHandlerFactories = facetHandlerFactories ; } public SenseiIndexReaderDecorator ( ) { this ( null , null ) ; } public List < FacetHandler < ? > > getFacetHandlerList ( ) { return _facetHandlers ; } public List < RuntimeFacetHandlerFactory < ? , ? > > getFacetHandlerFactories ( ) { return _facetHandlerFactories ; } public BoboIndexReader decorate ( ZoieIndexReader < BoboIndexReader > zoieReader ) throws IOException { BoboIndexReader boboReader = null ; if ( zoieReader != null ) { boboReader = BoboIndexReader . getInstanceAsSubReader ( zoieReader , _facetHandlers , _facetHandlerFactories ) ; } return boboReader ; } @ Override public BoboIndexReader redecorate ( BoboIndexReader reader , ZoieIndexReader < BoboIndexReader > newReader , boolean withDeletes ) throws IOException { return reader . copy ( newReader ) ; } } </s>
|
<s> package com . senseidb . search . node ; import java . util . ArrayList ; import java . util . List ; import java . util . Set ; import java . util . concurrent . TimeUnit ; import org . apache . log4j . Logger ; import com . linkedin . norbert . javacompat . cluster . Node ; import com . linkedin . norbert . network . ResponseIterator ; import com . linkedin . norbert . javacompat . network . ScatterGatherHandler ; import com . senseidb . search . req . AbstractSenseiRequest ; import com . senseidb . search . req . AbstractSenseiResult ; import com . senseidb . search . req . ErrorType ; import com . senseidb . search . req . SenseiError ; public abstract class AbstractSenseiScatterGatherHandler < REQUEST extends AbstractSenseiRequest , RESULT extends AbstractSenseiResult > implements ScatterGatherHandler < REQUEST , RESULT , RESULT , Integer > { private final static Logger logger = Logger . getLogger ( AbstractSenseiScatterGatherHandler . class ) ; private final static long TIMEOUT_MILLIS = <NUM_LIT> ; private final REQUEST _request ; private long _timeoutMillis = TIMEOUT_MILLIS ; public AbstractSenseiScatterGatherHandler ( REQUEST request ) { _request = request ; } public void setTimeoutMillis ( long timeoutMillis ) { _timeoutMillis = timeoutMillis ; } public long getTimeoutMillis ( ) { return _timeoutMillis ; } public abstract RESULT mergeResults ( REQUEST request , List < RESULT > resultList ) ; public abstract REQUEST customizeRequest ( REQUEST request , Node node , Set < Integer > partitions ) ; @ Override public RESULT gatherResponses ( ResponseIterator < RESULT > iter ) throws Exception { boolean debugmode = logger . isDebugEnabled ( ) ; int timeOuts = <NUM_LIT:0> ; ; List < RESULT > boboBrowseList = new ArrayList < RESULT > ( ) ; while ( iter . hasNext ( ) ) { RESULT result = iter . next ( _timeoutMillis > <NUM_LIT:0> ? _timeoutMillis : Long . MAX_VALUE , TimeUnit . MILLISECONDS ) ; if ( result == null ) { timeOuts ++ ; logger . error ( "<STR_LIT>" ) ; } else { boboBrowseList . add ( result ) ; } } RESULT res = mergeResults ( _request , boboBrowseList ) ; res . addError ( new SenseiError ( "<STR_LIT>" , ErrorType . BrokerTimeout ) ) ; if ( debugmode ) { logger . debug ( "<STR_LIT>" + res ) ; logger . debug ( "<STR_LIT>" ) ; } return res ; } } </s>
|
<s> package com . senseidb . search . node ; import java . io . File ; import org . apache . log4j . Logger ; import proj . zoie . api . indexing . ZoieIndexableInterpreter ; import proj . zoie . api . DirectoryManager . DIRECTORY_MODE ; import proj . zoie . api . IndexCopier ; import proj . zoie . api . Zoie ; import proj . zoie . impl . indexing . ZoieConfig ; import proj . zoie . pair . impl . Pair ; import com . browseengine . bobo . api . BoboIndexReader ; public class SenseiPairFactory < T > extends SenseiZoieFactory < T > { private static Logger log = Logger . getLogger ( SenseiPairFactory . class ) ; private final static String ZOIEONE_DIR = "<STR_LIT>" ; private File _zoieOneRoot ; private IndexCopier _indexCopier ; private SenseiZoieFactory _zoieTwoFactory ; public SenseiPairFactory ( File idxDir , DIRECTORY_MODE dirMode , IndexCopier indexCopier , ZoieIndexableInterpreter < T > interpreter , SenseiIndexReaderDecorator indexReaderDecorator , ZoieConfig zoieConfig , SenseiZoieFactory < T > zoieTwoFactory ) { super ( idxDir , dirMode , interpreter , indexReaderDecorator , zoieConfig ) ; _zoieOneRoot = new File ( idxDir , ZOIEONE_DIR ) ; _indexCopier = indexCopier ; _zoieTwoFactory = zoieTwoFactory ; } @ Override public Pair < BoboIndexReader , T > getZoieInstance ( int nodeId , int partitionId ) { File zoieOneDir = getPath ( nodeId , partitionId ) ; if ( ! zoieOneDir . exists ( ) ) { zoieOneDir . mkdirs ( ) ; log . info ( "<STR_LIT>" + nodeId + "<STR_LIT>" + partitionId + "<STR_LIT>" ) ; } Zoie < BoboIndexReader , T > zoieTwo = _zoieTwoFactory . getZoieInstance ( nodeId , partitionId ) ; Pair < BoboIndexReader , T > zoie = new Pair < BoboIndexReader , T > ( zoieOneDir , _dirMode , _indexCopier , _interpreter , _indexReaderDecorator , _zoieConfig , zoieTwo ) ; return zoie ; } public File getPath ( int nodeId , int partitionId ) { return getPath ( _zoieOneRoot , nodeId , partitionId ) ; } } </s>
|
<s> package com . senseidb . search . node ; import com . senseidb . metrics . MetricFactory ; import java . io . File ; import java . net . SocketException ; import java . net . UnknownHostException ; import java . util . Arrays ; import java . util . HashSet ; import java . util . List ; import javax . management . StandardMBean ; import org . apache . log4j . Logger ; import org . mortbay . jetty . Server ; import proj . zoie . api . DataProvider ; import com . linkedin . norbert . javacompat . cluster . ClusterClient ; import com . linkedin . norbert . javacompat . cluster . Node ; import com . linkedin . norbert . javacompat . network . NetworkServer ; import com . linkedin . norbert . network . NetworkingException ; import com . senseidb . conf . SenseiServerBuilder ; import com . senseidb . jmx . JmxUtil ; import com . senseidb . plugin . SenseiPluginRegistry ; import com . senseidb . search . req . AbstractSenseiRequest ; import com . senseidb . search . req . AbstractSenseiResult ; import com . senseidb . svc . impl . AbstractSenseiCoreService ; import com . senseidb . svc . impl . CoreSenseiServiceImpl ; import com . senseidb . svc . impl . SenseiCoreServiceMessageHandler ; import com . senseidb . svc . impl . SysSenseiCoreServiceImpl ; import com . senseidb . util . NetUtil ; public class SenseiServer { private static final Logger logger = Logger . getLogger ( SenseiServer . class ) ; private static final String AVAILABLE = "<STR_LIT>" ; private static final String UNAVAILABLE = "<STR_LIT>" ; private static final String DUMMY_OUT_IP = "<STR_LIT>" ; private int _id ; private int _port ; private int [ ] _partitions ; private NetworkServer _networkServer ; private ClusterClient _clusterClient ; private final SenseiCore _core ; protected volatile Node _serverNode ; private final List < AbstractSenseiCoreService < AbstractSenseiRequest , AbstractSenseiResult > > _externalSvc ; protected volatile boolean _available = false ; private final SenseiPluginRegistry pluginRegistry ; public SenseiServer ( int id , int port , int [ ] partitions , NetworkServer networkServer , ClusterClient clusterClient , SenseiZoieFactory < ? > zoieSystemFactory , SenseiIndexingManager indexingManager , SenseiQueryBuilderFactory queryBuilderFactory , List < AbstractSenseiCoreService < AbstractSenseiRequest , AbstractSenseiResult > > externalSvc , SenseiPluginRegistry pluginRegistry ) { this ( port , networkServer , clusterClient , new SenseiCore ( id , partitions , zoieSystemFactory , indexingManager , queryBuilderFactory ) , externalSvc , pluginRegistry ) ; } public SenseiServer ( int port , NetworkServer networkServer , ClusterClient clusterClient , SenseiCore senseiCore , List < AbstractSenseiCoreService < AbstractSenseiRequest , AbstractSenseiResult > > externalSvc , SenseiPluginRegistry pluginRegistry ) { _core = senseiCore ; this . pluginRegistry = pluginRegistry ; _id = senseiCore . getNodeId ( ) ; _port = port ; _partitions = senseiCore . getPartitions ( ) ; _networkServer = networkServer ; _clusterClient = clusterClient ; _externalSvc = externalSvc ; } private static String help ( ) { StringBuffer buffer = new StringBuffer ( ) ; buffer . append ( "<STR_LIT>" ) ; buffer . append ( "<STR_LIT>" ) ; buffer . append ( "<STR_LIT>" ) ; buffer . append ( "<STR_LIT>" ) ; buffer . append ( "<STR_LIT>" ) ; return buffer . toString ( ) ; } public DataProvider getDataProvider ( ) { return _core . getDataProvider ( ) ; } public SenseiCore getSenseiCore ( ) { return _core ; } public void shutdown ( ) { try { logger . info ( "<STR_LIT>" ) ; try { _core . shutdown ( ) ; pluginRegistry . stop ( ) ; _clusterClient . removeNode ( _id ) ; _clusterClient . shutdown ( ) ; _serverNode = null ; _core . getPluggableSearchEngineManager ( ) . close ( ) ; } catch ( Exception e ) { logger . warn ( e . getMessage ( ) ) ; } finally { if ( _networkServer != null ) { _networkServer . shutdown ( ) ; } } } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } MetricFactory . stop ( ) ; JmxUtil . unregisterMBeans ( ) ; } public void start ( boolean available ) throws Exception { MetricFactory . start ( ) ; _core . start ( ) ; String clusterName = _clusterClient . getServiceName ( ) ; logger . info ( "<STR_LIT>" + clusterName ) ; logger . info ( "<STR_LIT>" + _clusterClient . toString ( ) ) ; AbstractSenseiCoreService coreSenseiService = new CoreSenseiServiceImpl ( _core ) ; AbstractSenseiCoreService sysSenseiCoreService = new SysSenseiCoreServiceImpl ( _core ) ; SenseiCoreServiceMessageHandler senseiMsgHandler = new SenseiCoreServiceMessageHandler ( coreSenseiService ) ; SenseiCoreServiceMessageHandler senseiSysMsgHandler = new SenseiCoreServiceMessageHandler ( sysSenseiCoreService ) ; _networkServer . registerHandler ( senseiMsgHandler , coreSenseiService . getSerializer ( ) ) ; _networkServer . registerHandler ( senseiSysMsgHandler , sysSenseiCoreService . getSerializer ( ) ) ; _networkServer . registerHandler ( senseiMsgHandler , CoreSenseiServiceImpl . JAVA_SERIALIZER ) ; _networkServer . registerHandler ( senseiSysMsgHandler , SysSenseiCoreServiceImpl . JAVA_SERIALIZER ) ; if ( _externalSvc != null ) { for ( AbstractSenseiCoreService svc : _externalSvc ) { _networkServer . registerHandler ( new SenseiCoreServiceMessageHandler ( svc ) , svc . getSerializer ( ) ) ; } } HashSet < Integer > partition = new HashSet < Integer > ( ) ; for ( int partId : _partitions ) { partition . add ( partId ) ; } boolean nodeExists = false ; try { logger . info ( "<STR_LIT>" ) ; _clusterClient . awaitConnectionUninterruptibly ( ) ; _serverNode = _clusterClient . getNodeWithId ( _id ) ; nodeExists = ( _serverNode != null ) ; if ( ! nodeExists ) { String ipAddr = getLocalIpAddress ( ) ; logger . info ( "<STR_LIT>" + _id + "<STR_LIT>" + ipAddr ) ; _serverNode = _clusterClient . addNode ( _id , ipAddr , partition ) ; logger . info ( "<STR_LIT>" + _id ) ; } else { } } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; throw e ; } try { logger . info ( "<STR_LIT>" ) ; _networkServer . bind ( _id , available ) ; Thread . sleep ( <NUM_LIT:1000> ) ; _available = available ; logger . info ( "<STR_LIT>" + available + "<STR_LIT>" ) ; if ( nodeExists ) { logger . warn ( "<STR_LIT>" ) ; try { _clusterClient . removeNode ( _id ) ; _serverNode = null ; } catch ( Exception e ) { logger . error ( "<STR_LIT>" + e . getMessage ( ) , e ) ; } String ipAddr = getLocalIpAddress ( ) ; _serverNode = _clusterClient . addNode ( _id , ipAddr , partition ) ; Thread . sleep ( <NUM_LIT:1000> ) ; logger . info ( "<STR_LIT>" + _id ) ; } } catch ( NetworkingException e ) { logger . error ( e . getMessage ( ) , e ) ; try { if ( ! nodeExists ) { _clusterClient . removeNode ( _id ) ; _serverNode = null ; } } catch ( Exception ex ) { logger . warn ( ex . getMessage ( ) ) ; } finally { try { _networkServer . shutdown ( ) ; _networkServer = null ; } finally { _clusterClient . shutdown ( ) ; _clusterClient = null ; } } throw e ; } SenseiServerAdminMBean senseiAdminMBean = getAdminMBean ( ) ; StandardMBean bean = new StandardMBean ( senseiAdminMBean , SenseiServerAdminMBean . class ) ; JmxUtil . registerMBean ( bean , "<STR_LIT:name>" , "<STR_LIT>" + _id ) ; } private String getLocalIpAddress ( ) throws SocketException , UnknownHostException { String addr = NetUtil . getHostAddress ( ) ; return String . format ( "<STR_LIT>" , addr , _port ) ; } private SenseiServerAdminMBean getAdminMBean ( ) { return new SenseiServerAdminMBean ( ) { @ Override public int getId ( ) { return _id ; } @ Override public int getPort ( ) { return _port ; } @ Override public String getPartitions ( ) { StringBuffer sb = new StringBuffer ( ) ; if ( _partitions . length > <NUM_LIT:0> ) sb . append ( String . valueOf ( _partitions [ <NUM_LIT:0> ] ) ) ; for ( int i = <NUM_LIT:1> ; i < _partitions . length ; i ++ ) { sb . append ( '<CHAR_LIT:U+002C>' ) ; sb . append ( String . valueOf ( _partitions [ i ] ) ) ; } return sb . toString ( ) ; } @ Override public boolean isAvailable ( ) { return SenseiServer . this . isAvailable ( ) ; } @ Override public void setAvailable ( boolean available ) { SenseiServer . this . setAvailable ( available ) ; } } ; } public void setAvailable ( boolean available ) { if ( available ) { logger . info ( "<STR_LIT>" + _id + "<STR_LIT>" + _port + "<STR_LIT>" + Arrays . toString ( _partitions ) ) ; _networkServer . markAvailable ( ) ; try { Thread . sleep ( <NUM_LIT:1000> ) ; } catch ( InterruptedException e ) { } } else { logger . info ( "<STR_LIT>" + _id + "<STR_LIT>" + _port + "<STR_LIT>" + Arrays . toString ( _partitions ) ) ; _networkServer . markUnavailable ( ) ; } _available = available ; } public boolean isAvailable ( ) { if ( _serverNode != null && _serverNode . isAvailable ( ) == _available ) return _available ; try { Thread . sleep ( <NUM_LIT:1000> ) ; _serverNode = _clusterClient . getNodeWithId ( _id ) ; if ( _serverNode != null && _serverNode . isAvailable ( ) == _available ) return _available ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } _available = ( _serverNode != null ? _serverNode . isAvailable ( ) : false ) ; return _available ; } public static void main ( String [ ] args ) throws Exception { if ( args . length < <NUM_LIT:1> ) { System . out . println ( help ( ) ) ; System . exit ( <NUM_LIT:1> ) ; } File confDir = null ; try { confDir = new File ( args [ <NUM_LIT:0> ] ) ; } catch ( Exception e ) { System . out . println ( help ( ) ) ; System . exit ( <NUM_LIT:1> ) ; } boolean available = true ; for ( int i = <NUM_LIT:1> ; i < args . length ; i ++ ) { if ( args [ i ] != null ) { if ( AVAILABLE . equalsIgnoreCase ( args [ i ] ) ) { available = true ; } if ( UNAVAILABLE . equalsIgnoreCase ( args [ i ] ) ) { available = false ; } } } SenseiServerBuilder senseiServerBuilder = new SenseiServerBuilder ( confDir , null ) ; final SenseiServer server = senseiServerBuilder . buildServer ( ) ; final Server jettyServer = senseiServerBuilder . buildHttpRestServer ( ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ) { @ Override public void run ( ) { try { jettyServer . stop ( ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } finally { try { server . shutdown ( ) ; } finally { } } } } ) ; server . start ( available ) ; jettyServer . start ( ) ; } } </s>
|
<s> package com . senseidb . search . req ; import java . io . Serializable ; public class SenseiGenericRequest implements Serializable { private static final long serialVersionUID = <NUM_LIT:1L> ; private String classname ; private Serializable request ; public String getClassname ( ) { return classname ; } public void setClassname ( String classname ) { this . classname = classname ; } public Serializable getRequest ( ) { return request ; } public void setRequest ( Serializable request ) { this . request = request ; } } </s>
|
<s> package com . senseidb . search . req ; import java . util . ArrayList ; import java . util . Arrays ; import java . util . HashMap ; import java . util . Iterator ; import java . util . List ; import java . util . Map ; import java . util . Map . Entry ; import java . util . Random ; import java . util . Set ; import org . apache . lucene . search . SortField ; import org . json . JSONObject ; import com . browseengine . bobo . api . BrowseSelection ; import com . browseengine . bobo . api . FacetSpec ; import com . browseengine . bobo . facets . FacetHandlerInitializerParam ; import com . senseidb . search . req . mapred . SenseiMapReduce ; import com . senseidb . util . RequestConverter2 ; public class SenseiRequest implements AbstractSenseiRequest , Cloneable { private static final long serialVersionUID = <NUM_LIT:1L> ; private long tid = - <NUM_LIT:1> ; private HashMap < String , BrowseSelection > _selections ; private ArrayList < SortField > _sortSpecs ; private Map < String , FacetSpec > _facetSpecMap ; private Map < String , Integer > _origFacetSpecMaxCounts ; private SenseiQuery _query ; private int _offset ; private int _count ; private int _origOffset ; private int _origCount ; private boolean _fetchStoredFields ; private boolean _origFetchStoredFields ; private boolean _fetchStoredValue ; private Map < String , FacetHandlerInitializerParam > _facetInitParamMap ; private Set < Integer > _partitions ; private boolean _showExplanation ; private static Random _rand = new Random ( System . nanoTime ( ) ) ; private String _routeParam ; private String _groupBy ; private String [ ] _groupByMulti ; private int _maxPerGroup ; private Set < String > _termVectorsToFetch ; private List < String > _selectList ; private SenseiMapReduce mapReduceFunction ; private List < SenseiError > errors ; public SenseiRequest ( ) { _facetInitParamMap = new HashMap < String , FacetHandlerInitializerParam > ( ) ; _selections = new HashMap < String , BrowseSelection > ( ) ; _sortSpecs = new ArrayList < SortField > ( ) ; _facetSpecMap = new HashMap < String , FacetSpec > ( ) ; _fetchStoredFields = false ; _fetchStoredValue = false ; _partitions = null ; _showExplanation = false ; _routeParam = null ; _groupBy = null ; _groupByMulti = null ; _maxPerGroup = <NUM_LIT:0> ; _termVectorsToFetch = null ; _selectList = null ; } public Set < String > getTermVectorsToFetch ( ) { return _termVectorsToFetch ; } public void setTermVectorsToFetch ( Set < String > termVectorsToFetch ) { _termVectorsToFetch = termVectorsToFetch ; } public final long getTid ( ) { return tid ; } public final void setTid ( long tid ) { this . tid = tid ; } public boolean isShowExplanation ( ) { return _showExplanation ; } public void setShowExplanation ( boolean showExplanation ) { _showExplanation = showExplanation ; } public void setPartitions ( Set < Integer > partitions ) { _partitions = partitions ; } public Set < Integer > getPartitions ( ) { return _partitions ; } public void setRouteParam ( String routeParam ) { _routeParam = routeParam ; } public String getRouteParam ( ) { if ( _routeParam != null ) return _routeParam ; return String . valueOf ( _rand . nextInt ( ) ) ; } public void setGroupBy ( String [ ] groupBy ) { _groupByMulti = groupBy ; if ( _groupByMulti != null && _groupByMulti . length != <NUM_LIT:0> ) _groupBy = _groupByMulti [ <NUM_LIT:0> ] ; } public String [ ] getGroupBy ( ) { if ( _groupByMulti == null && _groupBy != null ) _groupByMulti = new String [ ] { _groupBy } ; return _groupByMulti ; } public void setMaxPerGroup ( int maxPerGroup ) { _maxPerGroup = maxPerGroup ; } public int getMaxPerGroup ( ) { return _maxPerGroup ; } public Map < String , FacetHandlerInitializerParam > getFacetHandlerInitParamMap ( ) { return _facetInitParamMap ; } public void setFacetHandlerInitParamMap ( Map < String , FacetHandlerInitializerParam > paramMap ) { _facetInitParamMap = paramMap ; } public void putAllFacetHandlerInitializerParams ( Map < String , FacetHandlerInitializerParam > params ) { _facetInitParamMap . putAll ( params ) ; } public void setFacetHandlerInitializerParam ( String name , FacetHandlerInitializerParam param ) { _facetInitParamMap . put ( name , param ) ; } public FacetHandlerInitializerParam getFacetHandlerInitializerParam ( String name ) { return _facetInitParamMap . get ( name ) ; } public Set < String > getSelectionNames ( ) { return _selections . keySet ( ) ; } public void removeSelection ( String name ) { _selections . remove ( name ) ; } public void setFacetSpecs ( Map < String , FacetSpec > facetSpecMap ) { _facetSpecMap = facetSpecMap ; } public Map < String , FacetSpec > getFacetSpecs ( ) { return _facetSpecMap ; } public void saveState ( ) { _origOffset = _offset ; _origCount = _count ; _origFetchStoredFields = _fetchStoredFields ; if ( _origFacetSpecMaxCounts == null && _facetSpecMap != null ) { _origFacetSpecMaxCounts = new HashMap < String , Integer > ( ) ; for ( Map . Entry < String , FacetSpec > entry : _facetSpecMap . entrySet ( ) ) { FacetSpec spec = entry . getValue ( ) ; if ( spec != null ) { _origFacetSpecMaxCounts . put ( entry . getKey ( ) , spec . getMaxCount ( ) ) ; } } } } public void restoreState ( ) { _offset = _origOffset ; _count = _origCount ; _fetchStoredFields = _origFetchStoredFields ; if ( _facetSpecMap != null ) { for ( Map . Entry < String , FacetSpec > entry : _facetSpecMap . entrySet ( ) ) { FacetSpec spec = entry . getValue ( ) ; if ( spec != null ) { spec . setMaxCount ( _origFacetSpecMaxCounts . get ( entry . getKey ( ) ) ) ; } } } } public int getSelectionCount ( ) { return _selections . size ( ) ; } public void clearSelections ( ) { _selections . clear ( ) ; } public int getFacetSpecCount ( ) { return _facetSpecMap . size ( ) ; } public void clearSort ( ) { _sortSpecs . clear ( ) ; } public boolean isFetchStoredFields ( ) { return _fetchStoredFields ; } public void setFetchStoredFields ( boolean fetchStoredFields ) { _fetchStoredFields = fetchStoredFields ; } public boolean isFetchStoredValue ( ) { return _fetchStoredValue ; } public void setFetchStoredValue ( boolean fetchStoredValue ) { _fetchStoredValue = fetchStoredValue ; } public void setFacetSpec ( String name , FacetSpec facetSpec ) { _facetSpecMap . put ( name , facetSpec ) ; } public FacetSpec getFacetSpec ( String name ) { return _facetSpecMap . get ( name ) ; } public int getCount ( ) { return _count ; } public void setCount ( int count ) { _count = count ; } public int getOffset ( ) { return _offset ; } public void setOffset ( int offset ) { _offset = offset ; } public void setQuery ( SenseiQuery query ) { _query = query ; } public SenseiQuery getQuery ( ) { return _query ; } public void addSelections ( BrowseSelection [ ] selections ) { for ( BrowseSelection selection : selections ) { addSelection ( selection ) ; } } public void addSelection ( BrowseSelection sel ) { _selections . put ( sel . getFieldName ( ) , sel ) ; } public BrowseSelection [ ] getSelections ( ) { return _selections . values ( ) . toArray ( new BrowseSelection [ _selections . size ( ) ] ) ; } public BrowseSelection getSelection ( String fieldname ) { return _selections . get ( fieldname ) ; } public void addSortField ( SortField sortSpec ) { _sortSpecs . add ( sortSpec ) ; } public void addSortFields ( SortField [ ] sortSpecs ) { for ( SortField field : sortSpecs ) { addSortField ( field ) ; } } public SortField [ ] getSort ( ) { return _sortSpecs . toArray ( new SortField [ _sortSpecs . size ( ) ] ) ; } public void setSort ( SortField [ ] sorts ) { _sortSpecs . clear ( ) ; for ( int i = <NUM_LIT:0> ; i < sorts . length ; ++ i ) { _sortSpecs . add ( sorts [ i ] ) ; } } public void setSelectList ( List < String > selectList ) { _selectList = selectList ; } public List < String > getSelectList ( ) { return _selectList ; } public static final SortField FIELD_SCORE = new SortField ( null , SortField . SCORE ) ; public static final SortField FIELD_SCORE_REVERSE = new SortField ( null , SortField . SCORE , true ) ; public static final SortField FIELD_DOC = new SortField ( null , SortField . DOC ) ; public static final SortField FIELD_DOC_REVERSE = new SortField ( null , SortField . DOC , true ) ; @ Override public String toString ( ) { StringBuilder buf = new StringBuilder ( ) ; if ( _query != null ) buf . append ( "<STR_LIT>" ) . append ( _query . toString ( ) ) . append ( '<STR_LIT:\n>' ) ; buf . append ( "<STR_LIT>" ) . append ( _offset ) . append ( '<CHAR_LIT:U+002C>' ) . append ( _count ) . append ( "<STR_LIT>" ) ; if ( _sortSpecs != null ) buf . append ( "<STR_LIT>" ) . append ( _sortSpecs ) . append ( '<STR_LIT:\n>' ) ; if ( _selections != null ) buf . append ( "<STR_LIT>" ) . append ( _selections ) . append ( '<STR_LIT:\n>' ) ; if ( _facetSpecMap != null ) buf . append ( "<STR_LIT>" ) . append ( _facetSpecMap ) . append ( '<STR_LIT:\n>' ) ; if ( _routeParam != null ) buf . append ( "<STR_LIT>" ) . append ( _routeParam ) . append ( '<STR_LIT:\n>' ) ; if ( _groupBy != null ) buf . append ( "<STR_LIT>" ) . append ( _groupBy ) . append ( '<STR_LIT:\n>' ) ; buf . append ( "<STR_LIT>" ) . append ( _maxPerGroup ) . append ( '<STR_LIT:\n>' ) ; buf . append ( "<STR_LIT>" ) . append ( _fetchStoredFields ) . append ( '<STR_LIT:\n>' ) ; buf . append ( "<STR_LIT>" ) . append ( _fetchStoredValue ) ; return buf . toString ( ) ; } @ Override public SenseiRequest clone ( ) { SenseiRequest clone = new SenseiRequest ( ) ; clone . setTid ( this . getTid ( ) ) ; BrowseSelection [ ] selections = this . getSelections ( ) ; for ( BrowseSelection selection : selections ) clone . addSelection ( selection ) ; for ( SortField sort : this . getSort ( ) ) clone . addSortField ( sort ) ; Map < String , FacetSpec > cloneFacetSpecs = new HashMap < String , FacetSpec > ( ) ; for ( Entry < String , FacetSpec > facetSpec : this . getFacetSpecs ( ) . entrySet ( ) ) { cloneFacetSpecs . put ( facetSpec . getKey ( ) , facetSpec . getValue ( ) . clone ( ) ) ; } clone . setFacetSpecs ( cloneFacetSpecs ) ; clone . setQuery ( this . getQuery ( ) ) ; clone . setOffset ( this . getOffset ( ) ) ; clone . setCount ( this . getCount ( ) ) ; clone . setFetchStoredFields ( this . isFetchStoredFields ( ) ) ; clone . setFetchStoredValue ( this . isFetchStoredValue ( ) ) ; clone . setFacetHandlerInitParamMap ( this . getFacetHandlerInitParamMap ( ) ) ; clone . setPartitions ( this . getPartitions ( ) ) ; clone . setShowExplanation ( this . isShowExplanation ( ) ) ; clone . setRouteParam ( this . getRouteParam ( ) ) ; clone . setGroupBy ( this . getGroupBy ( ) ) ; clone . setMaxPerGroup ( this . getMaxPerGroup ( ) ) ; clone . setTermVectorsToFetch ( this . getTermVectorsToFetch ( ) ) ; clone . setSelectList ( this . getSelectList ( ) ) ; clone . setMapReduceFunction ( this . getMapReduceFunction ( ) ) ; return clone ; } @ Override public boolean equals ( Object o ) { if ( ! ( o instanceof SenseiRequest ) ) return false ; SenseiRequest b = ( SenseiRequest ) o ; if ( getCount ( ) != b . getCount ( ) ) return false ; if ( getOffset ( ) != b . getOffset ( ) ) return false ; if ( ! facetSpecsAreEqual ( getFacetSpecs ( ) , b . getFacetSpecs ( ) ) ) return false ; if ( ! selectionsAreEqual ( getSelections ( ) , b . getSelections ( ) ) ) return false ; if ( ! initParamsAreEqual ( getFacetHandlerInitParamMap ( ) , b . getFacetHandlerInitParamMap ( ) ) ) return false ; if ( ! Arrays . equals ( getSort ( ) , b . getSort ( ) ) ) return false ; if ( getQuery ( ) == null ) { if ( b . getQuery ( ) != null ) return false ; } else { if ( ! getQuery ( ) . toString ( ) . equals ( b . getQuery ( ) . toString ( ) ) ) return false ; } if ( getGroupBy ( ) == null ) { if ( b . getGroupBy ( ) != null ) return false ; } else { if ( ! getGroupBy ( ) . equals ( b . getGroupBy ( ) ) ) return false ; } if ( getMaxPerGroup ( ) != b . getMaxPerGroup ( ) ) return false ; if ( getPartitions ( ) == null ) { if ( b . getPartitions ( ) != null ) return false ; } else { if ( ! setsAreEqual ( getPartitions ( ) , b . getPartitions ( ) ) ) return false ; } return true ; } private boolean initParamsAreEqual ( Map < String , FacetHandlerInitializerParam > a , Map < String , FacetHandlerInitializerParam > b ) { if ( a . size ( ) != b . size ( ) ) return false ; for ( Entry < String , FacetHandlerInitializerParam > entry : a . entrySet ( ) ) { String key = entry . getKey ( ) ; if ( ! b . containsKey ( key ) ) return false ; if ( ! areFacetHandlerInitializerParamsEqual ( entry . getValue ( ) , b . get ( key ) ) ) return false ; } return true ; } private boolean areFacetHandlerInitializerParamsEqual ( FacetHandlerInitializerParam a , FacetHandlerInitializerParam b ) { if ( ! setsAreEqual ( a . getBooleanParamNames ( ) , b . getBooleanParamNames ( ) ) ) return false ; if ( ! setsAreEqual ( a . getIntParamNames ( ) , b . getIntParamNames ( ) ) ) return false ; if ( ! setsAreEqual ( a . getDoubleParamNames ( ) , b . getDoubleParamNames ( ) ) ) return false ; if ( ! setsAreEqual ( a . getLongParamNames ( ) , b . getLongParamNames ( ) ) ) return false ; if ( ! setsAreEqual ( a . getStringParamNames ( ) , b . getStringParamNames ( ) ) ) return false ; if ( ! setsAreEqual ( a . getByteArrayParamNames ( ) , b . getByteArrayParamNames ( ) ) ) return false ; for ( String name : a . getBooleanParamNames ( ) ) { if ( ! Arrays . equals ( a . getBooleanParam ( name ) , b . getBooleanParam ( name ) ) ) return false ; } for ( String name : a . getIntParamNames ( ) ) { if ( ! Arrays . equals ( a . getIntParam ( name ) , b . getIntParam ( name ) ) ) return false ; } for ( String name : a . getDoubleParamNames ( ) ) { if ( ! Arrays . equals ( a . getDoubleParam ( name ) , b . getDoubleParam ( name ) ) ) return false ; } for ( String name : a . getLongParamNames ( ) ) { if ( ! Arrays . equals ( a . getLongParam ( name ) , b . getLongParam ( name ) ) ) return false ; } for ( String name : a . getStringParamNames ( ) ) { if ( ! Arrays . equals ( a . getStringParam ( name ) . toArray ( new String [ <NUM_LIT:0> ] ) , b . getStringParam ( name ) . toArray ( new String [ <NUM_LIT:0> ] ) ) ) return false ; } return true ; } private boolean facetSpecsAreEqual ( Map < String , FacetSpec > a , Map < String , FacetSpec > b ) { if ( a . size ( ) != b . size ( ) ) return false ; for ( Entry < String , FacetSpec > entry : a . entrySet ( ) ) { String key = entry . getKey ( ) ; if ( ! ( b . containsKey ( key ) ) ) return false ; if ( ! facetSpecsAreEqual ( entry . getValue ( ) , b . get ( key ) ) ) return false ; } return true ; } private boolean facetSpecsAreEqual ( FacetSpec a , FacetSpec b ) { return ( a . getMaxCount ( ) == b . getMaxCount ( ) ) && ( a . getMinHitCount ( ) == b . getMinHitCount ( ) ) && ( a . getOrderBy ( ) == b . getOrderBy ( ) ) && ( a . isExpandSelection ( ) == b . isExpandSelection ( ) ) ; } private boolean selectionsAreEqual ( BrowseSelection [ ] a , BrowseSelection [ ] b ) { if ( a . length != b . length ) return false ; for ( int i = <NUM_LIT:0> ; i < a . length ; i ++ ) { if ( ! selectionsAreEqual ( a [ i ] , b [ i ] ) ) return false ; } return true ; } private boolean selectionsAreEqual ( BrowseSelection a , BrowseSelection b ) { return ( a . getFieldName ( ) . equals ( b . getFieldName ( ) ) ) && ( Arrays . equals ( a . getValues ( ) , b . getValues ( ) ) ) && ( Arrays . equals ( a . getNotValues ( ) , b . getNotValues ( ) ) ) && ( a . getSelectionOperation ( ) . equals ( b . getSelectionOperation ( ) ) ) && ( a . getSelectionProperties ( ) . equals ( b . getSelectionProperties ( ) ) ) ; } public SenseiMapReduce getMapReduceFunction ( ) { return mapReduceFunction ; } public void setMapReduceFunction ( SenseiMapReduce mapReduceFunction ) { this . mapReduceFunction = mapReduceFunction ; } public List < SenseiError > getErrors ( ) { if ( errors == null ) errors = new ArrayList < SenseiError > ( ) ; return errors ; } public void addError ( SenseiError error ) { if ( errors == null ) errors = new ArrayList < SenseiError > ( ) ; errors . add ( error ) ; } private < T > boolean setsAreEqual ( Set < T > a , Set < T > b ) { if ( a . size ( ) != b . size ( ) ) return false ; Iterator < T > iter = a . iterator ( ) ; while ( iter . hasNext ( ) ) { T val = iter . next ( ) ; if ( ! b . contains ( val ) ) return false ; } return true ; } public static SenseiRequest fromJSON ( final JSONObject json , final Map < String , String [ ] > facetInfoMap ) throws Exception { return RequestConverter2 . fromJSON ( json , facetInfoMap ) ; } } </s>
|
<s> package com . senseidb . search . req ; import java . io . Serializable ; public class SenseiGenericResult implements Serializable { private static final long serialVersionUID = <NUM_LIT:1L> ; private String classname ; private Serializable result ; public String getClassname ( ) { return classname ; } public void setClassname ( String classname ) { this . classname = classname ; } public Serializable getResult ( ) { return result ; } public void setResult ( Serializable result ) { this . result = result ; } } </s>
|
<s> package com . senseidb . search . req . mapred ; public enum CombinerStage { partitionLevel , nodeLevel } </s>
|
<s> package com . senseidb . search . req . mapred ; import java . io . Serializable ; import java . util . List ; import org . json . JSONObject ; public interface SenseiMapReduce < MapResult extends Serializable , ReduceResult extends Serializable > extends Serializable { public void init ( JSONObject params ) ; public MapResult map ( int [ ] docIds , int docIdCount , long [ ] uids , FieldAccessor accessor , FacetCountAccessor facetCountsAccessor ) ; public List < MapResult > combine ( List < MapResult > mapResults , CombinerStage combinerStage ) ; public ReduceResult reduce ( List < MapResult > combineResults ) ; public JSONObject render ( ReduceResult reduceResult ) ; } </s>
|
<s> package com . senseidb . search . req . mapred . functions ; import java . io . Serializable ; import java . util . List ; import org . json . JSONException ; import org . json . JSONObject ; import com . senseidb . search . req . mapred . CombinerStage ; import com . senseidb . search . req . mapred . FacetCountAccessor ; import com . senseidb . search . req . mapred . FieldAccessor ; import com . senseidb . search . req . mapred . SenseiMapReduce ; public class AvgMapReduce implements SenseiMapReduce < AvgResult , AvgResult > { private String column ; @ Override public void init ( JSONObject params ) { column = params . optString ( "<STR_LIT>" ) ; if ( column == null ) { throw new IllegalStateException ( "<STR_LIT>" ) ; } } @ Override public AvgResult map ( int [ ] docId , int docIdCount , long [ ] uids , FieldAccessor accessor , FacetCountAccessor facetCountAccessor ) { double ret = <NUM_LIT:0> ; for ( int i = <NUM_LIT:0> ; i < docIdCount ; i ++ ) { ret += accessor . getDouble ( column , docId [ i ] ) ; } return new AvgResult ( ret / docIdCount , docIdCount ) ; } @ Override public List < AvgResult > combine ( List < AvgResult > mapResults , CombinerStage combinerStage ) { AvgResult avgResult = reduce ( mapResults ) ; mapResults . clear ( ) ; mapResults . add ( avgResult ) ; return mapResults ; } @ Override public AvgResult reduce ( List < AvgResult > combineResults ) { if ( combineResults . isEmpty ( ) ) { return null ; } int minCount = Integer . MAX_VALUE ; for ( AvgResult avgResult : combineResults ) { if ( avgResult == null || avgResult . count == <NUM_LIT:0> ) { continue ; } if ( minCount > avgResult . count ) { minCount = avgResult . count ; } } if ( minCount == Integer . MAX_VALUE ) { return null ; } double accumulatedValue = <NUM_LIT:0> ; int accumulatedCount = <NUM_LIT:0> ; for ( AvgResult avgResult : combineResults ) { if ( avgResult == null || avgResult . count == <NUM_LIT:0> ) { continue ; } accumulatedValue += avgResult . value / minCount * avgResult . count ; accumulatedCount += avgResult . count ; } double ret = accumulatedValue / ( ( double ) accumulatedCount / minCount ) ; return new AvgResult ( ret , accumulatedCount ) ; } @ Override public JSONObject render ( AvgResult reduceResult ) { try { return new JSONObject ( ) . put ( "<STR_LIT>" , reduceResult . value ) . put ( "<STR_LIT:count>" , reduceResult . count ) ; } catch ( JSONException e ) { throw new RuntimeException ( e ) ; } } } class AvgResult implements Serializable { public double value ; public int count ; public AvgResult ( double value , int count ) { super ( ) ; this . value = value ; this . count = count ; } } </s>
|
<s> package com . senseidb . search . req . mapred . functions ; import java . util . List ; import javax . management . RuntimeErrorException ; import org . json . JSONException ; import org . json . JSONObject ; import com . senseidb . search . req . mapred . CombinerStage ; import com . senseidb . search . req . mapred . FacetCountAccessor ; import com . senseidb . search . req . mapred . FieldAccessor ; import com . senseidb . search . req . mapred . SenseiMapReduce ; public class SumMapReduce implements SenseiMapReduce < Double , Double > { private String column ; @ Override public void init ( JSONObject params ) { column = params . optString ( "<STR_LIT>" ) ; if ( column == null ) { throw new IllegalStateException ( "<STR_LIT>" ) ; } } @ Override public Double map ( int [ ] docIds , int docIdCount , long [ ] uids , FieldAccessor accessor , FacetCountAccessor facetCountAccessor ) { double ret = <NUM_LIT:0> ; for ( int i = <NUM_LIT:0> ; i < docIdCount ; i ++ ) { ret += accessor . getDouble ( column , docIds [ i ] ) ; } return ret ; } @ Override public List < Double > combine ( List < Double > mapResults , CombinerStage combinerStage ) { double ret = <NUM_LIT:0> ; for ( Double count : mapResults ) { ret += count ; } mapResults . clear ( ) ; mapResults . add ( ret ) ; return mapResults ; } @ Override public Double reduce ( List < Double > combineResults ) { double ret = <NUM_LIT:0> ; for ( Double count : combineResults ) { ret += count ; } return ret ; } @ Override public JSONObject render ( Double reduceResult ) { try { return new JSONObject ( ) . put ( "<STR_LIT>" , reduceResult ) ; } catch ( JSONException e ) { throw new RuntimeException ( e ) ; } } } </s>
|
<s> package com . senseidb . search . req . mapred . functions ; import java . io . Serializable ; import java . util . List ; import org . json . JSONException ; import org . json . JSONObject ; import com . senseidb . search . req . mapred . CombinerStage ; import com . senseidb . search . req . mapred . FacetCountAccessor ; import com . senseidb . search . req . mapred . FieldAccessor ; import com . senseidb . search . req . mapred . SenseiMapReduce ; public class MinMapReduce implements SenseiMapReduce < MinResult , MinResult > { private String column ; @ Override public MinResult map ( int [ ] docIds , int docIdCount , long [ ] uids , FieldAccessor accessor , FacetCountAccessor facetCountAccessor ) { double min = Double . MAX_VALUE ; double tmp = <NUM_LIT:0> ; long uid = <NUM_LIT> ; for ( int i = <NUM_LIT:0> ; i < docIdCount ; i ++ ) { tmp = accessor . getDouble ( column , docIds [ i ] ) ; if ( min > tmp ) { min = tmp ; uid = uids [ docIds [ i ] ] ; } } return new MinResult ( min , uid ) ; } @ Override public List < MinResult > combine ( List < MinResult > mapResults , CombinerStage combinerStage ) { if ( mapResults . isEmpty ( ) ) { return mapResults ; } MinResult ret = mapResults . get ( <NUM_LIT:0> ) ; for ( int i = <NUM_LIT:1> ; i < mapResults . size ( ) ; i ++ ) { if ( ret . value > mapResults . get ( i ) . value ) { ret = mapResults . get ( i ) ; } } mapResults . clear ( ) ; mapResults . add ( ret ) ; return mapResults ; } @ Override public MinResult reduce ( List < MinResult > combineResults ) { if ( combineResults . isEmpty ( ) ) { return null ; } MinResult ret = combineResults . get ( <NUM_LIT:0> ) ; for ( int i = <NUM_LIT:1> ; i < combineResults . size ( ) ; i ++ ) { if ( ret . value > combineResults . get ( i ) . value ) { ret = combineResults . get ( i ) ; } } return ret ; } @ Override public JSONObject render ( MinResult reduceResult ) { try { return new JSONObject ( ) . put ( "<STR_LIT>" , reduceResult . value ) . put ( "<STR_LIT>" , reduceResult . uid ) ; } catch ( JSONException ex ) { throw new RuntimeException ( ex ) ; } } @ Override public void init ( JSONObject params ) { column = params . optString ( "<STR_LIT>" ) ; if ( column == null ) { throw new IllegalStateException ( "<STR_LIT>" ) ; } } } class MinResult implements Serializable { public double value ; public long uid ; public MinResult ( double value , long uid ) { super ( ) ; this . value = value ; this . uid = uid ; } } </s>
|
<s> package com . senseidb . search . req . mapred . functions ; import java . io . Serializable ; import java . util . List ; import org . json . JSONException ; import org . json . JSONObject ; import scala . actors . threadpool . Arrays ; import com . senseidb . search . req . mapred . CombinerStage ; import com . senseidb . search . req . mapred . FacetCountAccessor ; import com . senseidb . search . req . mapred . FieldAccessor ; import com . senseidb . search . req . mapred . SenseiMapReduce ; public class MaxMapReduce implements SenseiMapReduce < MaxResult , MaxResult > { private String column ; @ Override public MaxResult map ( int [ ] docIds , int docIdCount , long [ ] uids , FieldAccessor accessor , FacetCountAccessor facetCountAccessor ) { double max = Double . MIN_VALUE ; double tmp = <NUM_LIT:0> ; long uid = <NUM_LIT> ; for ( int i = <NUM_LIT:0> ; i < docIdCount ; i ++ ) { tmp = accessor . getDouble ( column , docIds [ i ] ) ; if ( max < tmp ) { max = tmp ; uid = uids [ docIds [ i ] ] ; } } return new MaxResult ( max , uid ) ; } @ Override public List < MaxResult > combine ( List < MaxResult > mapResults , CombinerStage combinerStage ) { if ( mapResults . isEmpty ( ) ) { return mapResults ; } MaxResult ret = mapResults . get ( <NUM_LIT:0> ) ; for ( int i = <NUM_LIT:1> ; i < mapResults . size ( ) ; i ++ ) { if ( ret . value < mapResults . get ( i ) . value ) { ret = mapResults . get ( i ) ; } } return java . util . Arrays . asList ( ret ) ; } @ Override public MaxResult reduce ( List < MaxResult > combineResults ) { if ( combineResults . isEmpty ( ) ) { return null ; } MaxResult ret = combineResults . get ( <NUM_LIT:0> ) ; for ( int i = <NUM_LIT:1> ; i < combineResults . size ( ) ; i ++ ) { if ( ret . value < combineResults . get ( i ) . value ) { ret = combineResults . get ( i ) ; } } return ret ; } @ Override public JSONObject render ( MaxResult reduceResult ) { try { return new JSONObject ( ) . put ( "<STR_LIT>" , reduceResult . value ) . put ( "<STR_LIT>" , reduceResult . uid ) ; } catch ( JSONException ex ) { throw new RuntimeException ( ex ) ; } } @ Override public void init ( JSONObject params ) { column = params . optString ( "<STR_LIT>" ) ; if ( column == null ) { throw new IllegalStateException ( "<STR_LIT>" ) ; } } } class MaxResult implements Serializable { public double value ; public long uid ; public MaxResult ( double value , long uid ) { super ( ) ; this . value = value ; this . uid = uid ; } } </s>
|
<s> package com . senseidb . search . req . mapred . functions ; import it . unimi . dsi . fastutil . longs . LongOpenHashSet ; import java . util . List ; import org . json . JSONException ; import org . json . JSONObject ; import com . senseidb . search . req . mapred . CombinerStage ; import com . senseidb . search . req . mapred . FacetCountAccessor ; import com . senseidb . search . req . mapred . FieldAccessor ; import com . senseidb . search . req . mapred . SenseiMapReduce ; public class DistinctCountMapReduce implements SenseiMapReduce < LongOpenHashSet , Integer > { private String column ; @ Override public void init ( JSONObject params ) { column = params . optString ( "<STR_LIT>" ) ; if ( column == null ) { throw new IllegalStateException ( "<STR_LIT>" ) ; } } @ Override public LongOpenHashSet map ( int [ ] docId , int docIdCount , long [ ] uids , FieldAccessor accessor , FacetCountAccessor facetCountAccessor ) { LongOpenHashSet hashSet = new LongOpenHashSet ( docIdCount ) ; for ( int i = <NUM_LIT:0> ; i < docIdCount ; i ++ ) { hashSet . add ( accessor . getLong ( column , docId [ i ] ) ) ; } return hashSet ; } @ Override public List < LongOpenHashSet > combine ( List < LongOpenHashSet > mapResults , CombinerStage combinerStage ) { if ( mapResults . isEmpty ( ) ) { return mapResults ; } LongOpenHashSet ret = mapResults . get ( <NUM_LIT:0> ) ; for ( int i = <NUM_LIT:1> ; i < mapResults . size ( ) ; i ++ ) { ret . addAll ( mapResults . get ( i ) ) ; } mapResults . clear ( ) ; mapResults . add ( ret ) ; return mapResults ; } @ Override public Integer reduce ( List < LongOpenHashSet > combineResults ) { if ( combineResults . isEmpty ( ) ) { return <NUM_LIT:0> ; } LongOpenHashSet ret = combineResults . get ( <NUM_LIT:0> ) ; for ( int i = <NUM_LIT:1> ; i < combineResults . size ( ) ; i ++ ) { ret . addAll ( combineResults . get ( i ) ) ; } return ret . size ( ) ; } @ Override public JSONObject render ( Integer reduceResult ) { try { return new JSONObject ( ) . put ( "<STR_LIT>" , reduceResult ) ; } catch ( JSONException ex ) { throw new RuntimeException ( ex ) ; } } } </s>
|
<s> package com . senseidb . search . req . mapred . functions ; import it . unimi . dsi . fastutil . longs . LongOpenHashSet ; import java . util . Arrays ; import java . util . HashSet ; import java . util . List ; import org . json . JSONException ; import org . json . JSONObject ; import com . senseidb . search . req . mapred . CombinerStage ; import com . senseidb . search . req . mapred . FacetCountAccessor ; import com . senseidb . search . req . mapred . FieldAccessor ; import com . senseidb . search . req . mapred . SenseiMapReduce ; public class HashSetDistinctCountMapReduce implements SenseiMapReduce < HashSet , Integer > { private String column ; @ Override public void init ( JSONObject params ) { column = params . optString ( "<STR_LIT>" ) ; if ( column == null ) { throw new IllegalStateException ( "<STR_LIT>" ) ; } } @ Override public HashSet map ( int [ ] docId , int docIdCount , long [ ] uids , FieldAccessor accessor , FacetCountAccessor facetCountAccessor ) { HashSet hashSet = new HashSet ( docIdCount ) ; for ( int i = <NUM_LIT:0> ; i < docIdCount ; i ++ ) { hashSet . add ( accessor . getLong ( column , docId [ i ] ) ) ; } return hashSet ; } @ Override public List < HashSet > combine ( List < HashSet > mapResults , CombinerStage combinerStage ) { if ( mapResults . isEmpty ( ) ) { return mapResults ; } HashSet ret = mapResults . get ( <NUM_LIT:0> ) ; for ( int i = <NUM_LIT:1> ; i < mapResults . size ( ) ; i ++ ) { ret . addAll ( mapResults . get ( i ) ) ; } return Arrays . asList ( ret ) ; } @ Override public Integer reduce ( List < HashSet > combineResults ) { if ( combineResults . isEmpty ( ) ) { return <NUM_LIT:0> ; } HashSet ret = combineResults . get ( <NUM_LIT:0> ) ; for ( int i = <NUM_LIT:1> ; i < combineResults . size ( ) ; i ++ ) { ret . addAll ( combineResults . get ( i ) ) ; } return ret . size ( ) ; } @ Override public JSONObject render ( Integer reduceResult ) { try { return new JSONObject ( ) . put ( "<STR_LIT>" , reduceResult ) ; } catch ( JSONException ex ) { throw new RuntimeException ( ex ) ; } } } </s>
|
<s> package com . senseidb . search . req . mapred ; import java . util . HashMap ; import java . util . HashSet ; import java . util . Map ; import java . util . Set ; import com . browseengine . bobo . facets . FacetCountCollector ; public class FacetCountAccessor { private Map < String , FacetCountCollector > facetCountCollectors = new HashMap < String , FacetCountCollector > ( ) ; ; public FacetCountAccessor ( FacetCountCollector [ ] facetCountCollectors ) { if ( facetCountCollectors != null ) { for ( FacetCountCollector facetCountCollector : facetCountCollectors ) { this . facetCountCollectors . put ( facetCountCollector . getName ( ) , facetCountCollector ) ; } } } public boolean areFacetCountsPresent ( ) { return ! facetCountCollectors . isEmpty ( ) ; } public int getFacetCount ( String facetName , Object value ) { if ( ! facetCountCollectors . containsKey ( facetName ) ) { return - <NUM_LIT:1> ; } return facetCountCollectors . get ( facetName ) . getFacetHitsCount ( value ) ; } public int getFacetCount ( String facetName , String value ) { if ( ! facetCountCollectors . containsKey ( facetName ) ) { return - <NUM_LIT:1> ; } return facetCountCollectors . get ( facetName ) . getFacet ( value ) . getFacetValueHitCount ( ) ; } public int getFacetCount ( String facetName , int valIndex ) { if ( ! facetCountCollectors . containsKey ( facetName ) ) { return - <NUM_LIT:1> ; } return facetCountCollectors . get ( facetName ) . getCountDistribution ( ) . get ( valIndex ) ; } public FacetCountCollector getFacetCollector ( String facetName ) { return facetCountCollectors . get ( facetName ) ; } public Set < FacetCountCollector > getFacetCountCollectors ( ) { return new HashSet < FacetCountCollector > ( facetCountCollectors . values ( ) ) ; } public static final FacetCountAccessor EMPTY = new FacetCountAccessor ( new FacetCountCollector [ <NUM_LIT:0> ] ) ; } </s>
|
<s> package com . senseidb . search . req . mapred ; import java . util . HashMap ; import java . util . HashSet ; import java . util . Map ; import java . util . Set ; import proj . zoie . api . DocIDMapper ; import com . browseengine . bobo . api . BoboIndexReader ; import com . browseengine . bobo . facets . FacetHandler ; import com . browseengine . bobo . facets . data . FacetDataCache ; import com . browseengine . bobo . facets . data . MultiValueFacetDataCache ; import com . browseengine . bobo . facets . data . TermFloatList ; import com . browseengine . bobo . facets . data . TermIntList ; import com . browseengine . bobo . facets . data . TermLongList ; import com . browseengine . bobo . facets . data . TermNumberList ; import com . browseengine . bobo . facets . data . TermShortList ; import com . browseengine . bobo . facets . data . TermValueList ; import com . senseidb . search . req . SenseiSystemInfo . SenseiFacetInfo ; @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) public final class FieldAccessor { private final Set < String > facets = new HashSet < String > ( ) ; private final BoboIndexReader boboIndexReader ; private FacetDataCache lastFacetDataCache ; private String lastFacetDataCacheName ; private Map < String , FacetDataCache > facetDataMap = new HashMap < String , FacetDataCache > ( ) ; private final DocIDMapper mapper ; public FieldAccessor ( Set < SenseiFacetInfo > facetInfos , BoboIndexReader boboIndexReader , DocIDMapper mapper ) { this . mapper = mapper ; for ( SenseiFacetInfo facetInfo : facetInfos ) { facets . add ( facetInfo . getName ( ) ) ; } this . boboIndexReader = boboIndexReader ; } public final FacetDataCache getValueCache ( String name ) { if ( name . equals ( lastFacetDataCacheName ) ) { return lastFacetDataCache ; } FacetDataCache ret = facetDataMap . get ( name ) ; if ( ret != null ) { lastFacetDataCache = ret ; lastFacetDataCacheName = name ; return ret ; } Object rawFacetData = boboIndexReader . getFacetData ( name ) ; if ( ! ( rawFacetData instanceof FacetDataCache ) ) { return null ; } ret = ( FacetDataCache ) rawFacetData ; facetDataMap . put ( name , ret ) ; return ret ; } public final Object get ( String fieldName , int docId ) { FacetDataCache valueCache = getValueCache ( fieldName ) ; if ( valueCache instanceof MultiValueFacetDataCache ) { return getArray ( fieldName , docId ) ; } if ( valueCache != null ) { return valueCache . valArray . getRawValue ( valueCache . orderArray . get ( docId ) ) ; } return getFacetHandler ( fieldName ) . getRawFieldValues ( boboIndexReader , docId ) ; } public final String getString ( String fieldName , int docId ) { return getFacetHandler ( fieldName ) . getFieldValue ( boboIndexReader , docId ) ; } public final long getLong ( String fieldName , int docId ) { FacetDataCache valueCache = getValueCache ( fieldName ) ; if ( valueCache != null ) { if ( valueCache . valArray instanceof TermLongList ) { return ( ( TermLongList ) valueCache . valArray ) . getPrimitiveValue ( valueCache . orderArray . get ( docId ) ) ; } else { return ( long ) ( ( TermNumberList ) valueCache . valArray ) . getDoubleValue ( valueCache . orderArray . get ( docId ) ) ; } } else { Object value = getFacetHandler ( fieldName ) . getRawFieldValues ( boboIndexReader , docId ) [ <NUM_LIT:0> ] ; if ( value instanceof Long ) { return ( Long ) value ; } if ( value instanceof Number ) { return ( ( Number ) value ) . longValue ( ) ; } if ( value instanceof String ) { return Long . parseLong ( ( String ) value ) ; } throw new UnsupportedOperationException ( "<STR_LIT>" + value . getClass ( ) + "<STR_LIT>" ) ; } } public final double getDouble ( String fieldName , int docId ) { FacetDataCache valueCache = getValueCache ( fieldName ) ; if ( valueCache != null ) { return ( ( TermNumberList ) valueCache . valArray ) . getDoubleValue ( valueCache . orderArray . get ( docId ) ) ; } else { Object value = getFacetHandler ( fieldName ) . getRawFieldValues ( boboIndexReader , docId ) [ <NUM_LIT:0> ] ; if ( value instanceof Double ) { return ( Double ) value ; } if ( value instanceof Number ) { return ( ( Number ) value ) . doubleValue ( ) ; } if ( value instanceof String ) { return Double . parseDouble ( ( String ) value ) ; } throw new UnsupportedOperationException ( "<STR_LIT>" + value . getClass ( ) + "<STR_LIT>" ) ; } } public final short getShort ( String fieldName , int docId ) { FacetDataCache valueCache = getValueCache ( fieldName ) ; if ( valueCache != null ) { if ( valueCache . valArray instanceof TermShortList ) { return ( ( TermShortList ) valueCache . valArray ) . getPrimitiveValue ( valueCache . orderArray . get ( docId ) ) ; } else { return ( short ) ( ( TermNumberList ) valueCache . valArray ) . getDoubleValue ( valueCache . orderArray . get ( docId ) ) ; } } else { Object value = getFacetHandler ( fieldName ) . getRawFieldValues ( boboIndexReader , docId ) [ <NUM_LIT:0> ] ; if ( value instanceof Short ) { return ( Short ) value ; } if ( value instanceof Number ) { return ( ( Number ) value ) . shortValue ( ) ; } if ( value instanceof String ) { return Short . parseShort ( ( String ) value ) ; } throw new UnsupportedOperationException ( "<STR_LIT>" + value . getClass ( ) + "<STR_LIT>" ) ; } } public final int getInteger ( String fieldName , int docId ) { FacetDataCache valueCache = getValueCache ( fieldName ) ; if ( valueCache != null ) { if ( valueCache . valArray instanceof TermIntList ) { return ( ( TermIntList ) valueCache . valArray ) . getPrimitiveValue ( valueCache . orderArray . get ( docId ) ) ; } else { return ( int ) ( ( TermNumberList ) valueCache . valArray ) . getDoubleValue ( valueCache . orderArray . get ( docId ) ) ; } } else { Object value = getFacetHandler ( fieldName ) . getRawFieldValues ( boboIndexReader , docId ) [ <NUM_LIT:0> ] ; if ( value instanceof Integer ) { return ( Integer ) value ; } if ( value instanceof Number ) { return ( ( Number ) value ) . intValue ( ) ; } if ( value instanceof String ) { return Integer . parseInt ( ( String ) value ) ; } throw new UnsupportedOperationException ( "<STR_LIT>" + value . getClass ( ) + "<STR_LIT>" ) ; } } public final float getFloat ( String fieldName , int docId ) { FacetDataCache valueCache = getValueCache ( fieldName ) ; if ( valueCache != null ) { if ( valueCache . valArray instanceof TermFloatList ) { return ( ( TermFloatList ) valueCache . valArray ) . getPrimitiveValue ( valueCache . orderArray . get ( docId ) ) ; } else { return ( float ) ( ( TermNumberList ) valueCache . valArray ) . getDoubleValue ( valueCache . orderArray . get ( docId ) ) ; } } else { Object value = getFacetHandler ( fieldName ) . getRawFieldValues ( boboIndexReader , docId ) [ <NUM_LIT:0> ] ; if ( value instanceof Float ) { return ( Float ) value ; } if ( value instanceof Number ) { return ( ( Number ) value ) . floatValue ( ) ; } if ( value instanceof String ) { return Float . parseFloat ( ( String ) value ) ; } throw new UnsupportedOperationException ( "<STR_LIT>" + value . getClass ( ) + "<STR_LIT>" ) ; } } public final Object [ ] getArray ( String fieldName , int docId ) { return getFacetHandler ( fieldName ) . getRawFieldValues ( boboIndexReader , docId ) ; } public final Object getByUID ( String fieldName , long uid ) { return get ( fieldName , mapper . quickGetDocID ( uid ) ) ; } public final String getStringByUID ( String fieldName , long uid ) { return getString ( fieldName , mapper . quickGetDocID ( uid ) ) ; } public final long getLongByUID ( String fieldName , long uid ) { return getLong ( fieldName , mapper . quickGetDocID ( uid ) ) ; } public final double getDoubleByUID ( String fieldName , long uid ) { return getDouble ( fieldName , mapper . quickGetDocID ( uid ) ) ; } public final short getShortByUID ( String fieldName , long uid ) { return getShort ( fieldName , mapper . quickGetDocID ( uid ) ) ; } public final int getIntegerByUID ( String fieldName , long uid ) { return getInteger ( fieldName , mapper . quickGetDocID ( uid ) ) ; } public final float getFloatByUID ( String fieldName , long uid ) { return getFloat ( fieldName , mapper . quickGetDocID ( uid ) ) ; } public final Object [ ] getArrayByUID ( String fieldName , long uid ) { return getArray ( fieldName , mapper . quickGetDocID ( uid ) ) ; } public final TermValueList getTermValueList ( String fieldName ) { FacetDataCache valueCache = getValueCache ( fieldName ) ; if ( valueCache == null ) { return null ; } return valueCache . valArray ; } private String lastFacetHandlerName ; private FacetHandler lastFacetHandler ; public final FacetHandler getFacetHandler ( String facetName ) { if ( ! facetName . equals ( lastFacetHandlerName ) ) { lastFacetHandler = boboIndexReader . getFacetHandler ( facetName ) ; lastFacetHandlerName = facetName ; } if ( lastFacetHandler == null ) { throw new IllegalStateException ( "<STR_LIT>" + facetName + "<STR_LIT>" ) ; } return lastFacetHandler ; } public BoboIndexReader getBoboIndexReader ( ) { return boboIndexReader ; } public DocIDMapper getMapper ( ) { return mapper ; } } </s>
|
<s> package com . senseidb . search . req . mapred . impl ; import java . util . Map ; import java . util . concurrent . ConcurrentHashMap ; import com . senseidb . search . req . mapred . SenseiMapReduce ; import com . senseidb . search . req . mapred . functions . AvgMapReduce ; import com . senseidb . search . req . mapred . functions . DistinctCountMapReduce ; import com . senseidb . search . req . mapred . functions . MaxMapReduce ; import com . senseidb . search . req . mapred . functions . MinMapReduce ; import com . senseidb . search . req . mapred . functions . SumMapReduce ; @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) public class MapReduceRegistry { private static Map < String , Class < ? extends SenseiMapReduce > > keyToFunction = new ConcurrentHashMap < String , Class < ? extends SenseiMapReduce > > ( ) ; static { keyToFunction . put ( "<STR_LIT>" , MaxMapReduce . class ) ; keyToFunction . put ( "<STR_LIT>" , DistinctCountMapReduce . class ) ; keyToFunction . put ( "<STR_LIT>" , DistinctCountMapReduce . class ) ; keyToFunction . put ( "<STR_LIT>" , MinMapReduce . class ) ; keyToFunction . put ( "<STR_LIT>" , AvgMapReduce . class ) ; keyToFunction . put ( "<STR_LIT>" , SumMapReduce . class ) ; } public static void register ( String mapReduceKey , Class < ? extends SenseiMapReduce > mapReduceClass ) { keyToFunction . put ( mapReduceKey , mapReduceClass ) ; } public static SenseiMapReduce get ( String mapReduceKey ) { try { Class < ? extends SenseiMapReduce > cls = keyToFunction . get ( mapReduceKey ) ; if ( cls != null ) { return ( SenseiMapReduce ) cls . newInstance ( ) ; } cls = ( Class < ? extends SenseiMapReduce > ) Class . forName ( mapReduceKey ) ; keyToFunction . put ( mapReduceKey , cls ) ; return cls . newInstance ( ) ; } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; } } } </s>
|
<s> package com . senseidb . search . req . mapred . impl ; import java . util . ArrayList ; import java . util . Set ; import proj . zoie . api . ZoieSegmentReader ; import proj . zoie . api . impl . DocIDMapperImpl ; import com . browseengine . bobo . api . BoboIndexReader ; import com . browseengine . bobo . facets . FacetCountCollector ; import com . browseengine . bobo . mapred . BoboMapFunctionWrapper ; import com . browseengine . bobo . mapred . MapReduceResult ; import com . browseengine . bobo . util . MemoryManager ; import com . senseidb . search . req . SenseiSystemInfo . SenseiFacetInfo ; import com . senseidb . search . req . mapred . CombinerStage ; import com . senseidb . search . req . mapred . FacetCountAccessor ; import com . senseidb . search . req . mapred . FieldAccessor ; import com . senseidb . search . req . mapred . SenseiMapReduce ; public class SenseiMapFunctionWrapper implements BoboMapFunctionWrapper { private MapReduceResult result ; private SenseiMapReduce mapReduceStrategy ; private Set < SenseiFacetInfo > facetInfos ; public static final int BUFFER_SIZE = <NUM_LIT> ; private int [ ] partialDocIds ; ; private int docIdIndex = <NUM_LIT:0> ; public SenseiMapFunctionWrapper ( SenseiMapReduce mapReduceStrategy , Set < SenseiFacetInfo > facetInfos ) { super ( ) ; this . mapReduceStrategy = mapReduceStrategy ; partialDocIds = new int [ BUFFER_SIZE ] ; result = new MapReduceResult ( ) ; this . facetInfos = facetInfos ; } @ Override public void mapFullIndexReader ( BoboIndexReader reader , FacetCountCollector [ ] facetCountCollectors ) { ZoieSegmentReader < ? > zoieReader = ( ZoieSegmentReader < ? > ) ( reader . getInnerReader ( ) ) ; DocIDMapperImpl docIDMapper = ( DocIDMapperImpl ) zoieReader . getDocIDMaper ( ) ; result . getMapResults ( ) . add ( mapReduceStrategy . map ( docIDMapper . getDocArray ( ) , docIDMapper . getDocArray ( ) . length , zoieReader . getUIDArray ( ) , new FieldAccessor ( facetInfos , reader , docIDMapper ) , new FacetCountAccessor ( facetCountCollectors ) ) ) ; } @ Override public final void mapSingleDocument ( int docId , BoboIndexReader reader ) { if ( docIdIndex < BUFFER_SIZE - <NUM_LIT:1> ) { partialDocIds [ docIdIndex ++ ] = docId ; return ; } if ( docIdIndex == BUFFER_SIZE - <NUM_LIT:1> ) { partialDocIds [ docIdIndex ++ ] = docId ; ZoieSegmentReader < ? > zoieReader = ( ZoieSegmentReader < ? > ) ( reader . getInnerReader ( ) ) ; DocIDMapperImpl docIDMapper = ( DocIDMapperImpl ) zoieReader . getDocIDMaper ( ) ; result . getMapResults ( ) . add ( mapReduceStrategy . map ( partialDocIds , BUFFER_SIZE , zoieReader . getUIDArray ( ) , new FieldAccessor ( facetInfos , reader , docIDMapper ) , FacetCountAccessor . EMPTY ) ) ; docIdIndex = <NUM_LIT:0> ; } } @ Override public void finalizeSegment ( BoboIndexReader reader , FacetCountCollector [ ] facetCountCollectors ) { if ( docIdIndex > <NUM_LIT:0> ) { ZoieSegmentReader < ? > zoieReader = ( ZoieSegmentReader < ? > ) ( reader . getInnerReader ( ) ) ; DocIDMapperImpl docIDMapper = ( DocIDMapperImpl ) zoieReader . getDocIDMaper ( ) ; result . getMapResults ( ) . add ( mapReduceStrategy . map ( partialDocIds , docIdIndex , zoieReader . getUIDArray ( ) , new FieldAccessor ( facetInfos , reader , docIDMapper ) , new FacetCountAccessor ( facetCountCollectors ) ) ) ; } docIdIndex = <NUM_LIT:0> ; } @ Override public void finalizePartition ( ) { result . setMapResults ( new ArrayList ( mapReduceStrategy . combine ( result . getMapResults ( ) , CombinerStage . partitionLevel ) ) ) ; } @ Override public MapReduceResult getResult ( ) { return result ; } } </s>
|
<s> package com . senseidb . search . req . mapred . impl ; import java . util . ArrayList ; import java . util . Collection ; import java . util . List ; import com . browseengine . bobo . mapred . MapReduceResult ; import com . senseidb . search . req . SenseiResult ; import com . senseidb . search . req . mapred . CombinerStage ; import com . senseidb . search . req . mapred . SenseiMapReduce ; public class SenseiReduceFunctionWrapper { public static MapReduceResult combine ( SenseiMapReduce mapReduceFunction , List < MapReduceResult > results ) { MapReduceResult ret = null ; if ( results . isEmpty ( ) ) { return null ; } ret = results . get ( <NUM_LIT:0> ) ; for ( int i = <NUM_LIT:1> ; i < results . size ( ) ; i ++ ) { ret . getMapResults ( ) . addAll ( results . get ( i ) . getMapResults ( ) ) ; } ret . setMapResults ( new ArrayList ( mapReduceFunction . combine ( ret . getMapResults ( ) , CombinerStage . nodeLevel ) ) ) ; return ret ; } public static MapReduceResult reduce ( SenseiMapReduce mapReduceFunction , List < MapReduceResult > results ) { MapReduceResult ret = null ; if ( results . isEmpty ( ) ) { return ret ; } ret = results . get ( <NUM_LIT:0> ) ; for ( int i = <NUM_LIT:1> ; i < results . size ( ) ; i ++ ) { ret . getMapResults ( ) . addAll ( results . get ( i ) . getMapResults ( ) ) ; } ret . setReduceResult ( mapReduceFunction . reduce ( ret . getMapResults ( ) ) ) ; ret . setMapResults ( null ) ; return ret ; } public static List < MapReduceResult > extractMapReduceResults ( Collection < SenseiResult > senseiResults ) { List < MapReduceResult > ret = new ArrayList < MapReduceResult > ( senseiResults . size ( ) ) ; for ( SenseiResult senseiResult : senseiResults ) { if ( senseiResult . getMapReduceResult ( ) != null ) { ret . add ( senseiResult . getMapReduceResult ( ) ) ; } } return ret ; } } </s>
|
<s> package com . senseidb . search . req ; import org . json . JSONObject ; public class SenseiJSONQuery extends SenseiQuery { private static final long serialVersionUID = <NUM_LIT:1L> ; public SenseiJSONQuery ( JSONObject jsonObj ) { super ( jsonObj . toString ( ) . getBytes ( SenseiQuery . utf8Charset ) ) ; } } </s>
|
<s> package com . senseidb . search . req ; import java . io . Serializable ; import java . text . SimpleDateFormat ; import java . util . ArrayList ; import java . util . Arrays ; import java . util . Collections ; import java . util . Date ; import java . util . Iterator ; import java . util . LinkedHashMap ; import java . util . Comparator ; import java . util . List ; import java . util . Map ; import java . util . Set ; public class SenseiSystemInfo implements AbstractSenseiResult { public static class SenseiFacetInfo implements Serializable { private static final long serialVersionUID = <NUM_LIT:1L> ; private String _name ; private boolean _runTime ; private Map < String , String > _props ; public SenseiFacetInfo ( String name ) { _name = name ; _runTime = false ; _props = null ; } public String getName ( ) { return _name ; } public void setName ( String name ) { _name = name ; } public boolean isRunTime ( ) { return _runTime ; } public void setRunTime ( boolean runTime ) { _runTime = runTime ; } public Map < String , String > getProps ( ) { return _props ; } public void setProps ( Map < String , String > props ) { _props = props ; } @ Override public String toString ( ) { StringBuffer buf = new StringBuffer ( ) ; buf . append ( "<STR_LIT>" ) . append ( _name ) ; buf . append ( "<STR_LIT>" ) . append ( _runTime ) ; buf . append ( "<STR_LIT>" ) . append ( _props ) ; return buf . toString ( ) ; } } public static class SenseiNodeInfo implements Serializable { private static final long serialVersionUID = <NUM_LIT:1L> ; private int _id ; private int [ ] _partitions ; private String _nodeLink ; private String _adminLink ; public SenseiNodeInfo ( int id , int [ ] partitions , String nodeLink , String adminLink ) { _id = id ; _partitions = partitions ; _nodeLink = nodeLink ; _adminLink = adminLink ; } public int getId ( ) { return _id ; } public int [ ] getPartitions ( ) { return _partitions ; } public String getNodeLink ( ) { return _nodeLink ; } public String getAdminLink ( ) { return _adminLink ; } public String toString ( ) { StringBuffer buf = new StringBuffer ( ) ; buf . append ( "<STR_LIT>" ) . append ( _id ) . append ( "<STR_LIT>" ) . append ( Arrays . toString ( _partitions ) ) . append ( "<STR_LIT>" ) . append ( _nodeLink ) . append ( "<STR_LIT>" ) . append ( _adminLink ) . append ( "<STR_LIT:}>" ) ; return buf . toString ( ) ; } } private static final long serialVersionUID = <NUM_LIT:1L> ; private long _searchTimeMillis ; private int _numDocs ; private long _lastModified ; private String _version ; private Set < SenseiFacetInfo > _facetInfos ; private String _schema ; private List < SenseiNodeInfo > _clusterInfo ; private List < SenseiError > errors ; public SenseiSystemInfo ( ) { _numDocs = <NUM_LIT:0> ; _lastModified = <NUM_LIT> ; _version = null ; _facetInfos = null ; _schema = null ; _clusterInfo = null ; } public long getTime ( ) { return _searchTimeMillis ; } public void setTime ( long searchTimeMillis ) { _searchTimeMillis = searchTimeMillis ; } public int getNumDocs ( ) { return _numDocs ; } public void setNumDocs ( int numDocs ) { _numDocs = numDocs ; } public long getLastModified ( ) { return _lastModified ; } public void setLastModified ( long lastModified ) { _lastModified = lastModified ; } public String getSchema ( ) { return _schema ; } public void setSchema ( String schema ) { _schema = schema ; } public Set < SenseiFacetInfo > getFacetInfos ( ) { return _facetInfos ; } public void setFacetInfos ( Set < SenseiFacetInfo > facetInfos ) { _facetInfos = facetInfos ; } public String getVersion ( ) { return _version ; } public void setVersion ( String version ) { _version = version ; } public List < SenseiNodeInfo > getClusterInfo ( ) { return _clusterInfo ; } public void setClusterInfo ( List < SenseiNodeInfo > clusterInfo ) { _clusterInfo = clusterInfo ; } public List < SenseiError > getErrors ( ) { if ( errors == null ) errors = new ArrayList < SenseiError > ( ) ; return errors ; } public void addError ( SenseiError error ) { if ( errors == null ) errors = new ArrayList < SenseiError > ( ) ; errors . add ( error ) ; } @ Override public String toString ( ) { StringBuffer buf = new StringBuffer ( ) ; buf . append ( "<STR_LIT>" ) . append ( _numDocs ) ; buf . append ( "<STR_LIT>" ) . append ( new SimpleDateFormat ( "<STR_LIT>" ) . format ( new Date ( _lastModified ) ) ) ; buf . append ( "<STR_LIT>" ) . append ( _version ) ; if ( _schema != null && _schema . length ( ) != <NUM_LIT:0> ) buf . append ( "<STR_LIT>" ) . append ( _schema ) ; buf . append ( "<STR_LIT>" ) . append ( getCmdOutPutofSet ( _facetInfos ) ) ; buf . append ( "<STR_LIT>" ) . append ( _clusterInfo ) ; return buf . toString ( ) ; } private String getCmdOutPutofSet ( Set < SenseiFacetInfo > facetInfos ) { if ( facetInfos == null ) return "<STR_LIT:null>" ; StringBuffer buf = new StringBuffer ( ) ; Iterator < SenseiFacetInfo > it = facetInfos . iterator ( ) ; int count = <NUM_LIT:0> ; while ( it . hasNext ( ) ) { count ++ ; SenseiFacetInfo senseiFacetInfo = it . next ( ) ; String _name = senseiFacetInfo . getName ( ) ; boolean _runTime = senseiFacetInfo . isRunTime ( ) ; Map < String , String > _props = senseiFacetInfo . getProps ( ) ; Map < String , String > _sorted_props = sortByKey ( _props ) ; buf . append ( "<STR_LIT>" + padString ( "<STR_LIT:(>" + count + "<STR_LIT:)>" , <NUM_LIT:4> ) + "<STR_LIT>" ) . append ( _name ) ; buf . append ( "<STR_LIT>" ) . append ( _runTime ) ; buf . append ( "<STR_LIT>" ) . append ( _sorted_props ) ; } return buf . toString ( ) ; } private String padString ( String input , int length ) { if ( input . length ( ) > length ) return input ; else { StringBuffer sb = new StringBuffer ( input ) ; for ( int i = <NUM_LIT:0> ; i < ( length - input . length ( ) ) ; i ++ ) { sb . append ( "<STR_LIT:U+0020>" ) ; } return sb . toString ( ) ; } } private Map < String , String > sortByKey ( Map < String , String > _props ) { Map < String , String > map = new LinkedHashMap < String , String > ( ) ; ArrayList < String > sortedKeys = new ArrayList < String > ( _props . keySet ( ) ) ; Collections . sort ( sortedKeys ) ; for ( String key : sortedKeys ) { map . put ( key , _props . get ( key ) ) ; } return map ; } } </s>
|
<s> package com . senseidb . search . req ; import java . io . Serializable ; public interface AbstractSenseiResult extends Serializable { public abstract long getTime ( ) ; public abstract void setTime ( long searchTimeMillis ) ; public void addError ( SenseiError error ) ; } </s>
|
<s> package com . senseidb . search . req ; import java . io . Serializable ; public class SenseiError implements Serializable { private final String message ; private final ErrorType errorType ; private final int errorCode ; public SenseiError ( String message , ErrorType errorType ) { this . message = message ; this . errorType = errorType ; this . errorCode = errorType . getDefaultErrorCode ( ) ; } public SenseiError ( String message , ErrorType errorType , int errorCode ) { this . message = message ; this . errorType = errorType ; this . errorCode = errorCode ; } @ Override public int hashCode ( ) { final int prime = <NUM_LIT:31> ; int result = <NUM_LIT:1> ; result = prime * result + ( ( errorType == null ) ? <NUM_LIT:0> : errorType . hashCode ( ) ) ; result = prime * result + ( ( message == null ) ? <NUM_LIT:0> : message . hashCode ( ) ) ; return result ; } @ Override public boolean equals ( Object obj ) { if ( this == obj ) return true ; if ( obj == null ) return false ; if ( getClass ( ) != obj . getClass ( ) ) return false ; SenseiError other = ( SenseiError ) obj ; if ( errorType != other . errorType ) return false ; if ( message == null ) { if ( other . message != null ) return false ; } else if ( ! message . equals ( other . message ) ) return false ; return true ; } public String getMessage ( ) { return message ; } public ErrorType getErrorType ( ) { return errorType ; } public int getErrorCode ( ) { return errorCode ; } @ Override public String toString ( ) { ErrorType et = errorType ; if ( et == null ) et = ErrorType . UnknownError ; return String . format ( "<STR_LIT>" , et . name ( ) , errorCode , message ) ; } } </s>
|
<s> package com . senseidb . search . req ; public enum ErrorType { JsonParsingError ( <NUM_LIT:100> ) , JsonCompilationError ( <NUM_LIT> ) , BQLParsingError ( <NUM_LIT> ) , BoboExecutionError ( <NUM_LIT> ) , ExecutionTimeout ( <NUM_LIT> ) , BrokerGatherError ( <NUM_LIT> ) , PartitionCallError ( <NUM_LIT> ) , BrokerTimeout ( <NUM_LIT> ) , InternalError ( <NUM_LIT> ) , MergePartitionError ( <NUM_LIT> ) , FederatedBrokerUnavailable ( <NUM_LIT> ) , UnknownError ( <NUM_LIT:1000> ) ; private final int defaultErrorCode ; private ErrorType ( int defaultErrorCode ) { this . defaultErrorCode = defaultErrorCode ; } public int getDefaultErrorCode ( ) { return defaultErrorCode ; } } </s>
|
<s> package com . senseidb . search . req ; import java . io . Serializable ; import java . nio . charset . Charset ; public class SenseiQuery implements Serializable { private static final long serialVersionUID = <NUM_LIT:1L> ; private byte [ ] _bytes ; public static Charset utf8Charset = Charset . forName ( "<STR_LIT:UTF-8>" ) ; public SenseiQuery ( byte [ ] bytes ) { _bytes = bytes ; } final public byte [ ] toBytes ( ) { return _bytes ; } @ Override public String toString ( ) { return new String ( _bytes , utf8Charset ) ; } } </s>
|
<s> package com . senseidb . search . req ; import java . util . ArrayList ; import java . util . Arrays ; import java . util . List ; import java . util . Map ; import java . util . Map . Entry ; import org . apache . lucene . document . Document ; import org . apache . lucene . search . Explanation ; import com . browseengine . bobo . api . BrowseFacet ; import com . browseengine . bobo . api . BrowseHit ; import com . browseengine . bobo . api . BrowseResult ; import com . browseengine . bobo . api . FacetAccessible ; public class SenseiResult extends BrowseResult implements AbstractSenseiResult { private static final long serialVersionUID = <NUM_LIT:1L> ; private String _parsedQuery = null ; private List < SenseiError > errors ; public SenseiHit [ ] getSenseiHits ( ) { BrowseHit [ ] hits = getHits ( ) ; if ( hits == null || hits . length == <NUM_LIT:0> ) { return new SenseiHit [ <NUM_LIT:0> ] ; } return ( SenseiHit [ ] ) hits ; } public void setParsedQuery ( String query ) { _parsedQuery = query ; } public String getParsedQuery ( ) { return _parsedQuery ; } @ Override public boolean equals ( Object o ) { if ( ! ( o instanceof SenseiResult ) ) return false ; SenseiResult b = ( SenseiResult ) o ; if ( ! getParsedQuery ( ) . equals ( b . getParsedQuery ( ) ) ) return false ; if ( ! senseiHitsAreEqual ( getSenseiHits ( ) , b . getSenseiHits ( ) ) ) return false ; if ( getTid ( ) != b . getTid ( ) ) return false ; if ( getTime ( ) != b . getTime ( ) ) return false ; if ( getNumHits ( ) != getNumHits ( ) ) return false ; if ( getNumGroups ( ) != getNumGroups ( ) ) return false ; if ( getTotalDocs ( ) != getTotalDocs ( ) ) return false ; if ( ! facetMapsAreEqual ( getFacetMap ( ) , b . getFacetMap ( ) ) ) return false ; return true ; } private boolean senseiHitsAreEqual ( SenseiHit [ ] a , SenseiHit [ ] b ) { if ( a == null ) return b == null ; if ( a . length != b . length ) return false ; for ( int i = <NUM_LIT:0> ; i < a . length ; i ++ ) { if ( a [ i ] . getUID ( ) != b [ i ] . getUID ( ) ) return false ; if ( a [ i ] . getDocid ( ) != b [ i ] . getDocid ( ) ) return false ; if ( a [ i ] . getScore ( ) != b [ i ] . getScore ( ) ) return false ; if ( a [ i ] . getGroupValue ( ) == null ) { if ( b [ i ] . getGroupValue ( ) != null ) return false ; } else { if ( ! a [ i ] . getGroupValue ( ) . equals ( b [ i ] . getGroupValue ( ) ) ) return false ; } if ( a [ i ] . getGroupHitsCount ( ) != b [ i ] . getGroupHitsCount ( ) ) return false ; if ( ! senseiHitsAreEqual ( a [ i ] . getSenseiGroupHits ( ) , b [ i ] . getSenseiGroupHits ( ) ) ) return false ; if ( ! expalanationsAreEqual ( a [ i ] . getExplanation ( ) , b [ i ] . getExplanation ( ) ) ) return false ; if ( ! storedFieldsAreEqual ( a [ i ] . getStoredFields ( ) , b [ i ] . getStoredFields ( ) ) ) return false ; } return true ; } private boolean storedFieldsAreEqual ( Document a , Document b ) { if ( a == null ) return b == null ; return a . toString ( ) . equals ( b . toString ( ) ) ; } private boolean expalanationsAreEqual ( Explanation a , Explanation b ) { return a . toString ( ) . equals ( b . toString ( ) ) ; } private boolean facetMapsAreEqual ( Map < String , FacetAccessible > a , Map < String , FacetAccessible > b ) { if ( a == null ) return b == null ; if ( a . size ( ) != b . size ( ) ) return false ; for ( Entry < String , FacetAccessible > entry : a . entrySet ( ) ) { String fieldName = entry . getKey ( ) ; if ( ! b . containsKey ( fieldName ) ) return false ; if ( ! facetAccessibleAreEqual ( entry . getValue ( ) , b . get ( fieldName ) ) ) return false ; } return true ; } private boolean facetAccessibleAreEqual ( FacetAccessible a , FacetAccessible b ) { if ( a == null ) return b == null ; if ( a . getFacets ( ) . size ( ) != b . getFacets ( ) . size ( ) ) return false ; List < BrowseFacet > al = a . getFacets ( ) ; List < BrowseFacet > bl = b . getFacets ( ) ; if ( ! Arrays . equals ( al . toArray ( new BrowseFacet [ al . size ( ) ] ) , bl . toArray ( new BrowseFacet [ bl . size ( ) ] ) ) ) return false ; return true ; } public List < SenseiError > getErrors ( ) { if ( errors == null ) errors = new ArrayList < SenseiError > ( ) ; return errors ; } public void addError ( SenseiError error ) { if ( errors == null ) errors = new ArrayList < SenseiError > ( ) ; errors . add ( error ) ; } } </s>
|
<s> package com . senseidb . search . req ; final public class StringQuery extends SenseiQuery { public StringQuery ( String q ) { super ( q . getBytes ( utf8Charset ) ) ; } } </s>
|
<s> package com . senseidb . search . req ; import java . io . Serializable ; import java . util . Set ; public interface AbstractSenseiRequest extends Serializable { public void setPartitions ( Set < Integer > partitions ) ; public Set < Integer > getPartitions ( ) ; public String getRouteParam ( ) ; public void addError ( SenseiError error ) ; } </s>
|
<s> package com . senseidb . search . req ; import com . browseengine . bobo . api . BrowseHit ; public class SenseiHit extends BrowseHit { private static final long serialVersionUID = <NUM_LIT:1L> ; private long _uid = Long . MIN_VALUE ; private String _srcData = "<STR_LIT>" ; private byte [ ] _storedValue = null ; public SenseiHit [ ] getSenseiGroupHits ( ) { BrowseHit [ ] hits = getGroupHits ( ) ; if ( hits == null || hits . length == <NUM_LIT:0> ) { return new SenseiHit [ <NUM_LIT:0> ] ; } return ( SenseiHit [ ] ) hits ; } public void setUID ( long uid ) { _uid = uid ; } public long getUID ( ) { return _uid ; } public void setSrcData ( String data ) { _srcData = data ; } public String getSrcData ( ) { return _srcData ; } public void setStoredValue ( byte [ ] value ) { _storedValue = value ; } public byte [ ] getStoredValue ( ) { return _storedValue ; } } </s>
|
<s> package com . senseidb . servlet ; import java . util . Comparator ; import javax . servlet . ServletConfig ; import javax . servlet . ServletContext ; import javax . servlet . ServletException ; import javax . servlet . http . HttpServlet ; import com . linkedin . norbert . javacompat . network . PartitionedLoadBalancer ; import com . linkedin . norbert . javacompat . network . PartitionedLoadBalancerFactory ; import org . apache . commons . configuration . Configuration ; import com . senseidb . conf . SenseiConfParams ; import com . senseidb . plugin . SenseiPluginRegistry ; public class ZookeeperConfigurableServlet extends HttpServlet { private static final long serialVersionUID = <NUM_LIT:1L> ; protected PartitionedLoadBalancerFactory < String > loadBalancerFactory ; protected Comparator < String > versionComparator ; protected boolean allowPartialMerge ; protected Configuration senseiConf ; protected SenseiPluginRegistry pluginRegistry ; @ Override public void init ( ServletConfig config ) throws ServletException { super . init ( config ) ; ServletContext ctx = config . getServletContext ( ) ; senseiConf = ( Configuration ) ctx . getAttribute ( SenseiConfigServletContextListener . SENSEI_CONF_OBJ ) ; versionComparator = ( Comparator < String > ) ctx . getAttribute ( SenseiConfigServletContextListener . SENSEI_CONF_VERSION_COMPARATOR ) ; loadBalancerFactory = ( PartitionedLoadBalancerFactory < String > ) ctx . getAttribute ( SenseiConfigServletContextListener . SENSEI_CONF_ROUTER_FACTORY ) ; pluginRegistry = ( SenseiPluginRegistry ) ctx . getAttribute ( SenseiConfigServletContextListener . SENSEI_CONF_PLUGIN_REGISTRY ) ; allowPartialMerge = senseiConf . getBoolean ( SenseiConfParams . ALLOW_PARTIAL_MERGE , true ) ; } } </s>
|
<s> package com . senseidb . servlet ; public interface SenseiSearchServletParams { public static final String PARAM_OFFSET = "<STR_LIT:start>" ; public static final String PARAM_COUNT = "<STR_LIT>" ; public static final String PARAM_QUERY = "<STR_LIT:q>" ; public static final String PARAM_QUERY_PARAM = "<STR_LIT>" ; public static final String PARAM_SORT = "<STR_LIT>" ; public static final String PARAM_SORT_ASC = "<STR_LIT>" ; public static final String PARAM_SORT_DESC = "<STR_LIT>" ; public static final String PARAM_SORT_SCORE = "<STR_LIT>" ; public static final String PARAM_SORT_SCORE_REVERSE = "<STR_LIT>" ; public static final String PARAM_SORT_DOC = "<STR_LIT>" ; public static final String PARAM_SORT_DOC_REVERSE = "<STR_LIT>" ; public static final String PARAM_FETCH_STORED = "<STR_LIT>" ; public static final String PARAM_FETCH_STORED_VALUE = "<STR_LIT>" ; public static final String PARAM_FETCH_TERMVECTOR = "<STR_LIT>" ; public static final String PARAM_SHOW_EXPLAIN = "<STR_LIT>" ; public static final String PARAM_ROUTE_PARAM = "<STR_LIT>" ; public static final String PARAM_GROUP_BY = "<STR_LIT>" ; public static final String PARAM_MAX_PER_GROUP = "<STR_LIT>" ; public static final String PARAM_SELECT = "<STR_LIT>" ; public static final String PARAM_SELECT_VAL = "<STR_LIT>" ; public static final String PARAM_SELECT_NOT = "<STR_LIT>" ; public static final String PARAM_SELECT_OP = "<STR_LIT>" ; public static final String PARAM_SELECT_OP_AND = "<STR_LIT>" ; public static final String PARAM_SELECT_OP_OR = "<STR_LIT>" ; public static final String PARAM_SELECT_PROP = "<STR_LIT>" ; public static final String PARAM_FACET = "<STR_LIT>" ; public static final String PARAM_DYNAMIC_INIT = "<STR_LIT>" ; public static final String PARAM_PARTITIONS = "<STR_LIT>" ; public static final String PARAM_FACET_EXPAND = "<STR_LIT>" ; public static final String PARAM_FACET_MAX = "<STR_LIT>" ; public static final String PARAM_FACET_MINHIT = "<STR_LIT>" ; public static final String PARAM_FACET_ORDER = "<STR_LIT>" ; public static final String PARAM_FACET_ORDER_HITS = "<STR_LIT>" ; public static final String PARAM_FACET_ORDER_VAL = "<STR_LIT>" ; public static final String PARAM_DYNAMIC_TYPE = "<STR_LIT:type>" ; public static final String PARAM_DYNAMIC_TYPE_STRING = "<STR_LIT:string>" ; public static final String PARAM_DYNAMIC_TYPE_BYTEARRAY = "<STR_LIT>" ; public static final String PARAM_DYNAMIC_TYPE_BOOL = "<STR_LIT:boolean>" ; public static final String PARAM_DYNAMIC_TYPE_INT = "<STR_LIT:int>" ; public static final String PARAM_DYNAMIC_TYPE_LONG = "<STR_LIT:long>" ; public static final String PARAM_DYNAMIC_TYPE_DOUBLE = "<STR_LIT:double>" ; public static final String PARAM_DYNAMIC_VAL = "<STR_LIT>" ; public static final String PARAM_RESULT_PARSEDQUERY = "<STR_LIT>" ; public static final String PARAM_RESULT_HIT_STORED_FIELDS = "<STR_LIT>" ; public static final String PARAM_RESULT_HIT_TERMVECTORS = "<STR_LIT>" ; public static final String PARAM_RESULT_HIT_STORED_FIELDS_NAME = "<STR_LIT:name>" ; public static final String PARAM_RESULT_HIT_STORED_FIELDS_VALUE = "<STR_LIT>" ; public static final String PARAM_RESULT_HIT_EXPLANATION = "<STR_LIT>" ; public static final String PARAM_RESULT_HIT_GROUPFIELD = "<STR_LIT>" ; public static final String PARAM_RESULT_HIT_GROUPVALUE = "<STR_LIT>" ; public static final String PARAM_RESULT_HIT_GROUPHITSCOUNT = "<STR_LIT>" ; public static final String PARAM_RESULT_HIT_GROUPHITS = "<STR_LIT>" ; public static final String PARAM_RESULT_FACETS = "<STR_LIT>" ; public static final String PARAM_RESULT_ERRORS = "<STR_LIT>" ; public static final String PARAM_RESULT_ERROR_CODE = "<STR_LIT>" ; public static final String PARAM_RESULT_ERROR_MESSAGE = "<STR_LIT:message>" ; public static final String PARAM_RESULT_ERROR_TYPE = "<STR_LIT>" ; public static final String PARAM_RESULT_TID = "<STR_LIT>" ; public static final String PARAM_RESULT_TOTALDOCS = "<STR_LIT>" ; public static final String PARAM_RESULT_TOTALGROUPS = "<STR_LIT>" ; public static final String PARAM_RESULT_NUMHITS = "<STR_LIT>" ; public static final String PARAM_RESULT_NUMGROUPS = "<STR_LIT>" ; public static final String PARAM_RESULT_HITS = "<STR_LIT>" ; public static final String PARAM_RESULT_HIT_UID = "<STR_LIT>" ; public static final String PARAM_RESULT_HIT_DOCID = "<STR_LIT>" ; public static final String PARAM_RESULT_HIT_SCORE = "<STR_LIT>" ; public static final String PARAM_RESULT_HIT_SRC_DATA = "<STR_LIT>" ; public static final String PARAM_RESULT_TIME = "<STR_LIT>" ; public static final String PARAM_RESULT_SELECT_LIST = "<STR_LIT>" ; public static final String PARAM_SYSINFO_NUMDOCS = "<STR_LIT>" ; public static final String PARAM_SYSINFO_LASTMODIFIED = "<STR_LIT>" ; public static final String PARAM_SYSINFO_VERSION = "<STR_LIT:version>" ; public static final String PARAM_SYSINFO_SCHEMA = "<STR_LIT>" ; public static final String PARAM_SYSINFO_FACETS = "<STR_LIT>" ; public static final String PARAM_SYSINFO_FACETS_NAME = "<STR_LIT:name>" ; public static final String PARAM_SYSINFO_FACETS_RUNTIME = "<STR_LIT>" ; public static final String PARAM_SYSINFO_FACETS_PROPS = "<STR_LIT>" ; public static final String PARAM_SYSINFO_CLUSTERINFO = "<STR_LIT>" ; public static final String PARAM_SYSINFO_CLUSTERINFO_ID = "<STR_LIT:id>" ; public static final String PARAM_SYSINFO_CLUSTERINFO_PARTITIONS = "<STR_LIT>" ; public static final String PARAM_SYSINFO_CLUSTERINFO_NODELINK = "<STR_LIT>" ; public static final String PARAM_SYSINFO_CLUSTERINFO_ADMINLINK = "<STR_LIT>" ; public static final String PARAM_RESULT_HITS_EXPL_VALUE = "<STR_LIT:value>" ; public static final String PARAM_RESULT_HITS_EXPL_DESC = "<STR_LIT:description>" ; public static final String PARAM_RESULT_HITS_EXPL_DETAILS = "<STR_LIT>" ; public static final String PARAM_RESULT_FACET_INFO_VALUE = "<STR_LIT:value>" ; public static final String PARAM_RESULT_FACET_INFO_COUNT = "<STR_LIT:count>" ; public static final String PARAM_RESULT_FACET_INFO_SELECTED = "<STR_LIT>" ; } </s>
|
<s> package com . senseidb . servlet ; import java . io . File ; import javax . servlet . ServletContext ; import javax . servlet . ServletContextEvent ; import javax . servlet . ServletContextListener ; import org . apache . commons . configuration . ConfigurationException ; import org . apache . commons . configuration . PropertiesConfiguration ; import org . apache . log4j . Logger ; public class SenseiConfigServletContextListener implements ServletContextListener { private static final Logger logger = Logger . getLogger ( SenseiConfigServletContextListener . class ) ; public static final String SENSEI_CONF_FILE_PARAM = "<STR_LIT>" ; public static final String SENSEI_CONF_DIR_PARAM = "<STR_LIT>" ; public static final String SENSEI_CONF_ZKURL = "<STR_LIT>" ; public static final String SENSEI_CONF_CLUSTER_CLIENT_NAME = "<STR_LIT>" ; public static final String SENSEI_CONF_CLUSTER_NAME = "<STR_LIT>" ; public static final String SENSEI_CONF_ZKTIMEOUT = "<STR_LIT>" ; public static final String SENSEI_CONF_NC_CONN_TIMEOUT = "<STR_LIT>" ; public static final String SENSEI_CONF_NC_WRITE_TIMEOUT = "<STR_LIT>" ; public static final String SENSEI_CONF_NC_MAX_CONN_PER_NODE = "<STR_LIT>" ; public static final String SENSEI_CONF_NC_STALE_TIMEOUT_MINS = "<STR_LIT>" ; public static final String SENSEI_CONF_NC_STALE_CLEANUP_FREQ_MINS = "<STR_LIT>" ; public static final String SENSEI_CONF_VERSION_COMPARATOR = "<STR_LIT>" ; public static final String SENSEI_CONF_ROUTER_FACTORY = "<STR_LIT>" ; public static final String SENSEI_CONF_OBJ = "<STR_LIT>" ; public static final String SENSEI_CONF_PLUGIN_REGISTRY = "<STR_LIT>" ; @ Override public void contextDestroyed ( ServletContextEvent ctx ) { } @ Override public void contextInitialized ( ServletContextEvent ctxEvt ) { ServletContext ctx = ctxEvt . getServletContext ( ) ; String confFileName = ctx . getInitParameter ( SENSEI_CONF_FILE_PARAM ) ; File confFile = null ; if ( confFileName == null ) { String confDirName = ctx . getInitParameter ( SENSEI_CONF_DIR_PARAM ) ; if ( confDirName != null ) { confFile = new File ( confDirName , "<STR_LIT>" ) ; } } else { confFile = new File ( confFileName ) ; } if ( confFile != null ) { try { PropertiesConfiguration conf = new PropertiesConfiguration ( ) ; conf . setDelimiterParsingDisabled ( true ) ; conf . load ( confFile ) ; ctx . setAttribute ( SENSEI_CONF_OBJ , conf ) ; } catch ( ConfigurationException e ) { logger . error ( e . getMessage ( ) , e ) ; } } else { logger . warn ( "<STR_LIT>" ) ; } } } </s>
|
<s> package com . senseidb . servlet ; import java . io . IOException ; import javax . servlet . ServletConfig ; import javax . servlet . ServletException ; import javax . servlet . http . HttpServletRequest ; import javax . servlet . http . HttpServletResponse ; import org . springframework . remoting . httpinvoker . HttpInvokerServiceExporter ; import org . springframework . util . StringUtils ; import org . springframework . web . HttpRequestMethodNotSupportedException ; import com . senseidb . svc . api . SenseiService ; import com . senseidb . svc . impl . ClusteredSenseiServiceImpl ; public class SenseiHttpInvokerServiceServlet extends ZookeeperConfigurableServlet { private static final long serialVersionUID = <NUM_LIT:1L> ; private ClusteredSenseiServiceImpl innerSvc ; private HttpInvokerServiceExporter target ; @ Override public void init ( ServletConfig config ) throws ServletException { super . init ( config ) ; innerSvc = new ClusteredSenseiServiceImpl ( senseiConf , loadBalancerFactory , versionComparator ) ; innerSvc . start ( ) ; target = new HttpInvokerServiceExporter ( ) ; target . setService ( innerSvc ) ; target . setServiceInterface ( SenseiService . class ) ; target . afterPropertiesSet ( ) ; } protected void service ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { try { this . target . handleRequest ( request , response ) ; } catch ( HttpRequestMethodNotSupportedException ex ) { String [ ] supportedMethods = ( ( HttpRequestMethodNotSupportedException ) ex ) . getSupportedMethods ( ) ; if ( supportedMethods != null ) { response . setHeader ( "<STR_LIT>" , StringUtils . arrayToDelimitedString ( supportedMethods , "<STR_LIT:U+002CU+0020>" ) ) ; } response . sendError ( HttpServletResponse . SC_METHOD_NOT_ALLOWED , ex . getMessage ( ) ) ; } } @ Override public void destroy ( ) { try { innerSvc . shutdown ( ) ; } finally { super . destroy ( ) ; } } } </s>
|
<s> package com . senseidb . servlet ; import java . io . BufferedReader ; import java . io . IOException ; import java . io . InputStream ; import java . io . OutputStream ; import java . net . URL ; import java . net . URLConnection ; import java . net . URLDecoder ; import java . util . HashMap ; import java . util . Iterator ; import java . util . Map ; import javax . servlet . ServletConfig ; import javax . servlet . ServletException ; import javax . servlet . http . HttpServletRequest ; import javax . servlet . http . HttpServletResponse ; import org . antlr . runtime . RecognitionException ; import org . apache . commons . configuration . DataConfiguration ; import org . apache . commons . configuration . MapConfiguration ; import org . apache . http . client . utils . URLEncodedUtils ; import org . apache . log4j . Logger ; import org . json . JSONArray ; import org . json . JSONException ; import org . json . JSONObject ; import com . browseengine . bobo . api . BrowseSelection ; import com . linkedin . norbert . javacompat . cluster . ClusterClient ; import com . linkedin . norbert . javacompat . cluster . ZooKeeperClusterClient ; import com . linkedin . norbert . javacompat . network . NetworkClientConfig ; import com . senseidb . bql . parsers . BQLCompiler ; import com . senseidb . cluster . client . SenseiNetworkClient ; import com . senseidb . conf . SenseiConfParams ; import com . senseidb . conf . SenseiFacetHandlerBuilder ; import com . senseidb . search . node . Broker ; import com . senseidb . search . node . SenseiBroker ; import com . senseidb . search . node . SenseiSysBroker ; import com . senseidb . search . node . broker . BrokerConfig ; import com . senseidb . search . node . broker . LayeredBroker ; import com . senseidb . search . req . ErrorType ; import com . senseidb . search . req . SenseiError ; import com . senseidb . search . req . SenseiHit ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiResult ; import com . senseidb . search . req . SenseiSystemInfo ; import com . senseidb . svc . impl . HttpRestSenseiServiceImpl ; import com . senseidb . util . JsonTemplateProcessor ; import com . senseidb . util . RequestConverter2 ; public abstract class AbstractSenseiClientServlet extends ZookeeperConfigurableServlet { public static final int JSON_PARSING_ERROR = <NUM_LIT> ; public static final int BQL_EXTRA_FILTER_ERROR = <NUM_LIT> ; public static final int BQL_PARSING_ERROR = <NUM_LIT> ; public static final String BQL_STMT = "<STR_LIT>" ; public static final String BQL_EXTRA_FILTER = "<STR_LIT>" ; private static final long serialVersionUID = <NUM_LIT:1L> ; private static final Logger logger = Logger . getLogger ( AbstractSenseiClientServlet . class ) ; private static final Logger queryLogger = Logger . getLogger ( "<STR_LIT>" ) ; private final NetworkClientConfig _networkClientConfig = new NetworkClientConfig ( ) ; private ClusterClient _clusterClient = null ; private SenseiNetworkClient _networkClient = null ; private SenseiBroker _senseiBroker = null ; private SenseiSysBroker _senseiSysBroker = null ; private Map < String , String [ ] > _facetInfoMap = new HashMap < String , String [ ] > ( ) ; private BQLCompiler _compiler = null ; private LayeredBroker federatedBroker ; public AbstractSenseiClientServlet ( ) { } @ Override public void init ( ServletConfig config ) throws ServletException { super . init ( config ) ; BrokerConfig brokerConfig = new BrokerConfig ( senseiConf , loadBalancerFactory ) ; brokerConfig . init ( ) ; _senseiBroker = brokerConfig . buildSenseiBroker ( ) ; _senseiSysBroker = brokerConfig . buildSysSenseiBroker ( versionComparator ) ; _networkClient = brokerConfig . getNetworkClient ( ) ; _clusterClient = brokerConfig . getClusterClient ( ) ; federatedBroker = pluginRegistry . getBeanByFullPrefix ( SenseiConfParams . SENSEI_FEDERATED_BROKER , LayeredBroker . class ) ; if ( federatedBroker != null ) { federatedBroker . warmUp ( ) ; } logger . info ( "<STR_LIT>" + brokerConfig . getClusterName ( ) + "<STR_LIT>" ) ; _clusterClient . awaitConnectionUninterruptibly ( ) ; int count = <NUM_LIT:0> ; while ( true ) { try { count ++ ; logger . info ( "<STR_LIT>" ) ; SenseiSystemInfo sysInfo = _senseiSysBroker . browse ( new SenseiRequest ( ) ) ; _facetInfoMap = extractFacetInfo ( sysInfo ) ; _compiler = new BQLCompiler ( _facetInfoMap ) ; break ; } catch ( Exception e ) { logger . info ( "<STR_LIT>" , e ) ; if ( count > <NUM_LIT:10> ) { logger . error ( "<STR_LIT>" ) ; throw new ServletException ( e . getMessage ( ) , e ) ; } else { try { Thread . sleep ( <NUM_LIT> ) ; } catch ( InterruptedException e2 ) { logger . error ( "<STR_LIT>" , e ) ; } } } } logger . info ( "<STR_LIT>" + brokerConfig . getClusterName ( ) + "<STR_LIT>" ) ; } public static Map < String , String [ ] > extractFacetInfo ( SenseiSystemInfo sysInfo ) { Map < String , String [ ] > facetInfoMap = new HashMap < String , String [ ] > ( ) ; Iterator < SenseiSystemInfo . SenseiFacetInfo > itr = sysInfo . getFacetInfos ( ) . iterator ( ) ; while ( itr . hasNext ( ) ) { SenseiSystemInfo . SenseiFacetInfo facetInfo = itr . next ( ) ; Map < String , String > props = facetInfo . getProps ( ) ; facetInfoMap . put ( facetInfo . getName ( ) , new String [ ] { props . get ( "<STR_LIT:type>" ) , props . get ( "<STR_LIT>" ) } ) ; } return facetInfoMap ; } protected abstract SenseiRequest buildSenseiRequest ( HttpServletRequest req ) throws Exception ; public static Map < String , String > getParameters ( String query ) throws Exception { Map < String , String > params = new HashMap < String , String > ( ) ; for ( String param : query . split ( "<STR_LIT:&>" ) ) { String pair [ ] = param . split ( "<STR_LIT:=>" ) ; String key = URLDecoder . decode ( pair [ <NUM_LIT:0> ] , "<STR_LIT:UTF-8>" ) ; String value = "<STR_LIT>" ; if ( pair . length > <NUM_LIT:1> ) { value = URLDecoder . decode ( pair [ <NUM_LIT:1> ] , "<STR_LIT:UTF-8>" ) ; } params . put ( key , value ) ; } return params ; } private void handleSenseiRequest ( HttpServletRequest req , HttpServletResponse resp , Broker < SenseiRequest , SenseiResult > broker ) throws ServletException , IOException { long time = System . currentTimeMillis ( ) ; int numHits = <NUM_LIT:0> , totalDocs = <NUM_LIT:0> ; String query = null ; SenseiRequest senseiReq = null ; try { JSONObject jsonObj = null ; String content = null ; if ( "<STR_LIT>" . equalsIgnoreCase ( req . getMethod ( ) ) ) { BufferedReader reader = req . getReader ( ) ; content = readContent ( reader ) ; if ( content == null || content . length ( ) == <NUM_LIT:0> ) content = "<STR_LIT:{}>" ; try { jsonObj = new JSONObject ( content ) ; } catch ( JSONException jse ) { String contentType = req . getHeader ( "<STR_LIT:Content-Type>" ) ; if ( contentType != null && contentType . indexOf ( "<STR_LIT>" ) >= <NUM_LIT:0> ) { logger . error ( "<STR_LIT>" , jse ) ; writeEmptyResponse ( req , resp , new SenseiError ( jse . getMessage ( ) , ErrorType . JsonParsingError ) ) ; return ; } logger . warn ( "<STR_LIT>" , jse ) ; senseiReq = DefaultSenseiJSONServlet . convertSenseiRequest ( new DataConfiguration ( new MapConfiguration ( getParameters ( content ) ) ) ) ; query = content ; } } else { content = req . getParameter ( "<STR_LIT>" ) ; if ( content != null ) { if ( content . length ( ) == <NUM_LIT:0> ) content = "<STR_LIT:{}>" ; try { jsonObj = new JSONObject ( content ) ; } catch ( JSONException jse ) { logger . error ( "<STR_LIT>" , jse ) ; writeEmptyResponse ( req , resp , new SenseiError ( jse . getMessage ( ) , ErrorType . JsonParsingError ) ) ; return ; } } else { senseiReq = buildSenseiRequest ( req ) ; query = URLEncodedUtils . format ( HttpRestSenseiServiceImpl . convertRequestToQueryParams ( senseiReq ) , "<STR_LIT:UTF-8>" ) ; } } if ( jsonObj != null ) { String bqlStmt = jsonObj . optString ( BQL_STMT ) ; JSONObject templatesJson = jsonObj . optJSONObject ( JsonTemplateProcessor . TEMPLATE_MAPPING_PARAM ) ; JSONObject compiledJson = null ; if ( bqlStmt . length ( ) > <NUM_LIT:0> ) { try { if ( jsonObj . length ( ) == <NUM_LIT:1> ) query = "<STR_LIT>" + bqlStmt ; else query = "<STR_LIT>" + content ; compiledJson = _compiler . compile ( bqlStmt ) ; } catch ( RecognitionException e ) { String errMsg = _compiler . getErrorMessage ( e ) ; if ( errMsg == null ) { errMsg = "<STR_LIT>" ; } logger . error ( "<STR_LIT>" + errMsg + "<STR_LIT>" + bqlStmt ) ; writeEmptyResponse ( req , resp , new SenseiError ( errMsg , ErrorType . BQLParsingError ) ) ; return ; } String extraFilter = jsonObj . optString ( BQL_EXTRA_FILTER ) ; JSONObject predObj = null ; if ( extraFilter . length ( ) > <NUM_LIT:0> ) { String bql2 = "<STR_LIT>" + extraFilter ; try { predObj = _compiler . compile ( bql2 ) ; } catch ( RecognitionException e ) { String errMsg = _compiler . getErrorMessage ( e ) ; if ( errMsg == null ) { errMsg = "<STR_LIT>" ; } logger . error ( "<STR_LIT>" + errMsg + "<STR_LIT>" + bql2 ) ; writeEmptyResponse ( req , resp , new SenseiError ( "<STR_LIT>" + errMsg + "<STR_LIT>" + bql2 , ErrorType . BQLParsingError ) ) ; return ; } JSONArray filter_list = new JSONArray ( ) ; JSONObject currentFilter = compiledJson . optJSONObject ( "<STR_LIT>" ) ; if ( currentFilter != null ) { filter_list . put ( currentFilter ) ; } JSONArray selections = predObj . optJSONArray ( "<STR_LIT>" ) ; if ( selections != null ) { for ( int i = <NUM_LIT:0> ; i < selections . length ( ) ; ++ i ) { JSONObject pred = selections . getJSONObject ( i ) ; if ( pred != null ) { filter_list . put ( pred ) ; } } } JSONObject additionalFilter = predObj . optJSONObject ( "<STR_LIT>" ) ; if ( additionalFilter != null ) { filter_list . put ( additionalFilter ) ; } if ( filter_list . length ( ) > <NUM_LIT:1> ) { compiledJson . put ( "<STR_LIT>" , new JSONObject ( ) . put ( "<STR_LIT>" , filter_list ) ) ; } else if ( filter_list . length ( ) == <NUM_LIT:1> ) { compiledJson . put ( "<STR_LIT>" , filter_list . get ( <NUM_LIT:0> ) ) ; } } JSONObject metaData = compiledJson . optJSONObject ( "<STR_LIT>" ) ; if ( metaData != null ) { JSONArray variables = metaData . optJSONArray ( "<STR_LIT>" ) ; if ( variables != null ) { for ( int i = <NUM_LIT:0> ; i < variables . length ( ) ; ++ i ) { String var = variables . getString ( i ) ; if ( templatesJson == null || templatesJson . opt ( var ) == null ) { writeEmptyResponse ( req , resp , new SenseiError ( "<STR_LIT>" + var + "<STR_LIT>" , ErrorType . BQLParsingError ) ) ; return ; } } } } } else { query = "<STR_LIT>" + content ; compiledJson = jsonObj ; } if ( templatesJson != null ) { compiledJson . put ( JsonTemplateProcessor . TEMPLATE_MAPPING_PARAM , templatesJson ) ; } senseiReq = SenseiRequest . fromJSON ( compiledJson , _facetInfoMap ) ; } SenseiResult res = broker . browse ( senseiReq ) ; numHits = res . getNumHits ( ) ; totalDocs = res . getTotalDocs ( ) ; sendResponse ( req , resp , senseiReq , res ) ; } catch ( JSONException e ) { try { writeEmptyResponse ( req , resp , new SenseiError ( e . getMessage ( ) , ErrorType . JsonParsingError ) ) ; } catch ( Exception ex ) { throw new ServletException ( e ) ; } } catch ( Exception e ) { try { logger . error ( e . getMessage ( ) , e ) ; if ( e . getCause ( ) != null && e . getCause ( ) instanceof JSONException ) { writeEmptyResponse ( req , resp , new SenseiError ( e . getMessage ( ) , ErrorType . JsonParsingError ) ) ; } else { writeEmptyResponse ( req , resp , new SenseiError ( e . getMessage ( ) , ErrorType . InternalError ) ) ; } } catch ( Exception ex ) { throw new ServletException ( e ) ; } } finally { if ( queryLogger . isInfoEnabled ( ) && query != null ) { queryLogger . info ( String . format ( "<STR_LIT>" , numHits , totalDocs , System . currentTimeMillis ( ) - time , query ) ) ; } } } private void writeEmptyResponse ( HttpServletRequest req , HttpServletResponse resp , SenseiError senseiError ) throws Exception { SenseiResult res = new SenseiResult ( ) ; res . addError ( senseiError ) ; sendResponse ( req , resp , new SenseiRequest ( ) , res ) ; } private void sendResponse ( HttpServletRequest req , HttpServletResponse resp , SenseiRequest senseiReq , SenseiResult res ) throws Exception { OutputStream ostream = resp . getOutputStream ( ) ; convertResult ( req , senseiReq , res , ostream ) ; ostream . flush ( ) ; } private void handleStoreGetRequest ( HttpServletRequest req , HttpServletResponse resp ) throws ServletException , IOException { long time = System . currentTimeMillis ( ) ; int numHits = <NUM_LIT:0> , totalDocs = <NUM_LIT:0> ; String query = null ; SenseiRequest senseiReq = null ; try { JSONArray ids = null ; if ( "<STR_LIT>" . equalsIgnoreCase ( req . getMethod ( ) ) ) { BufferedReader reader = req . getReader ( ) ; ids = new JSONArray ( readContent ( reader ) ) ; } else { String jsonString = req . getParameter ( "<STR_LIT>" ) ; if ( jsonString != null ) ids = new JSONArray ( jsonString ) ; } query = "<STR_LIT>" + String . valueOf ( ids ) ; String [ ] vals = RequestConverter2 . getStrings ( ids ) ; if ( vals != null && vals . length != <NUM_LIT:0> ) { senseiReq = new SenseiRequest ( ) ; senseiReq . setFetchStoredValue ( true ) ; senseiReq . setCount ( vals . length ) ; BrowseSelection sel = new BrowseSelection ( SenseiFacetHandlerBuilder . UID_FACET_NAME ) ; sel . setValues ( vals ) ; senseiReq . addSelection ( sel ) ; } SenseiResult res = null ; if ( senseiReq != null ) res = _senseiBroker . browse ( senseiReq ) ; if ( res != null ) { numHits = res . getNumHits ( ) ; totalDocs = res . getTotalDocs ( ) ; } JSONObject ret = new JSONObject ( ) ; JSONObject obj = null ; if ( res != null && res . getSenseiHits ( ) != null ) { for ( SenseiHit hit : res . getSenseiHits ( ) ) { try { obj = new JSONObject ( hit . getSrcData ( ) ) ; ret . put ( String . valueOf ( hit . getUID ( ) ) , obj ) ; } catch ( Exception ex ) { logger . warn ( ex . getMessage ( ) , ex ) ; } } } OutputStream ostream = resp . getOutputStream ( ) ; ostream . write ( ret . toString ( ) . getBytes ( "<STR_LIT:UTF-8>" ) ) ; ostream . flush ( ) ; } catch ( Exception e ) { throw new ServletException ( e . getMessage ( ) , e ) ; } finally { if ( queryLogger . isInfoEnabled ( ) && query != null ) { queryLogger . info ( String . format ( "<STR_LIT>" , numHits , totalDocs , System . currentTimeMillis ( ) - time , query ) ) ; } } } private void handleSystemInfoRequest ( HttpServletRequest req , HttpServletResponse resp ) throws ServletException , IOException { try { SenseiSystemInfo res = _senseiSysBroker . browse ( new SenseiRequest ( ) ) ; OutputStream ostream = resp . getOutputStream ( ) ; convertResult ( req , res , ostream ) ; ostream . flush ( ) ; } catch ( Exception e ) { throw new ServletException ( e . getMessage ( ) , e ) ; } } private void handleJMXRequest ( HttpServletRequest req , HttpServletResponse resp ) throws ServletException , IOException { InputStream is = null ; OutputStream os = null ; try { String myPath = req . getRequestURI ( ) . substring ( req . getServletPath ( ) . length ( ) + <NUM_LIT:11> ) ; URL adminUrl = null ; if ( myPath . indexOf ( '<CHAR_LIT:/>' ) > <NUM_LIT:0> ) { adminUrl = new URL ( new StringBuilder ( URLDecoder . decode ( myPath . substring ( <NUM_LIT:0> , myPath . indexOf ( '<CHAR_LIT:/>' ) ) , "<STR_LIT:UTF-8>" ) ) . append ( "<STR_LIT>" ) . append ( myPath . substring ( myPath . indexOf ( '<CHAR_LIT:/>' ) ) ) . toString ( ) ) ; } else { adminUrl = new URL ( new StringBuilder ( URLDecoder . decode ( myPath , "<STR_LIT:UTF-8>" ) ) . append ( "<STR_LIT>" ) . toString ( ) ) ; } URLConnection conn = adminUrl . openConnection ( ) ; byte [ ] buffer = new byte [ <NUM_LIT> ] ; int len = <NUM_LIT:0> ; InputStream ris = req . getInputStream ( ) ; while ( ( len = ris . read ( buffer ) ) > <NUM_LIT:0> ) { if ( ! conn . getDoOutput ( ) ) { conn . setDoOutput ( true ) ; os = conn . getOutputStream ( ) ; } os . write ( buffer , <NUM_LIT:0> , len ) ; } if ( os != null ) os . flush ( ) ; is = conn . getInputStream ( ) ; OutputStream ros = resp . getOutputStream ( ) ; while ( ( len = is . read ( buffer ) ) > <NUM_LIT:0> ) { ros . write ( buffer , <NUM_LIT:0> , len ) ; } ros . flush ( ) ; } catch ( Exception e ) { throw new ServletException ( e . getMessage ( ) , e ) ; } finally { if ( is != null ) is . close ( ) ; if ( os != null ) os . close ( ) ; } } private static String readContent ( BufferedReader reader ) throws IOException { StringBuilder jb = new StringBuilder ( ) ; String line = null ; while ( ( line = reader . readLine ( ) ) != null ) jb . append ( line ) ; return jb . toString ( ) ; } @ Override protected void doGet ( HttpServletRequest req , HttpServletResponse resp ) throws ServletException , IOException { if ( req . getCharacterEncoding ( ) == null ) req . setCharacterEncoding ( "<STR_LIT:UTF-8>" ) ; resp . setContentType ( "<STR_LIT>" ) ; resp . setCharacterEncoding ( "<STR_LIT:UTF-8>" ) ; resp . setHeader ( "<STR_LIT>" , "<STR_LIT:*>" ) ; resp . setHeader ( "<STR_LIT>" , "<STR_LIT>" ) ; resp . setHeader ( "<STR_LIT>" , "<STR_LIT>" ) ; if ( null == req . getPathInfo ( ) || "<STR_LIT:/>" . equalsIgnoreCase ( req . getPathInfo ( ) ) ) { handleSenseiRequest ( req , resp , _senseiBroker ) ; } else if ( "<STR_LIT>" . equalsIgnoreCase ( req . getPathInfo ( ) ) ) { handleStoreGetRequest ( req , resp ) ; } else if ( "<STR_LIT>" . equalsIgnoreCase ( req . getPathInfo ( ) ) ) { handleSystemInfoRequest ( req , resp ) ; } else if ( req . getPathInfo ( ) . startsWith ( "<STR_LIT>" ) ) { handleJMXRequest ( req , resp ) ; } else if ( req . getPathInfo ( ) . startsWith ( "<STR_LIT>" ) ) { if ( federatedBroker == null ) { try { writeEmptyResponse ( req , resp , new SenseiError ( "<STR_LIT>" , ErrorType . FederatedBrokerUnavailable ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } handleSenseiRequest ( req , resp , federatedBroker ) ; } else { handleSenseiRequest ( req , resp , _senseiBroker ) ; } } @ Override protected void doPost ( HttpServletRequest req , HttpServletResponse resp ) throws ServletException , IOException { doGet ( req , resp ) ; } @ Override protected void doOptions ( HttpServletRequest req , HttpServletResponse resp ) throws ServletException , IOException { resp . setHeader ( "<STR_LIT>" , "<STR_LIT:*>" ) ; resp . setHeader ( "<STR_LIT>" , "<STR_LIT>" ) ; resp . setHeader ( "<STR_LIT>" , "<STR_LIT>" ) ; } protected abstract void convertResult ( HttpServletRequest httpReq , SenseiSystemInfo info , OutputStream ostream ) throws Exception ; protected abstract void convertResult ( HttpServletRequest httpReq , SenseiRequest req , SenseiResult res , OutputStream ostream ) throws Exception ; @ Override public void destroy ( ) { try { try { if ( _senseiBroker != null ) { _senseiBroker . shutdown ( ) ; _senseiBroker = null ; } } finally { try { if ( _senseiSysBroker != null ) { _senseiSysBroker . shutdown ( ) ; _senseiSysBroker = null ; } } finally { try { if ( _networkClient != null ) { _networkClient . shutdown ( ) ; _networkClient = null ; } } finally { if ( _clusterClient != null ) { _clusterClient . shutdown ( ) ; _clusterClient = null ; } } } } } finally { super . destroy ( ) ; } } } </s>
|
<s> package com . senseidb . servlet ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_COUNT ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_DYNAMIC_INIT ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_DYNAMIC_TYPE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_DYNAMIC_TYPE_BOOL ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_DYNAMIC_TYPE_BYTEARRAY ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_DYNAMIC_TYPE_DOUBLE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_DYNAMIC_TYPE_INT ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_DYNAMIC_TYPE_LONG ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_DYNAMIC_TYPE_STRING ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_DYNAMIC_VAL ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_FACET ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_FACET_EXPAND ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_FACET_MAX ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_FACET_MINHIT ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_FACET_ORDER ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_FACET_ORDER_HITS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_FACET_ORDER_VAL ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_FETCH_STORED ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_FETCH_STORED_VALUE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_FETCH_TERMVECTOR ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_GROUP_BY ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_MAX_PER_GROUP ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_OFFSET ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_PARTITIONS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_QUERY ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_QUERY_PARAM ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_ERRORS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_ERROR_CODE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_ERROR_MESSAGE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_ERROR_TYPE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_FACETS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_FACET_INFO_COUNT ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_FACET_INFO_SELECTED ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_FACET_INFO_VALUE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HITS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HITS_EXPL_DESC ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HITS_EXPL_DETAILS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HITS_EXPL_VALUE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_DOCID ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_EXPLANATION ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_GROUPHITS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_GROUPHITSCOUNT ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_GROUPFIELD ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_GROUPVALUE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_SCORE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_SRC_DATA ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_STORED_FIELDS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_STORED_FIELDS_NAME ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_STORED_FIELDS_VALUE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_TERMVECTORS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_UID ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_NUMGROUPS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_NUMHITS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_PARSEDQUERY ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_SELECT_LIST ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_TID ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_TIME ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_TOTALDOCS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_ROUTE_PARAM ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SELECT ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SELECT_NOT ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SELECT_OP ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SELECT_OP_AND ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SELECT_OP_OR ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SELECT_PROP ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SELECT_VAL ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SHOW_EXPLAIN ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SORT ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SORT_DESC ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SORT_DOC ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SORT_DOC_REVERSE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SORT_SCORE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SORT_SCORE_REVERSE ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_CLUSTERINFO ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_CLUSTERINFO_ADMINLINK ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_CLUSTERINFO_ID ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_CLUSTERINFO_NODELINK ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_CLUSTERINFO_PARTITIONS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_FACETS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_FACETS_NAME ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_FACETS_PROPS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_FACETS_RUNTIME ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_LASTMODIFIED ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_NUMDOCS ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_SCHEMA ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_SYSINFO_VERSION ; import java . io . UnsupportedEncodingException ; import java . util . ArrayList ; import java . util . Arrays ; import java . util . Collections ; import java . util . Comparator ; import java . util . HashMap ; import java . util . HashSet ; import java . util . Iterator ; import java . util . List ; import java . util . Map ; import java . util . Map . Entry ; import java . util . Set ; import javax . servlet . http . HttpServletRequest ; import org . apache . commons . configuration . Configuration ; import org . apache . commons . configuration . DataConfiguration ; import org . apache . commons . lang . StringUtils ; import org . apache . log4j . Logger ; import org . apache . lucene . document . Document ; import org . apache . lucene . document . Fieldable ; import org . apache . lucene . search . Explanation ; import org . apache . lucene . search . SortField ; import org . json . JSONArray ; import org . json . JSONException ; import org . json . JSONObject ; import org . springframework . util . Assert ; import com . browseengine . bobo . api . BrowseFacet ; import com . browseengine . bobo . api . BrowseHit ; import com . browseengine . bobo . api . BrowseSelection ; import com . browseengine . bobo . api . BrowseSelection . ValueOperation ; import com . browseengine . bobo . api . FacetAccessible ; import com . browseengine . bobo . api . FacetSpec ; import com . browseengine . bobo . api . FacetSpec . FacetSortSpec ; import com . browseengine . bobo . facets . DefaultFacetHandlerInitializerParam ; import com . senseidb . search . req . SenseiError ; import com . senseidb . search . req . SenseiHit ; import com . senseidb . search . req . SenseiJSONQuery ; import com . senseidb . search . req . SenseiQuery ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiResult ; import com . senseidb . search . req . SenseiSystemInfo ; import com . senseidb . util . RequestConverter ; public class DefaultSenseiJSONServlet extends AbstractSenseiRestServlet { private static final String PARAM_RESULT_MAP_REDUCE = "<STR_LIT>" ; private static final long serialVersionUID = <NUM_LIT:1L> ; private static Logger logger = Logger . getLogger ( DefaultSenseiJSONServlet . class ) ; public static JSONObject convertExpl ( Explanation expl ) throws JSONException { JSONObject jsonObject = null ; if ( expl != null ) { jsonObject = new JSONObject ( ) ; jsonObject . put ( PARAM_RESULT_HITS_EXPL_VALUE , expl . getValue ( ) ) ; String descr = expl . getDescription ( ) ; jsonObject . put ( PARAM_RESULT_HITS_EXPL_DESC , descr == null ? "<STR_LIT>" : descr ) ; Explanation [ ] details = expl . getDetails ( ) ; if ( details != null ) { JSONArray detailArray = new JSONArray ( ) ; for ( Explanation detail : details ) { JSONObject subObj = convertExpl ( detail ) ; if ( subObj != null ) { detailArray . put ( subObj ) ; } } jsonObject . put ( PARAM_RESULT_HITS_EXPL_DETAILS , detailArray ) ; } } return jsonObject ; } public static JSONObject convert ( Map < String , FacetAccessible > facetValueMap , SenseiRequest req ) throws JSONException { JSONObject resMap = new JSONObject ( ) ; if ( facetValueMap != null ) { Set < Entry < String , FacetAccessible > > entrySet = facetValueMap . entrySet ( ) ; for ( Entry < String , FacetAccessible > entry : entrySet ) { String fieldname = entry . getKey ( ) ; BrowseSelection sel = req . getSelection ( fieldname ) ; HashSet < String > selectedVals = new HashSet < String > ( ) ; if ( sel != null ) { String [ ] vals = sel . getValues ( ) ; if ( vals != null && vals . length > <NUM_LIT:0> ) { selectedVals . addAll ( Arrays . asList ( vals ) ) ; } } FacetAccessible facetAccessible = entry . getValue ( ) ; List < BrowseFacet > facetList = facetAccessible . getFacets ( ) ; ArrayList < JSONObject > facets = new ArrayList < JSONObject > ( ) ; for ( BrowseFacet f : facetList ) { String fval = f . getValue ( ) ; if ( fval != null && fval . length ( ) > <NUM_LIT:0> ) { JSONObject fv = new JSONObject ( ) ; fv . put ( PARAM_RESULT_FACET_INFO_COUNT , f . getFacetValueHitCount ( ) ) ; fv . put ( PARAM_RESULT_FACET_INFO_VALUE , fval ) ; fv . put ( PARAM_RESULT_FACET_INFO_SELECTED , selectedVals . remove ( fval ) ) ; facets . add ( fv ) ; } } if ( selectedVals . size ( ) > <NUM_LIT:0> ) { for ( String selectedVal : selectedVals ) { if ( selectedVal != null && selectedVal . length ( ) > <NUM_LIT:0> ) { BrowseFacet selectedFacetVal = facetAccessible . getFacet ( selectedVal ) ; JSONObject fv = new JSONObject ( ) ; fv . put ( PARAM_RESULT_FACET_INFO_COUNT , selectedFacetVal == null ? <NUM_LIT:0> : selectedFacetVal . getFacetValueHitCount ( ) ) ; String fval = selectedFacetVal == null ? selectedVal : selectedFacetVal . getValue ( ) ; fv . put ( PARAM_RESULT_FACET_INFO_VALUE , fval ) ; fv . put ( PARAM_RESULT_FACET_INFO_SELECTED , true ) ; facets . add ( fv ) ; } } FacetSpec fspec = req . getFacetSpec ( fieldname ) ; assert fspec != null ; sortFacets ( fieldname , facets , fspec ) ; } resMap . put ( fieldname , facets ) ; } } return resMap ; } private static void sortFacets ( String fieldName , ArrayList < JSONObject > facets , FacetSpec fspec ) { FacetSortSpec sortSpec = fspec . getOrderBy ( ) ; if ( FacetSortSpec . OrderHitsDesc . equals ( sortSpec ) ) { Collections . sort ( facets , new Comparator < JSONObject > ( ) { @ Override public int compare ( JSONObject o1 , JSONObject o2 ) { try { int c1 = o1 . getInt ( PARAM_RESULT_FACET_INFO_COUNT ) ; int c2 = o2 . getInt ( PARAM_RESULT_FACET_INFO_COUNT ) ; int val = c2 - c1 ; if ( val == <NUM_LIT:0> ) { String s1 = o1 . getString ( PARAM_RESULT_FACET_INFO_VALUE ) ; String s2 = o1 . getString ( PARAM_RESULT_FACET_INFO_VALUE ) ; val = s1 . compareTo ( s2 ) ; } return val ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; return <NUM_LIT:0> ; } } } ) ; } else if ( FacetSortSpec . OrderValueAsc . equals ( sortSpec ) ) { Collections . sort ( facets , new Comparator < JSONObject > ( ) { @ Override public int compare ( JSONObject o1 , JSONObject o2 ) { try { String s1 = o1 . getString ( PARAM_RESULT_FACET_INFO_VALUE ) ; String s2 = o1 . getString ( PARAM_RESULT_FACET_INFO_VALUE ) ; return s1 . compareTo ( s2 ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; return <NUM_LIT:0> ; } } } ) ; } else { throw new IllegalStateException ( fieldName + "<STR_LIT>" ) ; } } @ Override protected String buildResultString ( HttpServletRequest httpReq , SenseiRequest req , SenseiResult res ) throws Exception { return supportJsonp ( httpReq , buildJSONResultString ( req , res ) ) ; } private String supportJsonp ( HttpServletRequest httpReq , String jsonString ) { String callback = httpReq . getParameter ( "<STR_LIT>" ) ; if ( callback != null ) { return callback + "<STR_LIT:(>" + jsonString + "<STR_LIT>" ; } else { return jsonString ; } } public static String buildJSONResultString ( SenseiRequest req , SenseiResult res ) throws Exception { JSONObject jsonObj = buildJSONResult ( req , res ) ; return jsonObj . toString ( ) ; } public static JSONArray buildJSONHits ( SenseiRequest req , SenseiHit [ ] hits ) throws Exception { Set < String > selectSet = null ; List < String > selectList = req . getSelectList ( ) ; if ( selectList != null && ! ( selectList . size ( ) == <NUM_LIT:1> && "<STR_LIT:*>" . equals ( selectList . get ( <NUM_LIT:0> ) ) ) ) { selectSet = new HashSet < String > ( selectList ) ; } JSONArray hitArray = new JSONArray ( ) ; for ( SenseiHit hit : hits ) { Map < String , String [ ] > fieldMap = hit . getFieldValues ( ) ; JSONObject hitObj = new JSONObject ( ) ; if ( selectSet == null || selectSet . contains ( PARAM_RESULT_HIT_UID ) ) { hitObj . put ( PARAM_RESULT_HIT_UID , hit . getUID ( ) ) ; } if ( selectSet == null || selectSet . contains ( PARAM_RESULT_HIT_DOCID ) ) { hitObj . put ( PARAM_RESULT_HIT_DOCID , hit . getDocid ( ) ) ; } if ( selectSet == null || selectSet . contains ( PARAM_RESULT_HIT_SCORE ) ) { hitObj . put ( PARAM_RESULT_HIT_SCORE , hit . getScore ( ) ) ; } if ( selectSet == null || selectSet . contains ( PARAM_RESULT_HIT_GROUPFIELD ) ) { hitObj . put ( PARAM_RESULT_HIT_GROUPFIELD , hit . getGroupField ( ) ) ; } if ( selectSet == null || selectSet . contains ( PARAM_RESULT_HIT_GROUPVALUE ) ) { hitObj . put ( PARAM_RESULT_HIT_GROUPVALUE , hit . getGroupValue ( ) ) ; } if ( selectSet == null || selectSet . contains ( PARAM_RESULT_HIT_GROUPHITSCOUNT ) ) { hitObj . put ( PARAM_RESULT_HIT_GROUPHITSCOUNT , hit . getGroupHitsCount ( ) ) ; } if ( hit . getGroupHits ( ) != null && hit . getGroupHits ( ) . length > <NUM_LIT:0> ) hitObj . put ( PARAM_RESULT_HIT_GROUPHITS , buildJSONHits ( req , hit . getSenseiGroupHits ( ) ) ) ; if ( selectSet == null || selectSet . contains ( PARAM_RESULT_HIT_SRC_DATA ) || req . isFetchStoredFields ( ) ) { hitObj . put ( PARAM_RESULT_HIT_SRC_DATA , hit . getSrcData ( ) ) ; } if ( fieldMap != null ) { Set < Entry < String , String [ ] > > entries = fieldMap . entrySet ( ) ; for ( Entry < String , String [ ] > entry : entries ) { String key = entry . getKey ( ) ; if ( key . equals ( PARAM_RESULT_HIT_UID ) ) { continue ; } String [ ] vals = entry . getValue ( ) ; JSONArray valArray = new JSONArray ( ) ; if ( vals != null ) { for ( String val : vals ) { valArray . put ( val ) ; } } if ( selectSet == null || selectSet . contains ( key ) ) { hitObj . put ( key , valArray ) ; } } } Document doc = hit . getStoredFields ( ) ; if ( doc != null ) { List < JSONObject > storedData = new ArrayList < JSONObject > ( ) ; List < Fieldable > fields = doc . getFields ( ) ; for ( Fieldable field : fields ) { JSONObject data = new JSONObject ( ) ; data . put ( PARAM_RESULT_HIT_STORED_FIELDS_NAME , field . name ( ) ) ; data . put ( PARAM_RESULT_HIT_STORED_FIELDS_VALUE , field . stringValue ( ) ) ; storedData . add ( data ) ; } if ( selectSet == null || selectSet . contains ( PARAM_RESULT_HIT_STORED_FIELDS ) ) { hitObj . put ( PARAM_RESULT_HIT_STORED_FIELDS , new JSONArray ( storedData ) ) ; } } Map < String , BrowseHit . TermFrequencyVector > tvMap = hit . getTermFreqMap ( ) ; if ( tvMap != null && tvMap . size ( ) > <NUM_LIT:0> ) { JSONObject tvObj = new JSONObject ( ) ; if ( selectSet == null || selectSet . contains ( PARAM_RESULT_HIT_TERMVECTORS ) ) { hitObj . put ( PARAM_RESULT_HIT_TERMVECTORS , tvObj ) ; } Set < Entry < String , BrowseHit . TermFrequencyVector > > entries = tvMap . entrySet ( ) ; for ( Entry < String , BrowseHit . TermFrequencyVector > entry : entries ) { String field = entry . getKey ( ) ; JSONArray tvArray = new JSONArray ( ) ; tvObj . put ( field , tvArray ) ; String [ ] terms = entry . getValue ( ) . terms ; int [ ] freqs = entry . getValue ( ) . freqs ; for ( int i = <NUM_LIT:0> ; i < terms . length ; ++ i ) { JSONObject tv = new JSONObject ( ) ; tv . put ( "<STR_LIT>" , terms [ i ] ) ; tv . put ( "<STR_LIT>" , freqs [ i ] ) ; tvArray . put ( tv ) ; } } } Explanation expl = hit . getExplanation ( ) ; if ( expl != null ) { if ( selectSet == null || selectSet . contains ( PARAM_RESULT_HIT_EXPLANATION ) ) { hitObj . put ( PARAM_RESULT_HIT_EXPLANATION , convertExpl ( expl ) ) ; } } hitArray . put ( hitObj ) ; } return hitArray ; } public static JSONObject buildJSONResult ( SenseiRequest req , SenseiResult res ) throws Exception { JSONObject jsonObj = new JSONObject ( ) ; jsonObj . put ( PARAM_RESULT_TID , res . getTid ( ) ) ; jsonObj . put ( PARAM_RESULT_TOTALDOCS , res . getTotalDocs ( ) ) ; jsonObj . put ( PARAM_RESULT_NUMHITS , res . getNumHits ( ) ) ; jsonObj . put ( PARAM_RESULT_NUMGROUPS , res . getNumGroups ( ) ) ; jsonObj . put ( PARAM_RESULT_PARSEDQUERY , res . getParsedQuery ( ) ) ; addErrors ( jsonObj , res ) ; SenseiHit [ ] hits = res . getSenseiHits ( ) ; JSONArray hitArray = buildJSONHits ( req , hits ) ; jsonObj . put ( PARAM_RESULT_HITS , hitArray ) ; List < String > selectList = req . getSelectList ( ) ; if ( selectList != null ) { JSONArray jsonSelectList = new JSONArray ( ) ; for ( String col : selectList ) { jsonSelectList . put ( col ) ; } jsonObj . put ( PARAM_RESULT_SELECT_LIST , jsonSelectList ) ; } jsonObj . put ( PARAM_RESULT_TIME , res . getTime ( ) ) ; jsonObj . put ( PARAM_RESULT_FACETS , convert ( res . getFacetMap ( ) , req ) ) ; if ( req . getMapReduceFunction ( ) != null && res . getMapReduceResult ( ) != null ) { jsonObj . put ( PARAM_RESULT_MAP_REDUCE , req . getMapReduceFunction ( ) . render ( res . getMapReduceResult ( ) . getReduceResult ( ) ) ) ; } return jsonObj ; } private static void addErrors ( JSONObject jsonResult , SenseiResult res ) throws JSONException { JSONArray errorsJson = new JSONArray ( ) ; for ( SenseiError error : res . getErrors ( ) ) { errorsJson . put ( new JSONObject ( ) . put ( PARAM_RESULT_ERROR_MESSAGE , error . getMessage ( ) ) . put ( PARAM_RESULT_ERROR_TYPE , error . getErrorType ( ) . name ( ) ) . put ( PARAM_RESULT_ERROR_CODE , error . getErrorCode ( ) ) ) ; } jsonResult . put ( PARAM_RESULT_ERRORS , errorsJson ) ; if ( res . getErrors ( ) . size ( ) > <NUM_LIT:0> ) { jsonResult . put ( PARAM_RESULT_ERROR_CODE , res . getErrors ( ) . get ( <NUM_LIT:0> ) . getErrorCode ( ) ) ; } else { jsonResult . put ( PARAM_RESULT_ERROR_CODE , <NUM_LIT:0> ) ; } } private static SenseiQuery buildSenseiQuery ( DataConfiguration params ) { SenseiQuery sq ; String query = params . getString ( PARAM_QUERY , null ) ; JSONObject qjson = new JSONObject ( ) ; if ( query != null && query . length ( ) > <NUM_LIT:0> ) { try { qjson . put ( "<STR_LIT:query>" , query ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } } try { String [ ] qparams = params . getStringArray ( PARAM_QUERY_PARAM ) ; for ( String qparam : qparams ) { qparam = qparam . trim ( ) ; if ( qparam . length ( ) == <NUM_LIT:0> ) continue ; String [ ] parts = qparam . split ( "<STR_LIT::>" , <NUM_LIT:2> ) ; if ( parts . length == <NUM_LIT:2> ) { qjson . put ( parts [ <NUM_LIT:0> ] , parts [ <NUM_LIT:1> ] ) ; } } } catch ( JSONException jse ) { logger . error ( jse . getMessage ( ) , jse ) ; } sq = new SenseiJSONQuery ( qjson ) ; return sq ; } @ Override protected SenseiRequest buildSenseiRequest ( DataConfiguration params ) throws Exception { return convertSenseiRequest ( params ) ; } public static SenseiRequest convertSenseiRequest ( DataConfiguration params ) { SenseiRequest senseiReq = new SenseiRequest ( ) ; convertScalarParams ( senseiReq , params ) ; convertSenseiQuery ( senseiReq , params ) ; convertSortParam ( senseiReq , params ) ; convertSelectParam ( senseiReq , params ) ; convertFacetParam ( senseiReq , params ) ; convertInitParams ( senseiReq , params ) ; convertPartitionParams ( senseiReq , params ) ; return senseiReq ; } public static void convertSenseiQuery ( SenseiRequest senseiReq , DataConfiguration params ) { senseiReq . setQuery ( buildSenseiQuery ( params ) ) ; } public static void convertScalarParams ( SenseiRequest senseiReq , DataConfiguration params ) { senseiReq . setOffset ( params . getInt ( PARAM_OFFSET , <NUM_LIT:0> ) ) ; senseiReq . setCount ( params . getInt ( PARAM_COUNT , <NUM_LIT:10> ) ) ; senseiReq . setShowExplanation ( params . getBoolean ( PARAM_SHOW_EXPLAIN , false ) ) ; senseiReq . setFetchStoredFields ( params . getBoolean ( PARAM_FETCH_STORED , false ) ) ; senseiReq . setFetchStoredValue ( params . getBoolean ( PARAM_FETCH_STORED_VALUE , false ) ) ; String [ ] fetchTVs = params . getStringArray ( PARAM_FETCH_TERMVECTOR ) ; if ( fetchTVs != null && fetchTVs . length > <NUM_LIT:0> ) { HashSet < String > tvsToFetch = new HashSet < String > ( Arrays . asList ( fetchTVs ) ) ; tvsToFetch . remove ( "<STR_LIT>" ) ; if ( tvsToFetch . size ( ) > <NUM_LIT:0> ) senseiReq . setTermVectorsToFetch ( tvsToFetch ) ; } String groupBy = params . getString ( PARAM_GROUP_BY , null ) ; if ( groupBy != null && groupBy . length ( ) != <NUM_LIT:0> ) senseiReq . setGroupBy ( StringUtils . split ( groupBy , '<CHAR_LIT:U+002C>' ) ) ; senseiReq . setMaxPerGroup ( params . getInt ( PARAM_MAX_PER_GROUP , <NUM_LIT:0> ) ) ; String routeParam = params . getString ( PARAM_ROUTE_PARAM ) ; if ( routeParam != null && routeParam . length ( ) != <NUM_LIT:0> ) senseiReq . setRouteParam ( routeParam ) ; } public static void convertPartitionParams ( SenseiRequest senseiReq , DataConfiguration params ) { if ( params . containsKey ( PARAM_PARTITIONS ) ) { List < Integer > partitions = params . getList ( Integer . class , PARAM_PARTITIONS ) ; senseiReq . setPartitions ( new HashSet < Integer > ( partitions ) ) ; } } public static void convertInitParams ( SenseiRequest senseiReq , DataConfiguration params ) { Map < String , Configuration > facetParamMap = RequestConverter . parseParamConf ( params , PARAM_DYNAMIC_INIT ) ; Set < Entry < String , Configuration > > facetEntries = facetParamMap . entrySet ( ) ; for ( Entry < String , Configuration > facetEntry : facetEntries ) { String facetName = facetEntry . getKey ( ) ; Configuration facetConf = facetEntry . getValue ( ) ; DefaultFacetHandlerInitializerParam facetParams = new DefaultFacetHandlerInitializerParam ( ) ; Iterator paramsIter = facetConf . getKeys ( ) ; while ( paramsIter . hasNext ( ) ) { String paramName = ( String ) paramsIter . next ( ) ; Configuration paramConf = ( Configuration ) facetConf . getProperty ( paramName ) ; String type = paramConf . getString ( PARAM_DYNAMIC_TYPE ) ; List < String > vals = paramConf . getList ( PARAM_DYNAMIC_VAL ) ; try { String [ ] attrVals = vals . toArray ( new String [ <NUM_LIT:0> ] ) ; if ( attrVals . length == <NUM_LIT:0> || attrVals [ <NUM_LIT:0> ] . length ( ) == <NUM_LIT:0> ) { logger . warn ( String . format ( "<STR_LIT>" , facetName , type ) ) ; continue ; } if ( type . equalsIgnoreCase ( PARAM_DYNAMIC_TYPE_BOOL ) ) { createBooleanInitParam ( facetParams , paramName , attrVals ) ; } else if ( type . equalsIgnoreCase ( PARAM_DYNAMIC_TYPE_STRING ) ) { createStringInitParam ( facetParams , paramName , attrVals ) ; } else if ( type . equalsIgnoreCase ( PARAM_DYNAMIC_TYPE_INT ) ) { createIntInitParam ( facetParams , paramName , attrVals ) ; } else if ( type . equalsIgnoreCase ( PARAM_DYNAMIC_TYPE_BYTEARRAY ) ) { createByteArrayInitParam ( facetParams , paramName , paramConf . getString ( PARAM_DYNAMIC_VAL ) ) ; } else if ( type . equalsIgnoreCase ( PARAM_DYNAMIC_TYPE_LONG ) ) { createLongInitParam ( facetParams , paramName , attrVals ) ; } else if ( type . equalsIgnoreCase ( PARAM_DYNAMIC_TYPE_DOUBLE ) ) { createDoubleInitParam ( facetParams , paramName , attrVals ) ; } else { logger . warn ( String . format ( "<STR_LIT>" , paramName , type , facetName ) ) ; continue ; } } catch ( Exception e ) { logger . warn ( String . format ( "<STR_LIT>" , paramName , type , facetName ) ) ; } } senseiReq . setFacetHandlerInitializerParam ( facetName , facetParams ) ; } } private static void createBooleanInitParam ( DefaultFacetHandlerInitializerParam facetParams , String name , String [ ] paramVals ) { boolean [ ] vals = new boolean [ paramVals . length ] ; int i = <NUM_LIT:0> ; for ( String paramVal : paramVals ) { vals [ i ++ ] = Boolean . parseBoolean ( paramVal ) ; } facetParams . putBooleanParam ( name , vals ) ; } private static void createStringInitParam ( DefaultFacetHandlerInitializerParam facetParams , String name , String [ ] paramVals ) { facetParams . putStringParam ( name , Arrays . asList ( paramVals ) ) ; } private static void createIntInitParam ( DefaultFacetHandlerInitializerParam facetParams , String name , String [ ] paramVals ) { int [ ] vals = new int [ paramVals . length ] ; int i = <NUM_LIT:0> ; for ( String paramVal : paramVals ) { vals [ i ++ ] = Integer . parseInt ( paramVal ) ; } facetParams . putIntParam ( name , vals ) ; } private static void createByteArrayInitParam ( DefaultFacetHandlerInitializerParam facetParams , String name , String paramVal ) throws UnsupportedEncodingException { byte [ ] val = paramVal . getBytes ( "<STR_LIT:UTF-8>" ) ; facetParams . putByteArrayParam ( name , val ) ; } private static void createLongInitParam ( DefaultFacetHandlerInitializerParam facetParams , String name , String [ ] paramVals ) { long [ ] vals = new long [ paramVals . length ] ; int i = <NUM_LIT:0> ; for ( String paramVal : paramVals ) { vals [ i ++ ] = Long . parseLong ( paramVal ) ; } facetParams . putLongParam ( name , vals ) ; } private static void createDoubleInitParam ( DefaultFacetHandlerInitializerParam facetParams , String name , String [ ] paramVals ) { double [ ] vals = new double [ paramVals . length ] ; int i = <NUM_LIT:0> ; for ( String paramVal : paramVals ) { vals [ i ++ ] = Double . parseDouble ( paramVal ) ; } facetParams . putDoubleParam ( name , vals ) ; } public static void convertSortParam ( SenseiRequest senseiReq , DataConfiguration params ) { String [ ] sortStrings = params . getStringArray ( PARAM_SORT ) ; if ( sortStrings != null && sortStrings . length > <NUM_LIT:0> ) { ArrayList < SortField > sortFieldList = new ArrayList < SortField > ( sortStrings . length ) ; for ( String sortString : sortStrings ) { sortString = sortString . trim ( ) ; if ( sortString . length ( ) == <NUM_LIT:0> ) continue ; SortField sf ; String [ ] parts = sortString . split ( "<STR_LIT::>" ) ; if ( parts . length == <NUM_LIT:2> ) { boolean reverse = PARAM_SORT_DESC . equals ( parts [ <NUM_LIT:1> ] ) ; sf = new SortField ( parts [ <NUM_LIT:0> ] , SortField . CUSTOM , reverse ) ; } else if ( parts . length == <NUM_LIT:1> ) { if ( PARAM_SORT_SCORE . equals ( parts [ <NUM_LIT:0> ] ) ) { sf = SenseiRequest . FIELD_SCORE ; } else if ( PARAM_SORT_SCORE_REVERSE . equals ( parts [ <NUM_LIT:0> ] ) ) { sf = SenseiRequest . FIELD_SCORE_REVERSE ; } else if ( PARAM_SORT_DOC . equals ( parts [ <NUM_LIT:0> ] ) ) { sf = SenseiRequest . FIELD_DOC ; } else if ( PARAM_SORT_DOC_REVERSE . equals ( parts [ <NUM_LIT:0> ] ) ) { sf = SenseiRequest . FIELD_DOC_REVERSE ; } else { sf = new SortField ( parts [ <NUM_LIT:0> ] , SortField . CUSTOM , false ) ; } } else { throw new IllegalArgumentException ( "<STR_LIT>" + sortString ) ; } if ( sf . getType ( ) != SortField . DOC && sf . getType ( ) != SortField . SCORE && ( sf . getField ( ) == null || sf . getField ( ) . isEmpty ( ) ) ) continue ; sortFieldList . add ( sf ) ; } senseiReq . setSort ( sortFieldList . toArray ( new SortField [ sortFieldList . size ( ) ] ) ) ; } } public static void convertFacetParam ( SenseiRequest senseiReq , DataConfiguration params ) { Map < String , Configuration > facetParamMap = RequestConverter . parseParamConf ( params , PARAM_FACET ) ; Set < Entry < String , Configuration > > entries = facetParamMap . entrySet ( ) ; for ( Entry < String , Configuration > entry : entries ) { String name = entry . getKey ( ) ; Configuration conf = entry . getValue ( ) ; FacetSpec fspec = new FacetSpec ( ) ; fspec . setExpandSelection ( conf . getBoolean ( PARAM_FACET_EXPAND , false ) ) ; fspec . setMaxCount ( conf . getInt ( PARAM_FACET_MAX , <NUM_LIT:10> ) ) ; fspec . setMinHitCount ( conf . getInt ( PARAM_FACET_MINHIT , <NUM_LIT:1> ) ) ; FacetSpec . FacetSortSpec orderBy ; String orderString = conf . getString ( PARAM_FACET_ORDER , PARAM_FACET_ORDER_HITS ) ; if ( PARAM_FACET_ORDER_HITS . equals ( orderString ) ) { orderBy = FacetSpec . FacetSortSpec . OrderHitsDesc ; } else if ( PARAM_FACET_ORDER_VAL . equals ( orderString ) ) { orderBy = FacetSpec . FacetSortSpec . OrderValueAsc ; } else { throw new IllegalArgumentException ( "<STR_LIT>" + orderString ) ; } fspec . setOrderBy ( orderBy ) ; senseiReq . setFacetSpec ( name , fspec ) ; } } public static void convertSelectParam ( SenseiRequest senseiReq , DataConfiguration params ) { Map < String , Configuration > selectParamMap = RequestConverter . parseParamConf ( params , PARAM_SELECT ) ; Set < Entry < String , Configuration > > entries = selectParamMap . entrySet ( ) ; for ( Entry < String , Configuration > entry : entries ) { String name = entry . getKey ( ) ; Configuration conf = entry . getValue ( ) ; BrowseSelection sel = new BrowseSelection ( name ) ; String [ ] vals = conf . getStringArray ( PARAM_SELECT_VAL ) ; for ( String val : vals ) { if ( val . trim ( ) . length ( ) > <NUM_LIT:0> ) { sel . addValue ( val ) ; } } vals = conf . getStringArray ( PARAM_SELECT_NOT ) ; for ( String val : vals ) { if ( val . trim ( ) . length ( ) > <NUM_LIT:0> ) { sel . addNotValue ( val ) ; } } String op = conf . getString ( PARAM_SELECT_OP , PARAM_SELECT_OP_OR ) ; ValueOperation valOp ; if ( PARAM_SELECT_OP_OR . equals ( op ) ) { valOp = ValueOperation . ValueOperationOr ; } else if ( PARAM_SELECT_OP_AND . equals ( op ) ) { valOp = ValueOperation . ValueOperationAnd ; } else { throw new IllegalArgumentException ( "<STR_LIT>" + op ) ; } sel . setSelectionOperation ( valOp ) ; String [ ] selectPropStrings = conf . getStringArray ( PARAM_SELECT_PROP ) ; if ( selectPropStrings != null && selectPropStrings . length > <NUM_LIT:0> ) { Map < String , String > prop = new HashMap < String , String > ( ) ; for ( String selProp : selectPropStrings ) { if ( selProp . trim ( ) . length ( ) == <NUM_LIT:0> ) continue ; String [ ] parts = selProp . split ( "<STR_LIT::>" ) ; if ( parts . length == <NUM_LIT:2> ) { prop . put ( parts [ <NUM_LIT:0> ] , parts [ <NUM_LIT:1> ] ) ; } else { throw new IllegalArgumentException ( "<STR_LIT>" + selProp ) ; } } sel . setSelectionProperties ( prop ) ; } senseiReq . addSelection ( sel ) ; } } @ Override protected String buildResultString ( HttpServletRequest httpReq , SenseiSystemInfo info ) throws Exception { JSONObject jsonObj = new JSONObject ( ) ; jsonObj . put ( PARAM_SYSINFO_NUMDOCS , info . getNumDocs ( ) ) ; jsonObj . put ( PARAM_SYSINFO_LASTMODIFIED , info . getLastModified ( ) ) ; jsonObj . put ( PARAM_SYSINFO_VERSION , info . getVersion ( ) ) ; if ( info . getSchema ( ) != null && info . getSchema ( ) . length ( ) != <NUM_LIT:0> ) { jsonObj . put ( PARAM_SYSINFO_SCHEMA , new JSONObject ( info . getSchema ( ) ) ) ; } JSONArray jsonArray = new JSONArray ( ) ; jsonObj . put ( PARAM_SYSINFO_FACETS , jsonArray ) ; Set < SenseiSystemInfo . SenseiFacetInfo > facets = info . getFacetInfos ( ) ; if ( facets != null ) { for ( SenseiSystemInfo . SenseiFacetInfo facet : facets ) { JSONObject facetObj = new JSONObject ( ) ; facetObj . put ( PARAM_SYSINFO_FACETS_NAME , facet . getName ( ) ) ; facetObj . put ( PARAM_SYSINFO_FACETS_RUNTIME , facet . isRunTime ( ) ) ; facetObj . put ( PARAM_SYSINFO_FACETS_PROPS , facet . getProps ( ) ) ; jsonArray . put ( facetObj ) ; } } jsonArray = new JSONArray ( ) ; jsonObj . put ( PARAM_SYSINFO_CLUSTERINFO , jsonArray ) ; List < SenseiSystemInfo . SenseiNodeInfo > clusterInfo = info . getClusterInfo ( ) ; if ( clusterInfo != null ) { for ( SenseiSystemInfo . SenseiNodeInfo nodeInfo : clusterInfo ) { JSONObject nodeObj = new JSONObject ( ) ; nodeObj . put ( PARAM_SYSINFO_CLUSTERINFO_ID , nodeInfo . getId ( ) ) ; nodeObj . put ( PARAM_SYSINFO_CLUSTERINFO_PARTITIONS , new JSONArray ( nodeInfo . getPartitions ( ) ) ) ; nodeObj . put ( PARAM_SYSINFO_CLUSTERINFO_NODELINK , nodeInfo . getNodeLink ( ) ) ; nodeObj . put ( PARAM_SYSINFO_CLUSTERINFO_ADMINLINK , nodeInfo . getAdminLink ( ) ) ; jsonArray . put ( nodeObj ) ; } } return supportJsonp ( httpReq , jsonObj . toString ( ) ) ; } } </s>
|
<s> package com . senseidb . servlet ; import java . io . OutputStream ; import javax . servlet . http . HttpServletRequest ; import org . apache . commons . configuration . DataConfiguration ; import org . apache . commons . configuration . web . ServletRequestConfiguration ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiResult ; import com . senseidb . search . req . SenseiSystemInfo ; public abstract class AbstractSenseiRestServlet extends AbstractSenseiClientServlet { private static final long serialVersionUID = <NUM_LIT:1L> ; abstract protected SenseiRequest buildSenseiRequest ( DataConfiguration params ) throws Exception ; @ Override protected SenseiRequest buildSenseiRequest ( HttpServletRequest req ) throws Exception { DataConfiguration params = new DataConfiguration ( new ServletRequestConfiguration ( req ) ) ; return buildSenseiRequest ( params ) ; } abstract protected String buildResultString ( HttpServletRequest httpReq , SenseiRequest req , SenseiResult res ) throws Exception ; abstract protected String buildResultString ( HttpServletRequest httpReq , SenseiSystemInfo info ) throws Exception ; @ Override protected void convertResult ( HttpServletRequest httpReq , SenseiSystemInfo info , OutputStream ostream ) throws Exception { String outString = buildResultString ( httpReq , info ) ; ostream . write ( outString . getBytes ( "<STR_LIT:UTF-8>" ) ) ; } @ Override protected void convertResult ( HttpServletRequest httpReq , SenseiRequest req , SenseiResult res , OutputStream ostream ) throws Exception { String outString = buildResultString ( httpReq , req , res ) ; ostream . write ( outString . getBytes ( "<STR_LIT:UTF-8>" ) ) ; } } </s>
|
<s> package com . senseidb . svc . impl ; import java . io . IOException ; import java . util . List ; import com . sensei . search . req . protobuf . SenseiSysReqProtoSerializer ; import org . apache . log4j . Logger ; import com . browseengine . bobo . api . BoboBrowser ; import com . browseengine . bobo . api . BoboIndexReader ; import com . browseengine . bobo . api . MultiBoboBrowser ; import com . linkedin . norbert . network . JavaSerializer ; import com . linkedin . norbert . network . Serializer ; import com . senseidb . search . node . SenseiCore ; import com . senseidb . search . node . SenseiQueryBuilderFactory ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiSystemInfo ; public class SysSenseiCoreServiceImpl extends AbstractSenseiCoreService < SenseiRequest , SenseiSystemInfo > { public static final Serializer < SenseiRequest , SenseiSystemInfo > JAVA_SERIALIZER = JavaSerializer . apply ( "<STR_LIT>" , SenseiRequest . class , SenseiSystemInfo . class ) ; public static final Serializer < SenseiRequest , SenseiSystemInfo > PROTO_SERIALIZER = new SenseiSysReqProtoSerializer ( ) ; private static final Logger logger = Logger . getLogger ( SysSenseiCoreServiceImpl . class ) ; public SysSenseiCoreServiceImpl ( SenseiCore core ) { super ( core ) ; } @ Override protected String getMetricScope ( ) { return "<STR_LIT>" ; } @ Override public SenseiSystemInfo handlePartitionedRequest ( SenseiRequest request , List < BoboIndexReader > readerList , SenseiQueryBuilderFactory queryBuilderFactory ) throws Exception { SenseiSystemInfo res = new SenseiSystemInfo ( ) ; MultiBoboBrowser browser = null ; try { browser = new MultiBoboBrowser ( BoboBrowser . createBrowsables ( readerList ) ) ; res . setNumDocs ( browser . numDocs ( ) ) ; return res ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; throw e ; } finally { if ( browser != null ) { try { browser . close ( ) ; } catch ( IOException ioe ) { logger . error ( ioe . getMessage ( ) , ioe ) ; } } } } @ Override public SenseiSystemInfo mergePartitionedResults ( SenseiRequest r , List < SenseiSystemInfo > resultList ) { SenseiSystemInfo result = _core . getSystemInfo ( ) ; result . setNumDocs ( <NUM_LIT:0> ) ; for ( SenseiSystemInfo res : resultList ) { result . setNumDocs ( result . getNumDocs ( ) + res . getNumDocs ( ) ) ; } return result ; } @ Override public SenseiSystemInfo getEmptyResultInstance ( Throwable error ) { return new SenseiSystemInfo ( ) ; } @ Override public Serializer < SenseiRequest , SenseiSystemInfo > getSerializer ( ) { return PROTO_SERIALIZER ; } } </s>
|
<s> package com . senseidb . svc . impl ; import java . util . Comparator ; import com . linkedin . norbert . javacompat . network . PartitionedLoadBalancerFactory ; import org . apache . commons . configuration . Configuration ; import org . apache . log4j . Logger ; import com . linkedin . norbert . javacompat . cluster . ClusterClient ; import com . linkedin . norbert . javacompat . cluster . ZooKeeperClusterClient ; import com . linkedin . norbert . javacompat . network . NetworkClientConfig ; import com . senseidb . cluster . client . SenseiNetworkClient ; import com . senseidb . search . node . SenseiBroker ; import com . senseidb . search . node . SenseiSysBroker ; import com . senseidb . search . node . broker . BrokerConfig ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiResult ; import com . senseidb . search . req . SenseiSystemInfo ; import com . senseidb . svc . api . SenseiException ; import com . senseidb . svc . api . SenseiService ; public class ClusteredSenseiServiceImpl implements SenseiService { private static final Logger logger = Logger . getLogger ( ClusteredSenseiServiceImpl . class ) ; private final NetworkClientConfig _networkClientConfig = new NetworkClientConfig ( ) ; private SenseiBroker _senseiBroker ; private SenseiSysBroker _senseiSysBroker ; private SenseiNetworkClient _networkClient = null ; private ClusterClient _clusterClient ; private final String _clusterName ; public ClusteredSenseiServiceImpl ( Configuration senseiConf , PartitionedLoadBalancerFactory < String > loadBalancerFactory , Comparator < String > versionComparator ) { BrokerConfig brokerConfig = new BrokerConfig ( senseiConf , loadBalancerFactory ) ; brokerConfig . init ( ) ; _clusterName = brokerConfig . getClusterName ( ) ; _clusterClient = brokerConfig . getClusterClient ( ) ; _networkClient = brokerConfig . getNetworkClient ( ) ; _senseiBroker = brokerConfig . buildSenseiBroker ( ) ; _senseiSysBroker = brokerConfig . buildSysSenseiBroker ( versionComparator ) ; } public void start ( ) { logger . info ( "<STR_LIT>" + _clusterName + "<STR_LIT>" ) ; _clusterClient . awaitConnectionUninterruptibly ( ) ; logger . info ( "<STR_LIT>" + _clusterName + "<STR_LIT>" ) ; } public SenseiResult doQuery ( SenseiRequest req ) throws SenseiException { return _senseiBroker . browse ( req ) ; } @ Override public SenseiSystemInfo getSystemInfo ( ) throws SenseiException { return _senseiSysBroker . browse ( new SenseiRequest ( ) ) ; } @ Override public void shutdown ( ) { try { if ( _senseiBroker != null ) { _senseiBroker . shutdown ( ) ; _senseiBroker = null ; } } finally { try { if ( _senseiSysBroker != null ) { _senseiSysBroker . shutdown ( ) ; _senseiSysBroker = null ; } } finally { try { if ( _networkClient != null ) { _networkClient . shutdown ( ) ; _networkClient = null ; } } finally { if ( _clusterClient != null ) { _clusterClient . shutdown ( ) ; _clusterClient = null ; } } } } } } </s>
|
<s> package com . senseidb . svc . impl ; import com . senseidb . metrics . MetricFactory ; import java . util . ArrayList ; import java . util . Collections ; import java . util . HashMap ; import java . util . HashSet ; import java . util . List ; import java . util . Map ; import java . util . Set ; import java . util . concurrent . Callable ; import java . util . concurrent . ExecutorService ; import java . util . concurrent . Executors ; import java . util . concurrent . Future ; import java . util . concurrent . TimeUnit ; import java . util . concurrent . TimeoutException ; import org . apache . log4j . Logger ; import org . apache . lucene . util . NamedThreadFactory ; import proj . zoie . api . IndexReaderFactory ; import proj . zoie . api . ZoieIndexReader ; import com . browseengine . bobo . api . BoboIndexReader ; import com . linkedin . norbert . network . Serializer ; import com . senseidb . metrics . MetricsConstants ; import com . senseidb . search . node . SenseiCore ; import com . senseidb . search . node . SenseiQueryBuilderFactory ; import com . senseidb . search . req . AbstractSenseiRequest ; import com . senseidb . search . req . AbstractSenseiResult ; import com . senseidb . search . req . ErrorType ; import com . senseidb . search . req . SenseiError ; import com . yammer . metrics . core . Meter ; import com . yammer . metrics . core . MetricName ; import com . yammer . metrics . core . Timer ; public abstract class AbstractSenseiCoreService < Req extends AbstractSenseiRequest , Res extends AbstractSenseiResult > { private final static Logger logger = Logger . getLogger ( AbstractSenseiCoreService . class ) ; private final Timer _getReaderTimer ; private final Timer _searchTimer ; private final Timer _mergeTimer ; private final Meter _searchCounter ; protected long _timeout = <NUM_LIT> ; protected final SenseiCore _core ; private final NamedThreadFactory threadFactory = new NamedThreadFactory ( "<STR_LIT>" ) ; private final ExecutorService _executorService = Executors . newCachedThreadPool ( threadFactory ) ; private final Map < Integer , Timer > partitionTimerMetricMap = new HashMap < Integer , Timer > ( ) ; public AbstractSenseiCoreService ( SenseiCore core ) { _core = core ; _getReaderTimer = registerTimer ( "<STR_LIT>" ) ; _searchTimer = registerTimer ( "<STR_LIT>" ) ; _mergeTimer = registerTimer ( "<STR_LIT>" ) ; _searchCounter = registerMeter ( "<STR_LIT>" , "<STR_LIT>" ) ; } private Timer buildTimer ( int partition ) { MetricName partitionSearchMetricName = new MetricName ( MetricsConstants . Domain , "<STR_LIT>" , "<STR_LIT>" + partition , "<STR_LIT>" ) ; return MetricFactory . newTimer ( partitionSearchMetricName , TimeUnit . MILLISECONDS , TimeUnit . SECONDS ) ; } private Timer getTimer ( int partition ) { Timer timer = partitionTimerMetricMap . get ( partition ) ; if ( timer == null ) { partitionTimerMetricMap . put ( partition , buildTimer ( partition ) ) ; return getTimer ( partition ) ; } return timer ; } public final Res execute ( final Req senseiReq ) { _searchCounter . mark ( ) ; Set < Integer > partitions = senseiReq == null ? null : senseiReq . getPartitions ( ) ; if ( partitions == null ) { partitions = new HashSet < Integer > ( ) ; int [ ] containsPart = _core . getPartitions ( ) ; if ( containsPart != null ) { for ( int part : containsPart ) { partitions . add ( part ) ; } } } Res finalResult ; if ( partitions != null && partitions . size ( ) > <NUM_LIT:0> ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "<STR_LIT>" + partitions . toString ( ) ) ; } final ArrayList < Res > resultList = new ArrayList < Res > ( partitions . size ( ) ) ; Future < Res > [ ] futures = new Future [ partitions . size ( ) - <NUM_LIT:1> ] ; int i = <NUM_LIT:0> ; for ( final int partition : partitions ) { final long start = System . currentTimeMillis ( ) ; final IndexReaderFactory < ZoieIndexReader < BoboIndexReader > > readerFactory = _core . getIndexReaderFactory ( partition ) ; if ( i < partitions . size ( ) - <NUM_LIT:1> ) { try { futures [ i ] = ( Future < Res > ) _executorService . submit ( new Callable < Res > ( ) { public Res call ( ) throws Exception { Timer timer = getTimer ( partition ) ; Res res = timer . time ( new Callable < Res > ( ) { @ Override public Res call ( ) throws Exception { return handleRequest ( senseiReq , readerFactory , _core . getQueryBuilderFactory ( ) ) ; } } ) ; long end = System . currentTimeMillis ( ) ; res . setTime ( end - start ) ; logger . info ( "<STR_LIT>" + partition + "<STR_LIT>" + res . getTime ( ) ) ; return res ; } } ) ; } catch ( Exception e ) { senseiReq . addError ( new SenseiError ( e . getMessage ( ) , ErrorType . BoboExecutionError ) ) ; logger . error ( e . getMessage ( ) , e ) ; } } else { try { Timer timer = getTimer ( partition ) ; Res res = timer . time ( new Callable < Res > ( ) { @ Override public Res call ( ) throws Exception { return handleRequest ( senseiReq , readerFactory , _core . getQueryBuilderFactory ( ) ) ; } } ) ; resultList . add ( res ) ; long end = System . currentTimeMillis ( ) ; res . setTime ( end - start ) ; logger . info ( "<STR_LIT>" + partition + "<STR_LIT>" + res . getTime ( ) ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; senseiReq . addError ( new SenseiError ( e . getMessage ( ) , ErrorType . BoboExecutionError ) ) ; resultList . add ( getEmptyResultInstance ( e ) ) ; } } ++ i ; } for ( i = <NUM_LIT:0> ; i < futures . length ; ++ i ) { try { Res res = futures [ i ] . get ( _timeout , TimeUnit . MILLISECONDS ) ; resultList . add ( res ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; if ( e instanceof TimeoutException ) { senseiReq . addError ( new SenseiError ( e . getMessage ( ) , ErrorType . ExecutionTimeout ) ) ; } else { senseiReq . addError ( new SenseiError ( e . getMessage ( ) , ErrorType . BoboExecutionError ) ) ; } resultList . add ( getEmptyResultInstance ( e ) ) ; } } try { finalResult = _mergeTimer . time ( new Callable < Res > ( ) { public Res call ( ) throws Exception { return mergePartitionedResults ( senseiReq , resultList ) ; } } ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; finalResult = getEmptyResultInstance ( null ) ; finalResult . addError ( new SenseiError ( e . getMessage ( ) , ErrorType . MergePartitionError ) ) ; } } else { if ( logger . isInfoEnabled ( ) ) { logger . info ( "<STR_LIT>" ) ; } finalResult = getEmptyResultInstance ( null ) ; finalResult . addError ( new SenseiError ( "<STR_LIT>" , ErrorType . PartitionCallError ) ) ; } if ( logger . isInfoEnabled ( ) ) { logger . info ( "<STR_LIT>" + String . valueOf ( partitions ) + "<STR_LIT>" + finalResult . getTime ( ) ) ; } return finalResult ; } private final Res handleRequest ( final Req senseiReq , final IndexReaderFactory < ZoieIndexReader < BoboIndexReader > > readerFactory , final SenseiQueryBuilderFactory queryBuilderFactory ) throws Exception { List < ZoieIndexReader < BoboIndexReader > > readerList = null ; try { readerList = _getReaderTimer . time ( new Callable < List < ZoieIndexReader < BoboIndexReader > > > ( ) { public List < ZoieIndexReader < BoboIndexReader > > call ( ) throws Exception { if ( readerFactory == null ) return Collections . EMPTY_LIST ; return readerFactory . getIndexReaders ( ) ; } } ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "<STR_LIT>" + readerList == null ? <NUM_LIT:0> : readerList . size ( ) ) ; } final List < BoboIndexReader > boboReaders = ZoieIndexReader . extractDecoratedReaders ( readerList ) ; return _searchTimer . time ( new Callable < Res > ( ) { public Res call ( ) throws Exception { return handlePartitionedRequest ( senseiReq , boboReaders , queryBuilderFactory ) ; } } ) ; } finally { if ( readerFactory != null && readerList != null ) { readerFactory . returnIndexReaders ( readerList ) ; } } } protected final Timer registerTimer ( String name ) { return MetricFactory . newTimer ( new MetricName ( MetricsConstants . Domain , "<STR_LIT>" , name , getMetricScope ( ) ) , TimeUnit . MILLISECONDS , TimeUnit . SECONDS ) ; } protected final Meter registerMeter ( String name , String eventType ) { return MetricFactory . newMeter ( new MetricName ( MetricsConstants . Domain , "<STR_LIT>" , name , getMetricScope ( ) ) , eventType , TimeUnit . SECONDS ) ; } public abstract Res handlePartitionedRequest ( Req r , final List < BoboIndexReader > readerList , SenseiQueryBuilderFactory queryBuilderFactory ) throws Exception ; public abstract Res mergePartitionedResults ( Req r , List < Res > reqList ) ; public abstract Res getEmptyResultInstance ( Throwable error ) ; public abstract Serializer < Req , Res > getSerializer ( ) ; protected abstract String getMetricScope ( ) ; } </s>
|
<s> package com . senseidb . svc . impl ; import java . io . IOException ; import java . util . ArrayList ; import java . util . List ; import java . util . concurrent . Callable ; import java . util . concurrent . atomic . AtomicInteger ; import com . sensei . search . req . protobuf . SenseiReqProtoSerializer ; import org . apache . log4j . Logger ; import org . apache . lucene . search . Query ; import proj . zoie . api . ZoieIndexReader ; import proj . zoie . api . ZoieIndexReader . SubReaderAccessor ; import proj . zoie . api . ZoieIndexReader . SubReaderInfo ; import com . browseengine . bobo . api . BoboBrowser ; import com . browseengine . bobo . api . BoboIndexReader ; import com . browseengine . bobo . api . BrowseException ; import com . browseengine . bobo . api . BrowseHit ; import com . browseengine . bobo . api . BrowseRequest ; import com . browseengine . bobo . api . BrowseResult ; import com . browseengine . bobo . api . MultiBoboBrowser ; import com . linkedin . norbert . network . JavaSerializer ; import com . linkedin . norbert . network . Serializer ; import com . senseidb . indexing . SenseiIndexPruner ; import com . senseidb . indexing . SenseiIndexPruner . IndexReaderSelector ; import com . senseidb . search . node . ResultMerger ; import com . senseidb . search . node . SenseiCore ; import com . senseidb . search . node . SenseiQueryBuilderFactory ; import com . senseidb . search . req . SenseiHit ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiResult ; import com . senseidb . search . req . mapred . impl . SenseiMapFunctionWrapper ; import com . senseidb . util . RequestConverter ; import com . yammer . metrics . core . Timer ; import static com . senseidb . servlet . SenseiSearchServletParams . PARAM_RESULT_HIT_UID ; public class CoreSenseiServiceImpl extends AbstractSenseiCoreService < SenseiRequest , SenseiResult > { public static final Serializer < SenseiRequest , SenseiResult > JAVA_SERIALIZER = JavaSerializer . apply ( "<STR_LIT>" , SenseiRequest . class , SenseiResult . class ) ; public static final Serializer < SenseiRequest , SenseiResult > PROTO_SERIALIZER = new SenseiReqProtoSerializer ( ) ; private static final Logger logger = Logger . getLogger ( CoreSenseiServiceImpl . class ) ; private final Timer _timerMetric ; public CoreSenseiServiceImpl ( SenseiCore core ) { super ( core ) ; _timerMetric = registerTimer ( "<STR_LIT>" ) ; } @ Override protected String getMetricScope ( ) { return "<STR_LIT>" ; } private SenseiResult browse ( MultiBoboBrowser browser , BrowseRequest req , SubReaderAccessor < BoboIndexReader > subReaderAccessor ) throws BrowseException { final SenseiResult result = new SenseiResult ( ) ; long start = System . currentTimeMillis ( ) ; int offset = req . getOffset ( ) ; int count = req . getCount ( ) ; if ( offset < <NUM_LIT:0> || count < <NUM_LIT:0> ) { throw new IllegalArgumentException ( "<STR_LIT>" + offset + "<STR_LIT:/>" + count ) ; } BrowseResult res = browser . browse ( req ) ; BrowseHit [ ] hits = res . getHits ( ) ; if ( req . getMapReduceWrapper ( ) != null ) { result . setMapReduceResult ( req . getMapReduceWrapper ( ) . getResult ( ) ) ; } SenseiHit [ ] senseiHits = new SenseiHit [ hits . length ] ; for ( int i = <NUM_LIT:0> ; i < hits . length ; i ++ ) { BrowseHit hit = hits [ i ] ; SenseiHit senseiHit = new SenseiHit ( ) ; int docid = hit . getDocid ( ) ; SubReaderInfo < BoboIndexReader > readerInfo = subReaderAccessor . getSubReaderInfo ( docid ) ; Long uid = ( Long ) hit . getRawField ( PARAM_RESULT_HIT_UID ) ; if ( uid == null ) uid = ( ( ZoieIndexReader < BoboIndexReader > ) readerInfo . subreader . getInnerReader ( ) ) . getUID ( readerInfo . subdocid ) ; senseiHit . setUID ( uid ) ; senseiHit . setDocid ( docid ) ; senseiHit . setScore ( hit . getScore ( ) ) ; senseiHit . setComparable ( hit . getComparable ( ) ) ; senseiHit . setFieldValues ( hit . getFieldValues ( ) ) ; senseiHit . setRawFieldValues ( hit . getRawFieldValues ( ) ) ; senseiHit . setStoredFields ( hit . getStoredFields ( ) ) ; senseiHit . setExplanation ( hit . getExplanation ( ) ) ; senseiHit . setGroupValue ( hit . getGroupValue ( ) ) ; senseiHit . setRawGroupValue ( hit . getRawGroupValue ( ) ) ; senseiHit . setGroupHitsCount ( hit . getGroupHitsCount ( ) ) ; senseiHit . setTermFreqMap ( hit . getTermFreqMap ( ) ) ; senseiHits [ i ] = senseiHit ; } result . setHits ( senseiHits ) ; result . setNumHits ( res . getNumHits ( ) ) ; result . setNumGroups ( res . getNumGroups ( ) ) ; result . setGroupAccessibles ( res . getGroupAccessibles ( ) ) ; result . setSortCollector ( res . getSortCollector ( ) ) ; result . setTotalDocs ( browser . numDocs ( ) ) ; result . addAll ( res . getFacetMap ( ) ) ; long end = System . currentTimeMillis ( ) ; result . setTime ( end - start ) ; result . setTid ( req . getTid ( ) ) ; Query parsedQ = req . getQuery ( ) ; if ( parsedQ != null ) { result . setParsedQuery ( parsedQ . toString ( ) ) ; } else { result . setParsedQuery ( "<STR_LIT>" ) ; } return result ; } @ Override public SenseiResult handlePartitionedRequest ( final SenseiRequest request , List < BoboIndexReader > readerList , SenseiQueryBuilderFactory queryBuilderFactory ) throws Exception { MultiBoboBrowser browser = null ; try { final List < BoboIndexReader > segmentReaders = BoboBrowser . gatherSubReaders ( readerList ) ; if ( segmentReaders != null && segmentReaders . size ( ) > <NUM_LIT:0> ) { final AtomicInteger skipDocs = new AtomicInteger ( <NUM_LIT:0> ) ; List < BoboIndexReader > validatedSegmentReaders = _timerMetric . time ( new Callable < List < BoboIndexReader > > ( ) { @ Override public List < BoboIndexReader > call ( ) throws Exception { SenseiIndexPruner pruner = _core . getIndexPruner ( ) ; IndexReaderSelector readerSelector = pruner . getReaderSelector ( request ) ; List < BoboIndexReader > validatedReaders = new ArrayList < BoboIndexReader > ( segmentReaders . size ( ) ) ; for ( BoboIndexReader segmentReader : segmentReaders ) { if ( readerSelector . isSelected ( segmentReader ) ) { validatedReaders . add ( segmentReader ) ; } else { skipDocs . addAndGet ( segmentReader . numDocs ( ) ) ; } } return validatedReaders ; } } ) ; browser = new MultiBoboBrowser ( BoboBrowser . createBrowsables ( validatedSegmentReaders ) ) ; BrowseRequest breq = RequestConverter . convert ( request , queryBuilderFactory ) ; if ( request . getMapReduceFunction ( ) != null ) { SenseiMapFunctionWrapper mapWrapper = new SenseiMapFunctionWrapper ( request . getMapReduceFunction ( ) , _core . getSystemInfo ( ) . getFacetInfos ( ) ) ; breq . setMapReduceWrapper ( mapWrapper ) ; } SubReaderAccessor < BoboIndexReader > subReaderAccessor = ZoieIndexReader . getSubReaderAccessor ( validatedSegmentReaders ) ; SenseiResult res = browse ( browser , breq , subReaderAccessor ) ; int totalDocs = res . getTotalDocs ( ) + skipDocs . get ( ) ; res . setTotalDocs ( totalDocs ) ; return res ; } else { return new SenseiResult ( ) ; } } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; throw e ; } finally { if ( browser != null ) { try { browser . close ( ) ; } catch ( IOException ioe ) { logger . error ( ioe . getMessage ( ) , ioe ) ; } } } } @ Override public SenseiResult mergePartitionedResults ( SenseiRequest r , List < SenseiResult > resultList ) { return ResultMerger . merge ( r , resultList , true ) ; } @ Override public SenseiResult getEmptyResultInstance ( Throwable error ) { return new SenseiResult ( ) ; } @ Override public Serializer < SenseiRequest , SenseiResult > getSerializer ( ) { return PROTO_SERIALIZER ; } } </s>
|
<s> package com . senseidb . svc . impl ; import java . io . File ; import org . apache . lucene . analysis . standard . StandardAnalyzer ; import org . apache . lucene . queryParser . QueryParser ; import org . apache . lucene . util . Version ; import org . json . JSONObject ; import proj . zoie . api . indexing . AbstractZoieIndexableInterpreter ; import proj . zoie . api . indexing . ZoieIndexable ; import proj . zoie . impl . indexing . SimpleReaderCache ; import proj . zoie . impl . indexing . ZoieConfig ; import com . senseidb . search . node . SenseiCore ; import com . senseidb . search . node . impl . DefaultJsonQueryBuilderFactory ; import com . senseidb . search . node . impl . DemoZoieSystemFactory ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiResult ; import com . senseidb . search . req . SenseiSystemInfo ; import com . senseidb . svc . api . SenseiException ; import com . senseidb . svc . api . SenseiService ; public class LocalQueryOnlySenseiServiceImpl implements SenseiService { private CoreSenseiServiceImpl _coreService ; private final SenseiCore _core ; public LocalQueryOnlySenseiServiceImpl ( File idxDir ) throws Exception { ZoieConfig zoieConfig = new ZoieConfig ( ) ; zoieConfig . setReadercachefactory ( SimpleReaderCache . FACTORY ) ; DemoZoieSystemFactory zoieFactory = new DemoZoieSystemFactory ( idxDir , new AbstractZoieIndexableInterpreter < JSONObject > ( ) { @ Override public ZoieIndexable convertAndInterpret ( JSONObject src ) { return null ; } } , zoieConfig ) ; QueryParser queryParser = new QueryParser ( Version . LUCENE_35 , "<STR_LIT>" , new StandardAnalyzer ( Version . LUCENE_35 ) ) ; DefaultJsonQueryBuilderFactory queryBuilderFactory = new DefaultJsonQueryBuilderFactory ( queryParser ) ; _core = new SenseiCore ( <NUM_LIT:1> , new int [ ] { <NUM_LIT:0> } , zoieFactory , null , queryBuilderFactory ) ; _coreService = new CoreSenseiServiceImpl ( _core ) ; _core . start ( ) ; } @ Override public SenseiResult doQuery ( SenseiRequest req ) throws SenseiException { return _coreService . execute ( req ) ; } @ Override public SenseiSystemInfo getSystemInfo ( ) throws SenseiException { return null ; } @ Override public void shutdown ( ) { _core . shutdown ( ) ; } public static void main ( String [ ] args ) throws Exception { File idxDir = new File ( "<STR_LIT>" ) ; SenseiService svc = new LocalQueryOnlySenseiServiceImpl ( idxDir ) ; SenseiResult res = svc . doQuery ( new SenseiRequest ( ) ) ; System . out . println ( res . getTotalDocs ( ) ) ; svc . shutdown ( ) ; } } </s>
|
<s> package com . senseidb . svc . impl ; import com . senseidb . metrics . MetricFactory ; import java . util . concurrent . Callable ; import java . util . concurrent . TimeUnit ; import org . apache . log4j . Logger ; import com . linkedin . norbert . javacompat . network . RequestHandler ; import com . senseidb . metrics . MetricsConstants ; import com . senseidb . search . req . AbstractSenseiRequest ; import com . senseidb . search . req . AbstractSenseiResult ; import com . yammer . metrics . core . MetricName ; import com . yammer . metrics . core . Timer ; public final class SenseiCoreServiceMessageHandler < REQUEST extends AbstractSenseiRequest , RESULT extends AbstractSenseiResult > implements RequestHandler < REQUEST , RESULT > { private static final Logger logger = Logger . getLogger ( SenseiCoreServiceMessageHandler . class ) ; private final AbstractSenseiCoreService < REQUEST , RESULT > _svc ; private final Timer _totalSearchTimer ; public SenseiCoreServiceMessageHandler ( AbstractSenseiCoreService < REQUEST , RESULT > svc ) { _svc = svc ; MetricName metricName = new MetricName ( MetricsConstants . Domain , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ; _totalSearchTimer = MetricFactory . newTimer ( metricName , TimeUnit . MILLISECONDS , TimeUnit . SECONDS ) ; } @ Override public RESULT handleRequest ( final REQUEST request ) throws Exception { return _totalSearchTimer . time ( new Callable < RESULT > ( ) { @ Override public RESULT call ( ) throws Exception { return _svc . execute ( request ) ; } } ) ; } } </s>
|
<s> package com . senseidb . svc . impl ; import java . io . BufferedReader ; import java . io . IOException ; import java . io . InputStream ; import java . io . InputStreamReader ; import java . io . UnsupportedEncodingException ; import java . net . MalformedURLException ; import java . net . URI ; import java . net . URISyntaxException ; import java . net . URL ; import java . util . ArrayList ; import java . util . Arrays ; import java . util . Collection ; import java . util . Collections ; import java . util . HashMap ; import java . util . HashSet ; import java . util . Iterator ; import java . util . List ; import java . util . Map ; import java . util . Map . Entry ; import java . util . Properties ; import java . util . Set ; import java . util . zip . GZIPInputStream ; import javax . net . ssl . SSLHandshakeException ; import org . apache . commons . io . IOUtils ; import org . apache . commons . lang . ArrayUtils ; import org . apache . commons . lang . StringUtils ; import org . apache . http . Header ; import org . apache . http . HeaderElement ; import org . apache . http . HeaderElementIterator ; import org . apache . http . HttpEntity ; import org . apache . http . HttpEntityEnclosingRequest ; import org . apache . http . HttpException ; import org . apache . http . HttpRequest ; import org . apache . http . HttpRequestInterceptor ; import org . apache . http . HttpResponse ; import org . apache . http . HttpResponseInterceptor ; import org . apache . http . NameValuePair ; import org . apache . http . NoHttpResponseException ; import org . apache . http . client . HttpRequestRetryHandler ; import org . apache . http . client . methods . HttpGet ; import org . apache . http . client . utils . URIUtils ; import org . apache . http . client . utils . URLEncodedUtils ; import org . apache . http . conn . ClientConnectionManager ; import org . apache . http . conn . scheme . PlainSocketFactory ; import org . apache . http . conn . scheme . Scheme ; import org . apache . http . conn . scheme . SchemeRegistry ; import org . apache . http . entity . HttpEntityWrapper ; import org . apache . http . impl . client . DefaultConnectionKeepAliveStrategy ; import org . apache . http . impl . client . DefaultHttpClient ; import org . apache . http . impl . conn . tsccm . ThreadSafeClientConnManager ; import org . apache . http . message . BasicHeaderElementIterator ; import org . apache . http . message . BasicNameValuePair ; import org . apache . http . params . BasicHttpParams ; import org . apache . http . params . HttpParams ; import org . apache . http . protocol . ExecutionContext ; import org . apache . http . protocol . HTTP ; import org . apache . http . protocol . HttpContext ; import org . apache . log4j . Logger ; import org . apache . lucene . document . Document ; import org . apache . lucene . document . Field ; import org . apache . lucene . search . Explanation ; import org . apache . lucene . search . SortField ; import org . json . JSONArray ; import org . json . JSONException ; import org . json . JSONObject ; import com . browseengine . bobo . api . BrowseFacet ; import com . browseengine . bobo . api . BrowseSelection ; import com . browseengine . bobo . api . FacetAccessible ; import com . browseengine . bobo . api . FacetSpec ; import com . browseengine . bobo . api . MappedFacetAccessible ; import com . browseengine . bobo . facets . FacetHandlerInitializerParam ; import com . senseidb . search . req . SenseiHit ; import com . senseidb . search . req . SenseiQuery ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiResult ; import com . senseidb . search . req . SenseiSystemInfo ; import com . senseidb . servlet . SenseiSearchServletParams ; import com . senseidb . svc . api . SenseiException ; import com . senseidb . svc . api . SenseiService ; public class HttpRestSenseiServiceImpl implements SenseiService { private static final Logger log = Logger . getLogger ( HttpRestSenseiServiceImpl . class ) ; String _scheme ; String _host ; int _port ; String _path ; int _defaultKeepAliveDurationMS ; int _maxRetries ; DefaultHttpClient _httpclient ; public HttpRestSenseiServiceImpl ( String scheme , String host , int port , String path ) { this ( scheme , host , port , path , <NUM_LIT> , <NUM_LIT:5> ) ; } public HttpRestSenseiServiceImpl ( String scheme , String host , int port , String path , int defaultKeepAliveDurationMS , final int maxRetries ) { this ( scheme , host , port , path , defaultKeepAliveDurationMS , maxRetries , null ) ; } public HttpRestSenseiServiceImpl ( String scheme , String host , int port , String path , int defaultKeepAliveDurationMS , final int maxRetries , HttpRequestRetryHandler retryHandler ) { _scheme = scheme ; _host = host ; _port = port ; _path = path ; _defaultKeepAliveDurationMS = defaultKeepAliveDurationMS ; _maxRetries = maxRetries ; _httpclient = createHttpClient ( retryHandler ) ; } public HttpRestSenseiServiceImpl ( String urlString ) throws MalformedURLException { URL url = new URL ( urlString ) ; _scheme = url . getProtocol ( ) ; _host = url . getHost ( ) ; _port = url . getPort ( ) ; _path = url . getPath ( ) ; _defaultKeepAliveDurationMS = <NUM_LIT> ; _maxRetries = <NUM_LIT:5> ; _httpclient = createHttpClient ( null ) ; } private DefaultHttpClient createHttpClient ( HttpRequestRetryHandler retryHandler ) { HttpParams params = new BasicHttpParams ( ) ; SchemeRegistry registry = new SchemeRegistry ( ) ; registry . register ( new Scheme ( _scheme , _port , PlainSocketFactory . getSocketFactory ( ) ) ) ; ClientConnectionManager cm = new ThreadSafeClientConnManager ( registry ) ; DefaultHttpClient client = new DefaultHttpClient ( cm , params ) ; if ( retryHandler == null ) { retryHandler = new HttpRequestRetryHandler ( ) { public boolean retryRequest ( IOException exception , int executionCount , HttpContext context ) { if ( executionCount >= _maxRetries ) { return false ; } if ( exception instanceof NoHttpResponseException ) { return true ; } if ( exception instanceof SSLHandshakeException ) { return false ; } HttpRequest request = ( HttpRequest ) context . getAttribute ( ExecutionContext . HTTP_REQUEST ) ; boolean idempotent = ! ( request instanceof HttpEntityEnclosingRequest ) ; if ( idempotent ) { return true ; } return false ; } } ; } client . setHttpRequestRetryHandler ( retryHandler ) ; client . addRequestInterceptor ( new HttpRequestInterceptor ( ) { public void process ( final HttpRequest request , final HttpContext context ) throws HttpException , IOException { if ( ! request . containsHeader ( "<STR_LIT>" ) ) { request . addHeader ( "<STR_LIT>" , "<STR_LIT>" ) ; } } } ) ; client . addResponseInterceptor ( new HttpResponseInterceptor ( ) { public void process ( final HttpResponse response , final HttpContext context ) throws HttpException , IOException { HttpEntity entity = response . getEntity ( ) ; Header ceheader = entity . getContentEncoding ( ) ; if ( ceheader != null ) { HeaderElement [ ] codecs = ceheader . getElements ( ) ; for ( int i = <NUM_LIT:0> ; i < codecs . length ; i ++ ) { if ( codecs [ i ] . getName ( ) . equalsIgnoreCase ( "<STR_LIT>" ) ) { response . setEntity ( new GzipDecompressingEntity ( response . getEntity ( ) ) ) ; return ; } } } } } ) ; client . setKeepAliveStrategy ( new DefaultConnectionKeepAliveStrategy ( ) { @ Override public long getKeepAliveDuration ( HttpResponse response , HttpContext context ) { HeaderElementIterator it = new BasicHeaderElementIterator ( response . headerIterator ( HTTP . CONN_KEEP_ALIVE ) ) ; while ( it . hasNext ( ) ) { HeaderElement he = it . nextElement ( ) ; String param = he . getName ( ) ; String value = he . getValue ( ) ; if ( ( value != null ) && param . equalsIgnoreCase ( "<STR_LIT>" ) ) { try { return Long . parseLong ( value ) * <NUM_LIT:1000> ; } catch ( NumberFormatException ignore ) { } } } long keepAlive = super . getKeepAliveDuration ( response , context ) ; if ( keepAlive == - <NUM_LIT:1> ) { keepAlive = _defaultKeepAliveDurationMS ; } return keepAlive ; } } ) ; return client ; } private static class GzipDecompressingEntity extends HttpEntityWrapper { public GzipDecompressingEntity ( final HttpEntity entity ) { super ( entity ) ; } @ Override public InputStream getContent ( ) throws IOException , IllegalStateException { InputStream wrappedin = wrappedEntity . getContent ( ) ; return new GZIPInputStream ( wrappedin ) ; } @ Override public long getContentLength ( ) { return - <NUM_LIT:1> ; } } @ Override public SenseiResult doQuery ( SenseiRequest req ) throws SenseiException { SenseiResult result ; InputStream is = null ; try { List < NameValuePair > queryParams = convertRequestToQueryParams ( req ) ; URI requestURI = buildRequestURI ( queryParams ) ; is = makeRequest ( requestURI ) ; JSONObject jsonObj = convertStreamToJSONObject ( is ) ; result = buildSenseiResult ( jsonObj ) ; } catch ( URISyntaxException e ) { throw new SenseiException ( e ) ; } catch ( IOException e ) { throw new SenseiException ( e ) ; } catch ( JSONException e ) { throw new SenseiException ( e ) ; } finally { if ( is != null ) { IOUtils . closeQuietly ( is ) ; } } return result ; } @ Override public SenseiSystemInfo getSystemInfo ( ) throws SenseiException { SenseiSystemInfo result ; InputStream is = null ; try { URI requestURI = buildSysInfoRequestURI ( ) ; is = makeRequest ( requestURI ) ; JSONObject jsonObj = convertStreamToJSONObject ( is ) ; result = buildSysInfo ( jsonObj ) ; } catch ( URISyntaxException e ) { throw new SenseiException ( e ) ; } catch ( IOException e ) { throw new SenseiException ( e ) ; } catch ( JSONException e ) { throw new SenseiException ( e ) ; } finally { if ( is != null ) { IOUtils . closeQuietly ( is ) ; } } return result ; } public static List < NameValuePair > convertRequestToQueryParams ( SenseiRequest req ) throws SenseiException , UnsupportedEncodingException { List < NameValuePair > qparams = new ArrayList < NameValuePair > ( ) ; convertScalarParams ( qparams , req ) ; convertSortFieldParams ( qparams , req . getSort ( ) ) ; convertSenseiQuery ( qparams , req . getQuery ( ) ) ; convertSelectionNames ( qparams , req ) ; convertFacetSpecs ( qparams , req . getFacetSpecs ( ) ) ; convertFacetInitParams ( qparams , req . getFacetHandlerInitParamMap ( ) ) ; convertPartitionParams ( qparams , req . getPartitions ( ) ) ; return qparams ; } public static void convertSortFieldParams ( List < NameValuePair > qparams , SortField [ ] sortFields ) { List < String > fieldList = new ArrayList < String > ( ) ; for ( SortField field : sortFields ) { fieldList . add ( convertSortField ( field ) ) ; } String paramList = join ( fieldList , "<STR_LIT:U+002C>" ) ; qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_SORT , paramList ) ) ; } public static void convertPartitionParams ( List < NameValuePair > qparams , Set < Integer > partitions ) { if ( partitions == null || partitions . size ( ) == <NUM_LIT:0> ) return ; qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_PARTITIONS , join ( partitions , "<STR_LIT:U+002C>" ) ) ) ; } public static void convertScalarParams ( List < NameValuePair > qparams , SenseiRequest req ) { qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_FETCH_STORED , Boolean . toString ( req . isFetchStoredFields ( ) ) ) ) ; qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_FETCH_STORED_VALUE , Boolean . toString ( req . isFetchStoredValue ( ) ) ) ) ; qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_SHOW_EXPLAIN , Boolean . toString ( req . isShowExplanation ( ) ) ) ) ; qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_OFFSET , Integer . toString ( req . getOffset ( ) ) ) ) ; qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_COUNT , Integer . toString ( req . getCount ( ) ) ) ) ; Set < String > tvFetch = req . getTermVectorsToFetch ( ) ; if ( tvFetch != null && tvFetch . size ( ) > <NUM_LIT:0> ) { String fetchString = join ( tvFetch , "<STR_LIT:U+002C>" ) ; qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_FETCH_TERMVECTOR , fetchString ) ) ; } if ( req . getRouteParam ( ) != null ) { qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_ROUTE_PARAM , req . getRouteParam ( ) ) ) ; } if ( req . getGroupBy ( ) != null ) { qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_GROUP_BY , StringUtils . join ( req . getGroupBy ( ) , '<CHAR_LIT:U+002C>' ) ) ) ; } if ( req . getMaxPerGroup ( ) > <NUM_LIT:0> ) { qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_MAX_PER_GROUP , Integer . toString ( req . getMaxPerGroup ( ) ) ) ) ; } } public static void convertFacetInitParams ( List < NameValuePair > qparams , Map < String , FacetHandlerInitializerParam > initParams ) throws UnsupportedEncodingException { final String format = "<STR_LIT>" ; for ( Entry < String , FacetHandlerInitializerParam > entry : initParams . entrySet ( ) ) { String facetName = entry . getKey ( ) ; FacetHandlerInitializerParam param = entry . getValue ( ) ; for ( String paramName : param . getBooleanParamNames ( ) ) { qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_DYNAMIC_INIT , facetName , paramName , SenseiSearchServletParams . PARAM_DYNAMIC_TYPE ) , SenseiSearchServletParams . PARAM_DYNAMIC_TYPE_BOOL ) ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_DYNAMIC_INIT , facetName , paramName , SenseiSearchServletParams . PARAM_DYNAMIC_VAL ) , join ( param . getBooleanParam ( paramName ) , "<STR_LIT:U+002C>" ) ) ) ; } for ( String paramName : param . getByteArrayParamNames ( ) ) { qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_DYNAMIC_INIT , facetName , paramName , SenseiSearchServletParams . PARAM_DYNAMIC_TYPE ) , SenseiSearchServletParams . PARAM_DYNAMIC_TYPE_BYTEARRAY ) ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_DYNAMIC_INIT , facetName , paramName , SenseiSearchServletParams . PARAM_DYNAMIC_VAL ) , new String ( param . getByteArrayParam ( paramName ) , "<STR_LIT:UTF-8>" ) ) ) ; } for ( String paramName : param . getDoubleParamNames ( ) ) { qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_DYNAMIC_INIT , facetName , paramName , SenseiSearchServletParams . PARAM_DYNAMIC_TYPE ) , SenseiSearchServletParams . PARAM_DYNAMIC_TYPE_DOUBLE ) ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_DYNAMIC_INIT , facetName , paramName , SenseiSearchServletParams . PARAM_DYNAMIC_VAL ) , join ( param . getDoubleParam ( paramName ) , "<STR_LIT:U+002C>" ) ) ) ; } for ( String paramName : param . getIntParamNames ( ) ) { qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_DYNAMIC_INIT , facetName , paramName , SenseiSearchServletParams . PARAM_DYNAMIC_TYPE ) , SenseiSearchServletParams . PARAM_DYNAMIC_TYPE_INT ) ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_DYNAMIC_INIT , facetName , paramName , SenseiSearchServletParams . PARAM_DYNAMIC_VAL ) , join ( param . getIntParam ( paramName ) , "<STR_LIT:U+002C>" ) ) ) ; } for ( String paramName : param . getLongParamNames ( ) ) { qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_DYNAMIC_INIT , facetName , paramName , SenseiSearchServletParams . PARAM_DYNAMIC_TYPE ) , SenseiSearchServletParams . PARAM_DYNAMIC_TYPE_LONG ) ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_DYNAMIC_INIT , facetName , paramName , SenseiSearchServletParams . PARAM_DYNAMIC_VAL ) , join ( param . getLongParam ( paramName ) , "<STR_LIT:U+002C>" ) ) ) ; } for ( String paramName : param . getStringParamNames ( ) ) { qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_DYNAMIC_INIT , facetName , paramName , SenseiSearchServletParams . PARAM_DYNAMIC_TYPE ) , SenseiSearchServletParams . PARAM_DYNAMIC_TYPE_STRING ) ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_DYNAMIC_INIT , facetName , paramName , SenseiSearchServletParams . PARAM_DYNAMIC_VAL ) , join ( param . getStringParam ( paramName ) , "<STR_LIT:U+002C>" ) ) ) ; } } } public static void convertFacetSpecs ( List < NameValuePair > qparams , Map < String , FacetSpec > facetSpecs ) { final String format = "<STR_LIT>" ; for ( Entry < String , FacetSpec > entry : facetSpecs . entrySet ( ) ) { String facetName = entry . getKey ( ) ; FacetSpec spec = entry . getValue ( ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_FACET , facetName , SenseiSearchServletParams . PARAM_FACET_MAX ) , Integer . toString ( spec . getMaxCount ( ) ) ) ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_FACET , facetName , SenseiSearchServletParams . PARAM_FACET_ORDER ) , convertFacetSortSpec ( spec . getOrderBy ( ) ) ) ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_FACET , facetName , SenseiSearchServletParams . PARAM_FACET_EXPAND ) , Boolean . toString ( spec . isExpandSelection ( ) ) ) ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_FACET , facetName , SenseiSearchServletParams . PARAM_FACET_MINHIT ) , Integer . toString ( spec . getMinHitCount ( ) ) ) ) ; } } public static String convertFacetSortSpec ( FacetSpec . FacetSortSpec spec ) { switch ( spec ) { case OrderValueAsc : return SenseiSearchServletParams . PARAM_FACET_ORDER_VAL ; case OrderHitsDesc : return SenseiSearchServletParams . PARAM_FACET_ORDER_HITS ; case OrderByCustom : default : throw new IllegalArgumentException ( "<STR_LIT>" + spec ) ; } } public static String convertSortField ( SortField field ) { String result ; if ( field . equals ( SenseiRequest . FIELD_SCORE ) ) { result = SenseiSearchServletParams . PARAM_SORT_SCORE ; } else if ( field . equals ( SenseiRequest . FIELD_SCORE_REVERSE ) ) { result = SenseiSearchServletParams . PARAM_SORT_SCORE_REVERSE ; } else if ( field . equals ( SenseiRequest . FIELD_DOC ) ) { result = SenseiSearchServletParams . PARAM_SORT_DOC ; } else if ( field . equals ( SenseiRequest . FIELD_DOC_REVERSE ) ) { result = SenseiSearchServletParams . PARAM_SORT_DOC_REVERSE ; } else { result = String . format ( "<STR_LIT>" , field . getField ( ) , field . getReverse ( ) ? SenseiSearchServletParams . PARAM_SORT_DESC : SenseiSearchServletParams . PARAM_SORT_ASC ) ; } return result ; } public static void convertSelectionNames ( List < NameValuePair > qparams , SenseiRequest req ) { Set < String > selectionNames = req . getSelectionNames ( ) ; final String format = "<STR_LIT>" ; for ( String selectionName : selectionNames ) { BrowseSelection selection = req . getSelection ( selectionName ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_SELECT , selectionName , SenseiSearchServletParams . PARAM_SELECT_NOT ) , join ( selection . getNotValues ( ) , "<STR_LIT:U+002C>" ) ) ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_SELECT , selectionName , SenseiSearchServletParams . PARAM_SELECT_OP ) , convertSelectionOperation ( selection . getSelectionOperation ( ) ) ) ) ; qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_SELECT , selectionName , SenseiSearchServletParams . PARAM_SELECT_VAL ) , join ( selection . getValues ( ) , "<STR_LIT:U+002C>" ) ) ) ; if ( selection . getSelectionProperties ( ) . size ( ) > <NUM_LIT:0> ) { qparams . add ( new BasicNameValuePair ( String . format ( format , SenseiSearchServletParams . PARAM_SELECT , selectionName , SenseiSearchServletParams . PARAM_SELECT_PROP ) , convertSelectionProperties ( selection . getSelectionProperties ( ) ) ) ) ; } } } private static String convertSelectionOperation ( BrowseSelection . ValueOperation operation ) { switch ( operation ) { case ValueOperationOr : return SenseiSearchServletParams . PARAM_SELECT_OP_OR ; case ValueOperationAnd : return SenseiSearchServletParams . PARAM_SELECT_OP_AND ; default : throw new IllegalArgumentException ( "<STR_LIT>" ) ; } } private static String convertSelectionProperties ( Properties props ) { List < String > propList = new ArrayList < String > ( props . size ( ) ) ; final String format = "<STR_LIT>" ; Set < Entry < Object , Object > > entries = props . entrySet ( ) ; for ( Entry < Object , Object > entry : entries ) { propList . add ( String . format ( format , entry . getKey ( ) , entry . getValue ( ) ) ) ; } return join ( propList , "<STR_LIT:U+002C>" ) ; } public static void convertSenseiQuery ( List < NameValuePair > qparams , SenseiQuery query ) throws SenseiException { if ( query == null ) return ; try { JSONObject jsonObj = new JSONObject ( query . toString ( ) ) ; Iterator iter = jsonObj . keys ( ) ; final String format = "<STR_LIT>" ; while ( iter . hasNext ( ) ) { String key = ( String ) iter . next ( ) ; if ( key . equals ( "<STR_LIT:query>" ) ) { qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_QUERY , jsonObj . get ( key ) . toString ( ) ) ) ; continue ; } qparams . add ( new BasicNameValuePair ( SenseiSearchServletParams . PARAM_QUERY_PARAM , String . format ( format , key , jsonObj . get ( key ) ) ) ) ; } } catch ( JSONException e ) { throw new SenseiException ( e ) ; } } public URI buildSysInfoRequestURI ( ) throws URISyntaxException { URI uri = URIUtils . createURI ( _scheme , _host , _port , _path + "<STR_LIT>" , null , null ) ; return uri ; } public URI buildRequestURI ( List < NameValuePair > qparams ) throws URISyntaxException { URI uri = URIUtils . createURI ( _scheme , _host , _port , _path , URLEncodedUtils . format ( qparams , "<STR_LIT:UTF-8>" ) , null ) ; return uri ; } public InputStream makeRequest ( URI uri ) throws IOException { if ( log . isDebugEnabled ( ) ) { log . debug ( "<STR_LIT>" + uri ) ; } HttpGet httpget = new HttpGet ( uri ) ; HttpResponse response = _httpclient . execute ( httpget ) ; HttpEntity entity = response . getEntity ( ) ; if ( entity == null ) { throw new IOException ( "<STR_LIT>" ) ; } return entity . getContent ( ) ; } public static String join ( String [ ] arr , String delimiter ) { return join ( Arrays . asList ( arr ) , delimiter ) ; } public static String join ( boolean [ ] arr , String delimiter ) { return join ( Arrays . asList ( ArrayUtils . toObject ( arr ) ) , delimiter ) ; } public static String join ( byte [ ] arr , String delimiter ) { return join ( Arrays . asList ( ArrayUtils . toObject ( arr ) ) , delimiter ) ; } public static String join ( int [ ] arr , String delimiter ) { return join ( Arrays . asList ( ArrayUtils . toObject ( arr ) ) , delimiter ) ; } public static String join ( long [ ] arr , String delimiter ) { return join ( Arrays . asList ( ArrayUtils . toObject ( arr ) ) , delimiter ) ; } public static String join ( double [ ] arr , String delimiter ) { return join ( Arrays . asList ( ArrayUtils . toObject ( arr ) ) , delimiter ) ; } public static String join ( Collection < ? > s , String delimiter ) { StringBuilder builder = new StringBuilder ( ) ; Iterator iter = s . iterator ( ) ; while ( iter . hasNext ( ) ) { builder . append ( iter . next ( ) . toString ( ) ) ; if ( ! iter . hasNext ( ) ) { break ; } builder . append ( delimiter ) ; } return builder . toString ( ) ; } public static String convertStreamToString ( InputStream is ) throws IOException { BufferedReader reader = new BufferedReader ( new InputStreamReader ( is ) ) ; StringBuilder sb = new StringBuilder ( ) ; char [ ] buf = new char [ <NUM_LIT> ] ; try { while ( true ) { int count = reader . read ( buf ) ; if ( count < <NUM_LIT:0> ) break ; sb . append ( buf , <NUM_LIT:0> , count ) ; } } finally { is . close ( ) ; } String json = sb . toString ( ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "<STR_LIT>" + json ) ; } return json ; } public static JSONObject convertStreamToJSONObject ( InputStream is ) throws IOException , JSONException { String rawJSON = convertStreamToString ( is ) ; return new JSONObject ( rawJSON ) ; } public static SenseiResult buildSenseiResult ( JSONObject jsonObj ) throws JSONException { SenseiResult result = new SenseiResult ( ) ; result . setTid ( Long . parseLong ( jsonObj . getString ( SenseiSearchServletParams . PARAM_RESULT_TID ) ) ) ; result . setTotalDocs ( jsonObj . getInt ( SenseiSearchServletParams . PARAM_RESULT_TOTALDOCS ) ) ; result . setParsedQuery ( jsonObj . getString ( SenseiSearchServletParams . PARAM_RESULT_PARSEDQUERY ) ) ; result . setNumHits ( jsonObj . getInt ( SenseiSearchServletParams . PARAM_RESULT_NUMHITS ) ) ; if ( jsonObj . has ( SenseiSearchServletParams . PARAM_RESULT_NUMGROUPS ) ) { result . setNumGroups ( jsonObj . getInt ( SenseiSearchServletParams . PARAM_RESULT_NUMGROUPS ) ) ; } result . setTime ( Long . parseLong ( jsonObj . getString ( SenseiSearchServletParams . PARAM_RESULT_TIME ) ) ) ; result . addAll ( convertFacetMap ( jsonObj . getJSONObject ( SenseiSearchServletParams . PARAM_RESULT_FACETS ) ) ) ; result . setHits ( convertHitsArray ( jsonObj . getJSONArray ( SenseiSearchServletParams . PARAM_RESULT_HITS ) ) ) ; return result ; } public static SenseiSystemInfo buildSysInfo ( JSONObject jsonObj ) throws JSONException { SenseiSystemInfo result = new SenseiSystemInfo ( ) ; result . setNumDocs ( jsonObj . getInt ( SenseiSearchServletParams . PARAM_SYSINFO_NUMDOCS ) ) ; result . setLastModified ( Long . parseLong ( jsonObj . getString ( SenseiSearchServletParams . PARAM_SYSINFO_LASTMODIFIED ) ) ) ; result . setVersion ( jsonObj . getString ( SenseiSearchServletParams . PARAM_SYSINFO_VERSION ) ) ; result . setFacetInfos ( convertFacetInfos ( jsonObj . getJSONArray ( SenseiSearchServletParams . PARAM_SYSINFO_FACETS ) ) ) ; result . setClusterInfo ( convertClusterInfo ( jsonObj . getJSONArray ( SenseiSearchServletParams . PARAM_SYSINFO_CLUSTERINFO ) ) ) ; return result ; } private static Set < SenseiSystemInfo . SenseiFacetInfo > convertFacetInfos ( JSONArray array ) throws JSONException { if ( array == null || array . length ( ) == <NUM_LIT:0> ) return Collections . EMPTY_SET ; Set < SenseiSystemInfo . SenseiFacetInfo > infos = new HashSet < SenseiSystemInfo . SenseiFacetInfo > ( array . length ( ) ) ; for ( int i = <NUM_LIT:0> ; i < array . length ( ) ; ++ i ) { JSONObject info = array . getJSONObject ( i ) ; SenseiSystemInfo . SenseiFacetInfo facetInfo = new SenseiSystemInfo . SenseiFacetInfo ( info . getString ( SenseiSearchServletParams . PARAM_SYSINFO_FACETS_NAME ) ) ; facetInfo . setRunTime ( info . optBoolean ( SenseiSearchServletParams . PARAM_SYSINFO_FACETS_RUNTIME ) ) ; facetInfo . setProps ( convertJsonToStringMap ( info . optJSONObject ( SenseiSearchServletParams . PARAM_SYSINFO_FACETS_PROPS ) ) ) ; infos . add ( facetInfo ) ; } return infos ; } private static Map < String , String > convertJsonToStringMap ( JSONObject jsonObject ) throws JSONException { if ( jsonObject == null ) return Collections . EMPTY_MAP ; @ SuppressWarnings ( "<STR_LIT:unchecked>" ) Iterator < String > nameItr = jsonObject . keys ( ) ; Map < String , String > outMap = new HashMap < String , String > ( ) ; while ( nameItr . hasNext ( ) ) { String name = nameItr . next ( ) ; outMap . put ( name , jsonObject . getString ( name ) ) ; } return outMap ; } private static List < SenseiSystemInfo . SenseiNodeInfo > convertClusterInfo ( JSONArray array ) throws JSONException { if ( array == null || array . length ( ) == <NUM_LIT:0> ) return Collections . EMPTY_LIST ; List < SenseiSystemInfo . SenseiNodeInfo > clusterInfo = new ArrayList ( array . length ( ) ) ; for ( int i = <NUM_LIT:0> ; i < array . length ( ) ; ++ i ) { JSONObject node = array . getJSONObject ( i ) ; JSONArray partitionsArray = node . getJSONArray ( SenseiSearchServletParams . PARAM_SYSINFO_CLUSTERINFO_PARTITIONS ) ; int [ ] partitions = null ; if ( partitionsArray != null ) { partitions = new int [ partitionsArray . length ( ) ] ; for ( int j = <NUM_LIT:0> ; j < partitionsArray . length ( ) ; ++ j ) { partitions [ j ] = partitionsArray . getInt ( j ) ; } } clusterInfo . add ( new SenseiSystemInfo . SenseiNodeInfo ( node . getInt ( SenseiSearchServletParams . PARAM_SYSINFO_CLUSTERINFO_ID ) , partitions , node . getString ( SenseiSearchServletParams . PARAM_SYSINFO_CLUSTERINFO_NODELINK ) , node . getString ( SenseiSearchServletParams . PARAM_SYSINFO_CLUSTERINFO_ADMINLINK ) ) ) ; } return clusterInfo ; } private static Map < String , FacetAccessible > convertFacetMap ( JSONObject jsonObject ) throws JSONException { Map < String , FacetAccessible > map = new HashMap < String , FacetAccessible > ( ) ; Iterator iter = jsonObject . sortedKeys ( ) ; while ( iter . hasNext ( ) ) { String fieldName = ( String ) iter . next ( ) ; JSONArray facetArr = ( JSONArray ) jsonObject . get ( fieldName ) ; int length = facetArr . length ( ) ; BrowseFacet [ ] facets = new BrowseFacet [ length ] ; for ( int i = <NUM_LIT:0> ; i < length ; i ++ ) { JSONObject facetObj = ( JSONObject ) facetArr . get ( i ) ; BrowseFacet bf = new BrowseFacet ( ) ; bf . setFacetValueHitCount ( facetObj . getInt ( SenseiSearchServletParams . PARAM_RESULT_FACET_INFO_COUNT ) ) ; bf . setValue ( facetObj . getString ( SenseiSearchServletParams . PARAM_RESULT_FACET_INFO_VALUE ) ) ; facets [ i ] = bf ; } FacetAccessible fa = new MappedFacetAccessible ( facets ) ; map . put ( fieldName , fa ) ; } return map ; } private static SenseiHit [ ] convertHitsArray ( JSONArray hitsArray ) throws JSONException { int hitsArrayLength = hitsArray . length ( ) ; SenseiHit [ ] result = new SenseiHit [ hitsArrayLength ] ; for ( int i = <NUM_LIT:0> ; i < hitsArrayLength ; i ++ ) { JSONObject hitObj = ( JSONObject ) hitsArray . get ( i ) ; SenseiHit hit = new SenseiHit ( ) ; Iterator keys = hitObj . keys ( ) ; Map < String , String [ ] > fieldMap = new HashMap < String , String [ ] > ( ) ; while ( keys . hasNext ( ) ) { String key = ( String ) keys . next ( ) ; if ( SenseiSearchServletParams . PARAM_RESULT_HIT_UID . equals ( key ) ) { hit . setUID ( Long . parseLong ( hitObj . getString ( SenseiSearchServletParams . PARAM_RESULT_HIT_UID ) ) ) ; } else if ( SenseiSearchServletParams . PARAM_RESULT_HIT_DOCID . equals ( key ) ) { hit . setDocid ( hitObj . getInt ( SenseiSearchServletParams . PARAM_RESULT_HIT_DOCID ) ) ; } else if ( SenseiSearchServletParams . PARAM_RESULT_HIT_SCORE . equals ( key ) ) { hit . setScore ( ( float ) hitObj . getDouble ( SenseiSearchServletParams . PARAM_RESULT_HIT_SCORE ) ) ; } else if ( SenseiSearchServletParams . PARAM_RESULT_HIT_SRC_DATA . equals ( key ) ) { hit . setSrcData ( hitObj . getString ( SenseiSearchServletParams . PARAM_RESULT_HIT_SRC_DATA ) ) ; } else if ( SenseiSearchServletParams . PARAM_RESULT_HIT_STORED_FIELDS . equals ( key ) ) { hit . setStoredFields ( convertStoredFields ( hitObj . optJSONArray ( SenseiSearchServletParams . PARAM_RESULT_HIT_STORED_FIELDS ) ) ) ; } else if ( SenseiSearchServletParams . PARAM_RESULT_HIT_GROUPFIELD . equals ( key ) ) { hit . setGroupValue ( hitObj . getString ( SenseiSearchServletParams . PARAM_RESULT_HIT_GROUPFIELD ) ) ; } else if ( SenseiSearchServletParams . PARAM_RESULT_HIT_GROUPVALUE . equals ( key ) ) { hit . setGroupValue ( hitObj . getString ( SenseiSearchServletParams . PARAM_RESULT_HIT_GROUPVALUE ) ) ; } else if ( SenseiSearchServletParams . PARAM_RESULT_HIT_GROUPHITSCOUNT . equals ( key ) ) { hit . setGroupHitsCount ( hitObj . getInt ( SenseiSearchServletParams . PARAM_RESULT_HIT_GROUPHITSCOUNT ) ) ; } else if ( SenseiSearchServletParams . PARAM_RESULT_HIT_EXPLANATION . equals ( key ) ) { hit . setExplanation ( convertToExplanation ( hitObj . optJSONObject ( SenseiSearchServletParams . PARAM_RESULT_HIT_EXPLANATION ) ) ) ; } else if ( SenseiSearchServletParams . PARAM_RESULT_HIT_GROUPHITS . equals ( key ) ) { hit . setGroupHits ( convertHitsArray ( hitObj . getJSONArray ( SenseiSearchServletParams . PARAM_RESULT_HIT_GROUPHITS ) ) ) ; } else { JSONArray array = hitObj . optJSONArray ( key ) ; if ( array != null ) { String [ ] arr = new String [ array . length ( ) ] ; for ( int k = <NUM_LIT:0> ; k < arr . length ; ++ k ) { arr [ k ] = array . getString ( k ) ; } fieldMap . put ( key , arr ) ; } } } hit . setFieldValues ( fieldMap ) ; result [ i ] = hit ; } return result ; } public static Document convertStoredFields ( JSONArray jsonArray ) throws JSONException { int length = jsonArray . length ( ) ; Document doc = new Document ( ) ; for ( int i = <NUM_LIT:0> ; i < length ; i ++ ) { JSONObject jsonObject = ( JSONObject ) jsonArray . get ( i ) ; String name = jsonObject . getString ( SenseiSearchServletParams . PARAM_RESULT_HIT_STORED_FIELDS_NAME ) ; String value = jsonObject . getString ( SenseiSearchServletParams . PARAM_RESULT_HIT_STORED_FIELDS_VALUE ) ; doc . add ( new org . apache . lucene . document . Field ( name , value , Field . Store . YES , Field . Index . ANALYZED ) ) ; } return doc ; } public static Explanation convertToExplanation ( JSONObject jsonObj ) throws JSONException { if ( jsonObj == null ) return null ; Explanation explanation = new Explanation ( ) ; float value = ( float ) jsonObj . optDouble ( SenseiSearchServletParams . PARAM_RESULT_HITS_EXPL_VALUE ) ; String description = jsonObj . optString ( SenseiSearchServletParams . PARAM_RESULT_HITS_EXPL_DESC ) ; explanation . setDescription ( description ) ; explanation . setValue ( value ) ; if ( jsonObj . has ( SenseiSearchServletParams . PARAM_RESULT_HITS_EXPL_DETAILS ) ) { JSONArray detailsArr = jsonObj . getJSONArray ( SenseiSearchServletParams . PARAM_RESULT_HITS_EXPL_DETAILS ) ; int detailsCnt = detailsArr . length ( ) ; for ( int i = <NUM_LIT:0> ; i < detailsCnt ; i ++ ) { JSONObject detailObj = ( JSONObject ) detailsArr . get ( i ) ; Explanation detailExpl = convertToExplanation ( detailObj ) ; explanation . addDetail ( detailExpl ) ; } } return explanation ; } @ Override public void shutdown ( ) { if ( _httpclient == null ) return ; _httpclient . getConnectionManager ( ) . shutdown ( ) ; _httpclient = null ; } } </s>
|
<s> package com . senseidb . svc . api ; public class SenseiException extends Exception { private static final long serialVersionUID = <NUM_LIT:1L> ; public SenseiException ( ) { } public SenseiException ( String message ) { super ( message ) ; } public SenseiException ( Throwable cause ) { super ( cause ) ; } public SenseiException ( String message , Throwable cause ) { super ( message , cause ) ; } } </s>
|
<s> package com . senseidb . svc . api ; import com . senseidb . search . req . SenseiRequest ; import com . senseidb . search . req . SenseiResult ; import com . senseidb . search . req . SenseiSystemInfo ; public interface SenseiService { SenseiResult doQuery ( SenseiRequest req ) throws SenseiException ; SenseiSystemInfo getSystemInfo ( ) throws SenseiException ; void shutdown ( ) ; } </s>
|
<s> package com . senseidb . metrics ; import com . yammer . metrics . core . Counter ; import com . yammer . metrics . core . Histogram ; import com . yammer . metrics . core . Meter ; import com . yammer . metrics . core . MetricName ; import com . yammer . metrics . core . MetricsRegistry ; import com . yammer . metrics . core . Timer ; import com . yammer . metrics . reporting . JmxReporter ; import java . util . concurrent . TimeUnit ; import java . util . concurrent . atomic . AtomicReference ; public final class MetricFactory { private static final AtomicReference < MetricFactory > FACTORY = new AtomicReference < MetricFactory > ( ) ; private final MetricsRegistry _registry ; private final JmxReporter _reporter ; public static void start ( ) { MetricFactory oldFactory = FACTORY . getAndSet ( new MetricFactory ( ) . startAll ( ) ) ; if ( oldFactory != null ) { oldFactory . stopAll ( ) ; } } public static void stop ( ) { MetricFactory oldFactory = FACTORY . getAndSet ( null ) ; if ( oldFactory != null ) { oldFactory . stopAll ( ) ; } } public static Timer newTimer ( MetricName metricName , TimeUnit durationUnit , TimeUnit rateUnit ) { return getRegistry ( ) . newTimer ( metricName , durationUnit , rateUnit ) ; } public static Meter newMeter ( MetricName metricName , String eventType , TimeUnit unit ) { return getRegistry ( ) . newMeter ( metricName , eventType , unit ) ; } public static Counter newCounter ( MetricName metricName ) { return getRegistry ( ) . newCounter ( metricName ) ; } public static Histogram newHistogram ( MetricName metricName , boolean biased ) { return getRegistry ( ) . newHistogram ( metricName , biased ) ; } private static MetricsRegistry getRegistry ( ) { MetricFactory factory = FACTORY . get ( ) ; while ( factory == null ) { start ( ) ; factory = FACTORY . get ( ) ; } return factory . _registry ; } private MetricFactory ( ) { _registry = new MetricsRegistry ( ) ; _reporter = new JmxReporter ( _registry ) ; } private MetricFactory startAll ( ) { _reporter . start ( ) ; return this ; } private void stopAll ( ) { _registry . shutdown ( ) ; _reporter . shutdown ( ) ; } } </s>
|
<s> package com . senseidb . metrics ; public interface MetricsConstants { public static final String Domain = "<STR_LIT>" ; } </s>
|
<s> package com . senseidb . indexing ; import java . io . ByteArrayInputStream ; import java . io . ByteArrayOutputStream ; import java . nio . charset . Charset ; import java . text . SimpleDateFormat ; import java . util . ArrayList ; import java . util . Date ; import java . util . HashMap ; import java . util . HashSet ; import java . util . LinkedList ; import java . util . List ; import java . util . Map ; import java . util . Map . Entry ; import java . util . Set ; import java . util . StringTokenizer ; import java . util . zip . GZIPInputStream ; import java . util . zip . GZIPOutputStream ; import org . apache . commons . configuration . ConfigurationException ; import org . apache . log4j . Logger ; import org . apache . lucene . document . Field ; import org . apache . lucene . document . Field . Index ; import org . apache . lucene . document . Field . Store ; import org . apache . lucene . index . FieldInfo . IndexOptions ; import org . json . JSONException ; import org . json . JSONObject ; import proj . zoie . api . indexing . AbstractZoieIndexable ; import proj . zoie . api . indexing . AbstractZoieIndexableInterpreter ; import proj . zoie . api . indexing . ZoieIndexable ; import com . senseidb . conf . SenseiSchema ; import com . senseidb . conf . SenseiSchema . FieldDefinition ; import com . senseidb . search . plugin . PluggableSearchEngineManager ; public class DefaultJsonSchemaInterpreter extends AbstractZoieIndexableInterpreter < JSONObject > { private static final Logger logger = Logger . getLogger ( DefaultJsonSchemaInterpreter . class ) ; private final SenseiSchema _schema ; private final Set < Entry < String , FieldDefinition > > entries ; private final String _uidField ; private final String _delField ; private final String _skipField ; private final boolean _compressSrcData ; private final Map < String , JsonValExtractor > _dateExtractorMap ; private JsonFilter _jsonFilter = null ; private static Charset UTF8 = Charset . forName ( "<STR_LIT:UTF-8>" ) ; private CustomIndexingPipeline _customIndexingPipeline = null ; private Set < String > nonLuceneFields = new HashSet < String > ( ) ; public DefaultJsonSchemaInterpreter ( SenseiSchema schema ) throws ConfigurationException { this ( schema , null ) ; } public DefaultJsonSchemaInterpreter ( SenseiSchema schema , PluggableSearchEngineManager pluggableSearchEngineManager ) throws ConfigurationException { _schema = schema ; if ( pluggableSearchEngineManager != null ) { nonLuceneFields . addAll ( pluggableSearchEngineManager . getFieldNames ( ) ) ; } entries = _schema . getFieldDefMap ( ) . entrySet ( ) ; _uidField = _schema . getUidField ( ) ; _delField = _schema . getDeleteField ( ) ; _skipField = _schema . getSkipField ( ) ; _compressSrcData = _schema . isCompressSrcData ( ) ; _dateExtractorMap = new HashMap < String , JsonValExtractor > ( ) ; for ( Entry < String , FieldDefinition > entry : entries ) { final FieldDefinition def = entry . getValue ( ) ; if ( Date . class . equals ( def . type ) ) { _dateExtractorMap . put ( entry . getKey ( ) , new JsonValExtractor ( ) { @ Override public Object extract ( String val ) { try { return ( ( SimpleDateFormat ) ( def . formatter ) ) . parse ( val ) ; } catch ( Exception e ) { throw new RuntimeException ( e . getMessage ( ) , e ) ; } } } ) ; } } } private static interface JsonValExtractor { Object extract ( String val ) ; } private final static Map < Class , JsonValExtractor > ExtractorMap = new HashMap < Class , JsonValExtractor > ( ) ; static { ExtractorMap . put ( int . class , new JsonValExtractor ( ) { @ Override public Object extract ( String val ) { if ( val == null || val . length ( ) == <NUM_LIT:0> ) { return <NUM_LIT:0> ; } else { int num = Integer . parseInt ( val ) ; return num ; } } } ) ; ExtractorMap . put ( double . class , new JsonValExtractor ( ) { @ Override public Object extract ( String val ) { if ( val == null || val . length ( ) == <NUM_LIT:0> ) { return <NUM_LIT:0.0> ; } else { double num = Double . parseDouble ( val ) ; return num ; } } } ) ; ExtractorMap . put ( long . class , new JsonValExtractor ( ) { @ Override public Object extract ( String val ) { if ( val == null || val . length ( ) == <NUM_LIT:0> ) { return <NUM_LIT:0.0> ; } else { long num = Long . parseLong ( val ) ; return num ; } } } ) ; ExtractorMap . put ( String . class , new JsonValExtractor ( ) { @ Override public Object extract ( String val ) { return val ; } } ) ; } public static byte [ ] compress ( byte [ ] src ) throws Exception { byte [ ] data = null ; if ( src != null ) { ByteArrayOutputStream bout = new ByteArrayOutputStream ( ) ; GZIPOutputStream gzipStream = new GZIPOutputStream ( bout ) ; gzipStream . write ( src ) ; gzipStream . flush ( ) ; gzipStream . close ( ) ; bout . flush ( ) ; data = bout . toByteArray ( ) ; } return data ; } public static byte [ ] decompress ( byte [ ] src ) throws Exception { byte [ ] data = null ; if ( src != null ) { ByteArrayOutputStream bout = new ByteArrayOutputStream ( ) ; byte [ ] buf = new byte [ <NUM_LIT> ] ; ByteArrayInputStream bin = new ByteArrayInputStream ( src ) ; GZIPInputStream gzipStream = new GZIPInputStream ( bin ) ; int len ; while ( ( len = gzipStream . read ( buf ) ) > <NUM_LIT:0> ) { bout . write ( buf , <NUM_LIT:0> , len ) ; } bout . flush ( ) ; data = bout . toByteArray ( ) ; } return data ; } public void setCustomIndexingPipeline ( CustomIndexingPipeline customIndexingPipeline ) { _customIndexingPipeline = customIndexingPipeline ; } public CustomIndexingPipeline getCustomIndexingPipeline ( ) { return _customIndexingPipeline ; } public void setJsonFilter ( JsonFilter jsonFilter ) { _jsonFilter = jsonFilter ; } public static List < String > tokenize ( String val , String delim ) { List < String > result = new ArrayList < String > ( ) ; if ( val == null || val . length ( ) == <NUM_LIT:0> ) return result ; if ( delim == null || delim . length ( ) == <NUM_LIT:0> ) result . add ( val ) ; else if ( delim . length ( ) == <NUM_LIT:1> ) { char de = delim . charAt ( <NUM_LIT:0> ) ; StringBuilder sb = new StringBuilder ( ) ; boolean escape = false ; for ( char c : val . toCharArray ( ) ) { if ( escape ) { if ( c == '<STR_LIT:\\>' || c == de ) sb . append ( c ) ; else sb . append ( '<STR_LIT:\\>' ) . append ( c ) ; escape = false ; } else { if ( c == '<STR_LIT:\\>' ) { escape = true ; continue ; } else if ( c == de ) { if ( sb . length ( ) > <NUM_LIT:0> ) { result . add ( sb . toString ( ) ) ; sb . setLength ( <NUM_LIT:0> ) ; } } else sb . append ( c ) ; } } if ( escape ) sb . append ( '<STR_LIT:\\>' ) ; if ( sb . length ( ) > <NUM_LIT:0> ) result . add ( sb . toString ( ) ) ; } else { StringTokenizer strtok = new StringTokenizer ( val , delim ) ; while ( strtok . hasMoreTokens ( ) ) { result . add ( strtok . nextToken ( ) ) ; } } return result ; } @ Override public ZoieIndexable convertAndInterpret ( JSONObject obj ) { final JSONObject src = obj ; final JSONObject filtered ; if ( _jsonFilter != null ) { try { filtered = _jsonFilter . filter ( src ) ; } catch ( Exception e ) { throw new RuntimeException ( e . getMessage ( ) , e ) ; } } else { filtered = src ; } return new AbstractZoieIndexable ( ) { @ Override public IndexingReq [ ] buildIndexingReqs ( ) { org . apache . lucene . document . Document luceneDoc = new org . apache . lucene . document . Document ( ) ; for ( Entry < String , FieldDefinition > entry : entries ) { String name = entry . getKey ( ) ; try { final FieldDefinition fldDef = entry . getValue ( ) ; if ( nonLuceneFields . contains ( entry . getKey ( ) ) ) { continue ; } if ( fldDef . isMeta ) { JsonValExtractor extractor = ExtractorMap . get ( fldDef . type ) ; if ( extractor == null ) { if ( Date . class . equals ( fldDef . type ) ) { extractor = _dateExtractorMap . get ( name ) ; } else { extractor = ExtractorMap . get ( String . class ) ; } } if ( filtered . has ( fldDef . fromField ) ) { List < Object > vals = new LinkedList < Object > ( ) ; if ( filtered . isNull ( fldDef . fromField ) ) continue ; if ( fldDef . isMulti ) { String val = filtered . optString ( fldDef . fromField ) ; for ( String token : tokenize ( val , fldDef . delim ) ) { Object obj = extractor . extract ( token ) ; if ( obj != null ) { vals . add ( obj ) ; } } } else { String val = filtered . optString ( fldDef . fromField ) ; if ( val == null ) continue ; Object obj = extractor . extract ( filtered . optString ( fldDef . fromField ) ) ; if ( obj != null ) { vals . add ( obj ) ; } } for ( Object val : vals ) { if ( val == null ) continue ; String strVal = null ; if ( fldDef . formatter != null ) { strVal = fldDef . formatter . format ( val ) ; } else { strVal = String . valueOf ( val ) ; } Field metaField = new Field ( name , strVal , Store . NO , Index . NOT_ANALYZED_NO_NORMS ) ; metaField . setOmitNorms ( true ) ; metaField . setIndexOptions ( IndexOptions . DOCS_ONLY ) ; luceneDoc . add ( metaField ) ; } } } else { Field textField = new Field ( name , filtered . optString ( fldDef . fromField ) , fldDef . textIndexSpec . store , fldDef . textIndexSpec . index , fldDef . textIndexSpec . tv ) ; luceneDoc . add ( textField ) ; } } catch ( Exception e ) { logger . error ( "<STR_LIT>" + name , e ) ; throw new RuntimeException ( e ) ; } } if ( _customIndexingPipeline != null ) { _customIndexingPipeline . applyCustomization ( luceneDoc , _schema , filtered ) ; } return new IndexingReq [ ] { new IndexingReq ( luceneDoc ) } ; } @ Override public long getUID ( ) { try { return Long . parseLong ( filtered . getString ( _uidField ) ) ; } catch ( JSONException e ) { throw new IllegalStateException ( e . getMessage ( ) , e ) ; } } @ Override public boolean isDeleted ( ) { try { String type = filtered . optString ( SenseiSchema . EVENT_TYPE_FIELD , null ) ; if ( type == null ) return filtered . optBoolean ( _delField ) ; else return SenseiSchema . EVENT_TYPE_DELETE . equalsIgnoreCase ( type ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; return false ; } } @ Override public boolean isSkip ( ) { try { String type = filtered . optString ( SenseiSchema . EVENT_TYPE_FIELD , null ) ; if ( type == null ) return filtered . optBoolean ( _skipField ) ; else return SenseiSchema . EVENT_TYPE_SKIP . equalsIgnoreCase ( type ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; return false ; } } @ Override public byte [ ] getStoreValue ( ) { byte [ ] data = null ; if ( src != null ) { Object type = src . remove ( SenseiSchema . EVENT_TYPE_FIELD ) ; try { if ( _compressSrcData ) data = compress ( src . toString ( ) . getBytes ( "<STR_LIT:UTF-8>" ) ) ; else data = src . toString ( ) . getBytes ( "<STR_LIT:UTF-8>" ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } if ( type != null ) { try { src . put ( SenseiSchema . EVENT_TYPE_FIELD , type ) ; } catch ( Exception e ) { logger . error ( "<STR_LIT>" , e ) ; } } } return data ; } @ Override public boolean isStorable ( ) { return true ; } } ; } } </s>
|
<s> package com . senseidb . indexing ; import org . json . JSONObject ; public abstract class JsonFilter extends DataSourceFilter < JSONObject > { } </s>
|
<s> package com . senseidb . indexing ; import java . lang . annotation . ElementType ; import java . lang . annotation . Retention ; import java . lang . annotation . RetentionPolicy ; import java . lang . annotation . Target ; @ Target ( ElementType . METHOD ) @ Retention ( RetentionPolicy . RUNTIME ) public @ interface SkipChecker { } </s>
|
<s> package com . senseidb . indexing ; public interface DataSourceFilterable < D > { void setFilter ( DataSourceFilter < D > filter ) ; } </s>
|
<s> package com . senseidb . indexing ; import java . io . IOException ; import com . browseengine . bobo . api . BoboIndexReader ; import com . browseengine . bobo . api . BrowseSelection ; import com . browseengine . bobo . facets . FacetHandler ; import com . browseengine . bobo . facets . filter . EmptyFilter ; import com . browseengine . bobo . facets . filter . RandomAccessFilter ; import com . senseidb . search . req . SenseiRequest ; public interface SenseiIndexPruner { IndexReaderSelector getReaderSelector ( SenseiRequest req ) ; public interface IndexReaderSelector { boolean isSelected ( BoboIndexReader reader ) throws IOException ; } public static class DefaultSenseiIndexPruner implements SenseiIndexPruner { @ Override public IndexReaderSelector getReaderSelector ( SenseiRequest req ) { return new IndexReaderSelector ( ) { @ Override public boolean isSelected ( BoboIndexReader reader ) throws IOException { return true ; } } ; } } public static class BoboSelectionSenseiIndexPruner implements SenseiIndexPruner { @ Override public IndexReaderSelector getReaderSelector ( final SenseiRequest req ) { return new IndexReaderSelector ( ) { @ Override public boolean isSelected ( BoboIndexReader reader ) throws IOException { BrowseSelection [ ] selections = req . getSelections ( ) ; boolean valid = true ; if ( selections != null ) { for ( BrowseSelection sel : selections ) { String name = sel . getFieldName ( ) ; FacetHandler < ? > handler = reader . getFacetHandler ( name ) ; if ( handler != null ) { RandomAccessFilter filter = handler . buildFilter ( sel ) ; if ( EmptyFilter . getInstance ( ) == filter ) { valid = false ; break ; } } } } return valid ; } } ; } } } </s>
|
<s> package com . senseidb . indexing ; import java . lang . reflect . Field ; import java . lang . reflect . Method ; import java . text . DecimalFormat ; import java . text . DecimalFormatSymbols ; import java . text . Format ; import java . text . SimpleDateFormat ; import java . util . Collection ; import java . util . Date ; import java . util . HashMap ; import java . util . Locale ; import java . util . Map ; import java . util . Set ; import org . apache . log4j . Logger ; import org . apache . lucene . document . Field . Index ; import org . apache . lucene . document . Field . Store ; import org . apache . lucene . document . Field . TermVector ; import proj . zoie . api . indexing . AbstractZoieIndexableInterpreter ; import proj . zoie . api . indexing . ZoieIndexable ; import com . browseengine . bobo . facets . data . PredefinedTermListFactory ; import com . browseengine . bobo . facets . data . TermListFactory ; public class DefaultSenseiInterpreter < V > extends AbstractZoieIndexableInterpreter < V > { private static Logger logger = Logger . getLogger ( DefaultSenseiInterpreter . class ) ; public static final Map < MetaType , String > DEFAULT_FORMAT_STRING_MAP = new HashMap < MetaType , String > ( ) ; public static final Map < Class , MetaType > CLASS_METATYPE_MAP = new HashMap < Class , MetaType > ( ) ; public static final Map < String , Index > INDEX_VAL_MAP = new HashMap < String , Index > ( ) ; public static final Map < String , Store > STORE_VAL_MAP = new HashMap < String , Store > ( ) ; public static final Map < String , TermVector > TV_VAL_MAP = new HashMap < String , TermVector > ( ) ; static { DEFAULT_FORMAT_STRING_MAP . put ( MetaType . Integer , "<STR_LIT>" ) ; DEFAULT_FORMAT_STRING_MAP . put ( MetaType . Short , "<STR_LIT>" ) ; DEFAULT_FORMAT_STRING_MAP . put ( MetaType . Long , "<STR_LIT>" ) ; DEFAULT_FORMAT_STRING_MAP . put ( MetaType . Date , "<STR_LIT>" ) ; DEFAULT_FORMAT_STRING_MAP . put ( MetaType . Float , "<STR_LIT>" ) ; DEFAULT_FORMAT_STRING_MAP . put ( MetaType . Double , "<STR_LIT>" ) ; CLASS_METATYPE_MAP . put ( String . class , MetaType . String ) ; CLASS_METATYPE_MAP . put ( int . class , MetaType . Integer ) ; CLASS_METATYPE_MAP . put ( Integer . class , MetaType . Integer ) ; CLASS_METATYPE_MAP . put ( short . class , MetaType . Short ) ; CLASS_METATYPE_MAP . put ( Short . class , MetaType . Short ) ; CLASS_METATYPE_MAP . put ( long . class , MetaType . Long ) ; CLASS_METATYPE_MAP . put ( Long . class , MetaType . Long ) ; CLASS_METATYPE_MAP . put ( float . class , MetaType . Float ) ; CLASS_METATYPE_MAP . put ( Float . class , MetaType . Float ) ; CLASS_METATYPE_MAP . put ( double . class , MetaType . Double ) ; CLASS_METATYPE_MAP . put ( Double . class , MetaType . Double ) ; CLASS_METATYPE_MAP . put ( char . class , MetaType . Char ) ; CLASS_METATYPE_MAP . put ( Character . class , MetaType . Char ) ; CLASS_METATYPE_MAP . put ( boolean . class , MetaType . Boolean ) ; CLASS_METATYPE_MAP . put ( Boolean . class , MetaType . Boolean ) ; CLASS_METATYPE_MAP . put ( Date . class , MetaType . Date ) ; INDEX_VAL_MAP . put ( "<STR_LIT>" , Index . NO ) ; INDEX_VAL_MAP . put ( "<STR_LIT>" , Index . ANALYZED ) ; INDEX_VAL_MAP . put ( "<STR_LIT>" , Index . ANALYZED ) ; INDEX_VAL_MAP . put ( "<STR_LIT>" , Index . NOT_ANALYZED ) ; INDEX_VAL_MAP . put ( "<STR_LIT>" , Index . NOT_ANALYZED ) ; INDEX_VAL_MAP . put ( "<STR_LIT>" , Index . NOT_ANALYZED_NO_NORMS ) ; INDEX_VAL_MAP . put ( "<STR_LIT>" , Index . NOT_ANALYZED_NO_NORMS ) ; INDEX_VAL_MAP . put ( "<STR_LIT>" , Index . NOT_ANALYZED_NO_NORMS ) ; INDEX_VAL_MAP . put ( "<STR_LIT>" , Index . ANALYZED_NO_NORMS ) ; STORE_VAL_MAP . put ( "<STR_LIT>" , Store . NO ) ; STORE_VAL_MAP . put ( "<STR_LIT>" , Store . YES ) ; TV_VAL_MAP . put ( "<STR_LIT>" , TermVector . NO ) ; TV_VAL_MAP . put ( "<STR_LIT>" , TermVector . YES ) ; TV_VAL_MAP . put ( "<STR_LIT>" , TermVector . WITH_POSITIONS ) ; TV_VAL_MAP . put ( "<STR_LIT>" , TermVector . WITH_OFFSETS ) ; TV_VAL_MAP . put ( "<STR_LIT>" , TermVector . WITH_POSITIONS_OFFSETS ) ; } public static < T > TermListFactory < T > getTermListFactory ( Class < T > cls ) { MetaType metaType = CLASS_METATYPE_MAP . get ( cls ) ; if ( metaType == null ) { throw new IllegalArgumentException ( "<STR_LIT>" + cls . getName ( ) ) ; } return new PredefinedTermListFactory < T > ( cls , DEFAULT_FORMAT_STRING_MAP . get ( metaType ) ) ; } public static class IndexSpec { public Store store ; public Index index ; public TermVector tv ; Field fld ; } static class MetaFormatSpec { Format formatter ; Field fld ; } private Class < V > _cls ; final Map < String , IndexSpec > _textIndexingSpecMap ; final Map < String , MetaFormatSpec > _metaFormatSpecMap ; Field _uidField ; Method _deleteChecker ; Method _skipChecker ; public DefaultSenseiInterpreter ( Class < V > cls ) { _cls = cls ; _metaFormatSpecMap = new HashMap < String , MetaFormatSpec > ( ) ; _textIndexingSpecMap = new HashMap < String , IndexSpec > ( ) ; _uidField = null ; Field [ ] fields = cls . getDeclaredFields ( ) ; for ( Field f : fields ) { if ( f . isAnnotationPresent ( Uid . class ) ) { if ( _uidField != null ) { throw new IllegalStateException ( "<STR_LIT>" + cls ) ; } else { Class fieldType = f . getType ( ) ; if ( fieldType . isPrimitive ( ) ) { if ( int . class . equals ( fieldType ) || short . class . equals ( fieldType ) || long . class . equals ( fieldType ) ) { _uidField = f ; _uidField . setAccessible ( true ) ; } } if ( _uidField == null ) { throw new IllegalStateException ( "<STR_LIT>" ) ; } } } else if ( f . isAnnotationPresent ( Text . class ) ) { f . setAccessible ( true ) ; Text textAnnotation = f . getAnnotation ( Text . class ) ; String name = textAnnotation . name ( ) ; if ( "<STR_LIT>" . equals ( name ) ) { name = f . getName ( ) ; } Index idx = INDEX_VAL_MAP . get ( textAnnotation . index ( ) ) ; Store store = STORE_VAL_MAP . get ( textAnnotation . store ( ) ) ; TermVector tv = TV_VAL_MAP . get ( textAnnotation . termVector ( ) ) ; if ( idx == null || store == null || tv == null ) { throw new RuntimeException ( "<STR_LIT>" ) ; } IndexSpec indexingSpec = new IndexSpec ( ) ; indexingSpec . store = store ; indexingSpec . index = idx ; indexingSpec . tv = tv ; indexingSpec . fld = f ; _textIndexingSpecMap . put ( name , indexingSpec ) ; } else if ( f . isAnnotationPresent ( StoredValue . class ) ) { f . setAccessible ( true ) ; StoredValue storeAnnotation = f . getAnnotation ( StoredValue . class ) ; String name = storeAnnotation . name ( ) ; if ( "<STR_LIT>" . equals ( name ) ) { name = f . getName ( ) ; } IndexSpec indexingSpec = new IndexSpec ( ) ; indexingSpec . store = Store . YES ; indexingSpec . index = Index . NO ; indexingSpec . tv = TermVector . NO ; indexingSpec . fld = f ; _textIndexingSpecMap . put ( name , indexingSpec ) ; } else if ( f . isAnnotationPresent ( Meta . class ) ) { f . setAccessible ( true ) ; Meta metaAnnotation = f . getAnnotation ( Meta . class ) ; String name = metaAnnotation . name ( ) ; if ( "<STR_LIT>" . equals ( name ) ) { name = f . getName ( ) ; } MetaType metaType = metaAnnotation . type ( ) ; if ( MetaType . Auto . equals ( metaType ) ) { Class typeClass = f . getType ( ) ; if ( Collection . class . isAssignableFrom ( typeClass ) ) { metaType = MetaType . String ; } else { metaType = CLASS_METATYPE_MAP . get ( typeClass ) ; if ( metaType == null ) { metaType = MetaType . String ; } } } String defaultFormatString = DEFAULT_FORMAT_STRING_MAP . get ( metaType ) ; String formatString = metaAnnotation . format ( ) ; if ( "<STR_LIT>" . equals ( formatString ) ) { formatString = defaultFormatString ; } MetaFormatSpec formatSpec = new MetaFormatSpec ( ) ; _metaFormatSpecMap . put ( name , formatSpec ) ; formatSpec . fld = f ; if ( defaultFormatString != null ) { if ( MetaType . Date == metaType ) { formatSpec . formatter = new SimpleDateFormat ( formatString ) ; } else { formatSpec . formatter = new DecimalFormat ( formatString , new DecimalFormatSymbols ( Locale . US ) ) ; } } } } Method [ ] methods = cls . getDeclaredMethods ( ) ; for ( Method method : methods ) { if ( method . isAnnotationPresent ( DeleteChecker . class ) ) { if ( _deleteChecker == null ) { method . setAccessible ( true ) ; _deleteChecker = method ; } else { throw new IllegalStateException ( "<STR_LIT>" + cls ) ; } } else if ( method . isAnnotationPresent ( SkipChecker . class ) ) { if ( _skipChecker == null ) { method . setAccessible ( true ) ; _skipChecker = method ; } else { throw new IllegalStateException ( "<STR_LIT>" + cls ) ; } } } if ( _uidField == null ) { throw new IllegalStateException ( cls + "<STR_LIT>" ) ; } } @ Override public String toString ( ) { StringBuilder buf = new StringBuilder ( ) ; buf . append ( "<STR_LIT>" ) . append ( _cls . getName ( ) ) ; buf . append ( "<STR_LIT>" ) . append ( _uidField . getName ( ) ) ; buf . append ( "<STR_LIT>" ) . append ( _deleteChecker == null ? "<STR_LIT:none>" : _deleteChecker . getName ( ) ) ; buf . append ( "<STR_LIT>" ) . append ( _skipChecker == null ? "<STR_LIT:none>" : _skipChecker . getName ( ) ) ; buf . append ( "<STR_LIT>" ) ; if ( _textIndexingSpecMap . size ( ) == <NUM_LIT:0> ) { buf . append ( "<STR_LIT:none>" ) ; } else { boolean first = true ; Set < String > tfNames = _textIndexingSpecMap . keySet ( ) ; for ( String name : tfNames ) { if ( ! first ) { buf . append ( "<STR_LIT:U+002C>" ) ; } else { first = false ; } buf . append ( name ) ; } } buf . append ( "<STR_LIT>" ) ; if ( _metaFormatSpecMap . size ( ) == <NUM_LIT:0> ) { buf . append ( "<STR_LIT:none>" ) ; } else { boolean first = true ; Set < String > tfNames = _metaFormatSpecMap . keySet ( ) ; for ( String tf : tfNames ) { if ( ! first ) { buf . append ( "<STR_LIT:U+002C>" ) ; } else { first = false ; } buf . append ( tf ) ; } } return buf . toString ( ) ; } @ Override public ZoieIndexable convertAndInterpret ( V obj ) { return new DefaultSenseiZoieIndexable < V > ( obj , this ) ; } } </s>
|
<s> package com . senseidb . indexing ; import com . senseidb . metrics . MetricFactory ; import java . util . Collection ; import java . util . Comparator ; import java . util . Iterator ; import java . util . LinkedHashMap ; import java . util . LinkedList ; import java . util . List ; import java . util . Map ; import java . util . concurrent . TimeUnit ; import javax . management . StandardMBean ; import org . apache . commons . configuration . Configuration ; import org . apache . commons . configuration . ConfigurationException ; import org . apache . log4j . Logger ; import org . json . JSONObject ; import proj . zoie . api . DataConsumer ; import proj . zoie . api . DataConsumer . DataEvent ; import proj . zoie . api . DataProvider ; import proj . zoie . api . Zoie ; import proj . zoie . api . ZoieException ; import proj . zoie . api . ZoieIndexReader ; import proj . zoie . impl . indexing . StreamDataProvider ; import proj . zoie . impl . indexing . ZoieConfig ; import proj . zoie . mbean . DataProviderAdmin ; import proj . zoie . mbean . DataProviderAdminMBean ; import com . browseengine . bobo . api . BoboIndexReader ; import com . senseidb . conf . SenseiSchema ; import com . senseidb . gateway . SenseiGateway ; import com . senseidb . jmx . JmxUtil ; import com . senseidb . metrics . MetricsConstants ; import com . senseidb . plugin . SenseiPluginRegistry ; import com . senseidb . search . node . SenseiIndexingManager ; import com . senseidb . search . plugin . PluggableSearchEngineManager ; import com . yammer . metrics . core . Meter ; import com . yammer . metrics . core . MetricName ; public class DefaultStreamingIndexingManager implements SenseiIndexingManager < JSONObject > { private static final Logger logger = Logger . getLogger ( DefaultStreamingIndexingManager . class ) ; public static final String CONFIG_PREFIX = "<STR_LIT>" ; private static final String MAX_PARTITION_ID = "<STR_LIT>" ; private static final String EVTS_PER_MIN = "<STR_LIT>" ; private static final String BATCH_SIZE = "<STR_LIT>" ; private Meter _providerBatchSizeMeter ; private Meter _eventMeter ; private Meter _updateBatchSizeMeter ; private StreamDataProvider < JSONObject > _dataProvider ; private String _oldestSinceKey ; private final SenseiSchema _senseiSchema ; private final Configuration _myconfig ; private Map < Integer , Zoie < BoboIndexReader , JSONObject > > _zoieSystemMap ; private final LinkedHashMap < Integer , Collection < DataEvent < JSONObject > > > _dataCollectorMap ; private final SenseiGateway < ? > _gateway ; private final ShardingStrategy _shardingStrategy ; private final Comparator < String > _versionComparator ; private final PluggableSearchEngineManager pluggableSearchEngineManager ; private SenseiPluginRegistry pluginRegistry ; public DefaultStreamingIndexingManager ( SenseiSchema schema , Configuration senseiConfig , SenseiPluginRegistry pluginRegistry , SenseiGateway < ? > gateway , ShardingStrategy shardingStrategy , PluggableSearchEngineManager pluggableSearchEngineManager ) { _dataProvider = null ; _myconfig = senseiConfig . subset ( CONFIG_PREFIX ) ; this . pluginRegistry = pluginRegistry ; _oldestSinceKey = null ; _senseiSchema = schema ; _zoieSystemMap = null ; _dataCollectorMap = new LinkedHashMap < Integer , Collection < DataEvent < JSONObject > > > ( ) ; _gateway = gateway ; this . pluggableSearchEngineManager = pluggableSearchEngineManager ; if ( _gateway != null ) { _versionComparator = _gateway . getVersionComparator ( ) ; } else { _versionComparator = ZoieConfig . DEFAULT_VERSION_COMPARATOR ; } _shardingStrategy = shardingStrategy ; } public void updateOldestSinceKey ( String sinceKey ) { if ( _oldestSinceKey == null ) { _oldestSinceKey = sinceKey ; if ( _dataProvider != null ) { _dataProvider . setStartingOffset ( _oldestSinceKey ) ; } } else if ( sinceKey != null && _versionComparator . compare ( sinceKey , _oldestSinceKey ) < <NUM_LIT:0> ) { _oldestSinceKey = sinceKey ; if ( _dataProvider != null ) { _dataProvider . setStartingOffset ( _oldestSinceKey ) ; } } } private Meter registerMeter ( String name , String eventType ) { return MetricFactory . newMeter ( new MetricName ( MetricsConstants . Domain , "<STR_LIT>" , name , "<STR_LIT>" ) , eventType , TimeUnit . SECONDS ) ; } @ Override public void initialize ( Map < Integer , Zoie < BoboIndexReader , JSONObject > > zoieSystemMap ) throws Exception { int maxPartitionId = _myconfig . getInt ( MAX_PARTITION_ID ) + <NUM_LIT:1> ; String uidField = _senseiSchema . getUidField ( ) ; DataDispatcher consumer = new DataDispatcher ( maxPartitionId , uidField ) ; _zoieSystemMap = zoieSystemMap ; Iterator < Integer > it = zoieSystemMap . keySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { int part = it . next ( ) ; Zoie < BoboIndexReader , JSONObject > zoie = zoieSystemMap . get ( part ) ; updateOldestSinceKey ( zoie . getVersion ( ) ) ; _dataCollectorMap . put ( part , new LinkedList < DataEvent < JSONObject > > ( ) ) ; } if ( pluggableSearchEngineManager != null && pluggableSearchEngineManager . getOldestVersion ( ) != null && ! ( "<STR_LIT>" . equals ( pluggableSearchEngineManager . getOldestVersion ( ) ) ) ) { updateOldestSinceKey ( pluggableSearchEngineManager . getOldestVersion ( ) ) ; } _dataProvider = buildDataProvider ( ) ; if ( _dataProvider != null ) { _dataProvider . setDataConsumer ( consumer ) ; } } @ Override public DataProvider < JSONObject > getDataProvider ( ) { return _dataProvider ; } private StreamDataProvider < JSONObject > buildDataProvider ( ) throws ConfigurationException { StreamDataProvider < JSONObject > dataProvider = null ; if ( _gateway != null ) { try { dataProvider = _gateway . buildDataProvider ( _senseiSchema , _oldestSinceKey , pluginRegistry , _shardingStrategy , _zoieSystemMap . keySet ( ) ) ; long maxEventsPerMin = _myconfig . getLong ( EVTS_PER_MIN , <NUM_LIT> ) ; dataProvider . setMaxEventsPerMinute ( maxEventsPerMin ) ; int batchSize = _myconfig . getInt ( BATCH_SIZE , <NUM_LIT:1> ) ; dataProvider . setBatchSize ( batchSize ) ; } catch ( Exception e ) { throw new ConfigurationException ( e . getMessage ( ) , e ) ; } try { StandardMBean dataProviderMbean = new StandardMBean ( new DataProviderAdmin ( dataProvider ) , DataProviderAdminMBean . class ) ; JmxUtil . registerMBean ( dataProviderMbean , "<STR_LIT>" , "<STR_LIT>" ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } } return dataProvider ; } @ Override public void shutdown ( ) { if ( pluggableSearchEngineManager != null ) { pluggableSearchEngineManager . close ( ) ; } if ( _dataProvider != null ) { _dataProvider . stop ( ) ; } if ( _providerBatchSizeMeter != null ) { _providerBatchSizeMeter . stop ( ) ; } if ( _updateBatchSizeMeter != null ) { _updateBatchSizeMeter . stop ( ) ; } if ( _eventMeter != null ) { _eventMeter . stop ( ) ; } } @ Override public void start ( ) throws Exception { if ( _dataProvider == null ) { logger . warn ( "<STR_LIT>" ) ; } else { _providerBatchSizeMeter = registerMeter ( "<STR_LIT>" , "<STR_LIT>" ) ; _updateBatchSizeMeter = registerMeter ( "<STR_LIT>" , "<STR_LIT>" ) ; _eventMeter = registerMeter ( "<STR_LIT>" , "<STR_LIT>" ) ; _dataProvider . start ( ) ; } } @ Override public void syncWithVersion ( long timeToWait , String version ) throws ZoieException { Iterator < Integer > itr = _zoieSystemMap . keySet ( ) . iterator ( ) ; while ( itr . hasNext ( ) ) { int part_num = itr . next ( ) ; Zoie < BoboIndexReader , JSONObject > dataConsumer = _zoieSystemMap . get ( part_num ) ; if ( dataConsumer != null ) { dataConsumer . syncWithVersion ( timeToWait , version ) ; } } } private class DataDispatcher implements DataConsumer < JSONObject > { int _maxPartitionId ; private final String _uidField ; private volatile String _currentVersion ; public DataDispatcher ( int maxPartitionId , String uidField ) { _maxPartitionId = maxPartitionId ; _uidField = uidField ; _currentVersion = null ; } private JSONObject rewriteData ( JSONObject obj , int partNum ) { String type = obj . optString ( SenseiSchema . EVENT_TYPE_FIELD , null ) ; JSONObject event = obj . optJSONObject ( SenseiSchema . EVENT_FIELD ) ; if ( event == null ) event = obj ; else if ( type != null ) { try { event . put ( SenseiSchema . EVENT_TYPE_FIELD , type ) ; } catch ( Exception e ) { logger . error ( "<STR_LIT>" , e ) ; } } if ( SenseiSchema . EVENT_TYPE_UPDATE . equalsIgnoreCase ( type ) ) { Zoie < BoboIndexReader , JSONObject > zoie = _zoieSystemMap . get ( partNum ) ; List < ZoieIndexReader < BoboIndexReader > > readers ; try { readers = zoie . getIndexReaders ( ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; return null ; } if ( readers == null ) { logger . error ( "<STR_LIT>" + obj ) ; return null ; } try { byte [ ] src = null ; long uid = Long . parseLong ( event . getString ( _senseiSchema . getUidField ( ) ) ) ; for ( ZoieIndexReader < BoboIndexReader > reader : readers ) { src = reader . getStoredValue ( uid ) ; if ( src != null ) break ; } byte [ ] data = null ; if ( _senseiSchema . isCompressSrcData ( ) ) data = DefaultJsonSchemaInterpreter . decompress ( src ) ; else data = src ; if ( data == null ) { logger . error ( "<STR_LIT>" + obj ) ; return null ; } JSONObject newEvent = new JSONObject ( new String ( data , "<STR_LIT:UTF-8>" ) ) ; Iterator < String > keys = event . keys ( ) ; while ( keys . hasNext ( ) ) { String key = keys . next ( ) ; newEvent . put ( key , event . get ( key ) ) ; } event = newEvent ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; return null ; } finally { zoie . returnIndexReaders ( readers ) ; } } return event ; } @ Override public void consume ( Collection < proj . zoie . api . DataConsumer . DataEvent < JSONObject > > data ) throws ZoieException { _updateBatchSizeMeter . mark ( data . size ( ) ) ; _providerBatchSizeMeter . mark ( _dataProvider . getBatchSize ( ) ) ; _eventMeter . mark ( _dataProvider . getEventCount ( ) ) ; try { for ( DataEvent < JSONObject > dataEvt : data ) { JSONObject obj = dataEvt . getData ( ) ; if ( obj == null ) continue ; String version = dataEvt . getVersion ( ) ; _currentVersion = ( _versionComparator . compare ( _currentVersion , version ) < <NUM_LIT:0> ) ? version : _currentVersion ; if ( pluggableSearchEngineManager != null && pluggableSearchEngineManager . acceptEventsForAllPartitions ( ) ) { obj = pluggableSearchEngineManager . update ( obj , _currentVersion ) ; } int routeToPart = _shardingStrategy . caculateShard ( _maxPartitionId , obj ) ; Collection < DataEvent < JSONObject > > partDataSet = _dataCollectorMap . get ( routeToPart ) ; if ( partDataSet != null ) { JSONObject rewrited = obj ; if ( pluggableSearchEngineManager != null && ! pluggableSearchEngineManager . acceptEventsForAllPartitions ( ) ) { rewrited = pluggableSearchEngineManager . update ( obj , dataEvt . getVersion ( ) ) ; } rewrited = rewriteData ( obj , routeToPart ) ; if ( rewrited != null ) { if ( rewrited != obj ) dataEvt = new DataEvent < JSONObject > ( rewrited , dataEvt . getVersion ( ) ) ; partDataSet . add ( dataEvt ) ; } } } Iterator < Integer > it = _zoieSystemMap . keySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { int part_num = it . next ( ) ; Zoie < BoboIndexReader , JSONObject > dataConsumer = _zoieSystemMap . get ( part_num ) ; if ( dataConsumer != null ) { LinkedList < DataEvent < JSONObject > > partDataSet = ( LinkedList < DataEvent < JSONObject > > ) _dataCollectorMap . get ( part_num ) ; if ( partDataSet != null ) { if ( partDataSet . size ( ) == <NUM_LIT:0> ) { JSONObject markerObj = new JSONObject ( ) ; markerObj . put ( SenseiSchema . EVENT_TYPE_FIELD , SenseiSchema . EVENT_TYPE_SKIP ) ; markerObj . put ( _uidField , <NUM_LIT> ) ; partDataSet . add ( new DataEvent < JSONObject > ( markerObj , _currentVersion ) ) ; } else if ( _currentVersion != null && ! _currentVersion . equals ( partDataSet . getLast ( ) . getVersion ( ) ) ) { DataEvent < JSONObject > last = partDataSet . pollLast ( ) ; partDataSet . add ( new DataEvent < JSONObject > ( last . getData ( ) , _currentVersion ) ) ; } dataConsumer . consume ( partDataSet ) ; } } _dataCollectorMap . put ( part_num , new LinkedList < DataEvent < JSONObject > > ( ) ) ; } } catch ( Exception e ) { throw new ZoieException ( e . getMessage ( ) , e ) ; } } @ Override public String getVersion ( ) { return _currentVersion ; } @ Override public Comparator < String > getVersionComparator ( ) { return _versionComparator ; } } } </s>
|
<s> package com . senseidb . indexing ; import org . apache . lucene . document . Document ; import org . json . JSONException ; import org . json . JSONObject ; import proj . zoie . api . indexing . AbstractZoieIndexable ; import proj . zoie . api . indexing . AbstractZoieIndexableInterpreter ; import proj . zoie . api . indexing . ZoieIndexable ; public abstract class JSONDataInterpreter extends AbstractZoieIndexableInterpreter < JSONObject > { @ Override public ZoieIndexable convertAndInterpret ( final JSONObject src ) { return new AbstractZoieIndexable ( ) { @ Override public IndexingReq [ ] buildIndexingReqs ( ) { try { return new IndexingReq [ ] { new IndexingReq ( buildDoc ( src ) ) } ; } catch ( JSONException e ) { throw new RuntimeException ( e . getMessage ( ) , e ) ; } } @ Override public long getUID ( ) { try { return extractUID ( src ) ; } catch ( JSONException e ) { throw new RuntimeException ( e . getMessage ( ) , e ) ; } } @ Override public boolean isDeleted ( ) { return extractDeleteFlag ( src ) ; } @ Override public boolean isSkip ( ) { return extractSkipFlag ( src ) ; } } ; } public abstract long extractUID ( JSONObject obj ) throws JSONException ; public abstract Document buildDoc ( JSONObject obj ) throws JSONException ; public boolean extractSkipFlag ( JSONObject obj ) { return false ; } public boolean extractDeleteFlag ( JSONObject obj ) { return false ; } } </s>
|
<s> package com . senseidb . indexing ; import org . json . JSONObject ; import proj . zoie . api . indexing . AbstractZoieIndexableInterpreter ; import proj . zoie . api . indexing . ZoieIndexable ; public abstract class JSONValueInterpreter < V > extends AbstractZoieIndexableInterpreter < JSONObject > { private final DefaultSenseiInterpreter < V > _innerInterpreter ; public JSONValueInterpreter ( Class < V > cls ) { _innerInterpreter = new DefaultSenseiInterpreter < V > ( cls ) ; } public abstract V buildDataObj ( JSONObject jsonObj ) ; @ Override public ZoieIndexable convertAndInterpret ( JSONObject src ) { V obj = buildDataObj ( src ) ; return _innerInterpreter . convertAndInterpret ( obj ) ; } } </s>
|
<s> package com . senseidb . indexing ; import java . lang . reflect . Method ; import java . text . Format ; import java . util . Collection ; import java . util . LinkedList ; import java . util . Map . Entry ; import java . util . Set ; import org . apache . log4j . Logger ; import org . apache . lucene . document . Document ; import org . apache . lucene . document . Field . Index ; import org . apache . lucene . document . Field . Store ; import org . apache . lucene . document . Field . TermVector ; import proj . zoie . api . indexing . AbstractZoieIndexable ; import proj . zoie . api . indexing . ZoieIndexable . IndexingReq ; import com . senseidb . indexing . DefaultSenseiInterpreter . IndexSpec ; import com . senseidb . indexing . DefaultSenseiInterpreter . MetaFormatSpec ; public class DefaultSenseiZoieIndexable < V > extends AbstractZoieIndexable { private static final Logger logger = Logger . getLogger ( DefaultSenseiZoieIndexable . class ) ; private final V _obj ; private final DefaultSenseiInterpreter < V > _interpreter ; DefaultSenseiZoieIndexable ( V obj , DefaultSenseiInterpreter < V > interpreter ) { _obj = obj ; _interpreter = interpreter ; } @ Override public IndexingReq [ ] buildIndexingReqs ( ) { Document doc = new Document ( ) ; Set < Entry < String , IndexSpec > > entries = _interpreter . _textIndexingSpecMap . entrySet ( ) ; for ( Entry < String , IndexSpec > entry : entries ) { try { String name = entry . getKey ( ) ; IndexSpec idxSpec = entry . getValue ( ) ; String val = String . valueOf ( idxSpec . fld . get ( _obj ) ) ; doc . add ( new org . apache . lucene . document . Field ( name , val , idxSpec . store , idxSpec . index , idxSpec . tv ) ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } } Set < Entry < String , MetaFormatSpec > > metaEntries = _interpreter . _metaFormatSpecMap . entrySet ( ) ; for ( Entry < String , MetaFormatSpec > entry : metaEntries ) { String name = entry . getKey ( ) ; try { MetaFormatSpec formatSpec = entry . getValue ( ) ; Object valObj = formatSpec . fld . get ( _obj ) ; if ( valObj == null ) continue ; Format formatter = formatSpec . formatter ; Collection valueCollection = null ; if ( valObj instanceof Collection ) { valueCollection = ( Collection ) valObj ; } else { valueCollection = new LinkedList ( ) ; valueCollection . add ( valObj ) ; } for ( Object obj : valueCollection ) { String val = formatter == null ? String . valueOf ( obj ) : formatter . format ( obj ) ; org . apache . lucene . document . Field fld = new org . apache . lucene . document . Field ( name , val , Store . NO , Index . NOT_ANALYZED_NO_NORMS , TermVector . NO ) ; fld . setOmitTermFreqAndPositions ( true ) ; doc . add ( fld ) ; } } catch ( Exception e ) { logger . error ( "<STR_LIT>" + name + "<STR_LIT>" + e . getMessage ( ) , e ) ; } } return new IndexingReq [ ] { new IndexingReq ( doc ) } ; } @ Override public long getUID ( ) { try { return _interpreter . _uidField . getLong ( _obj ) ; } catch ( Exception e ) { throw new RuntimeException ( e . getMessage ( ) , e ) ; } } private boolean checkViaReflection ( Method m ) { boolean retVal = false ; if ( m != null ) { try { Object retObj = m . invoke ( _obj , new Object [ <NUM_LIT:0> ] ) ; retVal = ( ( Boolean ) retObj ) . booleanValue ( ) ; } catch ( Exception e ) { throw new RuntimeException ( e . getMessage ( ) , e ) ; } } return retVal ; } @ Override public boolean isDeleted ( ) { return checkViaReflection ( _interpreter . _deleteChecker ) ; } @ Override public boolean isSkip ( ) { return checkViaReflection ( _interpreter . _skipChecker ) ; } } </s>
|
<s> package com . senseidb . indexing ; import java . util . Map ; import org . json . JSONObject ; import org . json . JSONException ; import com . senseidb . conf . SenseiSchema ; import com . senseidb . plugin . SenseiPluginFactory ; import com . senseidb . plugin . SenseiPluginRegistry ; public interface ShardingStrategy { int caculateShard ( int maxShardId , JSONObject dataObj ) throws JSONException ; public static class FieldModShardingStrategy implements ShardingStrategy { public static class Factory implements SenseiPluginFactory < FieldModShardingStrategy > { @ Override public FieldModShardingStrategy getBean ( Map < String , String > initProperties , String fullPrefix , SenseiPluginRegistry pluginRegistry ) { return new FieldModShardingStrategy ( initProperties . get ( "<STR_LIT:field>" ) ) ; } } protected String _field ; public FieldModShardingStrategy ( String field ) { _field = field ; } @ Override public int caculateShard ( int maxShardId , JSONObject dataObj ) throws JSONException { JSONObject event = dataObj . optJSONObject ( SenseiSchema . EVENT_FIELD ) ; long uid ; if ( event == null ) uid = Long . parseLong ( dataObj . getString ( _field ) ) ; else uid = Long . parseLong ( event . getString ( _field ) ) ; return ( int ) ( uid % maxShardId ) ; } } } </s>
|
<s> package com . senseidb . indexing ; import org . apache . lucene . document . Document ; import org . json . JSONObject ; import com . senseidb . conf . SenseiSchema ; public interface CustomIndexingPipeline { void applyCustomization ( Document luceneDoc , SenseiSchema schema , JSONObject dataSource ) ; } </s>
|
<s> package com . senseidb . indexing ; import org . apache . commons . codec . binary . Base64 ; import org . json . JSONObject ; public abstract class DataSourceFilter < D > { protected String _srcDataStore ; protected String _srcDataField = "<STR_LIT>" ; protected abstract JSONObject doFilter ( D data ) throws Exception ; public JSONObject filter ( D data ) throws Exception { JSONObject obj = doFilter ( data ) ; if ( data != null && obj != null && ! obj . has ( _srcDataField ) && _srcDataStore != null && _srcDataStore . length ( ) != <NUM_LIT:0> && ! "<STR_LIT:none>" . equals ( _srcDataStore ) && _srcDataField != null && _srcDataField . length ( ) != <NUM_LIT:0> ) { if ( data instanceof byte [ ] ) { obj . put ( _srcDataField , Base64 . encodeBase64String ( ( byte [ ] ) data ) ) ; } else { obj . put ( _srcDataField , data . toString ( ) ) ; } } return obj ; } public void setSrcDataStore ( String srcDataStore ) { _srcDataStore = srcDataStore ; } public void setSrcDataField ( String srcDataField ) { _srcDataField = srcDataField ; } } </s>
|
<s> package com . senseidb . indexing ; import com . senseidb . metrics . MetricFactory ; import java . io . IOException ; import java . util . Map ; import com . browseengine . bobo . api . BoboIndexReader ; import com . browseengine . bobo . api . BrowseSelection ; import com . browseengine . bobo . facets . data . FacetDataCache ; import com . browseengine . bobo . facets . data . TermLongList ; import com . browseengine . bobo . facets . filter . FacetRangeFilter ; import com . senseidb . metrics . MetricsConstants ; import com . senseidb . plugin . SenseiPlugin ; import com . senseidb . plugin . SenseiPluginRegistry ; import com . senseidb . search . req . SenseiRequest ; import com . yammer . metrics . core . Counter ; import com . yammer . metrics . core . MetricName ; public class TimeBasedIndexSelector implements SenseiIndexPruner , SenseiPlugin { private static final String TIME_FACET_NAME = "<STR_LIT>" ; private String facetName ; private Counter processedReadersCount ; private Counter filteredReadersCount ; private IndexReaderSelector defaultReaderSelector = new IndexReaderSelector ( ) { @ Override public boolean isSelected ( BoboIndexReader reader ) throws IOException { return true ; } } ; @ Override public IndexReaderSelector getReaderSelector ( SenseiRequest req ) { BrowseSelection selection = req . getSelection ( facetName ) ; if ( selection == null || selection . getValues ( ) == null || selection . getValues ( ) . length == <NUM_LIT:0> ) { return defaultReaderSelector ; } String [ ] rangeStrings = FacetRangeFilter . getRangeStrings ( selection . getValues ( ) [ <NUM_LIT:0> ] ) ; final long start = getStartTime ( rangeStrings ) ; final long end = getEndTime ( rangeStrings ) ; return new IndexReaderSelector ( ) { @ Override public boolean isSelected ( BoboIndexReader reader ) throws IOException { processedReadersCount . inc ( ) ; Object facetDataObj = reader . getFacetData ( facetName ) ; if ( facetDataObj == null || ! ( facetDataObj instanceof FacetDataCache ) ) { throw new IllegalStateException ( "<STR_LIT>" + facetName ) ; } FacetDataCache facetDataCache = ( FacetDataCache ) reader . getFacetData ( facetName ) ; if ( ! ( facetDataCache . valArray instanceof TermLongList ) ) { throw new IllegalStateException ( "<STR_LIT>" + facetName ) ; } long [ ] elements = ( ( TermLongList ) facetDataCache . valArray ) . getElements ( ) ; if ( elements . length < <NUM_LIT:2> ) { filteredReadersCount . inc ( ) ; return false ; } if ( elements [ <NUM_LIT:1> ] > end || elements [ elements . length - <NUM_LIT:1> ] < start ) { filteredReadersCount . inc ( ) ; return false ; } return true ; } } ; } private long getStartTime ( String [ ] rangeStrings ) { long start ; if ( "<STR_LIT:*>" . equals ( rangeStrings [ <NUM_LIT:0> ] ) ) { start = Long . MIN_VALUE ; } else { start = Long . parseLong ( rangeStrings [ <NUM_LIT:0> ] ) ; if ( "<STR_LIT:true>" . equals ( rangeStrings [ <NUM_LIT:2> ] ) ) { start -- ; } } return start ; } private long getEndTime ( String [ ] rangeStrings ) { long end ; if ( "<STR_LIT:*>" . equals ( rangeStrings [ <NUM_LIT:1> ] ) ) { end = Long . MAX_VALUE ; } else { end = Long . parseLong ( rangeStrings [ <NUM_LIT:1> ] ) ; if ( "<STR_LIT:true>" . equals ( rangeStrings [ <NUM_LIT:3> ] ) ) { end ++ ; } } return end ; } @ Override public void init ( Map < String , String > config , SenseiPluginRegistry pluginRegistry ) { facetName = config . get ( TIME_FACET_NAME ) ; } @ Override public void start ( ) { MetricName processedReadersMetric = new MetricName ( MetricsConstants . Domain , "<STR_LIT>" , "<STR_LIT>" ) ; processedReadersCount = MetricFactory . newCounter ( processedReadersMetric ) ; MetricName filteredReadersMetric = new MetricName ( MetricsConstants . Domain , "<STR_LIT>" , "<STR_LIT>" ) ; filteredReadersCount = MetricFactory . newCounter ( filteredReadersMetric ) ; } @ Override public void stop ( ) { } } </s>
|
<s> package com . senseidb . indexing ; import java . lang . annotation . ElementType ; import java . lang . annotation . Retention ; import java . lang . annotation . RetentionPolicy ; import java . lang . annotation . Target ; @ Target ( ElementType . FIELD ) @ Retention ( RetentionPolicy . RUNTIME ) public @ interface StoredValue { String name ( ) default "<STR_LIT>" ; } </s>
|
<s> package com . senseidb . indexing ; import java . lang . annotation . ElementType ; import java . lang . annotation . Retention ; import java . lang . annotation . RetentionPolicy ; import java . lang . annotation . Target ; @ Target ( ElementType . FIELD ) @ Retention ( RetentionPolicy . RUNTIME ) public @ interface Text { String name ( ) default "<STR_LIT>" ; String store ( ) default "<STR_LIT>" ; String index ( ) default "<STR_LIT>" ; String termVector ( ) default "<STR_LIT>" ; } </s>
|
<s> package com . senseidb . indexing . activity ; import it . unimi . dsi . fastutil . longs . LongIterator ; import java . io . IOException ; import java . lang . management . ManagementFactory ; import java . util . BitSet ; import java . util . List ; import java . util . Set ; import java . util . concurrent . Callable ; import java . util . concurrent . Executors ; import java . util . concurrent . ScheduledExecutorService ; import java . util . concurrent . TimeUnit ; import javax . management . MBeanServer ; import javax . management . ObjectInstance ; import javax . management . ObjectName ; import org . apache . log4j . Logger ; import proj . zoie . api . DocIDMapper ; import proj . zoie . api . IndexReaderFactory ; import proj . zoie . api . ZoieIndexReader ; import com . browseengine . bobo . api . BoboIndexReader ; import com . senseidb . conf . SenseiConfParams ; import com . senseidb . plugin . SenseiPluginRegistry ; import com . yammer . metrics . Metrics ; import com . yammer . metrics . core . Counter ; import com . yammer . metrics . core . MetricName ; import com . yammer . metrics . core . Timer ; public class PurgeUnusedActivitiesJob implements Runnable , PurgeUnusedActivitiesJobMBean { private final static Logger logger = Logger . getLogger ( PurgeUnusedActivitiesJob . class ) ; private final CompositeActivityValues compositeActivityValues ; private final Set < IndexReaderFactory < ZoieIndexReader < BoboIndexReader > > > zoieSystems ; private static Timer timer = Metrics . newTimer ( new MetricName ( PurgeUnusedActivitiesJob . class , "<STR_LIT>" ) , TimeUnit . MILLISECONDS , TimeUnit . SECONDS ) ; private static Counter foundActivitiesToPurge = Metrics . newCounter ( new MetricName ( PurgeUnusedActivitiesJob . class , "<STR_LIT>" ) ) ; private static Counter recentUidsSavedFromPurge = Metrics . newCounter ( new MetricName ( PurgeUnusedActivitiesJob . class , "<STR_LIT>" ) ) ; protected ScheduledExecutorService executorService = Executors . newSingleThreadScheduledExecutor ( ) ; private final long frequencyInMillis ; public PurgeUnusedActivitiesJob ( CompositeActivityValues compositeActivityValues , Set < IndexReaderFactory < ZoieIndexReader < BoboIndexReader > > > zoieSystems , long frequencyInMillis ) { this . compositeActivityValues = compositeActivityValues ; this . zoieSystems = zoieSystems ; this . frequencyInMillis = frequencyInMillis ; } public void start ( ) { if ( frequencyInMillis > <NUM_LIT:0> ) { executorService . scheduleAtFixedRate ( this , frequencyInMillis , frequencyInMillis , TimeUnit . MILLISECONDS ) ; } MBeanServer platformMBeanServer = ManagementFactory . getPlatformMBeanServer ( ) ; ObjectName name ; try { name = new ObjectName ( "<STR_LIT>" ) ; Set < ObjectInstance > mbeans = platformMBeanServer . queryMBeans ( name , null ) ; if ( mbeans != null && mbeans . isEmpty ( ) ) { platformMBeanServer . registerMBean ( this , name ) ; } } catch ( Exception e ) { logger . error ( "<STR_LIT>" , e ) ; } } public void stop ( ) { executorService . shutdown ( ) ; } public void run ( ) { try { timer . time ( new Callable < Integer > ( ) { @ Override public Integer call ( ) throws Exception { return purgeUnusedActivityIndexes ( ) ; } } ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } } public int purgeUnusedActivityIndexes ( ) { logger . info ( "<STR_LIT>" ) ; long [ ] keys ; try { compositeActivityValues . globalLock . readLock ( ) . lock ( ) ; keys = new long [ compositeActivityValues . uidToArrayIndex . size ( ) ] ; LongIterator iterator = compositeActivityValues . uidToArrayIndex . keySet ( ) . iterator ( ) ; int i = <NUM_LIT:0> ; while ( iterator . hasNext ( ) ) { keys [ i ++ ] = iterator . nextLong ( ) ; } } finally { compositeActivityValues . globalLock . readLock ( ) . unlock ( ) ; } int bitSetLength = keys . length ; BitSet foundSet = new BitSet ( keys . length ) ; for ( IndexReaderFactory < ZoieIndexReader < BoboIndexReader > > zoie : zoieSystems ) { List < ZoieIndexReader < BoboIndexReader > > indexReaders = null ; try { indexReaders = zoie . getIndexReaders ( ) ; for ( int i = <NUM_LIT:0> ; i < keys . length ; i ++ ) { if ( foundSet . get ( i ) ) { continue ; } for ( ZoieIndexReader < BoboIndexReader > zoieIndexReader : indexReaders ) { if ( DocIDMapper . NOT_FOUND != zoieIndexReader . getDocIDMaper ( ) . getDocID ( keys [ i ] ) ) { foundSet . set ( i ) ; break ; } } } } catch ( IOException e ) { logger . error ( e . getMessage ( ) , e ) ; } finally { if ( indexReaders != null ) { zoie . returnIndexReaders ( indexReaders ) ; } } } int recovered = compositeActivityValues . recentlyAddedUids . markRecentAsFoundInBitSet ( keys , foundSet , bitSetLength ) ; recentUidsSavedFromPurge . inc ( recovered ) ; int found = foundSet . cardinality ( ) ; if ( found == keys . length ) { logger . info ( "<STR_LIT>" ) ; return <NUM_LIT:0> ; } long [ ] notFound = new long [ keys . length - found ] ; int j = <NUM_LIT:0> ; for ( int i = <NUM_LIT:0> ; i < keys . length ; i ++ ) { if ( ! foundSet . get ( i ) ) { notFound [ j ] = keys [ i ] ; j ++ ; } } compositeActivityValues . delete ( notFound ) ; logger . info ( "<STR_LIT>" + notFound . length + "<STR_LIT>" ) ; foundActivitiesToPurge . inc ( notFound . length ) ; return notFound . length ; } public static long extractFrequency ( SenseiPluginRegistry pluginRegistry ) { int minutes = pluginRegistry . getConfiguration ( ) . getInt ( SenseiConfParams . SENSEI_INDEX_ACTIVITY_PURGE_FREQUENCY_MINUTES , <NUM_LIT:0> ) ; if ( minutes != <NUM_LIT:0> ) { return <NUM_LIT> * <NUM_LIT> * minutes ; } int hours = pluginRegistry . getConfiguration ( ) . getInt ( SenseiConfParams . SENSEI_INDEX_ACTIVITY_PURGE_FREQUENCY_HOURS , <NUM_LIT:6> ) ; return <NUM_LIT> * <NUM_LIT> * <NUM_LIT> * hours ; } } </s>
|
<s> package com . senseidb . indexing . activity . facet ; import java . io . IOException ; import org . apache . lucene . search . DocIdSetIterator ; public class ActivityRangeFloatFilterIterator extends DocIdSetIterator { private int _doc ; protected final float [ ] fieldValues ; private final int start ; private final int end ; private final int arrLength ; private int [ ] indexes ; public ActivityRangeFloatFilterIterator ( float [ ] fieldValues , int [ ] indexes , int start , int end ) { this . fieldValues = fieldValues ; this . start = start ; this . end = end ; this . indexes = indexes ; arrLength = indexes . length ; _doc = - <NUM_LIT:1> ; } @ Override final public int docID ( ) { return _doc ; } @ Override public int nextDoc ( ) throws IOException { while ( ++ _doc < arrLength ) { if ( indexes [ _doc ] == - <NUM_LIT:1> ) { continue ; } float value = fieldValues [ indexes [ _doc ] ] ; if ( value >= start && value < end && value != Float . MIN_VALUE ) { return _doc ; } } return NO_MORE_DOCS ; } @ Override public int advance ( int id ) throws IOException { _doc = id - <NUM_LIT:1> ; return nextDoc ( ) ; } } </s>
|
<s> package com . senseidb . indexing . activity . facet ; import java . io . IOException ; import org . apache . lucene . search . DocIdSetIterator ; public class ActivityRangeIntFilterIterator extends DocIdSetIterator { private int _doc ; protected final int [ ] fieldValues ; private final int start ; private final int end ; private final int arrLength ; private int [ ] indexes ; public ActivityRangeIntFilterIterator ( int [ ] fieldValues , int [ ] indexes , int start , int end ) { this . fieldValues = fieldValues ; this . start = start ; this . end = end ; this . indexes = indexes ; arrLength = indexes . length ; _doc = - <NUM_LIT:1> ; } @ Override final public int docID ( ) { return _doc ; } @ Override public int nextDoc ( ) throws IOException { while ( ++ _doc < arrLength ) { if ( indexes [ _doc ] == - <NUM_LIT:1> ) { continue ; } int value = fieldValues [ indexes [ _doc ] ] ; if ( value >= start && value < end && value != Integer . MIN_VALUE ) { return _doc ; } } return NO_MORE_DOCS ; } @ Override public int advance ( int id ) throws IOException { _doc = id - <NUM_LIT:1> ; return nextDoc ( ) ; } } </s>
|
<s> package com . senseidb . indexing . activity . facet ; import java . io . IOException ; import org . apache . lucene . search . DocIdSetIterator ; public class SynchronizedIterator extends DocIdSetIterator { private final DocIdSetIterator inner ; public SynchronizedIterator ( DocIdSetIterator inner ) { this . inner = inner ; } @ Override public int nextDoc ( ) throws IOException { synchronized ( SynchronizedActivityRangeFacetHandler . GLOBAL_ACTIVITY_TEST_LOCK ) { return inner . nextDoc ( ) ; } } @ Override public int advance ( int id ) throws IOException { synchronized ( SynchronizedActivityRangeFacetHandler . GLOBAL_ACTIVITY_TEST_LOCK ) { return inner . advance ( id ) ; } } @ Override public int docID ( ) { return inner . docID ( ) ; } } </s>
|
<s> package com . senseidb . indexing . activity . facet ; import java . io . IOException ; import java . text . DecimalFormat ; import java . util . HashSet ; import java . util . Properties ; import org . apache . lucene . index . IndexReader ; import org . apache . lucene . search . DocIdSet ; import org . apache . lucene . search . DocIdSetIterator ; import org . apache . lucene . search . ScoreDoc ; import proj . zoie . api . ZoieSegmentReader ; import com . browseengine . bobo . api . BoboIndexReader ; import com . browseengine . bobo . api . BrowseSelection ; import com . browseengine . bobo . api . FacetSpec ; import com . browseengine . bobo . docidset . EmptyDocIdSet ; import com . browseengine . bobo . docidset . RandomAccessDocIdSet ; import com . browseengine . bobo . facets . FacetCountCollectorSource ; import com . browseengine . bobo . facets . FacetHandler ; import com . browseengine . bobo . facets . filter . FacetRangeFilter ; import com . browseengine . bobo . facets . filter . RandomAccessFilter ; import com . browseengine . bobo . sort . DocComparator ; import com . browseengine . bobo . sort . DocComparatorSource ; import com . senseidb . indexing . activity . CompositeActivityValues ; import com . senseidb . indexing . activity . primitives . ActivityIntValues ; import com . senseidb . indexing . activity . primitives . ActivityPrimitiveValues ; public class SynchronizedActivityRangeFacetHandler extends ActivityRangeFacetHandler { public static final Object GLOBAL_ACTIVITY_TEST_LOCK = new Object ( ) ; public SynchronizedActivityRangeFacetHandler ( String facetName , String fieldName , CompositeActivityValues compositeActivityValues , ActivityPrimitiveValues activityPrimitiveValues ) { super ( facetName , fieldName , compositeActivityValues , activityPrimitiveValues ) ; } @ Override public RandomAccessFilter buildRandomAccessFilter ( final String value , final Properties selectionProperty ) throws IOException { return new RandomAccessFilter ( ) { @ Override public RandomAccessDocIdSet getRandomAccessDocIdSet ( final BoboIndexReader reader ) throws IOException { final RandomAccessDocIdSet docIdSet = ( RandomAccessDocIdSet ) SynchronizedActivityRangeFacetHandler . super . buildRandomAccessFilter ( value , selectionProperty ) . getDocIdSet ( reader ) ; return new RandomAccessDocIdSet ( ) { @ Override public DocIdSetIterator iterator ( ) throws IOException { return new SynchronizedIterator ( docIdSet . iterator ( ) ) ; } @ Override public boolean get ( int docId ) { synchronized ( GLOBAL_ACTIVITY_TEST_LOCK ) { return docIdSet . get ( docId ) ; } } } ; } } ; } @ Override public Object [ ] getRawFieldValues ( BoboIndexReader reader , int id ) { synchronized ( GLOBAL_ACTIVITY_TEST_LOCK ) { return super . getRawFieldValues ( reader , id ) ; } } @ Override public String [ ] getFieldValues ( BoboIndexReader reader , int id ) { synchronized ( GLOBAL_ACTIVITY_TEST_LOCK ) { return super . getFieldValues ( reader , id ) ; } } @ Override public DocComparatorSource getDocComparatorSource ( ) { DocComparatorSource docComparatorSource = SynchronizedActivityRangeFacetHandler . super . getDocComparatorSource ( ) ; return new DocComparatorSource ( ) { @ Override public DocComparator getComparator ( IndexReader reader , int docbase ) throws IOException { final DocComparator comparator = SynchronizedActivityRangeFacetHandler . super . getDocComparatorSource ( ) . getComparator ( reader , docbase ) ; return new DocComparator ( ) { @ Override public Comparable < Integer > value ( ScoreDoc doc ) { synchronized ( GLOBAL_ACTIVITY_TEST_LOCK ) { return comparator . value ( doc ) ; } } @ Override public int compare ( ScoreDoc doc1 , ScoreDoc doc2 ) { synchronized ( GLOBAL_ACTIVITY_TEST_LOCK ) { return comparator . compare ( doc1 , doc2 ) ; } } } ; } } ; } } </s>
|
<s> package com . senseidb . indexing . activity . facet ; import java . io . IOException ; import java . text . DecimalFormat ; import java . util . HashSet ; import java . util . Properties ; import org . apache . lucene . index . IndexReader ; import org . apache . lucene . search . DocIdSetIterator ; import org . apache . lucene . search . ScoreDoc ; import proj . zoie . api . ZoieSegmentReader ; import com . browseengine . bobo . api . BoboIndexReader ; import com . browseengine . bobo . api . BrowseSelection ; import com . browseengine . bobo . api . FacetSpec ; import com . browseengine . bobo . docidset . EmptyDocIdSet ; import com . browseengine . bobo . docidset . RandomAccessDocIdSet ; import com . browseengine . bobo . facets . FacetCountCollectorSource ; import com . browseengine . bobo . facets . FacetHandler ; import com . browseengine . bobo . facets . filter . FacetRangeFilter ; import com . browseengine . bobo . facets . filter . RandomAccessFilter ; import com . browseengine . bobo . sort . DocComparator ; import com . browseengine . bobo . sort . DocComparatorSource ; import com . senseidb . bql . parsers . BQLParser . instanceof_expression_return ; import com . senseidb . indexing . activity . CompositeActivityValues ; import com . senseidb . indexing . activity . primitives . ActivityFloatValues ; import com . senseidb . indexing . activity . primitives . ActivityIntValues ; import com . senseidb . indexing . activity . primitives . ActivityPrimitiveValues ; public class ActivityRangeFacetHandler extends FacetHandler < int [ ] > { private static final String [ ] EMPTY_STRING_ARR = new String [ <NUM_LIT:0> ] ; private static final Object [ ] EMPTY_OBJ_ARR = new Object [ <NUM_LIT:0> ] ; private static final String DEFAULT_FORMATTING_STRING = "<STR_LIT>" ; public static volatile boolean isSynchronized = false ; protected ThreadLocal < DecimalFormat > formatter = new ThreadLocal < DecimalFormat > ( ) { protected DecimalFormat initialValue ( ) { return new DecimalFormat ( DEFAULT_FORMATTING_STRING ) ; } } ; private final ActivityPrimitiveValues activityValues ; private final CompositeActivityValues compositeActivityValues ; public ActivityRangeFacetHandler ( String facetName , String fieldName , CompositeActivityValues compositeActivityValues , ActivityPrimitiveValues activityValues ) { super ( facetName , new HashSet < String > ( ) ) ; this . compositeActivityValues = compositeActivityValues ; this . activityValues = activityValues ; } public static FacetHandler valueOf ( String facetName , String fieldName , CompositeActivityValues compositeActivityValues , ActivityPrimitiveValues activityValues ) { if ( isSynchronized ) { return new SynchronizedActivityRangeFacetHandler ( facetName , fieldName , compositeActivityValues , activityValues ) ; } return new ActivityRangeFacetHandler ( facetName , fieldName , compositeActivityValues , activityValues ) ; } @ Override public int [ ] load ( BoboIndexReader reader ) throws IOException { ZoieSegmentReader < ? > zoieReader = ( ZoieSegmentReader < ? > ) ( reader . getInnerReader ( ) ) ; long [ ] uidArray = zoieReader . getUIDArray ( ) ; return compositeActivityValues . precomputeArrayIndexes ( uidArray ) ; } @ Override public RandomAccessFilter buildRandomAccessFilter ( final String value , Properties selectionProperty ) throws IOException { return new RandomAccessFilter ( ) { @ Override public RandomAccessDocIdSet getRandomAccessDocIdSet ( BoboIndexReader reader ) throws IOException { final int [ ] indexes = ( int [ ] ) ( ( BoboIndexReader ) reader ) . getFacetData ( _name ) ; if ( value == null || value . isEmpty ( ) ) { return EmptyDocIdSet . getInstance ( ) ; } int [ ] range = parseRaw ( value ) ; final int startValue = range [ <NUM_LIT:0> ] ; final int endValue = range [ <NUM_LIT:1> ] ; if ( startValue >= endValue ) { return EmptyDocIdSet . getInstance ( ) ; } final int [ ] intArray = activityValues instanceof ActivityIntValues ? ( ( ActivityIntValues ) activityValues ) . getFieldValues ( ) : null ; final float [ ] floatArray = activityValues instanceof ActivityFloatValues ? ( ( ActivityFloatValues ) activityValues ) . getFieldValues ( ) : null ; return new RandomAccessDocIdSet ( ) { @ Override public DocIdSetIterator iterator ( ) throws IOException { if ( intArray != null ) { return new ActivityRangeIntFilterIterator ( intArray , indexes , startValue , endValue ) ; } else { return new ActivityRangeFloatFilterIterator ( floatArray , indexes , startValue , endValue ) ; } } @ Override public boolean get ( int docId ) { if ( indexes [ docId ] == - <NUM_LIT:1> ) return false ; if ( intArray != null ) { int val = intArray [ indexes [ docId ] ] ; return val >= startValue && val < endValue && val != Integer . MIN_VALUE ; } float val = floatArray [ indexes [ docId ] ] ; return val >= startValue && val < endValue && val != Integer . MIN_VALUE ; } } ; } } ; } @ Override public FacetCountCollectorSource getFacetCountCollectorSource ( BrowseSelection sel , FacetSpec fspec ) { throw new UnsupportedOperationException ( "<STR_LIT>" ) ; } @ Override public Object [ ] getRawFieldValues ( BoboIndexReader reader , int id ) { final int [ ] indexes = ( int [ ] ) ( ( BoboIndexReader ) reader ) . getFacetData ( _name ) ; return indexes [ id ] != - <NUM_LIT:1> ? new Object [ ] { activityValues . getValue ( indexes [ id ] ) } : EMPTY_OBJ_ARR ; } public int getIntActivityValue ( int [ ] facetData , int id ) { if ( id < <NUM_LIT:0> || id >= facetData . length ) { return Integer . MIN_VALUE ; } return facetData [ id ] != - <NUM_LIT:1> ? ( ( ActivityIntValues ) activityValues ) . fieldValues [ facetData [ id ] ] : Integer . MIN_VALUE ; } public float getFloatActivityValue ( int [ ] facetData , int id ) { if ( id < <NUM_LIT:0> || id >= facetData . length ) { return Integer . MIN_VALUE ; } return facetData [ id ] != - <NUM_LIT:1> ? ( ( ActivityFloatValues ) activityValues ) . fieldValues [ facetData [ id ] ] : Integer . MIN_VALUE ; } @ Override public String [ ] getFieldValues ( BoboIndexReader reader , int id ) { final int [ ] indexes = ( int [ ] ) ( ( BoboIndexReader ) reader ) . getFacetData ( _name ) ; if ( indexes [ id ] == - <NUM_LIT:1> ) return EMPTY_STRING_ARR ; Number value = activityValues . getValue ( indexes [ id ] ) ; if ( value . intValue ( ) == Integer . MIN_VALUE || value . floatValue ( ) == Float . MIN_VALUE ) { return EMPTY_STRING_ARR ; } return new String [ ] { formatter . get ( ) . format ( value ) } ; } @ Override public DocComparatorSource getDocComparatorSource ( ) { final int [ ] intArray = activityValues instanceof ActivityIntValues ? ( ( ActivityIntValues ) activityValues ) . getFieldValues ( ) : null ; final float [ ] floatArray = activityValues instanceof ActivityFloatValues ? ( ( ActivityFloatValues ) activityValues ) . getFieldValues ( ) : null ; if ( intArray != null ) return new DocComparatorSource ( ) { @ Override public DocComparator getComparator ( IndexReader reader , int docbase ) throws IOException { final int [ ] indexes = ( int [ ] ) ( ( BoboIndexReader ) reader ) . getFacetData ( _name ) ; return new DocComparator ( ) { @ Override public Comparable < Integer > value ( ScoreDoc doc ) { return indexes [ doc . doc ] != - <NUM_LIT:1> ? intArray [ indexes [ doc . doc ] ] : <NUM_LIT:0> ; } @ Override public int compare ( ScoreDoc doc1 , ScoreDoc doc2 ) { int val1 = indexes [ doc1 . doc ] != - <NUM_LIT:1> ? intArray [ indexes [ doc1 . doc ] ] : <NUM_LIT:0> ; int val2 = indexes [ doc2 . doc ] != - <NUM_LIT:1> ? intArray [ indexes [ doc2 . doc ] ] : <NUM_LIT:0> ; return ( val1 < val2 ? - <NUM_LIT:1> : ( val1 == val2 ? <NUM_LIT:0> : <NUM_LIT:1> ) ) ; } } ; } } ; else return new DocComparatorSource ( ) { @ Override public DocComparator getComparator ( IndexReader reader , int docbase ) throws IOException { final int [ ] indexes = ( int [ ] ) ( ( BoboIndexReader ) reader ) . getFacetData ( _name ) ; return new DocComparator ( ) { @ Override public Comparable < Float > value ( ScoreDoc doc ) { return indexes [ doc . doc ] != - <NUM_LIT:1> ? floatArray [ indexes [ doc . doc ] ] : <NUM_LIT:0> ; } @ Override public int compare ( ScoreDoc doc1 , ScoreDoc doc2 ) { float val1 = indexes [ doc1 . doc ] != - <NUM_LIT:1> ? floatArray [ indexes [ doc1 . doc ] ] : <NUM_LIT:0> ; float val2 = indexes [ doc2 . doc ] != - <NUM_LIT:1> ? floatArray [ indexes [ doc2 . doc ] ] : <NUM_LIT:0> ; return ( val1 < val2 ? - <NUM_LIT:1> : ( val1 == val2 ? <NUM_LIT:0> : <NUM_LIT:1> ) ) ; } } ; } } ; } public static int [ ] parseRaw ( String rangeString ) { String [ ] ranges = FacetRangeFilter . getRangeStrings ( rangeString ) ; String lower = ranges [ <NUM_LIT:0> ] ; String upper = ranges [ <NUM_LIT:1> ] ; String includeLower = ranges [ <NUM_LIT:2> ] ; String includeUpper = ranges [ <NUM_LIT:3> ] ; int start = <NUM_LIT:0> ; int end = <NUM_LIT:0> ; if ( "<STR_LIT:*>" . equals ( lower ) ) { start = Integer . MIN_VALUE ; } else { start = Integer . parseInt ( lower ) ; if ( "<STR_LIT:false>" . equals ( includeLower ) ) { start ++ ; } } if ( "<STR_LIT:*>" . equals ( upper ) ) { end = Integer . MAX_VALUE ; } else { end = Integer . parseInt ( upper ) ; if ( "<STR_LIT:true>" . equals ( includeUpper ) ) { end ++ ; } } return new int [ ] { start , end } ; } } </s>
|
<s> package com . senseidb . indexing . activity ; import com . senseidb . metrics . MetricFactory ; import it . unimi . dsi . fastutil . ints . IntArrayList ; import it . unimi . dsi . fastutil . ints . IntList ; import it . unimi . dsi . fastutil . longs . Long2IntMap ; import it . unimi . dsi . fastutil . longs . Long2IntOpenHashMap ; import java . util . ArrayList ; import java . util . Collection ; import java . util . Collections ; import java . util . Comparator ; import java . util . HashMap ; import java . util . List ; import java . util . Map ; import java . util . concurrent . ConcurrentHashMap ; import java . util . concurrent . Executors ; import java . util . concurrent . ScheduledExecutorService ; import java . util . concurrent . locks . Lock ; import java . util . concurrent . locks . ReadWriteLock ; import java . util . concurrent . locks . ReentrantReadWriteLock ; import org . apache . log4j . Logger ; import proj . zoie . api . ZoieIndexReader ; import com . senseidb . conf . SenseiSchema ; import com . senseidb . indexing . activity . CompositeActivityManager . TimeAggregateInfo ; import com . senseidb . indexing . activity . CompositeActivityStorage . Update ; import com . senseidb . indexing . activity . primitives . ActivityFloatValues ; import com . senseidb . indexing . activity . primitives . ActivityIntValues ; import com . senseidb . indexing . activity . primitives . ActivityPrimitiveValues ; import com . senseidb . indexing . activity . time . TimeAggregatedActivityValues ; import com . yammer . metrics . core . Counter ; import com . yammer . metrics . core . MetricName ; public class CompositeActivityValues { private static final int DEFAULT_INITIAL_CAPACITY = <NUM_LIT> ; private final static Logger logger = Logger . getLogger ( CompositeActivityValues . class ) ; protected Comparator < String > versionComparator ; private volatile UpdateBatch < Update > pendingDeletes ; protected Map < String , ActivityValues > valuesMap = new ConcurrentHashMap < String , ActivityValues > ( ) ; protected volatile String lastVersion = "<STR_LIT>" ; protected Long2IntMap uidToArrayIndex = new Long2IntOpenHashMap ( ) ; protected ReadWriteLock globalLock = new ReentrantReadWriteLock ( ) ; protected ScheduledExecutorService executor = Executors . newSingleThreadScheduledExecutor ( ) ; protected IntList deletedIndexes = new IntArrayList ( <NUM_LIT> ) ; protected CompositeActivityStorage activityStorage ; protected UpdateBatch < Update > updateBatch ; protected RecentlyAddedUids recentlyAddedUids ; protected volatile Metadata metadata ; private volatile boolean closed ; private ActivityConfig activityConfig ; protected final Counter reclaimedDocumentsCounter ; protected final Counter currentDocumentsCounter ; protected final Counter deletedDocumentsCounter ; protected final Counter insertedDocumentsCounter ; protected final Counter totalUpdatesCounter ; CompositeActivityValues ( ) { reclaimedDocumentsCounter = MetricFactory . newCounter ( new MetricName ( CompositeActivityValues . class , "<STR_LIT>" ) ) ; currentDocumentsCounter = MetricFactory . newCounter ( new MetricName ( CompositeActivityValues . class , "<STR_LIT>" ) ) ; deletedDocumentsCounter = MetricFactory . newCounter ( new MetricName ( CompositeActivityValues . class , "<STR_LIT>" ) ) ; insertedDocumentsCounter = MetricFactory . newCounter ( new MetricName ( CompositeActivityValues . class , "<STR_LIT>" ) ) ; totalUpdatesCounter = MetricFactory . newCounter ( new MetricName ( CompositeActivityValues . class , "<STR_LIT>" ) ) ; } public void init ( ) { init ( DEFAULT_INITIAL_CAPACITY ) ; } public void init ( int count ) { uidToArrayIndex = new Long2IntOpenHashMap ( count ) ; } public void updateVersion ( String version ) { if ( versionComparator . compare ( lastVersion , version ) < <NUM_LIT:0> ) { lastVersion = version ; } } public int update ( long uid , final String version , Map < String , Object > map ) { if ( valuesMap . isEmpty ( ) ) { return - <NUM_LIT:1> ; } if ( versionComparator . compare ( lastVersion , version ) > <NUM_LIT:0> ) { return - <NUM_LIT:1> ; } if ( map . isEmpty ( ) ) { lastVersion = version ; return - <NUM_LIT:1> ; } int index = - <NUM_LIT:1> ; Lock writeLock = globalLock . writeLock ( ) ; boolean needToFlush = false ; try { writeLock . lock ( ) ; totalUpdatesCounter . inc ( ) ; if ( uidToArrayIndex . containsKey ( uid ) ) { index = uidToArrayIndex . get ( uid ) ; } else { insertedDocumentsCounter . inc ( ) ; synchronized ( deletedIndexes ) { if ( deletedIndexes . size ( ) > <NUM_LIT:0> ) { index = deletedIndexes . removeInt ( deletedIndexes . size ( ) - <NUM_LIT:1> ) ; } else { index = uidToArrayIndex . size ( ) ; } } uidToArrayIndex . put ( uid , index ) ; recentlyAddedUids . add ( uid ) ; needToFlush = updateBatch . addFieldUpdate ( new Update ( index , uid ) ) ; } boolean currentUpdate = updateActivities ( map , index ) ; needToFlush = needToFlush || currentUpdate ; lastVersion = version ; } finally { writeLock . unlock ( ) ; } if ( needToFlush ) { flush ( ) ; } return index ; } public ActivityPrimitiveValues getActivityValues ( String fieldName ) { ActivityValues activityValues = valuesMap . get ( fieldName ) ; if ( activityValues == null ) { if ( fieldName . contains ( "<STR_LIT::>" ) ) { return ( ( TimeAggregatedActivityValues ) valuesMap . get ( fieldName . substring ( <NUM_LIT:0> , fieldName . indexOf ( "<STR_LIT::>" ) ) ) ) . getValuesMap ( ) . get ( fieldName . substring ( fieldName . indexOf ( "<STR_LIT::>" ) + <NUM_LIT:1> ) ) ; } return null ; } else if ( activityValues instanceof ActivityIntValues ) { return ( ActivityIntValues ) activityValues ; } else if ( activityValues instanceof ActivityFloatValues ) { return ( ActivityFloatValues ) activityValues ; } else { return ( ( TimeAggregatedActivityValues ) activityValues ) . getDefaultIntValues ( ) ; } } private boolean updateActivities ( Map < String , Object > map , int index ) { boolean needToFlush = false ; for ( ActivityValues activityIntValues : valuesMap . values ( ) ) { Object value = map . get ( activityIntValues . getFieldName ( ) ) ; if ( value != null ) { needToFlush = needToFlush | activityIntValues . update ( index , value ) ; } else { needToFlush = needToFlush | activityIntValues . update ( index , <NUM_LIT:0> ) ; } } return needToFlush ; } public void delete ( long ... uids ) { boolean needToFlush = false ; if ( uids . length == <NUM_LIT:0> ) { return ; } for ( long uid : uids ) { if ( uid == Long . MIN_VALUE ) { continue ; } Lock writeLock = globalLock . writeLock ( ) ; try { writeLock . lock ( ) ; if ( ! uidToArrayIndex . containsKey ( uid ) ) { continue ; } deletedDocumentsCounter . inc ( ) ; int index = uidToArrayIndex . remove ( uid ) ; for ( ActivityValues activityIntValues : valuesMap . values ( ) ) { activityIntValues . delete ( index ) ; } needToFlush = needToFlush || pendingDeletes . addFieldUpdate ( new Update ( index , Long . MIN_VALUE ) ) ; } finally { writeLock . unlock ( ) ; } } if ( needToFlush ) { flushDeletes ( ) ; } } public void flushDeletes ( ) { if ( pendingDeletes . updates . isEmpty ( ) ) { return ; } final UpdateBatch < Update > deleteBatch = pendingDeletes ; pendingDeletes = new UpdateBatch < Update > ( activityConfig ) ; executor . submit ( new Runnable ( ) { @ Override public void run ( ) { if ( closed ) { return ; } Collections . reverse ( deleteBatch . updates ) ; activityStorage . flush ( deleteBatch . updates ) ; synchronized ( deletedIndexes ) { for ( Update update : deleteBatch . updates ) { deletedIndexes . add ( update . index ) ; } } } } ) ; } public void syncWithPersistentVersion ( String version ) { synchronized ( this ) { while ( versionComparator . compare ( metadata != null ? metadata . version : lastVersion , version ) < <NUM_LIT:0> ) { try { this . wait ( <NUM_LIT> ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } } } } public void syncWithVersion ( String version ) { synchronized ( this ) { while ( versionComparator . compare ( lastVersion , version ) < <NUM_LIT:0> ) { try { this . wait ( <NUM_LIT> ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } } } } public String getVersion ( ) { return lastVersion ; } public synchronized void flush ( ) { if ( closed ) { return ; } final UpdateBatch < Update > oldBatch = updateBatch ; updateBatch = new UpdateBatch < CompositeActivityStorage . Update > ( activityConfig ) ; final List < Runnable > underlyingFlushes = new ArrayList < Runnable > ( valuesMap . size ( ) ) ; for ( ActivityValues activityIntValues : valuesMap . values ( ) ) { underlyingFlushes . add ( activityIntValues . prepareFlush ( ) ) ; } final String version = lastVersion ; final int count ; globalLock . readLock ( ) . lock ( ) ; try { synchronized ( deletedIndexes ) { count = uidToArrayIndex . size ( ) + deletedIndexes . size ( ) ; currentDocumentsCounter . clear ( ) ; currentDocumentsCounter . inc ( uidToArrayIndex . size ( ) ) ; reclaimedDocumentsCounter . clear ( ) ; reclaimedDocumentsCounter . inc ( deletedIndexes . size ( ) ) ; logger . info ( "<STR_LIT>" + uidToArrayIndex . size ( ) + "<STR_LIT>" + deletedIndexes . size ( ) ) ; } } finally { globalLock . readLock ( ) . unlock ( ) ; } executor . submit ( new Runnable ( ) { @ Override public void run ( ) { if ( closed || activityStorage == null ) { return ; } activityStorage . flush ( oldBatch . updates ) ; for ( Runnable runnable : underlyingFlushes ) { runnable . run ( ) ; } metadata . update ( version , count ) ; } } ) ; flushDeletes ( ) ; } public void close ( ) { closed = true ; if ( activityStorage != null ) { activityStorage . close ( ) ; } for ( ActivityValues activityIntValues : valuesMap . values ( ) ) { activityIntValues . close ( ) ; } } public int [ ] precomputeArrayIndexes ( long [ ] uids ) { int [ ] ret = new int [ uids . length ] ; for ( int i = <NUM_LIT:0> ; i < uids . length ; i ++ ) { long uid = uids [ i ] ; if ( uid == ZoieIndexReader . DELETED_UID ) { ret [ i ] = - <NUM_LIT:1> ; continue ; } Lock lock = globalLock . readLock ( ) ; try { lock . lock ( ) ; if ( ! uidToArrayIndex . containsKey ( uid ) ) { ret [ i ] = - <NUM_LIT:1> ; } else { ret [ i ] = uidToArrayIndex . get ( uid ) ; } } finally { lock . unlock ( ) ; } } return ret ; } public Map < String , ActivityValues > getActivityValuesMap ( ) { return valuesMap ; } public int getIntValueByUID ( long uid , String column ) { Lock lock = globalLock . readLock ( ) ; try { lock . lock ( ) ; if ( ! uidToArrayIndex . containsKey ( uid ) ) { return Integer . MIN_VALUE ; } return ( ( ActivityIntValues ) getActivityValues ( column ) ) . getIntValue ( uidToArrayIndex . get ( uid ) ) ; } finally { lock . unlock ( ) ; } } public float getFloatValueByUID ( long uid , String column ) { Lock lock = globalLock . readLock ( ) ; try { lock . lock ( ) ; if ( ! uidToArrayIndex . containsKey ( uid ) ) { return Integer . MIN_VALUE ; } return ( ( ActivityFloatValues ) getActivityValues ( column ) ) . getFloatValue ( uidToArrayIndex . get ( uid ) ) ; } finally { lock . unlock ( ) ; } } public int getIndexByUID ( long uid ) { Lock lock = globalLock . readLock ( ) ; try { lock . lock ( ) ; if ( ! uidToArrayIndex . containsKey ( uid ) ) { return - <NUM_LIT:1> ; } return uidToArrayIndex . get ( uid ) ; } finally { lock . unlock ( ) ; } } public static CompositeActivityValues createCompositeValues ( ActivityPersistenceFactory activityPersistenceFactory , Collection < SenseiSchema . FieldDefinition > fieldNames , List < TimeAggregateInfo > aggregatedActivities , Comparator < String > versionComparator ) { CompositeActivityValues ret = new CompositeActivityValues ( ) ; CompositeActivityStorage persistentColumnManager = activityPersistenceFactory . getCompositeStorage ( ) ; ret . metadata = activityPersistenceFactory . getMetadata ( ) ; ret . activityConfig = activityPersistenceFactory . getActivityConfig ( ) ; ret . updateBatch = new UpdateBatch < Update > ( ret . activityConfig ) ; ret . pendingDeletes = new UpdateBatch < Update > ( ret . activityConfig ) ; ret . recentlyAddedUids = new RecentlyAddedUids ( ret . activityConfig . getUndeletableBufferSize ( ) ) ; int count = <NUM_LIT:0> ; if ( ret . metadata != null ) { ret . metadata . init ( ) ; ret . lastVersion = ret . metadata . version ; count = ret . metadata . count ; } if ( persistentColumnManager != null ) { persistentColumnManager . decorateCompositeActivityValues ( ret , ret . metadata ) ; count = ret . metadata . count ; } logger . info ( "<STR_LIT>" + ret . uidToArrayIndex . size ( ) + "<STR_LIT>" + ret . deletedIndexes . size ( ) ) ; ret . versionComparator = versionComparator ; ret . valuesMap = new HashMap < String , ActivityValues > ( fieldNames . size ( ) ) ; for ( TimeAggregateInfo aggregatedActivity : aggregatedActivities ) { ret . valuesMap . put ( aggregatedActivity . fieldName , TimeAggregatedActivityValues . createTimeAggregatedValues ( aggregatedActivity . fieldName , aggregatedActivity . times , count , activityPersistenceFactory ) ) ; } for ( SenseiSchema . FieldDefinition field : fieldNames ) { if ( field . isActivity && ! ret . valuesMap . containsKey ( field . name ) ) { ActivityPrimitiveValues values = ActivityPrimitiveValues . createActivityPrimitiveValues ( activityPersistenceFactory , field , count ) ; ret . valuesMap . put ( field . name , values ) ; } } return ret ; } } </s>
|
<s> package com . senseidb . indexing . activity ; import java . io . File ; import java . io . IOException ; import java . util . Collection ; import java . util . Comparator ; import java . util . HashMap ; import java . util . List ; import org . apache . commons . io . FileUtils ; import org . apache . log4j . Logger ; import com . senseidb . conf . SenseiSchema ; import com . senseidb . indexing . activity . primitives . ActivityPrimitivesStorage ; import com . senseidb . indexing . activity . time . TimeAggregatedActivityValues ; public class ActivityPersistenceFactory { private static Logger logger = Logger . getLogger ( ActivityPersistenceFactory . class ) ; private static ThreadLocal < ActivityPersistenceFactory > overrideForCurrentThread = new ThreadLocal < ActivityPersistenceFactory > ( ) ; private Metadata metadata ; private static String indexDirPath ; private final ActivityConfig activityConfig ; public static ActivityPersistenceFactory getInstance ( String indexDirPath ) { return getInstance ( indexDirPath , new ActivityConfig ( ) ) ; } public static ActivityPersistenceFactory getInstance ( String indexDirPath , ActivityConfig activityConfig ) { if ( overrideForCurrentThread . get ( ) != null ) { ActivityPersistenceFactory ret = overrideForCurrentThread . get ( ) ; return ret ; } return new ActivityPersistenceFactory ( indexDirPath , activityConfig ) ; } public static ActivityPersistenceFactory getInMemoryInstance ( ) { return new ActivityInMemoryFactory ( ) ; } protected ActivityPersistenceFactory ( String indexDirPath , ActivityConfig activityConfig ) { this . indexDirPath = indexDirPath ; this . activityConfig = activityConfig ; } protected CompositeActivityStorage getCompositeStorage ( ) { CompositeActivityStorage ret = new CompositeActivityStorage ( indexDirPath ) ; ret . init ( ) ; return ret ; } public ActivityPrimitivesStorage getActivivityPrimitivesStorage ( String fieldName ) { ActivityPrimitivesStorage activityPrimitivesStorage = new ActivityPrimitivesStorage ( fieldName , indexDirPath ) ; activityPrimitivesStorage . init ( ) ; return activityPrimitivesStorage ; } public AggregatesMetadata createAggregatesMetadata ( String fieldName ) { AggregatesMetadata ret = new AggregatesMetadata ( ) ; File aggregatesFile = new File ( indexDirPath , fieldName + "<STR_LIT>" ) ; try { if ( ! aggregatesFile . exists ( ) ) { aggregatesFile . createNewFile ( ) ; ret . lastUpdatedTime = <NUM_LIT:0> ; FileUtils . writeStringToFile ( aggregatesFile , String . valueOf ( ret . lastUpdatedTime ) ) ; } else { ret . lastUpdatedTime = Integer . parseInt ( FileUtils . readFileToString ( aggregatesFile ) ) ; } } catch ( IOException e ) { throw new RuntimeException ( e ) ; } ret . aggregatesFile = aggregatesFile ; return ret ; } public static class AggregatesMetadata { protected int lastUpdatedTime ; protected File aggregatesFile ; protected AggregatesMetadata ( ) { } public void updateTime ( int currentTime ) { lastUpdatedTime = currentTime ; try { FileUtils . writeStringToFile ( aggregatesFile , String . valueOf ( currentTime ) ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } public int getLastUpdatedTime ( ) { return lastUpdatedTime ; } } public Metadata getMetadata ( ) { if ( metadata == null ) { metadata = new Metadata ( ActivityPersistenceFactory . this . indexDirPath ) ; } return metadata ; } public static void setOverrideForCurrentThread ( ActivityPersistenceFactory overrideForCurrentThread ) { ActivityPersistenceFactory . overrideForCurrentThread . set ( overrideForCurrentThread ) ; } public ActivityConfig getActivityConfig ( ) { return activityConfig ; } } </s>
|
<s> package com . senseidb . indexing . activity . time ; import java . text . ParseException ; import java . text . SimpleDateFormat ; public class Clock { private static volatile Long predefinedTime ; private static volatile Integer predefinedTimeInMinutes ; private static volatile long startTime ; static { SimpleDateFormat formatter = new SimpleDateFormat ( "<STR_LIT>" ) ; try { startTime = formatter . parse ( "<STR_LIT>" ) . getTime ( ) ; } catch ( ParseException e ) { throw new RuntimeException ( e ) ; } } public static int getCurrentTimeInMinutes ( ) { if ( predefinedTimeInMinutes != null ) { return predefinedTimeInMinutes ; } return ( int ) ( ( getTime ( ) - startTime ) / <NUM_LIT:1000> / <NUM_LIT> ) ; } public static Integer getPredefinedTimeInMinutes ( ) { return predefinedTimeInMinutes ; } public static void setPredefinedTimeInMinutes ( Integer predefinedTimeInMinutes ) { Clock . predefinedTimeInMinutes = predefinedTimeInMinutes ; } public static long getTime ( ) { if ( predefinedTime == null ) { return System . currentTimeMillis ( ) ; } return predefinedTime ; } public static void setPredefinedTime ( Long predefinedTime ) { Clock . predefinedTime = predefinedTime ; } } </s>
|
<s> package com . senseidb . indexing . activity . time ; import java . util . ArrayList ; import java . util . Arrays ; import java . util . HashMap ; import java . util . List ; import java . util . Map ; import com . senseidb . indexing . activity . ActivityPersistenceFactory . AggregatesMetadata ; import com . senseidb . indexing . activity . primitives . ActivityIntValues ; import com . senseidb . indexing . activity . primitives . ActivityPrimitiveValues ; import com . senseidb . indexing . activity . ActivityPersistenceFactory ; import com . senseidb . indexing . activity . ActivityValues ; public class TimeAggregatedActivityValues implements ActivityValues { protected final String fieldName ; protected Map < String , ActivityIntValues > valuesMap = new HashMap < String , ActivityIntValues > ( ) ; protected IntValueHolder [ ] intActivityValues ; protected TimeHitsHolder timeActivities ; public volatile int maxIndex ; private AggregatesMetadata aggregatesMetadata ; private AggregatesUpdateJob aggregatesUpdateJob ; protected ActivityIntValues defaultIntValues ; private TimeAggregatedActivityValues ( String fieldName , List < String > times , int count , ActivityPersistenceFactory activityPersistenceFactory ) { this . fieldName = fieldName ; intActivityValues = new IntValueHolder [ times . size ( ) ] ; int index = <NUM_LIT:0> ; for ( String time : times ) { int timeInMinutes = extractTimeInMinutes ( time ) ; ActivityIntValues activityIntValues = ( ActivityIntValues ) ActivityPrimitiveValues . createActivityPrimitiveValues ( activityPersistenceFactory , int . class , fieldName + "<STR_LIT::>" + time , count ) ; this . valuesMap . put ( time , activityIntValues ) ; intActivityValues [ index ++ ] = new IntValueHolder ( activityIntValues , time , timeInMinutes ) ; } defaultIntValues = ( ActivityIntValues ) ActivityPrimitiveValues . createActivityPrimitiveValues ( activityPersistenceFactory , int . class , fieldName , count ) ; Arrays . sort ( intActivityValues ) ; maxIndex = count ; aggregatesMetadata = activityPersistenceFactory . createAggregatesMetadata ( fieldName ) ; } protected synchronized static void initTimeHits ( TimeHitsHolder timeActivities , IntValueHolder [ ] intActivityValues , int count , int lastUpdatedTime ) { for ( int index = <NUM_LIT:0> ; index < count ; index ++ ) { int activitiesCount = <NUM_LIT:0> ; for ( int j = <NUM_LIT:0> ; j < intActivityValues . length ; j ++ ) { int value = intActivityValues [ j ] . activityIntValues . getIntValue ( index ) ; if ( value == Integer . MIN_VALUE ) { activitiesCount = <NUM_LIT:0> ; break ; } activitiesCount += value ; } if ( activitiesCount == <NUM_LIT:0> ) { continue ; } int length = Math . min ( activitiesCount , intActivityValues [ <NUM_LIT:0> ] . timeInMinutes ) ; IntContainer times = new IntContainer ( length ) ; IntContainer activities = new IntContainer ( length ) ; for ( int j = <NUM_LIT:0> ; j < intActivityValues . length - <NUM_LIT:1> ; j ++ ) { int value = intActivityValues [ j ] . activityIntValues . getIntValue ( index ) ; int time = intActivityValues [ j ] . timeInMinutes ; if ( value == Integer . MIN_VALUE ) { activitiesCount = <NUM_LIT:0> ; break ; } activitiesCount += value ; fillTimeHits ( times , activities , value - intActivityValues [ j + <NUM_LIT:1> ] . activityIntValues . getIntValue ( index ) , lastUpdatedTime - time + <NUM_LIT:1> , time - intActivityValues [ j + <NUM_LIT:1> ] . timeInMinutes ) ; } fillTimeHits ( times , activities , intActivityValues [ intActivityValues . length - <NUM_LIT:1> ] . activityIntValues . getIntValue ( index ) , lastUpdatedTime - intActivityValues [ intActivityValues . length - <NUM_LIT:1> ] . timeInMinutes + <NUM_LIT:1> , intActivityValues [ intActivityValues . length - <NUM_LIT:1> ] . timeInMinutes ) ; timeActivities . activities [ index ] = activities ; timeActivities . times [ index ] = times ; } } private static void fillTimeHits ( IntContainer times , IntContainer activities , int activityCount , int startTime , int periodInMinutes ) { int length = java . lang . Math . min ( periodInMinutes , activityCount ) ; if ( length == <NUM_LIT:1> ) { activities . add ( activityCount ) ; times . add ( startTime + periodInMinutes / <NUM_LIT:2> ) ; } else if ( length > <NUM_LIT:1> ) { int activityIncrement = activityCount / length ; int timeIncrement = periodInMinutes / length ; int activityIncrementDelta = activityCount - activityIncrement * length ; int timeOffset = startTime ; for ( int i = <NUM_LIT:0> ; i < length ; i ++ ) { if ( i == <NUM_LIT:0> ) { activities . add ( activityIncrementDelta + activityIncrement ) ; } else { activities . add ( activityIncrement ) ; } times . add ( timeOffset ) ; timeOffset += timeIncrement ; } } } public static Integer extractTimeInMinutes ( String time ) { time = time . trim ( ) ; char identifier = time . charAt ( time . length ( ) - <NUM_LIT:1> ) ; int number = Integer . parseInt ( time . substring ( <NUM_LIT:0> , time . length ( ) - <NUM_LIT:1> ) ) ; switch ( identifier ) { case '<CHAR_LIT>' : return number ; case '<CHAR_LIT>' : return <NUM_LIT> * number ; case '<CHAR_LIT>' : return <NUM_LIT:24> * <NUM_LIT> * number ; case '<CHAR_LIT>' : return <NUM_LIT:7> * <NUM_LIT:24> * <NUM_LIT> * number ; case '<CHAR_LIT>' : return <NUM_LIT:30> * <NUM_LIT:24> * <NUM_LIT> * number ; case '<CHAR_LIT>' : return <NUM_LIT> * <NUM_LIT:24> * <NUM_LIT> * number ; default : throw new UnsupportedOperationException ( "<STR_LIT>" ) ; } } @ Override public void init ( int capacity ) { timeActivities = new TimeHitsHolder ( capacity ) ; initTimeHits ( timeActivities , intActivityValues , capacity , aggregatesMetadata . getLastUpdatedTime ( ) ) ; aggregatesUpdateJob = new AggregatesUpdateJob ( this , aggregatesMetadata ) ; aggregatesUpdateJob . start ( ) ; } @ Override public boolean update ( int index , Object value ) { boolean needToFlush = false ; if ( maxIndex < index ) { maxIndex = index ; } int valueInt = getIntValue ( value ) ; String valueStr = valueInt > <NUM_LIT:0> ? "<STR_LIT:+>" + valueInt : String . valueOf ( valueInt ) ; int currentTime = Clock . getCurrentTimeInMinutes ( ) ; synchronized ( defaultIntValues ) { needToFlush = needToFlush | defaultIntValues . update ( index , value ) ; } timeActivities . ensureCapacity ( index ) ; synchronized ( timeActivities . getLock ( index ) ) { if ( ! timeActivities . isSet ( index ) ) { timeActivities . setActivities ( index , new IntContainer ( <NUM_LIT:1> ) ) ; timeActivities . setTime ( index , new IntContainer ( <NUM_LIT:1> ) ) ; } if ( timeActivities . getTimes ( index ) . getSize ( ) > <NUM_LIT:0> && timeActivities . getTimes ( index ) . peekLast ( ) == currentTime ) { timeActivities . getActivities ( index ) . add ( timeActivities . getActivities ( index ) . removeLast ( ) + valueInt ) ; } else { timeActivities . getTimes ( index ) . add ( currentTime ) ; timeActivities . getActivities ( index ) . add ( valueInt ) ; } } for ( IntValueHolder intValueHolder : intActivityValues ) { synchronized ( intValueHolder . activityIntValues ) { needToFlush = needToFlush | intValueHolder . activityIntValues . update ( index , valueStr ) ; } } return needToFlush ; } private int getIntValue ( Object value ) { int valueInt ; if ( value instanceof Number ) { valueInt = ( ( Number ) value ) . intValue ( ) ; } else if ( value instanceof String ) { if ( value . toString ( ) . startsWith ( "<STR_LIT:+>" ) ) { valueInt = Integer . parseInt ( value . toString ( ) . substring ( <NUM_LIT:1> ) ) ; } else { valueInt = Integer . parseInt ( value . toString ( ) ) ; } } else { throw new UnsupportedOperationException ( ) ; } return valueInt ; } @ Override public void delete ( int index ) { synchronized ( defaultIntValues ) { defaultIntValues . delete ( index ) ; } for ( IntValueHolder intValueHolder : intActivityValues ) { synchronized ( intValueHolder . activityIntValues ) { intValueHolder . activityIntValues . delete ( index ) ; } } synchronized ( timeActivities . getLock ( index ) ) { timeActivities . reset ( index ) ; } } @ Override public Runnable prepareFlush ( ) { final List < Runnable > flushes = new ArrayList < Runnable > ( intActivityValues . length ) ; flushes . add ( defaultIntValues . prepareFlush ( ) ) ; for ( IntValueHolder intValueHolder : intActivityValues ) { flushes . add ( intValueHolder . activityIntValues . prepareFlush ( ) ) ; } return new Runnable ( ) { public void run ( ) { for ( Runnable runnable : flushes ) { runnable . run ( ) ; } } } ; } @ Override public String getFieldName ( ) { return fieldName ; } @ Override public void close ( ) { defaultIntValues . close ( ) ; aggregatesUpdateJob . stop ( ) ; for ( IntValueHolder intValueHolder : intActivityValues ) { intValueHolder . activityIntValues . close ( ) ; } } public ActivityIntValues getDefaultIntValues ( ) { return defaultIntValues ; } public AggregatesUpdateJob getAggregatesUpdateJob ( ) { return aggregatesUpdateJob ; } public static class IntValueHolder implements Comparable < IntValueHolder > { public ActivityIntValues activityIntValues ; public final String time ; public final Integer timeInMinutes ; public IntValueHolder ( ActivityIntValues activityIntValues , String time , Integer timeInMinutes ) { this . activityIntValues = activityIntValues ; this . time = time ; this . timeInMinutes = timeInMinutes ; } @ Override public int compareTo ( IntValueHolder obj ) { return obj . timeInMinutes - timeInMinutes ; } } public static class TimeHitsHolder { private IntContainer [ ] times ; private IntContainer [ ] activities ; public TimeHitsHolder ( int capacity ) { times = new IntContainer [ capacity ] ; activities = new IntContainer [ capacity ] ; } public IntContainer getTimes ( int index ) { return times [ index ] ; } public IntContainer getActivities ( int index ) { return activities [ index ] ; } public boolean isSet ( int index ) { return activities [ index ] != null ; } public void reset ( int index ) { if ( activities . length <= index ) { return ; } activities [ index ] = null ; times [ index ] = null ; } public void setTime ( int index , IntContainer time ) { ensureCapacity ( index ) ; times [ index ] = time ; } public void setActivities ( int index , IntContainer activity ) { ensureCapacity ( index ) ; activities [ index ] = activity ; } public Object getLock ( int index ) { return activities [ index ] != null ? activities [ index ] : this ; } public void ensureCapacity ( int currentArraySize ) { if ( times . length == <NUM_LIT:0> ) { times = new IntContainer [ <NUM_LIT> ] ; activities = new IntContainer [ <NUM_LIT> ] ; return ; } if ( times . length - currentArraySize < <NUM_LIT:2> ) { int newSize = times . length < <NUM_LIT> ? times . length * <NUM_LIT:2> : ( int ) ( times . length * <NUM_LIT> ) ; IntContainer [ ] newFieldValues = new IntContainer [ newSize ] ; System . arraycopy ( times , <NUM_LIT:0> , newFieldValues , <NUM_LIT:0> , times . length ) ; times = newFieldValues ; newFieldValues = new IntContainer [ newSize ] ; System . arraycopy ( activities , <NUM_LIT:0> , newFieldValues , <NUM_LIT:0> , activities . length ) ; activities = newFieldValues ; } } } public Map < String , ActivityIntValues > getValuesMap ( ) { return valuesMap ; } public TimeHitsHolder getTimeActivities ( ) { return timeActivities ; } public static TimeAggregatedActivityValues createTimeAggregatedValues ( String fieldName , List < String > times , int count , ActivityPersistenceFactory activityPersistenceFactory ) { TimeAggregatedActivityValues ret = new TimeAggregatedActivityValues ( fieldName , times , count , activityPersistenceFactory ) ; ret . init ( count > <NUM_LIT:0> ? count : <NUM_LIT> ) ; return ret ; } } </s>
|
<s> package com . senseidb . indexing . activity . time ; import com . senseidb . metrics . MetricFactory ; import java . util . concurrent . Callable ; import java . util . concurrent . Executors ; import java . util . concurrent . ScheduledExecutorService ; import java . util . concurrent . TimeUnit ; import org . apache . log4j . Logger ; import com . senseidb . indexing . activity . ActivityPersistenceFactory . AggregatesMetadata ; import com . senseidb . indexing . activity . time . TimeAggregatedActivityValues . IntValueHolder ; import com . senseidb . metrics . MetricsConstants ; import com . yammer . metrics . core . MetricName ; import com . yammer . metrics . core . Timer ; public class AggregatesUpdateJob implements Runnable { private final static Logger logger = Logger . getLogger ( AggregatesUpdateJob . class ) ; protected ScheduledExecutorService executorService = Executors . newSingleThreadScheduledExecutor ( ) ; private final TimeAggregatedActivityValues timeAggregatedActivityValues ; private final AggregatesMetadata aggregatesMetadata ; private int currentCount ; private final Timer timer = MetricFactory . newTimer ( new MetricName ( MetricsConstants . Domain , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , TimeUnit . MILLISECONDS , TimeUnit . SECONDS ) ; public AggregatesUpdateJob ( TimeAggregatedActivityValues timeAggregatedActivityValues , AggregatesMetadata aggregatesMetadata ) { this . timeAggregatedActivityValues = timeAggregatedActivityValues ; this . aggregatesMetadata = aggregatesMetadata ; } public void start ( ) { executorService . scheduleAtFixedRate ( this , <NUM_LIT:30> , <NUM_LIT:30> , TimeUnit . SECONDS ) ; } public void stop ( ) { executorService . shutdown ( ) ; } public void awaitTermination ( ) { try { executorService . awaitTermination ( <NUM_LIT:5> , TimeUnit . SECONDS ) ; } catch ( InterruptedException e ) { throw new RuntimeException ( e ) ; } } @ Override public synchronized void run ( ) { try { timer . time ( new Callable < Void > ( ) { @ Override public Void call ( ) throws Exception { runUpdateJob ( ) ; return null ; } } ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } public void runUpdateJob ( ) { int currentTime = Clock . getCurrentTimeInMinutes ( ) ; if ( currentTime <= aggregatesMetadata . getLastUpdatedTime ( ) ) { return ; } currentCount = <NUM_LIT:0> ; ; for ( int i = <NUM_LIT:0> ; i <= timeAggregatedActivityValues . maxIndex ; i ++ ) { synchronized ( timeAggregatedActivityValues . timeActivities . getLock ( i ) ) { if ( ! timeAggregatedActivityValues . timeActivities . isSet ( i ) ) { continue ; } IntContainer activities = timeAggregatedActivityValues . timeActivities . getActivities ( i ) ; IntContainer times = timeAggregatedActivityValues . timeActivities . getTimes ( i ) ; int [ ] updateTempValues = new int [ timeAggregatedActivityValues . intActivityValues . length ] ; updateActivityValues ( timeAggregatedActivityValues . intActivityValues , activities , times , currentTime , i , updateTempValues ) ; } } aggregatesMetadata . updateTime ( currentTime ) ; logger . info ( "<STR_LIT>" + currentCount + "<STR_LIT>" ) ; } private final void updateActivityValues ( IntValueHolder [ ] intActivityValues , IntContainer activities , IntContainer times , int currentTime , int index , int [ ] updateTempValues ) { int minimumAggregateIndex = <NUM_LIT:0> ; for ( int activityIndex = <NUM_LIT:0> ; activityIndex < activities . getSize ( ) ; activityIndex ++ ) { if ( times . size ( ) != activities . size ( ) ) { throw new IllegalStateException ( "<STR_LIT>" + activities . getSize ( ) + "<STR_LIT>" + times . size ( ) ) ; } if ( times . size ( ) == <NUM_LIT:0> ) { continue ; } if ( currentTime - times . get ( activityIndex ) < intActivityValues [ intActivityValues . length - <NUM_LIT:1> ] . timeInMinutes ) { break ; } for ( int aggregateIndex = intActivityValues . length - <NUM_LIT:1> ; aggregateIndex >= minimumAggregateIndex ; aggregateIndex -- ) { IntValueHolder intValueHolder = intActivityValues [ aggregateIndex ] ; int currentElapsedTime = currentTime - times . get ( activityIndex ) ; if ( currentElapsedTime < intValueHolder . timeInMinutes ) { minimumAggregateIndex = aggregateIndex + <NUM_LIT:1> ; break ; } int previousElapsedTime = aggregatesMetadata . getLastUpdatedTime ( ) - times . get ( activityIndex ) ; if ( currentElapsedTime >= intValueHolder . timeInMinutes && previousElapsedTime < intValueHolder . timeInMinutes ) { int activityValue = activities . get ( activityIndex ) ; if ( activityValue != <NUM_LIT:0> ) { updateTempValues [ aggregateIndex ] += activityValue ; currentCount ++ ; } } } } for ( int i = <NUM_LIT:0> ; i < updateTempValues . length ; i ++ ) { int updateValue = updateTempValues [ i ] ; if ( updateValue != <NUM_LIT:0> ) { synchronized ( intActivityValues [ i ] . activityIntValues . getFieldValues ( ) ) { intActivityValues [ i ] . activityIntValues . update ( index , updateValue > <NUM_LIT:0> ? String . valueOf ( - updateValue ) : "<STR_LIT:+>" + String . valueOf ( updateValue ) ) ; } updateTempValues [ i ] = <NUM_LIT:0> ; } } while ( true ) { if ( times . size ( ) == <NUM_LIT:0> ) { break ; } int time = times . peekFirst ( ) ; int elapsedTime = currentTime - time ; if ( elapsedTime >= intActivityValues [ <NUM_LIT:0> ] . timeInMinutes ) { times . removeFirst ( ) ; activities . removeFirst ( ) ; if ( times . size ( ) == <NUM_LIT:0> ) { timeAggregatedActivityValues . timeActivities . reset ( index ) ; } } else { break ; } } } } </s>
|
<s> package com . senseidb . indexing . activity . time ; import java . util . Arrays ; public class IntContainer { private static int [ ] EMPTY_ARR = new int [ <NUM_LIT:0> ] ; private static final int initialGrowthFactor = <NUM_LIT:2> ; private static final int capacityThreshold = <NUM_LIT:10> ; protected int [ ] array ; protected int startIndex = <NUM_LIT:0> ; protected int actualSize = <NUM_LIT:0> ; public IntContainer ( int capacity ) { if ( capacity == <NUM_LIT:0> ) { array = EMPTY_ARR ; } else { array = new int [ capacity ] ; } } public IntContainer ( ) { array = new int [ <NUM_LIT:1> ] ; } public int removeFirst ( ) { ensureCapacityOnStart ( ) ; if ( actualSize == <NUM_LIT:0> ) { throw new IllegalStateException ( "<STR_LIT>" ) ; } startIndex ++ ; actualSize -- ; return array [ startIndex - <NUM_LIT:1> ] ; } public int removeLast ( ) { ensureCapacityOnStart ( ) ; if ( actualSize == <NUM_LIT:0> ) { throw new IllegalStateException ( "<STR_LIT>" ) ; } actualSize -- ; return array [ startIndex + actualSize ] ; } public int getSize ( ) { return actualSize ; } public int peekFirst ( ) { return array [ startIndex ] ; } public int peekLast ( ) { return array [ startIndex + actualSize - <NUM_LIT:1> ] ; } public int get ( int index ) { return array [ startIndex + index ] ; } public IntContainer add ( int number ) { ensureCapacityOnEnd ( ) ; array [ startIndex + actualSize ] = number ; actualSize ++ ; return this ; } private void ensureCapacityOnEnd ( ) { if ( actualSize + startIndex < array . length ) { return ; } double growthFactor = <NUM_LIT> ; int newSize = array . length < capacityThreshold ? array . length + initialGrowthFactor : ( int ) ( array . length * growthFactor ) ; int [ ] oldArr = array ; array = new int [ newSize ] ; System . arraycopy ( oldArr , startIndex , array , <NUM_LIT:0> , actualSize ) ; startIndex = <NUM_LIT:0> ; } private void ensureCapacityOnStart ( ) { int newStartIndex = startIndex ; int newArrayLength = array . length ; int reduceFactor = <NUM_LIT:2> ; if ( actualSize >= capacityThreshold && startIndex > actualSize / ( reduceFactor * reduceFactor ) ) { newStartIndex = <NUM_LIT:0> ; } else if ( startIndex > reduceFactor && actualSize < capacityThreshold ) { newStartIndex = <NUM_LIT:0> ; } if ( array . length > reduceFactor && actualSize < array . length / reduceFactor ) { newArrayLength = array . length / reduceFactor ; } if ( newStartIndex != startIndex || newArrayLength != array . length ) { int [ ] oldArr = array ; if ( newArrayLength != array . length ) { array = new int [ newArrayLength ] ; } System . arraycopy ( oldArr , startIndex , array , <NUM_LIT:0> , actualSize ) ; startIndex = <NUM_LIT:0> ; } } @ Override public String toString ( ) { return Arrays . toString ( array ) ; } public int size ( ) { return actualSize ; } } </s>
|
<s> package com . senseidb . indexing . activity ; import java . io . IOException ; import java . io . RandomAccessFile ; import java . nio . MappedByteBuffer ; public abstract class AtomicFieldUpdate { public int index ; public abstract int getFieldSizeInBytes ( ) ; public abstract void update ( MappedByteBuffer mappedByteBuffer , int offset ) ; public abstract void update ( RandomAccessFile storedFile , int offset ) ; public static class IntFieldUpdate extends AtomicFieldUpdate { public int value ; @ Override public int getFieldSizeInBytes ( ) { return <NUM_LIT:4> ; } @ Override public void update ( MappedByteBuffer mappedByteBuffer , int offset ) { mappedByteBuffer . putInt ( offset , value ) ; } @ Override public void update ( RandomAccessFile storedFile , int offset ) { try { storedFile . seek ( offset ) ; storedFile . writeInt ( value ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } } public static AtomicFieldUpdate valueOf ( int index , int value ) { IntFieldUpdate ret = new IntFieldUpdate ( ) ; ret . index = index ; ret . value = value ; return ret ; } public static AtomicFieldUpdate valueOf ( int index , float value ) { FloatFieldUpdate ret = new FloatFieldUpdate ( ) ; ret . index = index ; ret . value = value ; return ret ; } public static class FloatFieldUpdate extends AtomicFieldUpdate { public float value ; @ Override public int getFieldSizeInBytes ( ) { return <NUM_LIT:4> ; } @ Override public void update ( MappedByteBuffer mappedByteBuffer , int offset ) { mappedByteBuffer . putFloat ( offset , value ) ; } @ Override public void update ( RandomAccessFile storedFile , int offset ) { try { storedFile . seek ( offset ) ; storedFile . writeFloat ( value ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } } } </s>
|
<s> package com . senseidb . indexing . activity ; public interface PurgeUnusedActivitiesJobMBean { public int purgeUnusedActivityIndexes ( ) ; } </s>
|
<s> package com . senseidb . indexing . activity ; import com . senseidb . metrics . MetricFactory ; import java . io . File ; import java . io . IOException ; import java . util . ArrayList ; import java . util . Comparator ; import java . util . HashMap ; import java . util . HashSet ; import java . util . Iterator ; import java . util . List ; import java . util . Map ; import java . util . Set ; import org . apache . log4j . Logger ; import org . apache . lucene . index . IndexReader ; import org . jboss . netty . util . internal . ConcurrentHashMap ; import org . json . JSONException ; import org . json . JSONObject ; import proj . zoie . api . IndexReaderFactory ; import proj . zoie . api . Zoie ; import proj . zoie . api . ZoieIndexReader ; import proj . zoie . api . ZoieMultiReader ; import proj . zoie . api . ZoieSegmentReader ; import com . browseengine . bobo . api . BoboIndexReader ; import com . browseengine . bobo . facets . FacetHandler ; import com . senseidb . conf . SenseiConfParams ; import com . senseidb . conf . SenseiSchema ; import com . senseidb . conf . SenseiSchema . FacetDefinition ; import com . senseidb . conf . SenseiSchema . FieldDefinition ; import com . senseidb . indexing . ShardingStrategy ; import com . senseidb . indexing . activity . BaseActivityFilter . ActivityFilteredResult ; import com . senseidb . indexing . activity . facet . ActivityRangeFacetHandler ; import com . senseidb . indexing . activity . primitives . ActivityIntValues ; import com . senseidb . indexing . activity . time . TimeAggregatedActivityValues ; import com . senseidb . plugin . SenseiPluginRegistry ; import com . senseidb . search . node . SenseiCore ; import com . senseidb . search . plugin . PluggableSearchEngine ; import com . senseidb . search . plugin . PluggableSearchEngineManager ; import com . yammer . metrics . core . Counter ; import com . yammer . metrics . core . MetricName ; public class CompositeActivityManager implements PluggableSearchEngine { private final static Logger logger = Logger . getLogger ( PluggableSearchEngineManager . class ) ; protected CompositeActivityValues activityValues ; private SenseiSchema senseiSchema ; public static final String EVENT_TYPE_ONLY_ACTIVITY = "<STR_LIT>" ; private BaseActivityFilter activityFilter ; private ShardingStrategy shardingStrategy ; private SenseiCore senseiCore ; private PurgeUnusedActivitiesJob purgeUnusedActivitiesJob ; private Map < String , Set < String > > columnToFacetMapping = new HashMap < String , Set < String > > ( ) ; private Counter recoveredIndexInBoboFacetDataCache ; private Counter facetMappingMismatch ; private ActivityPersistenceFactory activityPersistenceFactory ; public CompositeActivityManager ( ActivityPersistenceFactory activityPersistenceFactory ) { this . activityPersistenceFactory = activityPersistenceFactory ; } public CompositeActivityManager ( ) { } public String getVersion ( ) { return activityValues . getVersion ( ) ; } public boolean acceptEventsForAllPartitions ( ) { if ( activityFilter == null ) return false ; return activityFilter . acceptEventsForAllPartitions ( ) ; } public final void init ( String indexDirectory , int nodeId , SenseiSchema senseiSchema , Comparator < String > versionComparator , SenseiPluginRegistry pluginRegistry , ShardingStrategy shardingStrategy ) { this . senseiSchema = senseiSchema ; this . shardingStrategy = shardingStrategy ; try { if ( activityPersistenceFactory == null ) { if ( indexDirectory == null ) { activityPersistenceFactory = ActivityPersistenceFactory . getInMemoryInstance ( ) ; } else { File dir = new File ( indexDirectory , "<STR_LIT>" + nodeId + "<STR_LIT>" ) ; dir . mkdirs ( ) ; String canonicalPath = dir . getCanonicalPath ( ) ; ActivityConfig activityConfig = new ActivityConfig ( pluginRegistry ) ; activityPersistenceFactory = ActivityPersistenceFactory . getInstance ( canonicalPath , activityConfig ) ; } } activityValues = CompositeActivityValues . createCompositeValues ( activityPersistenceFactory , senseiSchema . getFieldDefMap ( ) . values ( ) , TimeAggregateInfo . valueOf ( senseiSchema ) , versionComparator ) ; activityFilter = pluginRegistry . getBeanByFullPrefix ( SenseiConfParams . SENSEI_INDEX_ACTIVITY_FILTER , BaseActivityFilter . class ) ; if ( activityFilter == null ) { activityFilter = new DefaultActivityFilter ( ) ; } initColumnFacetMapping ( senseiSchema ) ; cachedInstances . put ( nodeId , this ) ; } catch ( IOException ex ) { throw new RuntimeException ( ex ) ; } } private void initColumnFacetMapping ( SenseiSchema senseiSchema ) { Set < String > facetNames = getFacetNames ( ) ; for ( FacetDefinition facet : senseiSchema . getFacets ( ) ) { if ( facet . name == null || facet . column == null || ! facetNames . contains ( facet . name ) ) { continue ; } if ( ! columnToFacetMapping . containsKey ( facet . column ) ) { columnToFacetMapping . put ( facet . column , new HashSet < String > ( ) ) ; } if ( "<STR_LIT>" . equals ( facet . type ) && facet . params . containsKey ( "<STR_LIT>" ) ) { for ( String time : facet . params . get ( "<STR_LIT>" ) ) { String name = facet . name + "<STR_LIT::>" + time ; columnToFacetMapping . get ( facet . column ) . add ( name ) ; } } columnToFacetMapping . get ( facet . column ) . add ( facet . name ) ; } } public boolean isOnlyActivityUpdate ( JSONObject event ) { boolean activityPresent = false ; Iterator keys = event . keys ( ) ; while ( keys . hasNext ( ) ) { String key = ( String ) keys . next ( ) ; FieldDefinition fieldDefinition = senseiSchema . getFieldDefMap ( ) . get ( key ) ; if ( fieldDefinition == null || senseiSchema . getUidField ( ) . equals ( key ) ) { continue ; } if ( fieldDefinition . isActivity ) { activityPresent = true ; } else { return false ; } } return activityPresent && SenseiSchema . EVENT_TYPE_UPDATE . equalsIgnoreCase ( event . optString ( SenseiSchema . EVENT_TYPE_FIELD , null ) ) ; } public JSONObject acceptEvent ( JSONObject event , String version ) { try { if ( event . opt ( SenseiSchema . EVENT_TYPE_SKIP ) != null || SenseiSchema . EVENT_TYPE_SKIP . equalsIgnoreCase ( event . optString ( SenseiSchema . EVENT_TYPE_FIELD ) ) ) { return event ; } boolean onlyActivityUpdate = isOnlyActivityUpdate ( event ) ; if ( onlyActivityUpdate ) { event . put ( SenseiSchema . EVENT_TYPE_FIELD , SenseiSchema . EVENT_TYPE_SKIP ) ; } long defaultUid = event . getLong ( senseiSchema . getUidField ( ) ) ; if ( event . opt ( SenseiSchema . EVENT_TYPE_FIELD ) != null && event . optString ( SenseiSchema . EVENT_TYPE_FIELD ) . equals ( SenseiSchema . EVENT_TYPE_DELETE ) ) { activityValues . delete ( defaultUid ) ; return event ; } ActivityFilteredResult activityFilteredResult = activityFilter . filter ( event , senseiSchema , shardingStrategy , senseiCore ) ; onlyActivityUpdate = onlyActivityUpdate || activityFilteredResult . getFilteredObject ( ) == null || activityFilteredResult . getFilteredObject ( ) . length ( ) == <NUM_LIT:0> || SenseiSchema . EVENT_TYPE_SKIP . equals ( activityFilteredResult . getFilteredObject ( ) . opt ( SenseiSchema . EVENT_TYPE_FIELD ) ) ; for ( long uid : activityFilteredResult . getActivityValues ( ) . keySet ( ) ) { if ( activityFilteredResult . getActivityValues ( ) . get ( uid ) == null || activityFilteredResult . getActivityValues ( ) . get ( uid ) . size ( ) == <NUM_LIT:0> ) { continue ; } int previousIndex = activityValues . getIndexByUID ( uid ) ; int index = activityValues . update ( uid , version , activityFilteredResult . getActivityValues ( ) . get ( uid ) ) ; if ( index >= <NUM_LIT:0> && previousIndex < <NUM_LIT:0> && ( onlyActivityUpdate || defaultUid != uid ) ) { updateExistingBoboIndexes ( uid , index , activityFilteredResult . getActivityValues ( ) . get ( uid ) . keySet ( ) ) ; } } return activityFilteredResult . getFilteredObject ( ) ; } catch ( JSONException ex ) { throw new RuntimeException ( ex ) ; } } private void updateExistingBoboIndexes ( long uid , int index , Set < String > columns ) { if ( columns . isEmpty ( ) ) { return ; } Set < String > facets = new HashSet < String > ( ) ; for ( String column : columns ) { if ( columnToFacetMapping . containsKey ( column ) ) { facets . addAll ( columnToFacetMapping . get ( column ) ) ; } } if ( facets . isEmpty ( ) ) { return ; } for ( int partition : senseiCore . getPartitions ( ) ) { IndexReaderFactory < ZoieIndexReader < BoboIndexReader > > indexReaderFactory = senseiCore . getIndexReaderFactory ( partition ) ; if ( indexReaderFactory == null ) { continue ; } List < ZoieIndexReader < BoboIndexReader > > indexReaders = null ; try { indexReaders = indexReaderFactory . getIndexReaders ( ) ; for ( ZoieIndexReader < BoboIndexReader > zoieIndexReader : indexReaders ) { if ( zoieIndexReader . getDocIDMaper ( ) . getDocID ( uid ) < <NUM_LIT:0> ) { continue ; } if ( zoieIndexReader instanceof ZoieMultiReader < ? > ) { for ( ZoieIndexReader < BoboIndexReader > segmentReader : ( ( ZoieMultiReader < BoboIndexReader > ) zoieIndexReader ) . getSequentialSubReaders ( ) ) { if ( ! ( segmentReader instanceof ZoieSegmentReader < ? > ) ) { throw new UnsupportedOperationException ( segmentReader . getClass ( ) . toString ( ) ) ; } updateExistingBoboIndexes ( ( ZoieSegmentReader < BoboIndexReader > ) segmentReader , uid , index , facets ) ; } } else if ( zoieIndexReader instanceof ZoieSegmentReader < ? > ) { updateExistingBoboIndexes ( ( ZoieSegmentReader < BoboIndexReader > ) zoieIndexReader , uid , index , facets ) ; } else { throw new UnsupportedOperationException ( zoieIndexReader . getClass ( ) . toString ( ) ) ; } } } catch ( IOException ex ) { logger . error ( ex . getMessage ( ) , ex ) ; } finally { if ( indexReaders != null ) { indexReaderFactory . returnIndexReaders ( indexReaders ) ; } } } } private void updateExistingBoboIndexes ( ZoieSegmentReader < BoboIndexReader > segmentReader , long uid , int index , Set < String > facets ) { int docId = segmentReader . getDocIDMaper ( ) . getDocID ( uid ) ; if ( docId < <NUM_LIT:0> ) { return ; } BoboIndexReader decoratedReader = segmentReader . getDecoratedReader ( ) ; for ( String facet : facets ) { Object facetData = decoratedReader . getFacetData ( facet ) ; if ( ! ( facetData instanceof int [ ] ) ) { logger . warn ( "<STR_LIT>" + facet + "<STR_LIT>" + facetData . getClass ( ) . toString ( ) ) ; continue ; } int [ ] indexes = ( int [ ] ) facetData ; if ( indexes . length <= docId ) { logger . warn ( String . format ( "<STR_LIT>" , facet , uid , docId , indexes . length ) ) ; facetMappingMismatch . inc ( ) ; continue ; } if ( indexes [ docId ] > - <NUM_LIT:1> && indexes [ docId ] != index ) { logger . warn ( String . format ( "<STR_LIT>" , facet , uid , docId , index , indexes [ docId ] ) ) ; facetMappingMismatch . inc ( ) ; continue ; } if ( indexes [ docId ] == - <NUM_LIT:1> ) { indexes [ docId ] = index ; recoveredIndexInBoboFacetDataCache . inc ( ) ; } } } public CompositeActivityValues getActivityValues ( ) { return activityValues ; } protected static Map < Integer , CompositeActivityManager > cachedInstances = new ConcurrentHashMap < Integer , CompositeActivityManager > ( ) ; public static boolean activitiesPresent ( SenseiSchema schema ) { for ( FieldDefinition field : schema . getFieldDefMap ( ) . values ( ) ) { if ( field . isActivity ) { return true ; } } return false ; } @ Override public void onDelete ( IndexReader indexReader , long ... uids ) { activityValues . delete ( uids ) ; } public static class TimeAggregateInfo { public String fieldName ; public List < String > times ; public TimeAggregateInfo ( String fieldName , List < String > times ) { this . fieldName = fieldName ; this . times = times ; } public TimeAggregateInfo ( ) { } public static List < TimeAggregateInfo > valueOf ( SenseiSchema senseiSchema ) { List < TimeAggregateInfo > ret = new ArrayList < CompositeActivityManager . TimeAggregateInfo > ( ) ; for ( FacetDefinition facetDefinition : senseiSchema . getFacets ( ) ) { if ( "<STR_LIT>" . equals ( facetDefinition . type ) ) { TimeAggregateInfo aggregateInfo = new TimeAggregateInfo ( ) ; aggregateInfo . fieldName = facetDefinition . column ; aggregateInfo . times = facetDefinition . params . get ( "<STR_LIT>" ) ; ret . add ( aggregateInfo ) ; } } return ret ; } } public void start ( SenseiCore senseiCore ) { recoveredIndexInBoboFacetDataCache = MetricFactory . newCounter ( new MetricName ( CompositeActivityManager . class , "<STR_LIT>" ) ) ; facetMappingMismatch = MetricFactory . newCounter ( new MetricName ( CompositeActivityManager . class , "<STR_LIT>" ) ) ; this . senseiCore = senseiCore ; Set < IndexReaderFactory < ZoieIndexReader < BoboIndexReader > > > zoieSystems = new HashSet < IndexReaderFactory < ZoieIndexReader < BoboIndexReader > > > ( ) ; for ( int partition : senseiCore . getPartitions ( ) ) { if ( senseiCore . getIndexReaderFactory ( partition ) != null ) { zoieSystems . add ( ( IndexReaderFactory < ZoieIndexReader < BoboIndexReader > > ) senseiCore . getIndexReaderFactory ( partition ) ) ; } } int purgeJobFrequencyInMinutes = activityPersistenceFactory . getActivityConfig ( ) . getPurgeJobFrequencyInMinutes ( ) ; purgeUnusedActivitiesJob = new PurgeUnusedActivitiesJob ( activityValues , zoieSystems , purgeJobFrequencyInMinutes * <NUM_LIT> * <NUM_LIT:1000> ) ; purgeUnusedActivitiesJob . start ( ) ; } public void stop ( ) { purgeUnusedActivitiesJob . stop ( ) ; getActivityValues ( ) . flush ( ) ; activityValues . close ( ) ; } @ Override public Set < String > getFieldNames ( ) { Set < String > ret = new HashSet < String > ( ) ; for ( String field : senseiSchema . getFieldDefMap ( ) . keySet ( ) ) { if ( senseiSchema . getFieldDefMap ( ) . get ( field ) . isActivity ) { ret . add ( field ) ; } } return ret ; } @ Override public Set < String > getFacetNames ( ) { Set < String > ret = new HashSet < String > ( ) ; for ( FacetDefinition facet : senseiSchema . getFacets ( ) ) { boolean isActivity = facet . column != null && senseiSchema . getFieldDefMap ( ) . containsKey ( facet . column ) && senseiSchema . getFieldDefMap ( ) . get ( facet . column ) . isActivity ; boolean isAggregatedRange = "<STR_LIT>" . equals ( facet . type ) ; if ( isActivity || isAggregatedRange ) { ret . add ( facet . name ) ; } } return ret ; } @ Override public List < FacetHandler < ? > > createFacetHandlers ( ) { Set < String > facets = getFacetNames ( ) ; List < FacetHandler < ? > > ret = new ArrayList < FacetHandler < ? > > ( ) ; for ( FacetDefinition facet : senseiSchema . getFacets ( ) ) { if ( ! facets . contains ( facet . name ) ) { continue ; } ActivityValues activityValues = getActivityValues ( ) . getActivityValuesMap ( ) . get ( facet . column ) ; if ( "<STR_LIT>" . equals ( facet . type ) ) { if ( ! ( activityValues instanceof TimeAggregatedActivityValues ) ) { throw new IllegalStateException ( "<STR_LIT>" + facet . name + "<STR_LIT>" ) ; } TimeAggregatedActivityValues aggregatedActivityValues = ( TimeAggregatedActivityValues ) activityValues ; for ( String time : facet . params . get ( "<STR_LIT>" ) ) { String name = facet . name + "<STR_LIT::>" + time ; ret . add ( ActivityRangeFacetHandler . valueOf ( name , facet . column , getActivityValues ( ) , ( ActivityIntValues ) aggregatedActivityValues . getValuesMap ( ) . get ( time ) ) ) ; } ret . add ( ActivityRangeFacetHandler . valueOf ( facet . name , facet . column , getActivityValues ( ) , ( ActivityIntValues ) aggregatedActivityValues . getDefaultIntValues ( ) ) ) ; } else if ( "<STR_LIT>" . equals ( facet . type ) ) { ret . add ( ActivityRangeFacetHandler . valueOf ( facet . name , facet . column , getActivityValues ( ) , getActivityValues ( ) . getActivityValues ( facet . column ) ) ) ; } else { throw new UnsupportedOperationException ( "<STR_LIT>" + facet . name + "<STR_LIT>" ) ; } } return ret ; } public PurgeUnusedActivitiesJob getPurgeUnusedActivitiesJob ( ) { return purgeUnusedActivitiesJob ; } } </s>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.