text
stringlengths 30
1.67M
|
|---|
<s> package org . apache . gora . sql . statement ; import java . io . IOException ; import java . sql . Connection ; import java . sql . PreparedStatement ; import java . sql . SQLException ; import java . util . Map . Entry ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . sql . store . Column ; import org . apache . gora . sql . store . SqlMapping ; import org . apache . gora . sql . store . SqlStore ; public class HSqlInsertUpdateStatement < K , T extends Persistent > extends InsertUpdateStatement < K , T > { public HSqlInsertUpdateStatement ( SqlStore < K , T > store , SqlMapping mapping , String tableName ) { super ( store , mapping , tableName ) ; } private String getVariable ( String columnName ) { return "<STR_LIT>" + columnName ; } @ Override public PreparedStatement toStatement ( Connection connection ) throws SQLException { int i ; StringBuilder buf = new StringBuilder ( "<STR_LIT>" ) ; buf . append ( tableName ) . append ( "<STR_LIT>" ) ; i = <NUM_LIT:0> ; for ( Entry < String , ColumnData > e : columnMap . entrySet ( ) ) { Column column = e . getValue ( ) . column ; if ( i != <NUM_LIT:0> ) buf . append ( "<STR_LIT:U+002C>" ) ; buf . append ( "<STR_LIT>" ) ; buf . append ( column . getJdbcType ( ) . toString ( ) ) ; if ( column . getScaleOrLength ( ) > <NUM_LIT:0> ) { buf . append ( "<STR_LIT:(>" ) . append ( column . getScaleOrLength ( ) ) . append ( "<STR_LIT:)>" ) ; } buf . append ( "<STR_LIT:)>" ) ; i ++ ; } buf . append ( "<STR_LIT>" ) ; i = <NUM_LIT:0> ; for ( String columnName : columnMap . keySet ( ) ) { if ( i != <NUM_LIT:0> ) buf . append ( "<STR_LIT:U+002C>" ) ; buf . append ( getVariable ( columnName ) ) ; i ++ ; } buf . append ( "<STR_LIT>" ) . append ( tableName ) . append ( "<STR_LIT:.>" ) . append ( mapping . getPrimaryColumnName ( ) ) . append ( "<STR_LIT>" ) ; buf . append ( getVariable ( mapping . getPrimaryColumnName ( ) ) ) ; buf . append ( "<STR_LIT>" ) ; i = <NUM_LIT:0> ; for ( String columnName : columnMap . keySet ( ) ) { if ( columnName . equals ( mapping . getPrimaryColumnName ( ) ) ) { continue ; } if ( i != <NUM_LIT:0> ) { buf . append ( "<STR_LIT:U+002C>" ) ; } buf . append ( tableName ) . append ( "<STR_LIT:.>" ) . append ( columnName ) . append ( "<STR_LIT>" ) ; buf . append ( getVariable ( columnName ) ) ; i ++ ; } buf . append ( "<STR_LIT>" ) ; i = <NUM_LIT:0> ; for ( String columnName : columnMap . keySet ( ) ) { if ( i != <NUM_LIT:0> ) { buf . append ( "<STR_LIT:U+002C>" ) ; } buf . append ( columnName ) ; i ++ ; } i = <NUM_LIT:0> ; buf . append ( "<STR_LIT>" ) ; for ( String columnName : columnMap . keySet ( ) ) { if ( i != <NUM_LIT:0> ) { buf . append ( "<STR_LIT:U+002C>" ) ; } buf . append ( "<STR_LIT>" ) . append ( getVariable ( columnName ) ) ; i ++ ; } Column primaryColumn = mapping . getPrimaryColumn ( ) ; PreparedStatement insert = connection . prepareStatement ( buf . toString ( ) ) ; int psIndex = <NUM_LIT:1> ; for ( Entry < String , ColumnData > e : columnMap . entrySet ( ) ) { ColumnData cd = e . getValue ( ) ; Column column = cd . column ; if ( column . getName ( ) . equals ( primaryColumn . getName ( ) ) ) { Object key = columnMap . get ( primaryColumn . getName ( ) ) . object ; if ( primaryColumn . getScaleOrLength ( ) > <NUM_LIT:0> ) { insert . setObject ( psIndex ++ , key , primaryColumn . getJdbcType ( ) . getOrder ( ) , primaryColumn . getScaleOrLength ( ) ) ; } else { insert . setObject ( psIndex ++ , key , primaryColumn . getJdbcType ( ) . getOrder ( ) ) ; } continue ; } try { store . setObject ( insert , psIndex ++ , cd . object , cd . schema , cd . column ) ; } catch ( IOException ex ) { throw new SQLException ( ex ) ; } } return insert ; } } </s>
|
<s> package org . apache . gora . hbase . store ; import java . io . IOException ; import java . util . List ; import java . util . concurrent . BlockingQueue ; import java . util . concurrent . LinkedBlockingQueue ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . hbase . HRegionLocation ; import org . apache . hadoop . hbase . HTableDescriptor ; import org . apache . hadoop . hbase . client . Delete ; import org . apache . hadoop . hbase . client . Get ; import org . apache . hadoop . hbase . client . HTable ; import org . apache . hadoop . hbase . client . HTableInterface ; import org . apache . hadoop . hbase . client . Increment ; import org . apache . hadoop . hbase . client . Put ; import org . apache . hadoop . hbase . client . Result ; import org . apache . hadoop . hbase . client . ResultScanner ; import org . apache . hadoop . hbase . client . Row ; import org . apache . hadoop . hbase . client . RowLock ; import org . apache . hadoop . hbase . client . Scan ; import org . apache . hadoop . hbase . util . Bytes ; import org . apache . hadoop . hbase . util . Pair ; public class HBaseTableConnection implements HTableInterface { private final Configuration conf ; private final ThreadLocal < HTable > tables ; private final BlockingQueue < HTable > pool = new LinkedBlockingQueue < HTable > ( ) ; private final boolean autoflush ; private final String tableName ; public HBaseTableConnection ( Configuration conf , String tableName , boolean autoflush ) throws IOException { this . conf = conf ; this . tables = new ThreadLocal < HTable > ( ) ; this . tableName = tableName ; this . autoflush = autoflush ; } private HTable getTable ( ) throws IOException { HTable table = tables . get ( ) ; if ( table == null ) { table = new HTable ( conf , tableName ) { @ Override public synchronized void flushCommits ( ) throws IOException { super . flushCommits ( ) ; } } ; table . setAutoFlush ( autoflush ) ; pool . add ( table ) ; tables . set ( table ) ; } return table ; } @ Override public void close ( ) throws IOException { for ( HTable table : pool ) { table . flushCommits ( ) ; table . close ( ) ; } } @ Override public byte [ ] getTableName ( ) { return Bytes . toBytes ( tableName ) ; } @ Override public Configuration getConfiguration ( ) { return conf ; } @ Override public boolean isAutoFlush ( ) { return autoflush ; } public Pair < byte [ ] [ ] , byte [ ] [ ] > getStartEndKeys ( ) throws IOException { return getTable ( ) . getStartEndKeys ( ) ; } public HRegionLocation getRegionLocation ( final byte [ ] bs ) throws IOException { return getTable ( ) . getRegionLocation ( bs ) ; } @ Override public HTableDescriptor getTableDescriptor ( ) throws IOException { return getTable ( ) . getTableDescriptor ( ) ; } @ Override public boolean exists ( Get get ) throws IOException { return getTable ( ) . exists ( get ) ; } @ Override public void batch ( List < Row > actions , Object [ ] results ) throws IOException , InterruptedException { getTable ( ) . batch ( actions , results ) ; } @ Override public Object [ ] batch ( List < Row > actions ) throws IOException , InterruptedException { return getTable ( ) . batch ( actions ) ; } @ Override public Result get ( Get get ) throws IOException { return getTable ( ) . get ( get ) ; } @ Override public Result [ ] get ( List < Get > gets ) throws IOException { return getTable ( ) . get ( gets ) ; } @ Override public Result getRowOrBefore ( byte [ ] row , byte [ ] family ) throws IOException { return getTable ( ) . getRowOrBefore ( row , family ) ; } @ Override public ResultScanner getScanner ( Scan scan ) throws IOException { return getTable ( ) . getScanner ( scan ) ; } @ Override public ResultScanner getScanner ( byte [ ] family ) throws IOException { return getTable ( ) . getScanner ( family ) ; } @ Override public ResultScanner getScanner ( byte [ ] family , byte [ ] qualifier ) throws IOException { return getTable ( ) . getScanner ( family , qualifier ) ; } @ Override public void put ( Put put ) throws IOException { getTable ( ) . put ( put ) ; } @ Override public void put ( List < Put > puts ) throws IOException { getTable ( ) . put ( puts ) ; } @ Override public boolean checkAndPut ( byte [ ] row , byte [ ] family , byte [ ] qualifier , byte [ ] value , Put put ) throws IOException { return getTable ( ) . checkAndPut ( row , family , qualifier , value , put ) ; } @ Override public void delete ( Delete delete ) throws IOException { getTable ( ) . delete ( delete ) ; } @ Override public void delete ( List < Delete > deletes ) throws IOException { getTable ( ) . delete ( deletes ) ; } @ Override public boolean checkAndDelete ( byte [ ] row , byte [ ] family , byte [ ] qualifier , byte [ ] value , Delete delete ) throws IOException { return getTable ( ) . checkAndDelete ( row , family , qualifier , value , delete ) ; } @ Override public Result increment ( Increment increment ) throws IOException { return getTable ( ) . increment ( increment ) ; } @ Override public long incrementColumnValue ( byte [ ] row , byte [ ] family , byte [ ] qualifier , long amount ) throws IOException { return getTable ( ) . incrementColumnValue ( row , family , qualifier , amount ) ; } @ Override public long incrementColumnValue ( byte [ ] row , byte [ ] family , byte [ ] qualifier , long amount , boolean writeToWAL ) throws IOException { return getTable ( ) . incrementColumnValue ( row , family , qualifier , amount , writeToWAL ) ; } @ Override public void flushCommits ( ) throws IOException { for ( HTable table : pool ) { table . flushCommits ( ) ; } } @ Override public RowLock lockRow ( byte [ ] row ) throws IOException { return getTable ( ) . lockRow ( row ) ; } @ Override public void unlockRow ( RowLock rl ) throws IOException { getTable ( ) . unlockRow ( rl ) ; } } </s>
|
<s> package org . apache . gora . hbase . store ; import java . util . Arrays ; class HBaseColumn { final byte [ ] family ; final byte [ ] qualifier ; public HBaseColumn ( byte [ ] family , byte [ ] qualifier ) { this . family = family == null ? null : Arrays . copyOf ( family , family . length ) ; this . qualifier = qualifier == null ? null : Arrays . copyOf ( qualifier , qualifier . length ) ; } public byte [ ] getFamily ( ) { return family ; } public byte [ ] getQualifier ( ) { return qualifier ; } @ Override public int hashCode ( ) { final int prime = <NUM_LIT:31> ; int result = <NUM_LIT:1> ; result = prime * result + Arrays . hashCode ( family ) ; result = prime * result + Arrays . hashCode ( qualifier ) ; return result ; } @ Override public boolean equals ( Object obj ) { if ( this == obj ) return true ; if ( obj == null ) return false ; if ( getClass ( ) != obj . getClass ( ) ) return false ; HBaseColumn other = ( HBaseColumn ) obj ; if ( ! Arrays . equals ( family , other . family ) ) return false ; if ( ! Arrays . equals ( qualifier , other . qualifier ) ) return false ; return true ; } @ Override public String toString ( ) { return "<STR_LIT>" + Arrays . toString ( family ) + "<STR_LIT>" + Arrays . toString ( qualifier ) + "<STR_LIT:]>" ; } } </s>
|
<s> package org . apache . gora . hbase . store ; import java . util . HashMap ; import java . util . Map ; import org . apache . hadoop . hbase . HColumnDescriptor ; import org . apache . hadoop . hbase . HTableDescriptor ; import org . apache . hadoop . hbase . io . hfile . Compression . Algorithm ; import org . apache . hadoop . hbase . regionserver . StoreFile . BloomType ; import org . apache . hadoop . hbase . util . Bytes ; public class HBaseMapping { private final HTableDescriptor tableDescriptor ; private final Map < String , HBaseColumn > columnMap ; public HBaseMapping ( HTableDescriptor tableDescriptor , Map < String , HBaseColumn > columnMap ) { super ( ) ; this . tableDescriptor = tableDescriptor ; this . columnMap = columnMap ; } public String getTableName ( ) { return tableDescriptor . getNameAsString ( ) ; } public HTableDescriptor getTable ( ) { return tableDescriptor ; } public HBaseColumn getColumn ( String fieldName ) { return columnMap . get ( fieldName ) ; } public static class HBaseMappingBuilder { private Map < String , Map < String , HColumnDescriptor > > tableToFamilies = new HashMap < String , Map < String , HColumnDescriptor > > ( ) ; private Map < String , HBaseColumn > columnMap = new HashMap < String , HBaseColumn > ( ) ; private String tableName ; public String getTableName ( ) { return tableName ; } public void setTableName ( String tableName ) { this . tableName = tableName ; } public void addFamilyProps ( String tableName , String familyName , String compression , String blockCache , String blockSize , String bloomFilter , String maxVersions , String timeToLive , String inMemory ) { Map < String , HColumnDescriptor > families = getOrCreateFamilies ( tableName ) ; ; HColumnDescriptor columnDescriptor = getOrCreateFamily ( familyName , families ) ; if ( compression != null ) columnDescriptor . setCompressionType ( Algorithm . valueOf ( compression ) ) ; if ( blockCache != null ) columnDescriptor . setBlockCacheEnabled ( Boolean . parseBoolean ( blockCache ) ) ; if ( blockSize != null ) columnDescriptor . setBlocksize ( Integer . parseInt ( blockSize ) ) ; if ( bloomFilter != null ) columnDescriptor . setBloomFilterType ( BloomType . valueOf ( bloomFilter ) ) ; if ( maxVersions != null ) columnDescriptor . setMaxVersions ( Integer . parseInt ( maxVersions ) ) ; if ( timeToLive != null ) columnDescriptor . setTimeToLive ( Integer . parseInt ( timeToLive ) ) ; if ( inMemory != null ) columnDescriptor . setInMemory ( Boolean . parseBoolean ( inMemory ) ) ; } public void addColumnFamily ( String tableName , String familyName ) { Map < String , HColumnDescriptor > families = getOrCreateFamilies ( tableName ) ; getOrCreateFamily ( familyName , families ) ; } public void addField ( String fieldName , String family , String qualifier ) { byte [ ] familyBytes = Bytes . toBytes ( family ) ; byte [ ] qualifierBytes = qualifier == null ? null : Bytes . toBytes ( qualifier ) ; HBaseColumn column = new HBaseColumn ( familyBytes , qualifierBytes ) ; columnMap . put ( fieldName , column ) ; } private HColumnDescriptor getOrCreateFamily ( String familyName , Map < String , HColumnDescriptor > families ) { HColumnDescriptor columnDescriptor = families . get ( familyName ) ; if ( columnDescriptor == null ) { columnDescriptor = new HColumnDescriptor ( familyName ) ; families . put ( familyName , columnDescriptor ) ; } return columnDescriptor ; } private Map < String , HColumnDescriptor > getOrCreateFamilies ( String tableName ) { Map < String , HColumnDescriptor > families ; families = tableToFamilies . get ( tableName ) ; if ( families == null ) { families = new HashMap < String , HColumnDescriptor > ( ) ; tableToFamilies . put ( tableName , families ) ; } return families ; } public void renameTable ( String oldName , String newName ) { Map < String , HColumnDescriptor > families = tableToFamilies . remove ( oldName ) ; if ( families == null ) throw new IllegalArgumentException ( oldName + "<STR_LIT>" ) ; tableToFamilies . put ( newName , families ) ; } public HBaseMapping build ( ) { if ( tableName == null ) throw new IllegalStateException ( "<STR_LIT>" ) ; Map < String , HColumnDescriptor > families = tableToFamilies . get ( tableName ) ; if ( families == null ) throw new IllegalStateException ( "<STR_LIT>" + tableName ) ; HTableDescriptor tableDescriptors = new HTableDescriptor ( tableName ) ; for ( HColumnDescriptor desc : families . values ( ) ) { tableDescriptors . addFamily ( desc ) ; } return new HBaseMapping ( tableDescriptors , columnMap ) ; } } } </s>
|
<s> package org . apache . gora . hbase . store ; import static org . apache . gora . hbase . util . HBaseByteInterface . fromBytes ; import static org . apache . gora . hbase . util . HBaseByteInterface . toBytes ; import java . io . FileNotFoundException ; import java . io . IOException ; import java . util . ArrayList ; import java . util . Arrays ; import java . util . HashMap ; import java . util . Iterator ; import java . util . List ; import java . util . Map ; import java . util . Map . Entry ; import java . util . NavigableMap ; import java . util . Properties ; import java . util . Set ; import org . apache . avro . Schema ; import org . apache . avro . Schema . Field ; import org . apache . avro . Schema . Type ; import org . apache . avro . generic . GenericArray ; import org . apache . avro . util . Utf8 ; import org . slf4j . Logger ; import org . slf4j . LoggerFactory ; import org . apache . gora . hbase . query . HBaseGetResult ; import org . apache . gora . hbase . query . HBaseQuery ; import org . apache . gora . hbase . query . HBaseScannerResult ; import org . apache . gora . hbase . store . HBaseMapping . HBaseMappingBuilder ; import org . apache . gora . hbase . util . HBaseByteInterface ; import org . apache . gora . persistency . ListGenericArray ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . persistency . State ; import org . apache . gora . persistency . StateManager ; import org . apache . gora . persistency . StatefulHashMap ; import org . apache . gora . persistency . StatefulMap ; import org . apache . gora . query . PartitionQuery ; import org . apache . gora . query . Query ; import org . apache . gora . query . impl . PartitionQueryImpl ; import org . apache . gora . store . impl . DataStoreBase ; import org . apache . hadoop . conf . Configurable ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . hbase . HBaseConfiguration ; import org . apache . hadoop . hbase . HConstants ; import org . apache . hadoop . hbase . HTableDescriptor ; import org . apache . hadoop . hbase . client . Delete ; import org . apache . hadoop . hbase . client . Get ; import org . apache . hadoop . hbase . client . HBaseAdmin ; import org . apache . hadoop . hbase . client . Put ; import org . apache . hadoop . hbase . client . Result ; import org . apache . hadoop . hbase . client . ResultScanner ; import org . apache . hadoop . hbase . client . Scan ; import org . apache . hadoop . hbase . util . Bytes ; import org . apache . hadoop . hbase . util . Pair ; import org . jdom . Document ; import org . jdom . Element ; import org . jdom . input . SAXBuilder ; public class HBaseStore < K , T extends Persistent > extends DataStoreBase < K , T > implements Configurable { public static final Logger LOG = LoggerFactory . getLogger ( HBaseStore . class ) ; public static final String PARSE_MAPPING_FILE_KEY = "<STR_LIT>" ; @ Deprecated private static final String DEPRECATED_MAPPING_FILE = "<STR_LIT>" ; public static final String DEFAULT_MAPPING_FILE = "<STR_LIT>" ; private volatile HBaseAdmin admin ; private volatile HBaseTableConnection table ; private final boolean autoCreateSchema = true ; private volatile HBaseMapping mapping ; public HBaseStore ( ) { } @ Override public void initialize ( Class < K > keyClass , Class < T > persistentClass , Properties properties ) throws IOException { super . initialize ( keyClass , persistentClass , properties ) ; this . conf = HBaseConfiguration . create ( getConf ( ) ) ; admin = new HBaseAdmin ( this . conf ) ; try { mapping = readMapping ( getConf ( ) . get ( PARSE_MAPPING_FILE_KEY , DEFAULT_MAPPING_FILE ) ) ; } catch ( FileNotFoundException ex ) { try { mapping = readMapping ( getConf ( ) . get ( PARSE_MAPPING_FILE_KEY , DEPRECATED_MAPPING_FILE ) ) ; LOG . warn ( DEPRECATED_MAPPING_FILE + "<STR_LIT>" + DEFAULT_MAPPING_FILE ) ; } catch ( FileNotFoundException ex1 ) { throw ex ; } catch ( Exception ex1 ) { LOG . warn ( DEPRECATED_MAPPING_FILE + "<STR_LIT>" + DEFAULT_MAPPING_FILE ) ; throw new RuntimeException ( ex1 ) ; } } catch ( Exception e ) { throw new RuntimeException ( e ) ; } if ( autoCreateSchema ) { createSchema ( ) ; } boolean autoflush = this . conf . getBoolean ( "<STR_LIT>" , false ) ; table = new HBaseTableConnection ( getConf ( ) , getSchemaName ( ) , autoflush ) ; } @ Override public String getSchemaName ( ) { return mapping . getTableName ( ) ; } @ Override public void createSchema ( ) throws IOException { if ( schemaExists ( ) ) { return ; } HTableDescriptor tableDesc = mapping . getTable ( ) ; admin . createTable ( tableDesc ) ; } @ Override public void deleteSchema ( ) throws IOException { if ( ! schemaExists ( ) ) { return ; } admin . disableTable ( getSchemaName ( ) ) ; admin . deleteTable ( getSchemaName ( ) ) ; } @ Override public boolean schemaExists ( ) throws IOException { return admin . tableExists ( mapping . getTableName ( ) ) ; } @ Override public T get ( K key , String [ ] fields ) throws IOException { fields = getFieldsToQuery ( fields ) ; Get get = new Get ( toBytes ( key ) ) ; addFields ( get , fields ) ; Result result = table . get ( get ) ; return newInstance ( result , fields ) ; } @ SuppressWarnings ( { "<STR_LIT:unchecked>" , "<STR_LIT:rawtypes>" } ) @ Override public void put ( K key , T persistent ) throws IOException { Schema schema = persistent . getSchema ( ) ; StateManager stateManager = persistent . getStateManager ( ) ; byte [ ] keyRaw = toBytes ( key ) ; Put put = new Put ( keyRaw ) ; Delete delete = new Delete ( keyRaw ) ; boolean hasPuts = false ; boolean hasDeletes = false ; Iterator < Field > iter = schema . getFields ( ) . iterator ( ) ; for ( int i = <NUM_LIT:0> ; iter . hasNext ( ) ; i ++ ) { Field field = iter . next ( ) ; if ( ! stateManager . isDirty ( persistent , i ) ) { continue ; } Type type = field . schema ( ) . getType ( ) ; Object o = persistent . get ( i ) ; HBaseColumn hcol = mapping . getColumn ( field . name ( ) ) ; switch ( type ) { case MAP : if ( o instanceof StatefulMap ) { StatefulHashMap < Utf8 , ? > map = ( StatefulHashMap < Utf8 , ? > ) o ; for ( Entry < Utf8 , State > e : map . states ( ) . entrySet ( ) ) { Utf8 mapKey = e . getKey ( ) ; switch ( e . getValue ( ) ) { case DIRTY : byte [ ] qual = Bytes . toBytes ( mapKey . toString ( ) ) ; byte [ ] val = toBytes ( map . get ( mapKey ) , field . schema ( ) . getValueType ( ) ) ; put . add ( hcol . getFamily ( ) , qual , val ) ; hasPuts = true ; break ; case DELETED : qual = Bytes . toBytes ( mapKey . toString ( ) ) ; hasDeletes = true ; delete . deleteColumn ( hcol . getFamily ( ) , qual ) ; break ; } } } else { Set < Map . Entry > set = ( ( Map ) o ) . entrySet ( ) ; for ( Entry entry : set ) { byte [ ] qual = toBytes ( entry . getKey ( ) ) ; byte [ ] val = toBytes ( entry . getValue ( ) ) ; put . add ( hcol . getFamily ( ) , qual , val ) ; hasPuts = true ; } } break ; case ARRAY : if ( o instanceof GenericArray ) { GenericArray arr = ( GenericArray ) o ; int j = <NUM_LIT:0> ; for ( Object item : arr ) { byte [ ] val = toBytes ( item ) ; put . add ( hcol . getFamily ( ) , Bytes . toBytes ( j ++ ) , val ) ; hasPuts = true ; } } break ; default : put . add ( hcol . getFamily ( ) , hcol . getQualifier ( ) , toBytes ( o , field . schema ( ) ) ) ; hasPuts = true ; break ; } } if ( hasPuts ) { table . put ( put ) ; } if ( hasDeletes ) { table . delete ( delete ) ; } } public void delete ( T obj ) { throw new RuntimeException ( "<STR_LIT>" ) ; } @ Override public boolean delete ( K key ) throws IOException { table . delete ( new Delete ( toBytes ( key ) ) ) ; return true ; } @ Override public long deleteByQuery ( Query < K , T > query ) throws IOException { String [ ] fields = getFieldsToQuery ( query . getFields ( ) ) ; boolean isAllFields = Arrays . equals ( fields , getBeanFactory ( ) . getCachedPersistent ( ) . getFields ( ) ) ; org . apache . gora . query . Result < K , T > result = query . execute ( ) ; ArrayList < Delete > deletes = new ArrayList < Delete > ( ) ; while ( result . next ( ) ) { Delete delete = new Delete ( toBytes ( result . getKey ( ) ) ) ; deletes . add ( delete ) ; if ( ! isAllFields ) { addFields ( delete , query ) ; } } table . delete ( deletes ) ; return deletes . size ( ) ; } @ Override public void flush ( ) throws IOException { table . flushCommits ( ) ; } @ Override public Query < K , T > newQuery ( ) { return new HBaseQuery < K , T > ( this ) ; } @ Override public List < PartitionQuery < K , T > > getPartitions ( Query < K , T > query ) throws IOException { Pair < byte [ ] [ ] , byte [ ] [ ] > keys = table . getStartEndKeys ( ) ; if ( keys == null || keys . getFirst ( ) == null || keys . getFirst ( ) . length == <NUM_LIT:0> ) { throw new IOException ( "<STR_LIT>" ) ; } if ( table == null ) { throw new IOException ( "<STR_LIT>" ) ; } List < PartitionQuery < K , T > > partitions = new ArrayList < PartitionQuery < K , T > > ( keys . getFirst ( ) . length ) ; for ( int i = <NUM_LIT:0> ; i < keys . getFirst ( ) . length ; i ++ ) { String regionLocation = table . getRegionLocation ( keys . getFirst ( ) [ i ] ) . getServerAddress ( ) . getHostname ( ) ; byte [ ] startRow = query . getStartKey ( ) != null ? toBytes ( query . getStartKey ( ) ) : HConstants . EMPTY_START_ROW ; byte [ ] stopRow = query . getEndKey ( ) != null ? toBytes ( query . getEndKey ( ) ) : HConstants . EMPTY_END_ROW ; if ( ( startRow . length == <NUM_LIT:0> || keys . getSecond ( ) [ i ] . length == <NUM_LIT:0> || Bytes . compareTo ( startRow , keys . getSecond ( ) [ i ] ) < <NUM_LIT:0> ) && ( stopRow . length == <NUM_LIT:0> || Bytes . compareTo ( stopRow , keys . getFirst ( ) [ i ] ) > <NUM_LIT:0> ) ) { byte [ ] splitStart = startRow . length == <NUM_LIT:0> || Bytes . compareTo ( keys . getFirst ( ) [ i ] , startRow ) >= <NUM_LIT:0> ? keys . getFirst ( ) [ i ] : startRow ; byte [ ] splitStop = ( stopRow . length == <NUM_LIT:0> || Bytes . compareTo ( keys . getSecond ( ) [ i ] , stopRow ) <= <NUM_LIT:0> ) && keys . getSecond ( ) [ i ] . length > <NUM_LIT:0> ? keys . getSecond ( ) [ i ] : stopRow ; K startKey = Arrays . equals ( HConstants . EMPTY_START_ROW , splitStart ) ? null : HBaseByteInterface . fromBytes ( keyClass , splitStart ) ; K endKey = Arrays . equals ( HConstants . EMPTY_END_ROW , splitStop ) ? null : HBaseByteInterface . fromBytes ( keyClass , splitStop ) ; PartitionQuery < K , T > partition = new PartitionQueryImpl < K , T > ( query , startKey , endKey , regionLocation ) ; partitions . add ( partition ) ; } } return partitions ; } @ Override public org . apache . gora . query . Result < K , T > execute ( Query < K , T > query ) throws IOException { query . setFields ( getFieldsToQuery ( query . getFields ( ) ) ) ; if ( query . getStartKey ( ) != null && query . getStartKey ( ) . equals ( query . getEndKey ( ) ) ) { Get get = new Get ( toBytes ( query . getStartKey ( ) ) ) ; addFields ( get , query . getFields ( ) ) ; addTimeRange ( get , query ) ; Result result = table . get ( get ) ; return new HBaseGetResult < K , T > ( this , query , result ) ; } else { ResultScanner scanner = createScanner ( query ) ; org . apache . gora . query . Result < K , T > result = new HBaseScannerResult < K , T > ( this , query , scanner ) ; return result ; } } public ResultScanner createScanner ( Query < K , T > query ) throws IOException { final Scan scan = new Scan ( ) ; if ( query . getStartKey ( ) != null ) { scan . setStartRow ( toBytes ( query . getStartKey ( ) ) ) ; } if ( query . getEndKey ( ) != null ) { scan . setStopRow ( toBytes ( query . getEndKey ( ) ) ) ; } addFields ( scan , query ) ; return table . getScanner ( scan ) ; } private void addFields ( Get get , String [ ] fieldNames ) { for ( String f : fieldNames ) { HBaseColumn col = mapping . getColumn ( f ) ; Schema fieldSchema = fieldMap . get ( f ) . schema ( ) ; switch ( fieldSchema . getType ( ) ) { case MAP : case ARRAY : get . addFamily ( col . family ) ; break ; default : get . addColumn ( col . family , col . qualifier ) ; break ; } } } private void addFields ( Scan scan , Query < K , T > query ) throws IOException { String [ ] fields = query . getFields ( ) ; for ( String f : fields ) { HBaseColumn col = mapping . getColumn ( f ) ; Schema fieldSchema = fieldMap . get ( f ) . schema ( ) ; switch ( fieldSchema . getType ( ) ) { case MAP : case ARRAY : scan . addFamily ( col . family ) ; break ; default : scan . addColumn ( col . family , col . qualifier ) ; break ; } } } private void addFields ( Delete delete , Query < K , T > query ) throws IOException { String [ ] fields = query . getFields ( ) ; for ( String f : fields ) { HBaseColumn col = mapping . getColumn ( f ) ; Schema fieldSchema = fieldMap . get ( f ) . schema ( ) ; switch ( fieldSchema . getType ( ) ) { case MAP : case ARRAY : delete . deleteFamily ( col . family ) ; break ; default : delete . deleteColumn ( col . family , col . qualifier ) ; break ; } } } private void addTimeRange ( Get get , Query < K , T > query ) throws IOException { if ( query . getStartTime ( ) > <NUM_LIT:0> || query . getEndTime ( ) > <NUM_LIT:0> ) { if ( query . getStartTime ( ) == query . getEndTime ( ) ) { get . setTimeStamp ( query . getStartTime ( ) ) ; } else { long startTime = query . getStartTime ( ) > <NUM_LIT:0> ? query . getStartTime ( ) : <NUM_LIT:0> ; long endTime = query . getEndTime ( ) > <NUM_LIT:0> ? query . getEndTime ( ) : Long . MAX_VALUE ; get . setTimeRange ( startTime , endTime ) ; } } } @ SuppressWarnings ( { "<STR_LIT:unchecked>" , "<STR_LIT:rawtypes>" } ) public T newInstance ( Result result , String [ ] fields ) throws IOException { if ( result == null || result . isEmpty ( ) ) return null ; T persistent = newPersistent ( ) ; StateManager stateManager = persistent . getStateManager ( ) ; for ( String f : fields ) { HBaseColumn col = mapping . getColumn ( f ) ; Field field = fieldMap . get ( f ) ; Schema fieldSchema = field . schema ( ) ; switch ( fieldSchema . getType ( ) ) { case MAP : NavigableMap < byte [ ] , byte [ ] > qualMap = result . getNoVersionMap ( ) . get ( col . getFamily ( ) ) ; if ( qualMap == null ) { continue ; } Schema valueSchema = fieldSchema . getValueType ( ) ; Map map = new HashMap ( ) ; for ( Entry < byte [ ] , byte [ ] > e : qualMap . entrySet ( ) ) { map . put ( new Utf8 ( Bytes . toString ( e . getKey ( ) ) ) , fromBytes ( valueSchema , e . getValue ( ) ) ) ; } setField ( persistent , field , map ) ; break ; case ARRAY : qualMap = result . getFamilyMap ( col . getFamily ( ) ) ; if ( qualMap == null ) { continue ; } valueSchema = fieldSchema . getElementType ( ) ; ArrayList arrayList = new ArrayList ( ) ; for ( Entry < byte [ ] , byte [ ] > e : qualMap . entrySet ( ) ) { arrayList . add ( fromBytes ( valueSchema , e . getValue ( ) ) ) ; } ListGenericArray arr = new ListGenericArray ( fieldSchema , arrayList ) ; setField ( persistent , field , arr ) ; break ; default : byte [ ] val = result . getValue ( col . getFamily ( ) , col . getQualifier ( ) ) ; if ( val == null ) { continue ; } setField ( persistent , field , val ) ; break ; } } stateManager . clearDirty ( persistent ) ; return persistent ; } @ SuppressWarnings ( { "<STR_LIT:unchecked>" , "<STR_LIT:rawtypes>" } ) private void setField ( T persistent , Field field , Map map ) { persistent . put ( field . pos ( ) , new StatefulHashMap ( map ) ) ; } private void setField ( T persistent , Field field , byte [ ] val ) throws IOException { persistent . put ( field . pos ( ) , fromBytes ( field . schema ( ) , val ) ) ; } @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) private void setField ( T persistent , Field field , GenericArray list ) { persistent . put ( field . pos ( ) , list ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) private HBaseMapping readMapping ( String filename ) throws IOException { HBaseMappingBuilder mappingBuilder = new HBaseMappingBuilder ( ) ; try { SAXBuilder builder = new SAXBuilder ( ) ; Document doc = builder . build ( getClass ( ) . getClassLoader ( ) . getResourceAsStream ( filename ) ) ; Element root = doc . getRootElement ( ) ; List < Element > tableElements = root . getChildren ( "<STR_LIT>" ) ; for ( Element tableElement : tableElements ) { String tableName = tableElement . getAttributeValue ( "<STR_LIT:name>" ) ; List < Element > fieldElements = tableElement . getChildren ( "<STR_LIT>" ) ; for ( Element fieldElement : fieldElements ) { String familyName = fieldElement . getAttributeValue ( "<STR_LIT:name>" ) ; String compression = fieldElement . getAttributeValue ( "<STR_LIT>" ) ; String blockCache = fieldElement . getAttributeValue ( "<STR_LIT>" ) ; String blockSize = fieldElement . getAttributeValue ( "<STR_LIT>" ) ; String bloomFilter = fieldElement . getAttributeValue ( "<STR_LIT>" ) ; String maxVersions = fieldElement . getAttributeValue ( "<STR_LIT>" ) ; String timeToLive = fieldElement . getAttributeValue ( "<STR_LIT>" ) ; String inMemory = fieldElement . getAttributeValue ( "<STR_LIT>" ) ; mappingBuilder . addFamilyProps ( tableName , familyName , compression , blockCache , blockSize , bloomFilter , maxVersions , timeToLive , inMemory ) ; } } List < Element > classElements = root . getChildren ( "<STR_LIT:class>" ) ; for ( Element classElement : classElements ) { if ( classElement . getAttributeValue ( "<STR_LIT>" ) . equals ( keyClass . getCanonicalName ( ) ) && classElement . getAttributeValue ( "<STR_LIT:name>" ) . equals ( persistentClass . getCanonicalName ( ) ) ) { String tableNameFromMapping = classElement . getAttributeValue ( "<STR_LIT>" ) ; String tableName = getSchemaName ( tableNameFromMapping , persistentClass ) ; if ( ! tableName . equals ( tableNameFromMapping ) ) { LOG . info ( "<STR_LIT>" + "<STR_LIT>" + tableNameFromMapping + "<STR_LIT>" + tableName + "<STR_LIT>" ) ; if ( tableNameFromMapping != null ) { mappingBuilder . renameTable ( tableNameFromMapping , tableName ) ; } } mappingBuilder . setTableName ( tableName ) ; List < Element > fields = classElement . getChildren ( "<STR_LIT:field>" ) ; for ( Element field : fields ) { String fieldName = field . getAttributeValue ( "<STR_LIT:name>" ) ; String family = field . getAttributeValue ( "<STR_LIT>" ) ; String qualifier = field . getAttributeValue ( "<STR_LIT>" ) ; mappingBuilder . addField ( fieldName , family , qualifier ) ; mappingBuilder . addColumnFamily ( tableName , family ) ; } break ; } } } catch ( IOException ex ) { throw ex ; } catch ( Exception ex ) { throw new IOException ( ex ) ; } return mappingBuilder . build ( ) ; } @ Override public void close ( ) throws IOException { table . close ( ) ; } @ Override public Configuration getConf ( ) { return conf ; } @ Override public void setConf ( Configuration conf ) { this . conf = conf ; } } </s>
|
<s> package org . apache . gora . hbase . util ; import java . io . ByteArrayOutputStream ; import java . io . IOException ; import java . io . OutputStream ; import java . nio . ByteBuffer ; import java . util . HashMap ; import java . util . Map ; import org . apache . avro . Schema ; import org . apache . avro . Schema . Type ; import org . apache . avro . io . BinaryDecoder ; import org . apache . avro . io . BinaryEncoder ; import org . apache . avro . io . DecoderFactory ; import org . apache . avro . specific . SpecificDatumReader ; import org . apache . avro . specific . SpecificDatumWriter ; import org . apache . avro . util . Utf8 ; import org . apache . gora . util . AvroUtils ; import org . apache . hadoop . hbase . util . Bytes ; public class HBaseByteInterface { public static final ThreadLocal < BinaryDecoder > decoders = new ThreadLocal < BinaryDecoder > ( ) ; public static final ThreadLocal < BinaryEncoderWithStream > encoders = new ThreadLocal < BinaryEncoderWithStream > ( ) ; public static final class BinaryEncoderWithStream extends BinaryEncoder { public BinaryEncoderWithStream ( OutputStream out ) { super ( out ) ; } protected OutputStream getOut ( ) { return out ; } } public static final ThreadLocal < Map < String , SpecificDatumReader < ? > > > readerMaps = new ThreadLocal < Map < String , SpecificDatumReader < ? > > > ( ) { protected Map < String , SpecificDatumReader < ? > > initialValue ( ) { return new HashMap < String , SpecificDatumReader < ? > > ( ) ; } ; } ; public static final ThreadLocal < Map < String , SpecificDatumWriter < ? > > > writerMaps = new ThreadLocal < Map < String , SpecificDatumWriter < ? > > > ( ) { protected Map < String , SpecificDatumWriter < ? > > initialValue ( ) { return new HashMap < String , SpecificDatumWriter < ? > > ( ) ; } ; } ; @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) public static Object fromBytes ( Schema schema , byte [ ] val ) throws IOException { Type type = schema . getType ( ) ; switch ( type ) { case ENUM : return AvroUtils . getEnumValue ( schema , val [ <NUM_LIT:0> ] ) ; case STRING : return new Utf8 ( Bytes . toString ( val ) ) ; case BYTES : return ByteBuffer . wrap ( val ) ; case INT : return Bytes . toInt ( val ) ; case LONG : return Bytes . toLong ( val ) ; case FLOAT : return Bytes . toFloat ( val ) ; case DOUBLE : return Bytes . toDouble ( val ) ; case BOOLEAN : return val [ <NUM_LIT:0> ] != <NUM_LIT:0> ; case RECORD : Map < String , SpecificDatumReader < ? > > readerMap = readerMaps . get ( ) ; SpecificDatumReader < ? > reader = readerMap . get ( schema . getFullName ( ) ) ; if ( reader == null ) { reader = new SpecificDatumReader ( schema ) ; readerMap . put ( schema . getFullName ( ) , reader ) ; } BinaryDecoder decoderFromCache = decoders . get ( ) ; BinaryDecoder decoder = DecoderFactory . defaultFactory ( ) . createBinaryDecoder ( val , decoderFromCache ) ; if ( decoderFromCache == null ) { decoders . set ( decoder ) ; } return reader . read ( null , decoder ) ; default : throw new RuntimeException ( "<STR_LIT>" + type ) ; } } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public static < K > K fromBytes ( Class < K > clazz , byte [ ] val ) { if ( clazz . equals ( Byte . TYPE ) || clazz . equals ( Byte . class ) ) { return ( K ) Byte . valueOf ( val [ <NUM_LIT:0> ] ) ; } else if ( clazz . equals ( Boolean . TYPE ) || clazz . equals ( Boolean . class ) ) { return ( K ) Boolean . valueOf ( val [ <NUM_LIT:0> ] == <NUM_LIT:0> ? false : true ) ; } else if ( clazz . equals ( Short . TYPE ) || clazz . equals ( Short . class ) ) { return ( K ) Short . valueOf ( Bytes . toShort ( val ) ) ; } else if ( clazz . equals ( Integer . TYPE ) || clazz . equals ( Integer . class ) ) { return ( K ) Integer . valueOf ( Bytes . toInt ( val ) ) ; } else if ( clazz . equals ( Long . TYPE ) || clazz . equals ( Long . class ) ) { return ( K ) Long . valueOf ( Bytes . toLong ( val ) ) ; } else if ( clazz . equals ( Float . TYPE ) || clazz . equals ( Float . class ) ) { return ( K ) Float . valueOf ( Bytes . toFloat ( val ) ) ; } else if ( clazz . equals ( Double . TYPE ) || clazz . equals ( Double . class ) ) { return ( K ) Double . valueOf ( Bytes . toDouble ( val ) ) ; } else if ( clazz . equals ( String . class ) ) { return ( K ) Bytes . toString ( val ) ; } else if ( clazz . equals ( Utf8 . class ) ) { return ( K ) new Utf8 ( Bytes . toString ( val ) ) ; } throw new RuntimeException ( "<STR_LIT>" + clazz ) ; } public static byte [ ] toBytes ( Object o ) { Class < ? > clazz = o . getClass ( ) ; if ( clazz . equals ( Enum . class ) ) { return new byte [ ] { ( byte ) ( ( Enum < ? > ) o ) . ordinal ( ) } ; } else if ( clazz . equals ( Byte . TYPE ) || clazz . equals ( Byte . class ) ) { return new byte [ ] { ( Byte ) o } ; } else if ( clazz . equals ( Boolean . TYPE ) || clazz . equals ( Boolean . class ) ) { return new byte [ ] { ( ( Boolean ) o ? ( byte ) <NUM_LIT:1> : ( byte ) <NUM_LIT:0> ) } ; } else if ( clazz . equals ( Short . TYPE ) || clazz . equals ( Short . class ) ) { return Bytes . toBytes ( ( Short ) o ) ; } else if ( clazz . equals ( Integer . TYPE ) || clazz . equals ( Integer . class ) ) { return Bytes . toBytes ( ( Integer ) o ) ; } else if ( clazz . equals ( Long . TYPE ) || clazz . equals ( Long . class ) ) { return Bytes . toBytes ( ( Long ) o ) ; } else if ( clazz . equals ( Float . TYPE ) || clazz . equals ( Float . class ) ) { return Bytes . toBytes ( ( Float ) o ) ; } else if ( clazz . equals ( Double . TYPE ) || clazz . equals ( Double . class ) ) { return Bytes . toBytes ( ( Double ) o ) ; } else if ( clazz . equals ( String . class ) ) { return Bytes . toBytes ( ( String ) o ) ; } else if ( clazz . equals ( Utf8 . class ) ) { return ( ( Utf8 ) o ) . getBytes ( ) ; } throw new RuntimeException ( "<STR_LIT>" + clazz ) ; } @ SuppressWarnings ( { "<STR_LIT:rawtypes>" , "<STR_LIT:unchecked>" } ) public static byte [ ] toBytes ( Object o , Schema schema ) throws IOException { Type type = schema . getType ( ) ; switch ( type ) { case STRING : return Bytes . toBytes ( ( ( Utf8 ) o ) . toString ( ) ) ; case BYTES : return ( ( ByteBuffer ) o ) . array ( ) ; case INT : return Bytes . toBytes ( ( Integer ) o ) ; case LONG : return Bytes . toBytes ( ( Long ) o ) ; case FLOAT : return Bytes . toBytes ( ( Float ) o ) ; case DOUBLE : return Bytes . toBytes ( ( Double ) o ) ; case BOOLEAN : return ( Boolean ) o ? new byte [ ] { <NUM_LIT:1> } : new byte [ ] { <NUM_LIT:0> } ; case ENUM : return new byte [ ] { ( byte ) ( ( Enum < ? > ) o ) . ordinal ( ) } ; case RECORD : Map < String , SpecificDatumWriter < ? > > writerMap = writerMaps . get ( ) ; SpecificDatumWriter writer = writerMap . get ( schema . getFullName ( ) ) ; if ( writer == null ) { writer = new SpecificDatumWriter ( schema ) ; writerMap . put ( schema . getFullName ( ) , writer ) ; } BinaryEncoderWithStream encoder = encoders . get ( ) ; if ( encoder == null ) { encoder = new BinaryEncoderWithStream ( new ByteArrayOutputStream ( ) ) ; encoders . set ( encoder ) ; } ByteArrayOutputStream os = ( ByteArrayOutputStream ) encoder . getOut ( ) ; os . reset ( ) ; writer . write ( o , encoder ) ; encoder . flush ( ) ; return os . toByteArray ( ) ; default : throw new RuntimeException ( "<STR_LIT>" + type ) ; } } } </s>
|
<s> package org . apache . gora . hbase . query ; import java . io . IOException ; import org . apache . gora . hbase . store . HBaseStore ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . hadoop . hbase . client . Result ; import org . apache . hadoop . hbase . client . ResultScanner ; public class HBaseScannerResult < K , T extends Persistent > extends HBaseResult < K , T > { private final ResultScanner scanner ; public HBaseScannerResult ( HBaseStore < K , T > dataStore , Query < K , T > query , ResultScanner scanner ) { super ( dataStore , query ) ; this . scanner = scanner ; } @ Override protected void clear ( ) { } @ Override public boolean nextInner ( ) throws IOException { Result result = scanner . next ( ) ; if ( result == null ) { return false ; } readNext ( result ) ; return true ; } @ Override public void close ( ) throws IOException { scanner . close ( ) ; } @ Override public float getProgress ( ) throws IOException { return <NUM_LIT:0> ; } } </s>
|
<s> package org . apache . gora . hbase . query ; import static org . apache . gora . hbase . util . HBaseByteInterface . fromBytes ; import java . io . IOException ; import org . apache . gora . hbase . store . HBaseStore ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . gora . query . impl . ResultBase ; import org . apache . hadoop . hbase . client . Result ; public abstract class HBaseResult < K , T extends Persistent > extends ResultBase < K , T > { public HBaseResult ( HBaseStore < K , T > dataStore , Query < K , T > query ) { super ( dataStore , query ) ; } @ Override public HBaseStore < K , T > getDataStore ( ) { return ( HBaseStore < K , T > ) super . getDataStore ( ) ; } protected void readNext ( Result result ) throws IOException { key = fromBytes ( getKeyClass ( ) , result . getRow ( ) ) ; persistent = getDataStore ( ) . newInstance ( result , query . getFields ( ) ) ; } } </s>
|
<s> package org . apache . gora . hbase . query ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . gora . query . impl . QueryBase ; import org . apache . gora . store . DataStore ; public class HBaseQuery < K , T extends Persistent > extends QueryBase < K , T > { public HBaseQuery ( ) { super ( null ) ; } public HBaseQuery ( DataStore < K , T > dataStore ) { super ( dataStore ) ; } } </s>
|
<s> package org . apache . gora . hbase . query ; import java . io . IOException ; import org . apache . gora . hbase . store . HBaseStore ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . hadoop . hbase . client . Get ; import org . apache . hadoop . hbase . client . Result ; public class HBaseGetResult < K , T extends Persistent > extends HBaseResult < K , T > { private Result result ; public HBaseGetResult ( HBaseStore < K , T > dataStore , Query < K , T > query , Result result ) { super ( dataStore , query ) ; this . result = result ; } @ Override public float getProgress ( ) throws IOException { return key == null ? <NUM_LIT> : <NUM_LIT> ; } @ Override public boolean nextInner ( ) throws IOException { if ( result == null || result . getRow ( ) == null || result . getRow ( ) . length == <NUM_LIT:0> ) { return false ; } if ( key == null ) { readNext ( result ) ; return key != null ; } return false ; } @ Override public void close ( ) throws IOException { } } </s>
|
<s> package org . apache . gora . hbase ; import org . apache . gora . GoraTestDriver ; import org . apache . gora . hbase . store . HBaseStore ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . hbase . HBaseTestingUtility ; import org . apache . hadoop . hbase . HTableDescriptor ; import org . apache . hadoop . hbase . client . HBaseAdmin ; public class GoraHBaseTestDriver extends GoraTestDriver { protected HBaseTestingUtility hbaseUtil ; protected int numServers = <NUM_LIT:1> ; public GoraHBaseTestDriver ( ) { super ( HBaseStore . class ) ; hbaseUtil = new HBaseTestingUtility ( ) ; } public void setNumServers ( int numServers ) { this . numServers = numServers ; } public int getNumServers ( ) { return numServers ; } @ Override public void setUpClass ( ) throws Exception { super . setUpClass ( ) ; log . info ( "<STR_LIT>" ) ; hbaseUtil . startMiniCluster ( numServers ) ; } @ Override public void tearDownClass ( ) throws Exception { super . tearDownClass ( ) ; log . info ( "<STR_LIT>" ) ; hbaseUtil . shutdownMiniCluster ( ) ; } @ Override public void setUp ( ) throws Exception { super . setUp ( ) ; } public void deleteAllTables ( ) throws Exception { HBaseAdmin admin = hbaseUtil . getHBaseAdmin ( ) ; for ( HTableDescriptor table : admin . listTables ( ) ) { admin . disableTable ( table . getName ( ) ) ; admin . deleteTable ( table . getName ( ) ) ; } } public Configuration getConf ( ) { return hbaseUtil . getConfiguration ( ) ; } public HBaseTestingUtility getHbaseUtil ( ) { return hbaseUtil ; } } </s>
|
<s> package org . apache . gora . hbase . util ; import java . util . ArrayList ; import java . util . Collection ; import java . util . List ; import java . util . Random ; import java . util . concurrent . Callable ; import java . util . concurrent . ExecutorService ; import java . util . concurrent . Executors ; import java . util . concurrent . Future ; import org . apache . avro . util . Utf8 ; import org . apache . gora . examples . generated . Employee ; import org . apache . gora . examples . generated . Metadata ; import org . apache . gora . examples . generated . TokenDatum ; import org . junit . Assert ; import org . junit . Test ; public class TestHBaseByteInterface { private static final Random RANDOM = new Random ( ) ; @ Test public void testEncodingDecoding ( ) throws Exception { for ( int i = <NUM_LIT:0> ; i < <NUM_LIT:1000> ; i ++ ) { Utf8 name = new Utf8 ( "<STR_LIT>" ) ; long dateOfBirth = System . currentTimeMillis ( ) ; int salary = <NUM_LIT> ; Utf8 ssn = new Utf8 ( String . valueOf ( RANDOM . nextLong ( ) ) ) ; Employee e = new Employee ( ) ; e . setName ( name ) ; e . setDateOfBirth ( dateOfBirth ) ; e . setSalary ( salary ) ; e . setSsn ( ssn ) ; byte [ ] employerBytes = HBaseByteInterface . toBytes ( e , Employee . _SCHEMA ) ; Employee e2 = ( Employee ) HBaseByteInterface . fromBytes ( Employee . _SCHEMA , employerBytes ) ; Assert . assertEquals ( name , e2 . getName ( ) ) ; Assert . assertEquals ( dateOfBirth , e2 . getDateOfBirth ( ) ) ; Assert . assertEquals ( salary , e2 . getSalary ( ) ) ; Assert . assertEquals ( ssn , e2 . getSsn ( ) ) ; Utf8 key = new Utf8 ( "<STR_LIT>" ) ; Utf8 value = new Utf8 ( "<STR_LIT>" + RANDOM . nextLong ( ) ) ; Metadata m = new Metadata ( ) ; m . putToData ( key , value ) ; byte [ ] datumBytes = HBaseByteInterface . toBytes ( m , Metadata . _SCHEMA ) ; Metadata m2 = ( Metadata ) HBaseByteInterface . fromBytes ( Metadata . _SCHEMA , datumBytes ) ; Assert . assertEquals ( value , m2 . getFromData ( key ) ) ; } } @ Test public void testEncodingDecodingMultithreaded ( ) throws Exception { int numThreads = <NUM_LIT:8> ; ExecutorService pool = Executors . newFixedThreadPool ( numThreads ) ; Collection < Callable < Integer > > tasks = new ArrayList < Callable < Integer > > ( ) ; for ( int i = <NUM_LIT:0> ; i < numThreads ; i ++ ) { tasks . add ( new Callable < Integer > ( ) { @ Override public Integer call ( ) { try { testEncodingDecoding ( ) ; return <NUM_LIT:0> ; } catch ( Exception e ) { e . printStackTrace ( ) ; return <NUM_LIT:1> ; } } } ) ; } List < Future < Integer > > results = pool . invokeAll ( tasks ) ; for ( Future < Integer > result : results ) { Assert . assertEquals ( <NUM_LIT:0> , ( int ) result . get ( ) ) ; } } } </s>
|
<s> package org . apache . gora . hbase . store ; import java . io . IOException ; import java . util . Arrays ; import junit . framework . Assert ; import org . apache . gora . examples . generated . Employee ; import org . apache . gora . examples . generated . WebPage ; import org . apache . gora . hbase . GoraHBaseTestDriver ; import org . apache . gora . hbase . store . HBaseStore ; import org . apache . gora . store . DataStore ; import org . apache . gora . store . DataStoreFactory ; import org . apache . gora . store . DataStoreTestBase ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . hbase . client . Get ; import org . apache . hadoop . hbase . client . HBaseAdmin ; import org . apache . hadoop . hbase . client . HTable ; import org . apache . hadoop . hbase . util . Bytes ; public class TestHBaseStore extends DataStoreTestBase { private Configuration conf ; static { setTestDriver ( new GoraHBaseTestDriver ( ) ) ; } @ Override public void setUp ( ) throws Exception { super . setUp ( ) ; conf = getTestDriver ( ) . getHbaseUtil ( ) . getConfiguration ( ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) @ Override protected DataStore < String , Employee > createEmployeeDataStore ( ) throws IOException { return DataStoreFactory . createDataStore ( HBaseStore . class , String . class , Employee . class , conf ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) @ Override protected DataStore < String , WebPage > createWebPageDataStore ( ) throws IOException { return DataStoreFactory . createDataStore ( HBaseStore . class , String . class , WebPage . class , conf ) ; } public GoraHBaseTestDriver getTestDriver ( ) { return ( GoraHBaseTestDriver ) testDriver ; } @ Override public void assertSchemaExists ( String schemaName ) throws Exception { HBaseAdmin admin = getTestDriver ( ) . getHbaseUtil ( ) . getHBaseAdmin ( ) ; Assert . assertTrue ( admin . tableExists ( schemaName ) ) ; } @ Override public void assertPutArray ( ) throws IOException { HTable table = new HTable ( "<STR_LIT>" ) ; Get get = new Get ( Bytes . toBytes ( "<STR_LIT>" ) ) ; org . apache . hadoop . hbase . client . Result result = table . get ( get ) ; Assert . assertEquals ( result . getFamilyMap ( Bytes . toBytes ( "<STR_LIT>" ) ) . size ( ) , <NUM_LIT:4> ) ; Assert . assertTrue ( Arrays . equals ( result . getValue ( Bytes . toBytes ( "<STR_LIT>" ) , Bytes . toBytes ( <NUM_LIT:0> ) ) , Bytes . toBytes ( "<STR_LIT>" ) ) ) ; Assert . assertTrue ( Arrays . equals ( result . getValue ( Bytes . toBytes ( "<STR_LIT>" ) , Bytes . toBytes ( <NUM_LIT:3> ) ) , Bytes . toBytes ( "<STR_LIT>" ) ) ) ; table . close ( ) ; } @ Override public void assertPutBytes ( byte [ ] contentBytes ) throws IOException { HTable table = new HTable ( "<STR_LIT>" ) ; Get get = new Get ( Bytes . toBytes ( "<STR_LIT>" ) ) ; org . apache . hadoop . hbase . client . Result result = table . get ( get ) ; byte [ ] actualBytes = result . getValue ( Bytes . toBytes ( "<STR_LIT:content>" ) , null ) ; Assert . assertNotNull ( actualBytes ) ; Assert . assertTrue ( Arrays . equals ( contentBytes , actualBytes ) ) ; table . close ( ) ; } @ Override public void assertPutMap ( ) throws IOException { HTable table = new HTable ( "<STR_LIT>" ) ; Get get = new Get ( Bytes . toBytes ( "<STR_LIT>" ) ) ; org . apache . hadoop . hbase . client . Result result = table . get ( get ) ; byte [ ] anchor2Raw = result . getValue ( Bytes . toBytes ( "<STR_LIT>" ) , Bytes . toBytes ( "<STR_LIT>" ) ) ; Assert . assertNotNull ( anchor2Raw ) ; String anchor2 = Bytes . toString ( anchor2Raw ) ; Assert . assertEquals ( "<STR_LIT>" , anchor2 ) ; table . close ( ) ; } @ Override public void testQueryEndKey ( ) throws IOException { } @ Override public void testQueryKeyRange ( ) throws IOException { } @ Override public void testDeleteByQuery ( ) throws IOException { } public static void main ( String [ ] args ) throws Exception { TestHBaseStore test = new TestHBaseStore ( ) ; test . setUpClass ( ) ; test . setUp ( ) ; test . testQuery ( ) ; test . tearDown ( ) ; test . tearDownClass ( ) ; } } </s>
|
<s> package org . apache . gora . hbase . mapreduce ; import org . apache . gora . examples . generated . TokenDatum ; import org . apache . gora . examples . generated . WebPage ; import org . apache . gora . hbase . store . HBaseStore ; import org . apache . gora . mapreduce . MapReduceTestUtils ; import org . apache . gora . store . DataStoreFactory ; import org . apache . hadoop . hbase . HBaseClusterTestCase ; import org . junit . After ; import org . junit . Before ; import org . junit . Test ; public class TestHBaseStoreWordCount extends HBaseClusterTestCase { private HBaseStore < String , WebPage > webPageStore ; private HBaseStore < String , TokenDatum > tokenStore ; @ Before @ Override public void setUp ( ) throws Exception { super . setUp ( ) ; webPageStore = DataStoreFactory . getDataStore ( HBaseStore . class , String . class , WebPage . class , conf ) ; tokenStore = DataStoreFactory . getDataStore ( HBaseStore . class , String . class , TokenDatum . class , conf ) ; } @ After @ Override public void tearDown ( ) throws Exception { webPageStore . close ( ) ; tokenStore . close ( ) ; super . tearDown ( ) ; } @ Test public void testWordCount ( ) throws Exception { MapReduceTestUtils . testWordCount ( conf , webPageStore , tokenStore ) ; } public static void main ( String [ ] args ) throws Exception { TestHBaseStoreWordCount test = new TestHBaseStoreWordCount ( ) ; test . setUp ( ) ; test . testWordCount ( ) ; } } </s>
|
<s> package org . apache . gora . hbase . mapreduce ; import org . apache . gora . examples . generated . TokenDatum ; import org . apache . gora . examples . generated . WebPage ; import org . apache . gora . hbase . store . HBaseStore ; import org . apache . gora . mapreduce . MapReduceTestUtils ; import org . apache . gora . store . DataStoreFactory ; import org . apache . hadoop . hbase . HBaseClusterTestCase ; import org . junit . After ; import org . junit . Before ; import org . junit . Test ; public class TestHBaseStoreCountQuery extends HBaseClusterTestCase { private HBaseStore < String , WebPage > webPageStore ; @ Before @ Override public void setUp ( ) throws Exception { super . setUp ( ) ; webPageStore = DataStoreFactory . getDataStore ( HBaseStore . class , String . class , WebPage . class , conf ) ; } @ After @ Override public void tearDown ( ) throws Exception { webPageStore . close ( ) ; super . tearDown ( ) ; } @ Test public void testCountQuery ( ) throws Exception { MapReduceTestUtils . testCountQuery ( webPageStore , conf ) ; } public static void main ( String [ ] args ) throws Exception { TestHBaseStoreCountQuery test = new TestHBaseStoreCountQuery ( ) ; test . setUp ( ) ; test . testCountQuery ( ) ; } } </s>
|
<s> package org . apache . gora . accumulo . util ; import org . apache . gora . accumulo . encoders . HexEncoder ; import org . junit . Assert ; import org . junit . Test ; public class HexEncoderTest { @ Test public void testByte ( ) { HexEncoder encoder = new HexEncoder ( ) ; Assert . assertEquals ( "<STR_LIT>" , new String ( encoder . encodeByte ( ( byte ) <NUM_LIT> ) ) ) ; Assert . assertEquals ( "<STR_LIT>" , new String ( encoder . encodeByte ( ( byte ) <NUM_LIT> ) ) ) ; byte b = Byte . MIN_VALUE ; while ( b != Byte . MAX_VALUE ) { Assert . assertEquals ( b , encoder . decodeByte ( encoder . encodeByte ( b ) ) ) ; b ++ ; } } @ Test public void testShort ( ) { HexEncoder encoder = new HexEncoder ( ) ; Assert . assertEquals ( "<STR_LIT>" , new String ( encoder . encodeShort ( ( short ) <NUM_LIT> ) ) ) ; Assert . assertEquals ( "<STR_LIT>" , new String ( encoder . encodeShort ( ( short ) <NUM_LIT> ) ) ) ; short s = Short . MIN_VALUE ; while ( s != Short . MAX_VALUE ) { Assert . assertEquals ( s , encoder . decodeShort ( encoder . encodeShort ( s ) ) ) ; s ++ ; } } } </s>
|
<s> package org . apache . gora . accumulo . util ; import java . util . ArrayList ; import java . util . Collections ; import junit . framework . Assert ; import org . apache . gora . accumulo . encoders . SignedBinaryEncoder ; import org . apache . hadoop . io . Text ; import org . junit . Test ; public class SignedBinaryEncoderTest { @ Test public void testShort ( ) { short s = Short . MIN_VALUE ; Text prev = null ; SignedBinaryEncoder encoder = new SignedBinaryEncoder ( ) ; while ( true ) { byte [ ] enc = encoder . encodeShort ( s ) ; Assert . assertEquals ( s , encoder . decodeShort ( enc ) ) ; Text current = new Text ( enc ) ; if ( prev != null ) Assert . assertTrue ( prev . compareTo ( current ) < <NUM_LIT:0> ) ; prev = current ; s ++ ; if ( s == Short . MAX_VALUE ) break ; } } private void testInt ( int start , int finish ) { int i = start ; Text prev = null ; SignedBinaryEncoder encoder = new SignedBinaryEncoder ( ) ; while ( true ) { byte [ ] enc = encoder . encodeInt ( i ) ; Assert . assertEquals ( i , encoder . decodeInt ( enc ) ) ; Text current = new Text ( enc ) ; if ( prev != null ) Assert . assertTrue ( prev . compareTo ( current ) < <NUM_LIT:0> ) ; prev = current ; i ++ ; if ( i == finish ) break ; } } @ Test public void testInt ( ) { testInt ( Integer . MIN_VALUE , Integer . MIN_VALUE + ( <NUM_LIT:1> << <NUM_LIT:16> ) ) ; testInt ( - ( <NUM_LIT:1> << <NUM_LIT:15> ) , ( <NUM_LIT:1> << <NUM_LIT:15> ) ) ; testInt ( Integer . MAX_VALUE - ( <NUM_LIT:1> << <NUM_LIT:16> ) , Integer . MAX_VALUE ) ; } private void testLong ( long start , long finish ) { long l = start ; Text prev = null ; SignedBinaryEncoder encoder = new SignedBinaryEncoder ( ) ; while ( true ) { byte [ ] enc = encoder . encodeLong ( l ) ; Assert . assertEquals ( l , encoder . decodeLong ( enc ) ) ; Text current = new Text ( enc ) ; if ( prev != null ) Assert . assertTrue ( prev . compareTo ( current ) < <NUM_LIT:0> ) ; prev = current ; l ++ ; if ( l == finish ) break ; } } @ Test public void testLong ( ) { testLong ( Long . MIN_VALUE , Long . MIN_VALUE + ( <NUM_LIT:1> << <NUM_LIT:16> ) ) ; testLong ( - ( <NUM_LIT:1> << <NUM_LIT:15> ) , ( <NUM_LIT:1> << <NUM_LIT:15> ) ) ; testLong ( Long . MAX_VALUE - ( <NUM_LIT:1> << <NUM_LIT:16> ) , Long . MAX_VALUE ) ; } @ Test public void testDouble ( ) { ArrayList < Double > testData = new ArrayList < Double > ( ) ; testData . add ( Double . NEGATIVE_INFINITY ) ; testData . add ( Double . MIN_VALUE ) ; testData . add ( Math . nextUp ( Double . NEGATIVE_INFINITY ) ) ; testData . add ( Math . pow ( <NUM_LIT> , <NUM_LIT> ) * - <NUM_LIT:1.0> ) ; testData . add ( Math . pow ( <NUM_LIT> , <NUM_LIT> ) ) ; testData . add ( Math . pow ( <NUM_LIT> , - <NUM_LIT> ) * - <NUM_LIT:1.0> ) ; testData . add ( Math . pow ( <NUM_LIT> , - <NUM_LIT> ) ) ; testData . add ( Math . nextAfter ( <NUM_LIT:0.0> , Double . NEGATIVE_INFINITY ) ) ; testData . add ( <NUM_LIT:0.0> ) ; testData . add ( Math . nextAfter ( Double . MAX_VALUE , Double . NEGATIVE_INFINITY ) ) ; testData . add ( Double . MAX_VALUE ) ; testData . add ( Double . POSITIVE_INFINITY ) ; Collections . sort ( testData ) ; SignedBinaryEncoder encoder = new SignedBinaryEncoder ( ) ; for ( int i = <NUM_LIT:0> ; i < testData . size ( ) ; i ++ ) { byte [ ] enc = encoder . encodeDouble ( testData . get ( i ) ) ; Assert . assertEquals ( testData . get ( i ) , encoder . decodeDouble ( enc ) ) ; if ( i > <NUM_LIT:1> ) { Assert . assertTrue ( "<STR_LIT>" + testData . get ( i ) + "<STR_LIT>" + testData . get ( i - <NUM_LIT:1> ) , new Text ( enc ) . compareTo ( new Text ( encoder . encodeDouble ( testData . get ( i - <NUM_LIT:1> ) ) ) ) > <NUM_LIT:0> ) ; } } } @ Test public void testFloat ( ) { ArrayList < Float > testData = new ArrayList < Float > ( ) ; testData . add ( Float . NEGATIVE_INFINITY ) ; testData . add ( Float . MIN_VALUE ) ; testData . add ( Math . nextUp ( Float . NEGATIVE_INFINITY ) ) ; testData . add ( ( float ) Math . pow ( <NUM_LIT> , <NUM_LIT> ) * - <NUM_LIT:1.0f> ) ; testData . add ( ( float ) Math . pow ( <NUM_LIT> , <NUM_LIT> ) ) ; testData . add ( ( float ) Math . pow ( <NUM_LIT> , - <NUM_LIT> ) * - <NUM_LIT:1.0f> ) ; testData . add ( ( float ) Math . pow ( <NUM_LIT> , - <NUM_LIT> ) ) ; testData . add ( Math . nextAfter ( <NUM_LIT:0.0f> , Float . NEGATIVE_INFINITY ) ) ; testData . add ( <NUM_LIT:0.0f> ) ; testData . add ( Math . nextAfter ( Float . MAX_VALUE , Float . NEGATIVE_INFINITY ) ) ; testData . add ( Float . MAX_VALUE ) ; testData . add ( Float . POSITIVE_INFINITY ) ; Collections . sort ( testData ) ; SignedBinaryEncoder encoder = new SignedBinaryEncoder ( ) ; for ( int i = <NUM_LIT:0> ; i < testData . size ( ) ; i ++ ) { byte [ ] enc = encoder . encodeFloat ( testData . get ( i ) ) ; Assert . assertEquals ( testData . get ( i ) , encoder . decodeFloat ( enc ) ) ; if ( i > <NUM_LIT:1> ) { Assert . assertTrue ( "<STR_LIT>" + testData . get ( i ) + "<STR_LIT>" + testData . get ( i - <NUM_LIT:1> ) , new Text ( enc ) . compareTo ( new Text ( encoder . encodeFloat ( testData . get ( i - <NUM_LIT:1> ) ) ) ) > <NUM_LIT:0> ) ; } } } } </s>
|
<s> package org . apache . gora . accumulo . store ; import java . io . IOException ; import org . apache . gora . examples . generated . Employee ; import org . apache . gora . examples . generated . WebPage ; import org . apache . gora . store . DataStore ; import org . apache . gora . store . DataStoreFactory ; import org . apache . gora . store . DataStoreTestBase ; import org . apache . hadoop . conf . Configuration ; public class AccumuloStoreTest extends DataStoreTestBase { @ Override protected DataStore < String , Employee > createEmployeeDataStore ( ) throws IOException { return DataStoreFactory . getDataStore ( String . class , Employee . class , new Configuration ( ) ) ; } @ Override protected DataStore < String , WebPage > createWebPageDataStore ( ) throws IOException { return DataStoreFactory . getDataStore ( String . class , WebPage . class , new Configuration ( ) ) ; } @ Override public void testDeleteByQueryFields ( ) throws IOException { return ; } } </s>
|
<s> package org . apache . gora . accumulo . store ; import java . io . ByteArrayOutputStream ; import java . io . DataOutputStream ; import java . io . IOException ; import junit . framework . Assert ; import org . apache . gora . accumulo . encoders . Encoder ; import org . apache . gora . accumulo . encoders . SignedBinaryEncoder ; import org . junit . Test ; public class PartitionTest { private static Encoder encoder = new SignedBinaryEncoder ( ) ; static long encl ( long l ) { ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; DataOutputStream dos = new DataOutputStream ( baos ) ; try { dos . writeLong ( l ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } return encoder . decodeLong ( baos . toByteArray ( ) ) ; } @ Test public void test1 ( ) { Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . followingKey ( encoder , Long . class , new byte [ ] { <NUM_LIT:0x00> , <NUM_LIT> } ) ) ; Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . followingKey ( encoder , Long . class , new byte [ ] { <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> } ) ) ; Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . followingKey ( encoder , Long . class , new byte [ ] { <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> } ) ) ; Assert . assertEquals ( encl ( - <NUM_LIT> ) , ( long ) AccumuloStore . followingKey ( encoder , Long . class , new byte [ ] { ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> } ) ) ; Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . followingKey ( encoder , Long . class , new byte [ ] { ( byte ) <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> } ) ) ; Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . followingKey ( encoder , Long . class , new byte [ ] { ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> } ) ) ; try { AccumuloStore . followingKey ( encoder , Long . class , new byte [ ] { ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> } ) ; Assert . assertTrue ( false ) ; } catch ( IllegalArgumentException iea ) { } } @ Test public void test2 ( ) { Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . lastPossibleKey ( encoder , Long . class , new byte [ ] { <NUM_LIT> } ) ) ; Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . lastPossibleKey ( encoder , Long . class , new byte [ ] { <NUM_LIT:0x00> , <NUM_LIT> } ) ) ; Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . lastPossibleKey ( encoder , Long . class , new byte [ ] { ( byte ) <NUM_LIT> , <NUM_LIT> } ) ) ; Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . lastPossibleKey ( encoder , Long . class , new byte [ ] { ( byte ) <NUM_LIT> , ( byte ) <NUM_LIT> } ) ) ; Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . lastPossibleKey ( encoder , Long . class , new byte [ ] { ( byte ) <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> } ) ) ; Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . lastPossibleKey ( encoder , Long . class , new byte [ ] { ( byte ) <NUM_LIT> } ) ) ; Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . lastPossibleKey ( encoder , Long . class , new byte [ ] { ( byte ) <NUM_LIT> } ) ) ; Assert . assertEquals ( encl ( <NUM_LIT> ) , ( long ) AccumuloStore . lastPossibleKey ( encoder , Long . class , new byte [ ] { ( byte ) <NUM_LIT> } ) ) ; try { AccumuloStore . lastPossibleKey ( encoder , Long . class , new byte [ ] { ( byte ) <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> } ) ; Assert . assertTrue ( false ) ; } catch ( IllegalArgumentException iea ) { } } } </s>
|
<s> package org . apache . gora . accumulo . store ; import java . util . HashMap ; import java . util . Map ; import org . apache . accumulo . core . util . Pair ; import org . apache . hadoop . io . Text ; public class AccumuloMapping { Map < String , Pair < Text , Text > > fieldMap = new HashMap < String , Pair < Text , Text > > ( ) ; Map < Pair < Text , Text > , String > columnMap = new HashMap < Pair < Text , Text > , String > ( ) ; Map < String , String > tableConfig = new HashMap < String , String > ( ) ; String tableName ; String encoder ; } </s>
|
<s> package org . apache . gora . accumulo . store ; import java . io . ByteArrayOutputStream ; import java . io . IOException ; import java . net . InetAddress ; import java . nio . ByteBuffer ; import java . util . ArrayList ; import java . util . Arrays ; import java . util . Collections ; import java . util . HashMap ; import java . util . Iterator ; import java . util . List ; import java . util . Map ; import java . util . Map . Entry ; import java . util . Properties ; import java . util . Set ; import javax . xml . parsers . DocumentBuilder ; import javax . xml . parsers . DocumentBuilderFactory ; import org . apache . accumulo . core . Constants ; import org . apache . accumulo . core . client . AccumuloException ; import org . apache . accumulo . core . client . AccumuloSecurityException ; import org . apache . accumulo . core . client . BatchWriter ; import org . apache . accumulo . core . client . Connector ; import org . apache . accumulo . core . client . IsolatedScanner ; import org . apache . accumulo . core . client . IteratorSetting ; import org . apache . accumulo . core . client . MutationsRejectedException ; import org . apache . accumulo . core . client . RowIterator ; import org . apache . accumulo . core . client . Scanner ; import org . apache . accumulo . core . client . TableDeletedException ; import org . apache . accumulo . core . client . TableExistsException ; import org . apache . accumulo . core . client . TableNotFoundException ; import org . apache . accumulo . core . client . TableOfflineException ; import org . apache . accumulo . core . client . ZooKeeperInstance ; import org . apache . accumulo . core . client . impl . Tables ; import org . apache . accumulo . core . client . impl . TabletLocator ; import org . apache . accumulo . core . client . mock . MockConnector ; import org . apache . accumulo . core . client . mock . MockInstance ; import org . apache . accumulo . core . client . mock . MockTabletLocator ; import org . apache . accumulo . core . data . ByteSequence ; import org . apache . accumulo . core . data . Key ; import org . apache . accumulo . core . data . KeyExtent ; import org . apache . accumulo . core . data . Mutation ; import org . apache . accumulo . core . data . Range ; import org . apache . accumulo . core . data . Value ; import org . apache . accumulo . core . iterators . SortedKeyIterator ; import org . apache . accumulo . core . iterators . user . TimestampFilter ; import org . apache . accumulo . core . master . state . tables . TableState ; import org . apache . accumulo . core . security . ColumnVisibility ; import org . apache . accumulo . core . security . thrift . AuthInfo ; import org . apache . accumulo . core . util . Pair ; import org . apache . accumulo . core . util . TextUtil ; import org . apache . accumulo . core . util . UtilWaitThread ; import org . apache . avro . Schema ; import org . apache . avro . Schema . Field ; import org . apache . avro . generic . GenericArray ; import org . apache . avro . io . BinaryDecoder ; import org . apache . avro . io . BinaryEncoder ; import org . apache . avro . io . DecoderFactory ; import org . apache . avro . specific . SpecificDatumReader ; import org . apache . avro . specific . SpecificDatumWriter ; import org . apache . avro . util . Utf8 ; import org . apache . gora . accumulo . encoders . Encoder ; import org . apache . gora . accumulo . query . AccumuloQuery ; import org . apache . gora . accumulo . query . AccumuloResult ; import org . apache . gora . persistency . ListGenericArray ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . persistency . State ; import org . apache . gora . persistency . StateManager ; import org . apache . gora . persistency . StatefulHashMap ; import org . apache . gora . persistency . StatefulMap ; import org . apache . gora . query . PartitionQuery ; import org . apache . gora . query . Query ; import org . apache . gora . query . Result ; import org . apache . gora . query . impl . PartitionQueryImpl ; import org . apache . gora . store . DataStoreFactory ; import org . apache . gora . store . impl . DataStoreBase ; import org . apache . gora . util . AvroUtils ; import org . apache . hadoop . io . Text ; import org . w3c . dom . Document ; import org . w3c . dom . Element ; import org . w3c . dom . NodeList ; public class AccumuloStore < K , T extends Persistent > extends DataStoreBase < K , T > { protected static final String MOCK_PROPERTY = "<STR_LIT>" ; protected static final String INSTANCE_NAME_PROPERTY = "<STR_LIT>" ; protected static final String ZOOKEEPERS_NAME_PROPERTY = "<STR_LIT>" ; protected static final String USERNAME_PROPERTY = "<STR_LIT>" ; protected static final String PASSWORD_PROPERTY = "<STR_LIT>" ; protected static final String DEFAULT_MAPPING_FILE = "<STR_LIT>" ; private Connector conn ; private BatchWriter batchWriter ; private AccumuloMapping mapping ; private AuthInfo authInfo ; private Encoder encoder ; public Object fromBytes ( Schema schema , byte data [ ] ) { return fromBytes ( encoder , schema , data ) ; } public static Object fromBytes ( Encoder encoder , Schema schema , byte data [ ] ) { switch ( schema . getType ( ) ) { case BOOLEAN : return encoder . decodeBoolean ( data ) ; case DOUBLE : return encoder . decodeDouble ( data ) ; case FLOAT : return encoder . decodeFloat ( data ) ; case INT : return encoder . decodeInt ( data ) ; case LONG : return encoder . decodeLong ( data ) ; case STRING : return new Utf8 ( data ) ; case BYTES : return ByteBuffer . wrap ( data ) ; case ENUM : return AvroUtils . getEnumValue ( schema , encoder . decodeInt ( data ) ) ; } throw new IllegalArgumentException ( "<STR_LIT>" + schema . getType ( ) ) ; } public K fromBytes ( Class < K > clazz , byte [ ] val ) { return fromBytes ( encoder , clazz , val ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public static < K > K fromBytes ( Encoder encoder , Class < K > clazz , byte [ ] val ) { try { if ( clazz . equals ( Byte . TYPE ) || clazz . equals ( Byte . class ) ) { return ( K ) Byte . valueOf ( encoder . decodeByte ( val ) ) ; } else if ( clazz . equals ( Boolean . TYPE ) || clazz . equals ( Boolean . class ) ) { return ( K ) Boolean . valueOf ( encoder . decodeBoolean ( val ) ) ; } else if ( clazz . equals ( Short . TYPE ) || clazz . equals ( Short . class ) ) { return ( K ) Short . valueOf ( encoder . decodeShort ( val ) ) ; } else if ( clazz . equals ( Integer . TYPE ) || clazz . equals ( Integer . class ) ) { return ( K ) Integer . valueOf ( encoder . decodeInt ( val ) ) ; } else if ( clazz . equals ( Long . TYPE ) || clazz . equals ( Long . class ) ) { return ( K ) Long . valueOf ( encoder . decodeLong ( val ) ) ; } else if ( clazz . equals ( Float . TYPE ) || clazz . equals ( Float . class ) ) { return ( K ) Float . valueOf ( encoder . decodeFloat ( val ) ) ; } else if ( clazz . equals ( Double . TYPE ) || clazz . equals ( Double . class ) ) { return ( K ) Double . valueOf ( encoder . decodeDouble ( val ) ) ; } else if ( clazz . equals ( String . class ) ) { return ( K ) new String ( val , "<STR_LIT:UTF-8>" ) ; } else if ( clazz . equals ( Utf8 . class ) ) { return ( K ) new Utf8 ( val ) ; } throw new IllegalArgumentException ( "<STR_LIT>" + clazz . getName ( ) ) ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } private static byte [ ] copyIfNeeded ( byte b [ ] , int offset , int len ) { if ( len != b . length || offset != <NUM_LIT:0> ) { byte copy [ ] = new byte [ len ] ; System . arraycopy ( b , offset , copy , <NUM_LIT:0> , copy . length ) ; b = copy ; } return b ; } public byte [ ] toBytes ( Object o ) { return toBytes ( encoder , o ) ; } public static byte [ ] toBytes ( Encoder encoder , Object o ) { try { if ( o instanceof String ) { return ( ( String ) o ) . getBytes ( "<STR_LIT:UTF-8>" ) ; } else if ( o instanceof Utf8 ) { return copyIfNeeded ( ( ( Utf8 ) o ) . getBytes ( ) , <NUM_LIT:0> , ( ( Utf8 ) o ) . getLength ( ) ) ; } else if ( o instanceof ByteBuffer ) { return copyIfNeeded ( ( ( ByteBuffer ) o ) . array ( ) , ( ( ByteBuffer ) o ) . arrayOffset ( ) + ( ( ByteBuffer ) o ) . position ( ) , ( ( ByteBuffer ) o ) . remaining ( ) ) ; } else if ( o instanceof Long ) { return encoder . encodeLong ( ( Long ) o ) ; } else if ( o instanceof Integer ) { return encoder . encodeInt ( ( Integer ) o ) ; } else if ( o instanceof Short ) { return encoder . encodeShort ( ( Short ) o ) ; } else if ( o instanceof Byte ) { return encoder . encodeByte ( ( Byte ) o ) ; } else if ( o instanceof Boolean ) { return encoder . encodeBoolean ( ( Boolean ) o ) ; } else if ( o instanceof Float ) { return encoder . encodeFloat ( ( Float ) o ) ; } else if ( o instanceof Double ) { return encoder . encodeDouble ( ( Double ) o ) ; } else if ( o instanceof Enum ) { return encoder . encodeInt ( ( ( Enum ) o ) . ordinal ( ) ) ; } } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } throw new IllegalArgumentException ( "<STR_LIT>" + o . getClass ( ) . getName ( ) ) ; } private BatchWriter getBatchWriter ( ) throws IOException { if ( batchWriter == null ) try { batchWriter = conn . createBatchWriter ( mapping . tableName , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:4> ) ; } catch ( TableNotFoundException e ) { throw new IOException ( e ) ; } return batchWriter ; } @ Override public void initialize ( Class < K > keyClass , Class < T > persistentClass , Properties properties ) throws IOException { super . initialize ( keyClass , persistentClass , properties ) ; String mock = DataStoreFactory . findProperty ( properties , this , MOCK_PROPERTY , null ) ; String mappingFile = DataStoreFactory . getMappingFile ( properties , this , DEFAULT_MAPPING_FILE ) ; String user = DataStoreFactory . findProperty ( properties , this , USERNAME_PROPERTY , null ) ; String password = DataStoreFactory . findProperty ( properties , this , PASSWORD_PROPERTY , null ) ; mapping = readMapping ( mappingFile ) ; if ( mapping . encoder == null || mapping . encoder . equals ( "<STR_LIT>" ) ) { encoder = new org . apache . gora . accumulo . encoders . BinaryEncoder ( ) ; } else { try { encoder = ( Encoder ) getClass ( ) . getClassLoader ( ) . loadClass ( mapping . encoder ) . newInstance ( ) ; } catch ( InstantiationException e ) { throw new IOException ( e ) ; } catch ( IllegalAccessException e ) { throw new IOException ( e ) ; } catch ( ClassNotFoundException e ) { throw new IOException ( e ) ; } } try { if ( mock == null || ! mock . equals ( "<STR_LIT:true>" ) ) { String instance = DataStoreFactory . findProperty ( properties , this , INSTANCE_NAME_PROPERTY , null ) ; String zookeepers = DataStoreFactory . findProperty ( properties , this , ZOOKEEPERS_NAME_PROPERTY , null ) ; conn = new ZooKeeperInstance ( instance , zookeepers ) . getConnector ( user , password ) ; authInfo = new AuthInfo ( user , ByteBuffer . wrap ( password . getBytes ( ) ) , conn . getInstance ( ) . getInstanceID ( ) ) ; } else { conn = new MockInstance ( ) . getConnector ( user , password ) ; } if ( autoCreateSchema ) createSchema ( ) ; } catch ( AccumuloException e ) { throw new IOException ( e ) ; } catch ( AccumuloSecurityException e ) { throw new IOException ( e ) ; } } protected AccumuloMapping readMapping ( String filename ) throws IOException { try { AccumuloMapping mapping = new AccumuloMapping ( ) ; DocumentBuilder db = DocumentBuilderFactory . newInstance ( ) . newDocumentBuilder ( ) ; Document dom = db . parse ( getClass ( ) . getClassLoader ( ) . getResourceAsStream ( filename ) ) ; Element root = dom . getDocumentElement ( ) ; NodeList nl = root . getElementsByTagName ( "<STR_LIT:class>" ) ; for ( int i = <NUM_LIT:0> ; i < nl . getLength ( ) ; i ++ ) { Element classElement = ( Element ) nl . item ( i ) ; if ( classElement . getAttribute ( "<STR_LIT>" ) . equals ( keyClass . getCanonicalName ( ) ) && classElement . getAttribute ( "<STR_LIT:name>" ) . equals ( persistentClass . getCanonicalName ( ) ) ) { mapping . tableName = getSchemaName ( classElement . getAttribute ( "<STR_LIT>" ) , persistentClass ) ; mapping . encoder = classElement . getAttribute ( "<STR_LIT>" ) ; NodeList fields = classElement . getElementsByTagName ( "<STR_LIT:field>" ) ; for ( int j = <NUM_LIT:0> ; j < fields . getLength ( ) ; j ++ ) { Element fieldElement = ( Element ) fields . item ( j ) ; String name = fieldElement . getAttribute ( "<STR_LIT:name>" ) ; String family = fieldElement . getAttribute ( "<STR_LIT>" ) ; String qualifier = fieldElement . getAttribute ( "<STR_LIT>" ) ; if ( qualifier . equals ( "<STR_LIT>" ) ) qualifier = null ; Pair < Text , Text > col = new Pair < Text , Text > ( new Text ( family ) , qualifier == null ? null : new Text ( qualifier ) ) ; mapping . fieldMap . put ( name , col ) ; mapping . columnMap . put ( col , name ) ; } } } nl = root . getElementsByTagName ( "<STR_LIT>" ) ; for ( int i = <NUM_LIT:0> ; i < nl . getLength ( ) ; i ++ ) { Element tableElement = ( Element ) nl . item ( i ) ; if ( tableElement . getAttribute ( "<STR_LIT:name>" ) . equals ( mapping . tableName ) ) { NodeList configs = tableElement . getElementsByTagName ( "<STR_LIT>" ) ; for ( int j = <NUM_LIT:0> ; j < configs . getLength ( ) ; j ++ ) { Element configElement = ( Element ) configs . item ( j ) ; String key = configElement . getAttribute ( "<STR_LIT:key>" ) ; String val = configElement . getAttribute ( "<STR_LIT:value>" ) ; mapping . tableConfig . put ( key , val ) ; } } } return mapping ; } catch ( Exception ex ) { throw new IOException ( ex ) ; } } @ Override public String getSchemaName ( ) { return mapping . tableName ; } @ Override public void createSchema ( ) throws IOException { try { conn . tableOperations ( ) . create ( mapping . tableName ) ; Set < Entry < String , String > > es = mapping . tableConfig . entrySet ( ) ; for ( Entry < String , String > entry : es ) { conn . tableOperations ( ) . setProperty ( mapping . tableName , entry . getKey ( ) , entry . getValue ( ) ) ; } } catch ( AccumuloException e ) { throw new IOException ( e ) ; } catch ( AccumuloSecurityException e ) { throw new IOException ( e ) ; } catch ( TableExistsException e ) { return ; } } @ Override public void deleteSchema ( ) throws IOException { try { if ( batchWriter != null ) batchWriter . close ( ) ; batchWriter = null ; conn . tableOperations ( ) . delete ( mapping . tableName ) ; } catch ( AccumuloException e ) { throw new IOException ( e ) ; } catch ( AccumuloSecurityException e ) { throw new IOException ( e ) ; } catch ( TableNotFoundException e ) { return ; } } @ Override public boolean schemaExists ( ) throws IOException { return conn . tableOperations ( ) . exists ( mapping . tableName ) ; } public ByteSequence populate ( Iterator < Entry < Key , Value > > iter , T persistent ) throws IOException { ByteSequence row = null ; Map currentMap = null ; ArrayList currentArray = null ; Text currentFam = null ; int currentPos = <NUM_LIT:0> ; Schema currentSchema = null ; Field currentField = null ; while ( iter . hasNext ( ) ) { Entry < Key , Value > entry = iter . next ( ) ; if ( currentMap != null ) { if ( currentFam . equals ( entry . getKey ( ) . getColumnFamily ( ) ) ) { currentMap . put ( new Utf8 ( entry . getKey ( ) . getColumnQualifierData ( ) . toArray ( ) ) , fromBytes ( currentSchema , entry . getValue ( ) . get ( ) ) ) ; continue ; } else { persistent . put ( currentPos , currentMap ) ; currentMap = null ; } } else if ( currentArray != null ) { if ( currentFam . equals ( entry . getKey ( ) . getColumnFamily ( ) ) ) { currentArray . add ( fromBytes ( currentSchema , entry . getValue ( ) . get ( ) ) ) ; continue ; } else { persistent . put ( currentPos , new ListGenericArray < T > ( currentField . schema ( ) , currentArray ) ) ; currentArray = null ; } } if ( row == null ) row = entry . getKey ( ) . getRowData ( ) ; String fieldName = mapping . columnMap . get ( new Pair < Text , Text > ( entry . getKey ( ) . getColumnFamily ( ) , entry . getKey ( ) . getColumnQualifier ( ) ) ) ; if ( fieldName == null ) fieldName = mapping . columnMap . get ( new Pair < Text , Text > ( entry . getKey ( ) . getColumnFamily ( ) , null ) ) ; Field field = fieldMap . get ( fieldName ) ; switch ( field . schema ( ) . getType ( ) ) { case MAP : currentMap = new StatefulHashMap ( ) ; currentPos = field . pos ( ) ; currentFam = entry . getKey ( ) . getColumnFamily ( ) ; currentSchema = field . schema ( ) . getValueType ( ) ; currentMap . put ( new Utf8 ( entry . getKey ( ) . getColumnQualifierData ( ) . toArray ( ) ) , fromBytes ( currentSchema , entry . getValue ( ) . get ( ) ) ) ; break ; case ARRAY : currentArray = new ArrayList ( ) ; currentPos = field . pos ( ) ; currentFam = entry . getKey ( ) . getColumnFamily ( ) ; currentSchema = field . schema ( ) . getElementType ( ) ; currentField = field ; currentArray . add ( fromBytes ( currentSchema , entry . getValue ( ) . get ( ) ) ) ; break ; case RECORD : SpecificDatumReader reader = new SpecificDatumReader ( field . schema ( ) ) ; byte [ ] val = entry . getValue ( ) . get ( ) ; BinaryDecoder decoder = DecoderFactory . defaultFactory ( ) . createBinaryDecoder ( val , null ) ; persistent . put ( field . pos ( ) , reader . read ( null , decoder ) ) ; break ; default : persistent . put ( field . pos ( ) , fromBytes ( field . schema ( ) , entry . getValue ( ) . get ( ) ) ) ; } } if ( currentMap != null ) { persistent . put ( currentPos , currentMap ) ; } else if ( currentArray != null ) { persistent . put ( currentPos , new ListGenericArray < T > ( currentField . schema ( ) , currentArray ) ) ; } persistent . clearDirty ( ) ; return row ; } private void setFetchColumns ( Scanner scanner , String fields [ ] ) { fields = getFieldsToQuery ( fields ) ; for ( String field : fields ) { Pair < Text , Text > col = mapping . fieldMap . get ( field ) ; if ( col . getSecond ( ) == null ) { scanner . fetchColumnFamily ( col . getFirst ( ) ) ; } else { scanner . fetchColumn ( col . getFirst ( ) , col . getSecond ( ) ) ; } } } @ Override public T get ( K key , String [ ] fields ) throws IOException { try { Scanner scanner = new IsolatedScanner ( conn . createScanner ( mapping . tableName , Constants . NO_AUTHS ) ) ; Range rowRange = new Range ( new Text ( toBytes ( key ) ) ) ; scanner . setRange ( rowRange ) ; setFetchColumns ( scanner , fields ) ; T persistent = newPersistent ( ) ; ByteSequence row = populate ( scanner . iterator ( ) , persistent ) ; if ( row == null ) return null ; return persistent ; } catch ( TableNotFoundException e ) { return null ; } } @ Override public void put ( K key , T val ) throws IOException { Mutation m = new Mutation ( new Text ( toBytes ( key ) ) ) ; Schema schema = val . getSchema ( ) ; StateManager stateManager = val . getStateManager ( ) ; Iterator < Field > iter = schema . getFields ( ) . iterator ( ) ; int count = <NUM_LIT:0> ; for ( int i = <NUM_LIT:0> ; iter . hasNext ( ) ; i ++ ) { Field field = iter . next ( ) ; if ( ! stateManager . isDirty ( val , i ) ) { continue ; } Object o = val . get ( i ) ; Pair < Text , Text > col = mapping . fieldMap . get ( field . name ( ) ) ; switch ( field . schema ( ) . getType ( ) ) { case MAP : if ( o instanceof StatefulMap ) { StatefulMap map = ( StatefulMap ) o ; Set < ? > es = map . states ( ) . entrySet ( ) ; for ( Object entry : es ) { Object mapKey = ( ( Entry ) entry ) . getKey ( ) ; State state = ( State ) ( ( Entry ) entry ) . getValue ( ) ; switch ( state ) { case NEW : case DIRTY : m . put ( col . getFirst ( ) , new Text ( toBytes ( mapKey ) ) , new Value ( toBytes ( map . get ( mapKey ) ) ) ) ; count ++ ; break ; case DELETED : m . putDelete ( col . getFirst ( ) , new Text ( toBytes ( mapKey ) ) ) ; count ++ ; break ; } } } else { Map map = ( Map ) o ; Set < ? > es = map . entrySet ( ) ; for ( Object entry : es ) { Object mapKey = ( ( Entry ) entry ) . getKey ( ) ; Object mapVal = ( ( Entry ) entry ) . getValue ( ) ; m . put ( col . getFirst ( ) , new Text ( toBytes ( mapKey ) ) , new Value ( toBytes ( mapVal ) ) ) ; count ++ ; } } break ; case ARRAY : GenericArray array = ( GenericArray ) o ; int j = <NUM_LIT:0> ; for ( Object item : array ) { m . put ( col . getFirst ( ) , new Text ( toBytes ( j ++ ) ) , new Value ( toBytes ( item ) ) ) ; count ++ ; } break ; case RECORD : SpecificDatumWriter writer = new SpecificDatumWriter ( field . schema ( ) ) ; ByteArrayOutputStream os = new ByteArrayOutputStream ( ) ; BinaryEncoder encoder = new BinaryEncoder ( os ) ; writer . write ( o , encoder ) ; encoder . flush ( ) ; m . put ( col . getFirst ( ) , col . getSecond ( ) , new Value ( os . toByteArray ( ) ) ) ; break ; default : m . put ( col . getFirst ( ) , col . getSecond ( ) , new Value ( toBytes ( o ) ) ) ; count ++ ; } } if ( count > <NUM_LIT:0> ) try { getBatchWriter ( ) . addMutation ( m ) ; } catch ( MutationsRejectedException e ) { throw new IOException ( e ) ; } } @ Override public boolean delete ( K key ) throws IOException { Query < K , T > q = newQuery ( ) ; q . setKey ( key ) ; return deleteByQuery ( q ) > <NUM_LIT:0> ; } @ Override public long deleteByQuery ( Query < K , T > query ) throws IOException { try { Scanner scanner = createScanner ( query ) ; scanner . addScanIterator ( new IteratorSetting ( Integer . MAX_VALUE , SortedKeyIterator . class ) ) ; RowIterator iterator = new RowIterator ( scanner . iterator ( ) ) ; long count = <NUM_LIT:0> ; while ( iterator . hasNext ( ) ) { Iterator < Entry < Key , Value > > row = iterator . next ( ) ; Mutation m = null ; while ( row . hasNext ( ) ) { Entry < Key , Value > entry = row . next ( ) ; Key key = entry . getKey ( ) ; if ( m == null ) m = new Mutation ( key . getRow ( ) ) ; m . putDelete ( key . getColumnFamily ( ) , key . getColumnQualifier ( ) , new ColumnVisibility ( key . getColumnVisibility ( ) ) , key . getTimestamp ( ) ) ; } getBatchWriter ( ) . addMutation ( m ) ; count ++ ; } return count ; } catch ( TableNotFoundException e ) { throw new IOException ( e ) ; } catch ( MutationsRejectedException e ) { throw new IOException ( e ) ; } } private Range createRange ( Query < K , T > query ) { Text startRow = null ; Text endRow = null ; if ( query . getStartKey ( ) != null ) startRow = new Text ( toBytes ( query . getStartKey ( ) ) ) ; if ( query . getEndKey ( ) != null ) endRow = new Text ( toBytes ( query . getEndKey ( ) ) ) ; return new Range ( startRow , true , endRow , true ) ; } private Scanner createScanner ( Query < K , T > query ) throws TableNotFoundException { Scanner scanner = new IsolatedScanner ( conn . createScanner ( mapping . tableName , Constants . NO_AUTHS ) ) ; setFetchColumns ( scanner , query . getFields ( ) ) ; scanner . setRange ( createRange ( query ) ) ; if ( query . getStartTime ( ) != - <NUM_LIT:1> || query . getEndTime ( ) != - <NUM_LIT:1> ) { IteratorSetting is = new IteratorSetting ( <NUM_LIT:30> , TimestampFilter . class ) ; if ( query . getStartTime ( ) != - <NUM_LIT:1> ) TimestampFilter . setStart ( is , query . getStartTime ( ) , true ) ; if ( query . getEndTime ( ) != - <NUM_LIT:1> ) TimestampFilter . setEnd ( is , query . getEndTime ( ) , true ) ; scanner . addScanIterator ( is ) ; } return scanner ; } @ Override public Result < K , T > execute ( Query < K , T > query ) throws IOException { try { Scanner scanner = createScanner ( query ) ; return new AccumuloResult < K , T > ( this , query , scanner ) ; } catch ( TableNotFoundException e ) { throw new IOException ( e ) ; } } @ Override public Query < K , T > newQuery ( ) { return new AccumuloQuery < K , T > ( this ) ; } Text pad ( Text key , int bytes ) { if ( key . getLength ( ) < bytes ) key = new Text ( key ) ; while ( key . getLength ( ) < bytes ) { key . append ( new byte [ ] { <NUM_LIT:0> } , <NUM_LIT:0> , <NUM_LIT:1> ) ; } return key ; } @ Override public List < PartitionQuery < K , T > > getPartitions ( Query < K , T > query ) throws IOException { try { TabletLocator tl ; if ( conn instanceof MockConnector ) tl = new MockTabletLocator ( ) ; else tl = TabletLocator . getInstance ( conn . getInstance ( ) , authInfo , new Text ( Tables . getTableId ( conn . getInstance ( ) , mapping . tableName ) ) ) ; Map < String , Map < KeyExtent , List < Range > > > binnedRanges = new HashMap < String , Map < KeyExtent , List < Range > > > ( ) ; tl . invalidateCache ( ) ; while ( tl . binRanges ( Collections . singletonList ( createRange ( query ) ) , binnedRanges ) . size ( ) > <NUM_LIT:0> ) { if ( ! Tables . exists ( conn . getInstance ( ) , Tables . getTableId ( conn . getInstance ( ) , mapping . tableName ) ) ) throw new TableDeletedException ( Tables . getTableId ( conn . getInstance ( ) , mapping . tableName ) ) ; else if ( Tables . getTableState ( conn . getInstance ( ) , Tables . getTableId ( conn . getInstance ( ) , mapping . tableName ) ) == TableState . OFFLINE ) throw new TableOfflineException ( conn . getInstance ( ) , Tables . getTableId ( conn . getInstance ( ) , mapping . tableName ) ) ; UtilWaitThread . sleep ( <NUM_LIT:100> ) ; tl . invalidateCache ( ) ; } List < PartitionQuery < K , T > > ret = new ArrayList < PartitionQuery < K , T > > ( ) ; Text startRow = null ; Text endRow = null ; if ( query . getStartKey ( ) != null ) startRow = new Text ( toBytes ( query . getStartKey ( ) ) ) ; if ( query . getEndKey ( ) != null ) endRow = new Text ( toBytes ( query . getEndKey ( ) ) ) ; HashMap < String , String > hostNameCache = new HashMap < String , String > ( ) ; for ( Entry < String , Map < KeyExtent , List < Range > > > entry : binnedRanges . entrySet ( ) ) { String ip = entry . getKey ( ) . split ( "<STR_LIT::>" , <NUM_LIT:2> ) [ <NUM_LIT:0> ] ; String location = hostNameCache . get ( ip ) ; if ( location == null ) { InetAddress inetAddress = InetAddress . getByName ( ip ) ; location = inetAddress . getHostName ( ) ; hostNameCache . put ( ip , location ) ; } Map < KeyExtent , List < Range > > tablets = entry . getValue ( ) ; for ( KeyExtent ke : tablets . keySet ( ) ) { K startKey = null ; if ( startRow == null || ! ke . contains ( startRow ) ) { if ( ke . getPrevEndRow ( ) != null ) { startKey = followingKey ( encoder , getKeyClass ( ) , TextUtil . getBytes ( ke . getPrevEndRow ( ) ) ) ; } } else { startKey = fromBytes ( getKeyClass ( ) , TextUtil . getBytes ( startRow ) ) ; } K endKey = null ; if ( endRow == null || ! ke . contains ( endRow ) ) { if ( ke . getEndRow ( ) != null ) endKey = lastPossibleKey ( encoder , getKeyClass ( ) , TextUtil . getBytes ( ke . getEndRow ( ) ) ) ; } else { endKey = fromBytes ( getKeyClass ( ) , TextUtil . getBytes ( endRow ) ) ; } PartitionQueryImpl pqi = new PartitionQueryImpl < K , T > ( query , startKey , endKey , new String [ ] { location } ) ; ret . add ( pqi ) ; } } return ret ; } catch ( TableNotFoundException e ) { throw new IOException ( e ) ; } catch ( AccumuloException e ) { throw new IOException ( e ) ; } catch ( AccumuloSecurityException e ) { throw new IOException ( e ) ; } } static < K > K lastPossibleKey ( Encoder encoder , Class < K > clazz , byte [ ] er ) { if ( clazz . equals ( Byte . TYPE ) || clazz . equals ( Byte . class ) ) { throw new UnsupportedOperationException ( ) ; } else if ( clazz . equals ( Boolean . TYPE ) || clazz . equals ( Boolean . class ) ) { throw new UnsupportedOperationException ( ) ; } else if ( clazz . equals ( Short . TYPE ) || clazz . equals ( Short . class ) ) { return fromBytes ( encoder , clazz , encoder . lastPossibleKey ( <NUM_LIT:2> , er ) ) ; } else if ( clazz . equals ( Integer . TYPE ) || clazz . equals ( Integer . class ) ) { return fromBytes ( encoder , clazz , encoder . lastPossibleKey ( <NUM_LIT:4> , er ) ) ; } else if ( clazz . equals ( Long . TYPE ) || clazz . equals ( Long . class ) ) { return fromBytes ( encoder , clazz , encoder . lastPossibleKey ( <NUM_LIT:8> , er ) ) ; } else if ( clazz . equals ( Float . TYPE ) || clazz . equals ( Float . class ) ) { return fromBytes ( encoder , clazz , encoder . lastPossibleKey ( <NUM_LIT:4> , er ) ) ; } else if ( clazz . equals ( Double . TYPE ) || clazz . equals ( Double . class ) ) { return fromBytes ( encoder , clazz , encoder . lastPossibleKey ( <NUM_LIT:8> , er ) ) ; } else if ( clazz . equals ( String . class ) ) { throw new UnsupportedOperationException ( ) ; } else if ( clazz . equals ( Utf8 . class ) ) { return fromBytes ( encoder , clazz , er ) ; } throw new IllegalArgumentException ( "<STR_LIT>" + clazz . getName ( ) ) ; } static < K > K followingKey ( Encoder encoder , Class < K > clazz , byte [ ] per ) { if ( clazz . equals ( Byte . TYPE ) || clazz . equals ( Byte . class ) ) { return ( K ) Byte . valueOf ( encoder . followingKey ( <NUM_LIT:1> , per ) [ <NUM_LIT:0> ] ) ; } else if ( clazz . equals ( Boolean . TYPE ) || clazz . equals ( Boolean . class ) ) { throw new UnsupportedOperationException ( ) ; } else if ( clazz . equals ( Short . TYPE ) || clazz . equals ( Short . class ) ) { return fromBytes ( encoder , clazz , encoder . followingKey ( <NUM_LIT:2> , per ) ) ; } else if ( clazz . equals ( Integer . TYPE ) || clazz . equals ( Integer . class ) ) { return fromBytes ( encoder , clazz , encoder . followingKey ( <NUM_LIT:4> , per ) ) ; } else if ( clazz . equals ( Long . TYPE ) || clazz . equals ( Long . class ) ) { return fromBytes ( encoder , clazz , encoder . followingKey ( <NUM_LIT:8> , per ) ) ; } else if ( clazz . equals ( Float . TYPE ) || clazz . equals ( Float . class ) ) { return fromBytes ( encoder , clazz , encoder . followingKey ( <NUM_LIT:4> , per ) ) ; } else if ( clazz . equals ( Double . TYPE ) || clazz . equals ( Double . class ) ) { return fromBytes ( encoder , clazz , encoder . followingKey ( <NUM_LIT:8> , per ) ) ; } else if ( clazz . equals ( String . class ) ) { throw new UnsupportedOperationException ( ) ; } else if ( clazz . equals ( Utf8 . class ) ) { return fromBytes ( encoder , clazz , Arrays . copyOf ( per , per . length + <NUM_LIT:1> ) ) ; } throw new IllegalArgumentException ( "<STR_LIT>" + clazz . getName ( ) ) ; } @ Override public void flush ( ) throws IOException { try { if ( batchWriter != null ) { batchWriter . flush ( ) ; } } catch ( MutationsRejectedException e ) { throw new IOException ( e ) ; } } @ Override public void close ( ) throws IOException { try { if ( batchWriter != null ) { batchWriter . close ( ) ; batchWriter = null ; } } catch ( MutationsRejectedException e ) { throw new IOException ( e ) ; } } } </s>
|
<s> package org . apache . gora . accumulo . util ; import java . io . IOException ; import java . io . OutputStream ; public class FixedByteArrayOutputStream extends OutputStream { private int i ; byte out [ ] ; public FixedByteArrayOutputStream ( byte out [ ] ) { this . out = out ; } @ Override public void write ( int b ) throws IOException { out [ i ++ ] = ( byte ) b ; } @ Override public void write ( byte b [ ] , int off , int len ) throws IOException { System . arraycopy ( b , off , out , i , len ) ; i += len ; } } </s>
|
<s> package org . apache . gora . accumulo . query ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . impl . QueryBase ; import org . apache . gora . store . DataStore ; public class AccumuloQuery < K , T extends Persistent > extends QueryBase < K , T > { public AccumuloQuery ( ) { super ( null ) ; } public AccumuloQuery ( DataStore < K , T > dataStore ) { super ( dataStore ) ; } } </s>
|
<s> package org . apache . gora . accumulo . query ; import java . io . IOException ; import java . util . Iterator ; import java . util . Map . Entry ; import org . apache . accumulo . core . client . RowIterator ; import org . apache . accumulo . core . client . Scanner ; import org . apache . accumulo . core . data . ByteSequence ; import org . apache . accumulo . core . data . Key ; import org . apache . accumulo . core . data . Value ; import org . apache . gora . accumulo . store . AccumuloStore ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . gora . query . impl . ResultBase ; import org . apache . gora . store . DataStore ; public class AccumuloResult < K , T extends Persistent > extends ResultBase < K , T > { private RowIterator iterator ; public AccumuloStore < K , T > getDataStore ( ) { return ( AccumuloStore < K , T > ) super . getDataStore ( ) ; } public AccumuloResult ( DataStore < K , T > dataStore , Query < K , T > query , Scanner scanner ) { super ( dataStore , query ) ; iterator = new RowIterator ( scanner . iterator ( ) ) ; } @ Override public float getProgress ( ) throws IOException { return <NUM_LIT:0> ; } @ Override public void close ( ) throws IOException { } @ Override protected boolean nextInner ( ) throws IOException { if ( ! iterator . hasNext ( ) ) return false ; key = null ; Iterator < Entry < Key , Value > > nextRow = iterator . next ( ) ; ByteSequence row = getDataStore ( ) . populate ( nextRow , persistent ) ; key = ( K ) ( ( AccumuloStore ) dataStore ) . fromBytes ( getKeyClass ( ) , row . toArray ( ) ) ; return true ; } } </s>
|
<s> package org . apache . gora . accumulo . encoders ; import java . math . BigInteger ; import java . util . Arrays ; public class Utils { private static BigInteger newPositiveBigInteger ( byte [ ] er ) { byte [ ] copy = new byte [ er . length + <NUM_LIT:1> ] ; System . arraycopy ( er , <NUM_LIT:0> , copy , <NUM_LIT:1> , er . length ) ; BigInteger bi = new BigInteger ( copy ) ; return bi ; } public static byte [ ] lastPossibleKey ( int size , byte [ ] er ) { if ( size == er . length ) return er ; if ( er . length > size ) throw new IllegalArgumentException ( ) ; BigInteger bi = newPositiveBigInteger ( er ) ; if ( bi . equals ( BigInteger . ZERO ) ) throw new IllegalArgumentException ( "<STR_LIT>" ) ; bi = bi . subtract ( BigInteger . ONE ) ; byte ret [ ] = new byte [ size ] ; Arrays . fill ( ret , ( byte ) <NUM_LIT> ) ; System . arraycopy ( getBytes ( bi , er . length ) , <NUM_LIT:0> , ret , <NUM_LIT:0> , er . length ) ; return ret ; } private static byte [ ] getBytes ( BigInteger bi , int minLen ) { byte [ ] ret = bi . toByteArray ( ) ; if ( ret [ <NUM_LIT:0> ] == <NUM_LIT:0> ) { byte copy [ ] = new byte [ ret . length - <NUM_LIT:1> ] ; System . arraycopy ( ret , <NUM_LIT:1> , copy , <NUM_LIT:0> , copy . length ) ; ret = copy ; } byte copy [ ] = new byte [ minLen ] ; if ( bi . compareTo ( BigInteger . ZERO ) < <NUM_LIT:0> ) { Arrays . fill ( copy , ( byte ) <NUM_LIT> ) ; } System . arraycopy ( ret , <NUM_LIT:0> , copy , minLen - ret . length , ret . length ) ; return copy ; } public static byte [ ] followingKey ( int size , byte [ ] per ) { if ( per . length > size ) throw new IllegalArgumentException ( ) ; if ( size == per . length ) { BigInteger bi = new BigInteger ( per ) ; bi = bi . add ( BigInteger . ONE ) ; if ( bi . equals ( BigInteger . ZERO ) ) { throw new IllegalArgumentException ( "<STR_LIT>" ) ; } return getBytes ( bi , size ) ; } else { return Arrays . copyOf ( per , size ) ; } } } </s>
|
<s> package org . apache . gora . accumulo . encoders ; public class HexEncoder implements Encoder { private byte chars [ ] = new byte [ ] { '<CHAR_LIT:0>' , '<CHAR_LIT:1>' , '<CHAR_LIT>' , '<CHAR_LIT>' , '<CHAR_LIT>' , '<CHAR_LIT>' , '<CHAR_LIT>' , '<CHAR_LIT>' , '<CHAR_LIT>' , '<CHAR_LIT:9>' , '<CHAR_LIT:a>' , '<CHAR_LIT:b>' , '<CHAR_LIT:c>' , '<CHAR_LIT>' , '<CHAR_LIT:e>' , '<CHAR_LIT>' } ; private void encode ( byte [ ] a , long l ) { for ( int i = a . length - <NUM_LIT:1> ; i >= <NUM_LIT:0> ; i -- ) { a [ i ] = chars [ ( int ) ( l & <NUM_LIT> ) ] ; l = l > > > <NUM_LIT:4> ; } } private int fromChar ( byte b ) { if ( b >= '<CHAR_LIT:0>' && b <= '<CHAR_LIT:9>' ) { return ( b - '<CHAR_LIT:0>' ) ; } else if ( b >= '<CHAR_LIT:a>' && b <= '<CHAR_LIT>' ) { return ( b - '<CHAR_LIT:a>' + <NUM_LIT:10> ) ; } throw new IllegalArgumentException ( "<STR_LIT>" + b ) ; } private long decode ( byte [ ] a ) { long b = <NUM_LIT:0> ; for ( int i = <NUM_LIT:0> ; i < a . length ; i ++ ) { b = b << <NUM_LIT:4> ; b |= fromChar ( a [ i ] ) ; } return b ; } @ Override public byte [ ] encodeByte ( byte b , byte [ ] ret ) { encode ( ret , <NUM_LIT> & b ) ; return ret ; } @ Override public byte [ ] encodeByte ( byte b ) { return encodeByte ( b , new byte [ <NUM_LIT:2> ] ) ; } @ Override public byte decodeByte ( byte [ ] a ) { return ( byte ) decode ( a ) ; } @ Override public byte [ ] encodeShort ( short s ) { return encodeShort ( s , new byte [ <NUM_LIT:4> ] ) ; } @ Override public byte [ ] encodeShort ( short s , byte [ ] ret ) { encode ( ret , <NUM_LIT> & s ) ; return ret ; } @ Override public short decodeShort ( byte [ ] a ) { return ( short ) decode ( a ) ; } @ Override public byte [ ] encodeInt ( int i ) { return encodeInt ( i , new byte [ <NUM_LIT:8> ] ) ; } @ Override public byte [ ] encodeInt ( int i , byte [ ] ret ) { encode ( ret , i ) ; return ret ; } @ Override public int decodeInt ( byte [ ] a ) { return ( int ) decode ( a ) ; } @ Override public byte [ ] encodeLong ( long l ) { return encodeLong ( l , new byte [ <NUM_LIT:16> ] ) ; } @ Override public byte [ ] encodeLong ( long l , byte [ ] ret ) { encode ( ret , l ) ; return ret ; } @ Override public long decodeLong ( byte [ ] a ) { return decode ( a ) ; } @ Override public byte [ ] encodeDouble ( double d ) { return encodeDouble ( d , new byte [ <NUM_LIT:16> ] ) ; } @ Override public byte [ ] encodeDouble ( double d , byte [ ] ret ) { return encodeLong ( Double . doubleToRawLongBits ( d ) , ret ) ; } @ Override public double decodeDouble ( byte [ ] a ) { return Double . longBitsToDouble ( decodeLong ( a ) ) ; } @ Override public byte [ ] encodeFloat ( float d ) { return encodeFloat ( d , new byte [ <NUM_LIT:16> ] ) ; } @ Override public byte [ ] encodeFloat ( float d , byte [ ] ret ) { return encodeInt ( Float . floatToRawIntBits ( d ) , ret ) ; } @ Override public float decodeFloat ( byte [ ] a ) { return Float . intBitsToFloat ( decodeInt ( a ) ) ; } @ Override public boolean decodeBoolean ( byte [ ] val ) { if ( decodeByte ( val ) == <NUM_LIT:1> ) { return true ; } return false ; } @ Override public byte [ ] encodeBoolean ( boolean b ) { return encodeBoolean ( b , new byte [ <NUM_LIT:2> ] ) ; } @ Override public byte [ ] encodeBoolean ( boolean b , byte [ ] ret ) { if ( b ) encode ( ret , <NUM_LIT:1> ) ; else encode ( ret , <NUM_LIT:0> ) ; return ret ; } private byte [ ] toBinary ( byte [ ] hex ) { byte [ ] bin = new byte [ ( hex . length / <NUM_LIT:2> ) + ( hex . length % <NUM_LIT:2> ) ] ; int j = <NUM_LIT:0> ; for ( int i = <NUM_LIT:0> ; i < bin . length ; i ++ ) { bin [ i ] = ( byte ) ( fromChar ( hex [ j ++ ] ) << <NUM_LIT:4> ) ; if ( j >= hex . length ) break ; bin [ i ] |= ( byte ) fromChar ( hex [ j ++ ] ) ; } return bin ; } private byte [ ] fromBinary ( byte [ ] bin ) { byte [ ] hex = new byte [ bin . length * <NUM_LIT:2> ] ; int j = <NUM_LIT:0> ; for ( int i = <NUM_LIT:0> ; i < bin . length ; i ++ ) { hex [ j ++ ] = chars [ <NUM_LIT> & ( bin [ i ] > > > <NUM_LIT:4> ) ] ; hex [ j ++ ] = chars [ <NUM_LIT> & bin [ i ] ] ; } return hex ; } @ Override public byte [ ] followingKey ( int size , byte [ ] per ) { return fromBinary ( Utils . followingKey ( size , toBinary ( per ) ) ) ; } @ Override public byte [ ] lastPossibleKey ( int size , byte [ ] er ) { return fromBinary ( Utils . lastPossibleKey ( size , toBinary ( er ) ) ) ; } } </s>
|
<s> package org . apache . gora . accumulo . encoders ; public interface Encoder { public byte [ ] encodeByte ( byte b , byte [ ] ret ) ; public byte [ ] encodeByte ( byte b ) ; public byte decodeByte ( byte [ ] a ) ; public byte [ ] encodeShort ( short s ) ; public byte [ ] encodeShort ( short s , byte ret [ ] ) ; public short decodeShort ( byte [ ] a ) ; public byte [ ] encodeInt ( int i ) ; public byte [ ] encodeInt ( int i , byte ret [ ] ) ; public int decodeInt ( byte [ ] a ) ; public byte [ ] encodeLong ( long l ) ; public byte [ ] encodeLong ( long l , byte ret [ ] ) ; public long decodeLong ( byte [ ] a ) ; public byte [ ] encodeDouble ( double d ) ; public byte [ ] encodeDouble ( double d , byte [ ] ret ) ; public double decodeDouble ( byte [ ] a ) ; public byte [ ] encodeFloat ( float d ) ; public byte [ ] encodeFloat ( float f , byte [ ] ret ) ; public float decodeFloat ( byte [ ] a ) ; public boolean decodeBoolean ( byte [ ] val ) ; public byte [ ] encodeBoolean ( boolean b ) ; public byte [ ] encodeBoolean ( boolean b , byte [ ] ret ) ; byte [ ] followingKey ( int size , byte [ ] per ) ; byte [ ] lastPossibleKey ( int size , byte [ ] er ) ; } </s>
|
<s> package org . apache . gora . accumulo . encoders ; import java . io . ByteArrayInputStream ; import java . io . DataInputStream ; import java . io . DataOutputStream ; import java . io . IOException ; import org . apache . gora . accumulo . util . FixedByteArrayOutputStream ; public class BinaryEncoder implements Encoder { public byte [ ] encodeShort ( short s ) { return encodeShort ( s , new byte [ <NUM_LIT:2> ] ) ; } public byte [ ] encodeShort ( short s , byte ret [ ] ) { try { DataOutputStream dos = new DataOutputStream ( new FixedByteArrayOutputStream ( ret ) ) ; dos . writeShort ( s ) ; return ret ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } public short decodeShort ( byte [ ] a ) { try { DataInputStream dis = new DataInputStream ( new ByteArrayInputStream ( a ) ) ; short s = dis . readShort ( ) ; return s ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } public byte [ ] encodeInt ( int i ) { return encodeInt ( i , new byte [ <NUM_LIT:4> ] ) ; } public byte [ ] encodeInt ( int i , byte ret [ ] ) { try { DataOutputStream dos = new DataOutputStream ( new FixedByteArrayOutputStream ( ret ) ) ; dos . writeInt ( i ) ; return ret ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } public int decodeInt ( byte [ ] a ) { try { DataInputStream dis = new DataInputStream ( new ByteArrayInputStream ( a ) ) ; int i = dis . readInt ( ) ; return i ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } public byte [ ] encodeLong ( long l ) { return encodeLong ( l , new byte [ <NUM_LIT:8> ] ) ; } public byte [ ] encodeLong ( long l , byte ret [ ] ) { try { DataOutputStream dos = new DataOutputStream ( new FixedByteArrayOutputStream ( ret ) ) ; dos . writeLong ( l ) ; return ret ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } public long decodeLong ( byte [ ] a ) { try { DataInputStream dis = new DataInputStream ( new ByteArrayInputStream ( a ) ) ; long l = dis . readLong ( ) ; return l ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } public byte [ ] encodeDouble ( double d ) { return encodeDouble ( d , new byte [ <NUM_LIT:8> ] ) ; } public byte [ ] encodeDouble ( double d , byte [ ] ret ) { try { long l = Double . doubleToRawLongBits ( d ) ; DataOutputStream dos = new DataOutputStream ( new FixedByteArrayOutputStream ( ret ) ) ; dos . writeLong ( l ) ; return ret ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } public double decodeDouble ( byte [ ] a ) { try { DataInputStream dis = new DataInputStream ( new ByteArrayInputStream ( a ) ) ; long l = dis . readLong ( ) ; return Double . longBitsToDouble ( l ) ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } public byte [ ] encodeFloat ( float d ) { return encodeFloat ( d , new byte [ <NUM_LIT:4> ] ) ; } public byte [ ] encodeFloat ( float f , byte [ ] ret ) { try { int i = Float . floatToRawIntBits ( f ) ; DataOutputStream dos = new DataOutputStream ( new FixedByteArrayOutputStream ( ret ) ) ; dos . writeInt ( i ) ; return ret ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } public float decodeFloat ( byte [ ] a ) { try { DataInputStream dis = new DataInputStream ( new ByteArrayInputStream ( a ) ) ; int i = dis . readInt ( ) ; return Float . intBitsToFloat ( i ) ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } public byte [ ] encodeByte ( byte b , byte [ ] ret ) { ret [ <NUM_LIT:0> ] = <NUM_LIT:0> ; return ret ; } public byte [ ] encodeByte ( byte b ) { return encodeByte ( b , new byte [ <NUM_LIT:1> ] ) ; } public byte decodeByte ( byte [ ] a ) { return a [ <NUM_LIT:0> ] ; } public boolean decodeBoolean ( byte [ ] a ) { try { DataInputStream dis = new DataInputStream ( new ByteArrayInputStream ( a ) ) ; return dis . readBoolean ( ) ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } public byte [ ] encodeBoolean ( boolean b ) { return encodeBoolean ( b , new byte [ <NUM_LIT:1> ] ) ; } public byte [ ] encodeBoolean ( boolean b , byte [ ] ret ) { try { DataOutputStream dos = new DataOutputStream ( new FixedByteArrayOutputStream ( ret ) ) ; dos . writeBoolean ( b ) ; return ret ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } @ Override public byte [ ] lastPossibleKey ( int size , byte [ ] er ) { return Utils . lastPossibleKey ( size , er ) ; } @ Override public byte [ ] followingKey ( int size , byte [ ] per ) { return Utils . followingKey ( size , per ) ; } } </s>
|
<s> package org . apache . gora . accumulo . encoders ; public class SignedBinaryEncoder extends BinaryEncoder { public byte [ ] encodeShort ( short s , byte ret [ ] ) { s = ( short ) ( ( s & <NUM_LIT> ) ^ <NUM_LIT> ) ; return super . encodeShort ( s , ret ) ; } public short decodeShort ( byte [ ] a ) { short s = super . decodeShort ( a ) ; s = ( short ) ( ( s & <NUM_LIT> ) ^ <NUM_LIT> ) ; return s ; } public byte [ ] encodeInt ( int i , byte ret [ ] ) { i = i ^ <NUM_LIT> ; return super . encodeInt ( i , ret ) ; } public int decodeInt ( byte [ ] a ) { int i = super . decodeInt ( a ) ; i = i ^ <NUM_LIT> ; return i ; } public byte [ ] encodeLong ( long l , byte ret [ ] ) { l = l ^ <NUM_LIT> ; return super . encodeLong ( l , ret ) ; } public long decodeLong ( byte [ ] a ) { long l = super . decodeLong ( a ) ; l = l ^ <NUM_LIT> ; return l ; } public byte [ ] encodeDouble ( double d , byte [ ] ret ) { long l = Double . doubleToRawLongBits ( d ) ; if ( l < <NUM_LIT:0> ) l = ~ l ; else l = l ^ <NUM_LIT> ; return super . encodeLong ( l , ret ) ; } public double decodeDouble ( byte [ ] a ) { long l = super . decodeLong ( a ) ; if ( l < <NUM_LIT:0> ) l = l ^ <NUM_LIT> ; else l = ~ l ; return Double . longBitsToDouble ( l ) ; } public byte [ ] encodeFloat ( float f , byte [ ] ret ) { int i = Float . floatToRawIntBits ( f ) ; if ( i < <NUM_LIT:0> ) i = ~ i ; else i = i ^ <NUM_LIT> ; return super . encodeInt ( i , ret ) ; } public float decodeFloat ( byte [ ] a ) { int i = super . decodeInt ( a ) ; if ( i < <NUM_LIT:0> ) i = i ^ <NUM_LIT> ; else i = ~ i ; return Float . intBitsToFloat ( i ) ; } } </s>
|
<s> package org . apache . gora . persistency . impl ; import java . lang . reflect . Constructor ; import org . apache . gora . persistency . BeanFactory ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . util . ReflectionUtils ; public class BeanFactoryImpl < K , T extends Persistent > implements BeanFactory < K , T > { private Class < K > keyClass ; private Class < T > persistentClass ; private Constructor < K > keyConstructor ; private K key ; private T persistent ; private boolean isKeyPersistent = false ; public BeanFactoryImpl ( Class < K > keyClass , Class < T > persistentClass ) { this . keyClass = keyClass ; this . persistentClass = persistentClass ; try { if ( ReflectionUtils . hasConstructor ( keyClass ) ) { this . keyConstructor = ReflectionUtils . getConstructor ( keyClass ) ; this . key = keyConstructor . newInstance ( ReflectionUtils . EMPTY_OBJECT_ARRAY ) ; } this . persistent = ReflectionUtils . newInstance ( persistentClass ) ; } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; } isKeyPersistent = Persistent . class . isAssignableFrom ( keyClass ) ; } @ Override @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public K newKey ( ) throws Exception { if ( isKeyPersistent ) return ( K ) ( ( Persistent ) key ) . newInstance ( new StateManagerImpl ( ) ) ; else if ( keyConstructor == null ) { throw new RuntimeException ( "<STR_LIT>" ) ; } else return keyConstructor . newInstance ( ReflectionUtils . EMPTY_OBJECT_ARRAY ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) @ Override public T newPersistent ( ) { return ( T ) persistent . newInstance ( new StateManagerImpl ( ) ) ; } @ Override public K getCachedKey ( ) { return key ; } @ Override public T getCachedPersistent ( ) { return persistent ; } @ Override public Class < K > getKeyClass ( ) { return keyClass ; } @ Override public Class < T > getPersistentClass ( ) { return persistentClass ; } public boolean isKeyPersistent ( ) { return isKeyPersistent ; } } </s>
|
<s> package org . apache . gora . persistency . impl ; import java . util . BitSet ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . persistency . StateManager ; public class StateManagerImpl implements StateManager { protected boolean isNew ; protected BitSet dirtyBits ; protected BitSet readableBits ; public StateManagerImpl ( ) { } public void setManagedPersistent ( Persistent persistent ) { dirtyBits = new BitSet ( persistent . getSchema ( ) . getFields ( ) . size ( ) ) ; readableBits = new BitSet ( persistent . getSchema ( ) . getFields ( ) . size ( ) ) ; isNew = true ; } @ Override public boolean isNew ( Persistent persistent ) { return isNew ; } @ Override public void setNew ( Persistent persistent ) { this . isNew = true ; } @ Override public void clearNew ( Persistent persistent ) { this . isNew = false ; } public void setDirty ( Persistent persistent , int fieldIndex ) { dirtyBits . set ( fieldIndex ) ; readableBits . set ( fieldIndex ) ; } public boolean isDirty ( Persistent persistent , int fieldIndex ) { return dirtyBits . get ( fieldIndex ) ; } public boolean isDirty ( Persistent persistent ) { return ! dirtyBits . isEmpty ( ) ; } @ Override public void setDirty ( Persistent persistent ) { dirtyBits . set ( <NUM_LIT:0> , dirtyBits . size ( ) ) ; } @ Override public void clearDirty ( Persistent persistent , int fieldIndex ) { dirtyBits . clear ( fieldIndex ) ; } public void clearDirty ( Persistent persistent ) { dirtyBits . clear ( ) ; } public void setReadable ( Persistent persistent , int fieldIndex ) { readableBits . set ( fieldIndex ) ; } public boolean isReadable ( Persistent persistent , int fieldIndex ) { return readableBits . get ( fieldIndex ) ; } @ Override public void clearReadable ( Persistent persistent , int fieldIndex ) { readableBits . clear ( fieldIndex ) ; } public void clearReadable ( Persistent persistent ) { readableBits . clear ( ) ; } } </s>
|
<s> package org . apache . gora . persistency . impl ; import java . nio . ByteBuffer ; import java . util . HashMap ; import java . util . List ; import java . util . Map ; import org . apache . avro . Schema . Field ; import org . apache . avro . Schema . Type ; import org . apache . avro . specific . SpecificRecord ; import org . apache . gora . avro . PersistentDatumReader ; import org . apache . gora . persistency . ListGenericArray ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . persistency . StateManager ; import org . apache . gora . persistency . StatefulHashMap ; public abstract class PersistentBase implements Persistent { protected static Map < Class < ? > , Map < String , Integer > > FIELD_MAP = new HashMap < Class < ? > , Map < String , Integer > > ( ) ; protected static Map < Class < ? > , String [ ] > FIELDS = new HashMap < Class < ? > , String [ ] > ( ) ; protected static final PersistentDatumReader < Persistent > datumReader = new PersistentDatumReader < Persistent > ( ) ; private StateManager stateManager ; protected PersistentBase ( ) { this ( new StateManagerImpl ( ) ) ; } protected PersistentBase ( StateManager stateManager ) { this . stateManager = stateManager ; stateManager . setManagedPersistent ( this ) ; } protected static void registerFields ( Class < ? > clazz , String ... fields ) { FIELDS . put ( clazz , fields ) ; int fieldsLength = fields == null ? <NUM_LIT:0> : fields . length ; HashMap < String , Integer > map = new HashMap < String , Integer > ( fieldsLength ) ; for ( int i = <NUM_LIT:0> ; i < fieldsLength ; i ++ ) { map . put ( fields [ i ] , i ) ; } FIELD_MAP . put ( clazz , map ) ; } @ Override public StateManager getStateManager ( ) { return stateManager ; } @ Override public String [ ] getFields ( ) { return FIELDS . get ( getClass ( ) ) ; } @ Override public String getField ( int index ) { return FIELDS . get ( getClass ( ) ) [ index ] ; } @ Override public int getFieldIndex ( String field ) { return FIELD_MAP . get ( getClass ( ) ) . get ( field ) ; } @ Override @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) public void clear ( ) { List < Field > fields = getSchema ( ) . getFields ( ) ; for ( int i = <NUM_LIT:0> ; i < getFields ( ) . length ; i ++ ) { switch ( fields . get ( i ) . schema ( ) . getType ( ) ) { case MAP : if ( get ( i ) != null ) { if ( get ( i ) instanceof StatefulHashMap ) { ( ( StatefulHashMap ) get ( i ) ) . reuse ( ) ; } else { ( ( Map ) get ( i ) ) . clear ( ) ; } } break ; case ARRAY : if ( get ( i ) != null ) { if ( get ( i ) instanceof ListGenericArray ) { ( ( ListGenericArray ) get ( i ) ) . clear ( ) ; } else { put ( i , new ListGenericArray ( fields . get ( i ) . schema ( ) ) ) ; } } break ; case RECORD : Persistent field = ( ( Persistent ) get ( i ) ) ; if ( field != null ) field . clear ( ) ; break ; case BOOLEAN : put ( i , false ) ; break ; case INT : put ( i , <NUM_LIT:0> ) ; break ; case DOUBLE : put ( i , <NUM_LIT> ) ; break ; case FLOAT : put ( i , <NUM_LIT> ) ; break ; case LONG : put ( i , <NUM_LIT> ) ; break ; case NULL : break ; default : put ( i , null ) ; break ; } } clearDirty ( ) ; clearReadable ( ) ; } @ Override public boolean isNew ( ) { return getStateManager ( ) . isNew ( this ) ; } @ Override public void setNew ( ) { getStateManager ( ) . setNew ( this ) ; } @ Override public void clearNew ( ) { getStateManager ( ) . clearNew ( this ) ; } @ Override public boolean isDirty ( ) { return getStateManager ( ) . isDirty ( this ) ; } @ Override public boolean isDirty ( int fieldIndex ) { return getStateManager ( ) . isDirty ( this , fieldIndex ) ; } @ Override public boolean isDirty ( String field ) { return isDirty ( getFieldIndex ( field ) ) ; } @ Override public void setDirty ( ) { getStateManager ( ) . setDirty ( this ) ; } @ Override public void setDirty ( int fieldIndex ) { getStateManager ( ) . setDirty ( this , fieldIndex ) ; } @ Override public void setDirty ( String field ) { setDirty ( getFieldIndex ( field ) ) ; } @ Override public void clearDirty ( int fieldIndex ) { getStateManager ( ) . clearDirty ( this , fieldIndex ) ; } @ Override public void clearDirty ( String field ) { clearDirty ( getFieldIndex ( field ) ) ; } @ Override public void clearDirty ( ) { getStateManager ( ) . clearDirty ( this ) ; } @ Override public boolean isReadable ( int fieldIndex ) { return getStateManager ( ) . isReadable ( this , fieldIndex ) ; } @ Override public boolean isReadable ( String field ) { return isReadable ( getFieldIndex ( field ) ) ; } @ Override public void setReadable ( int fieldIndex ) { getStateManager ( ) . setReadable ( this , fieldIndex ) ; } @ Override public void setReadable ( String field ) { setReadable ( getFieldIndex ( field ) ) ; } @ Override public void clearReadable ( ) { getStateManager ( ) . clearReadable ( this ) ; } @ Override public void clearReadable ( int fieldIndex ) { getStateManager ( ) . clearReadable ( this , fieldIndex ) ; } @ Override public void clearReadable ( String field ) { clearReadable ( getFieldIndex ( field ) ) ; } @ Override public boolean equals ( Object o ) { if ( this == o ) return true ; if ( ! ( o instanceof SpecificRecord ) ) return false ; SpecificRecord r2 = ( SpecificRecord ) o ; if ( ! this . getSchema ( ) . equals ( r2 . getSchema ( ) ) ) return false ; return this . hashCode ( ) == r2 . hashCode ( ) ; } @ Override public int hashCode ( ) { final int prime = <NUM_LIT:31> ; int result = <NUM_LIT:1> ; List < Field > fields = this . getSchema ( ) . getFields ( ) ; int end = fields . size ( ) ; for ( int i = <NUM_LIT:0> ; i < end ; i ++ ) { result = prime * result + getFieldHashCode ( i , fields . get ( i ) ) ; } return result ; } private int getFieldHashCode ( int i , Field field ) { Object o = get ( i ) ; if ( o == null ) return <NUM_LIT:0> ; if ( field . schema ( ) . getType ( ) == Type . BYTES ) { return getByteBufferHashCode ( ( ByteBuffer ) o ) ; } return o . hashCode ( ) ; } private int getByteBufferHashCode ( ByteBuffer buf ) { int h = <NUM_LIT:1> ; int p = buf . arrayOffset ( ) ; for ( int j = buf . limit ( ) - <NUM_LIT:1> ; j >= p ; j -- ) h = <NUM_LIT:31> * h + buf . get ( j ) ; return h ; } @ Override public Persistent clone ( ) { return datumReader . clone ( this , getSchema ( ) ) ; } @ Override public String toString ( ) { StringBuilder builder = new StringBuilder ( ) ; builder . append ( super . toString ( ) ) ; builder . append ( "<STR_LIT>" ) ; List < Field > fields = getSchema ( ) . getFields ( ) ; for ( int i = <NUM_LIT:0> ; i < fields . size ( ) ; i ++ ) { builder . append ( "<STR_LIT>" ) . append ( fields . get ( i ) . name ( ) ) . append ( "<STR_LIT>" ) ; builder . append ( get ( i ) ) . append ( "<STR_LIT>" ) ; } builder . append ( "<STR_LIT:}>" ) ; return builder . toString ( ) ; } protected boolean isFieldEqual ( int index , Object value ) { Object old = get ( index ) ; if ( old == null && value == null ) return true ; if ( old == null || value == null ) return false ; return value . equals ( old ) ; } } </s>
|
<s> package org . apache . gora . persistency ; import java . util . HashMap ; import java . util . Map ; @ SuppressWarnings ( "<STR_LIT:serial>" ) public class StatefulHashMap < K , V > extends HashMap < K , V > implements StatefulMap < K , V > { private Map < K , State > keyStates = new HashMap < K , State > ( ) ; public StatefulHashMap ( ) { this ( null ) ; } public StatefulHashMap ( Map < K , V > m ) { super ( ) ; if ( m == null ) { return ; } for ( java . util . Map . Entry < K , V > entry : m . entrySet ( ) ) { put ( entry . getKey ( ) , entry . getValue ( ) ) ; } clearStates ( ) ; } @ Override public V put ( K key , V value ) { keyStates . remove ( key ) ; V old = super . put ( key , value ) ; if ( ! value . equals ( old ) ) { keyStates . put ( key , State . DIRTY ) ; } return old ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) @ Override public V remove ( Object key ) { keyStates . put ( ( K ) key , State . DELETED ) ; return null ; } @ Override public void putAll ( Map < ? extends K , ? extends V > m ) { for ( Entry < ? extends K , ? extends V > e : m . entrySet ( ) ) { put ( e . getKey ( ) , e . getValue ( ) ) ; } } @ Override public void clear ( ) { for ( Entry < K , V > e : entrySet ( ) ) { keyStates . put ( e . getKey ( ) , State . DELETED ) ; } } public State getState ( K key ) { return keyStates . get ( key ) ; } ; public void clearStates ( ) { keyStates . clear ( ) ; } public void putState ( K key , State state ) { keyStates . put ( key , state ) ; } public Map < K , State > states ( ) { return keyStates ; } public void reuse ( ) { super . clear ( ) ; clearStates ( ) ; } } </s>
|
<s> package org . apache . gora . persistency ; import org . apache . avro . specific . SpecificRecord ; public interface Persistent extends SpecificRecord , Cloneable { StateManager getStateManager ( ) ; Persistent newInstance ( StateManager stateManager ) ; String [ ] getFields ( ) ; String getField ( int index ) ; int getFieldIndex ( String field ) ; void clear ( ) ; boolean isNew ( ) ; void setNew ( ) ; void clearNew ( ) ; boolean isDirty ( ) ; boolean isDirty ( int fieldIndex ) ; boolean isDirty ( String field ) ; void setDirty ( ) ; void setDirty ( int fieldIndex ) ; void setDirty ( String field ) ; void clearDirty ( int fieldIndex ) ; void clearDirty ( String field ) ; void clearDirty ( ) ; boolean isReadable ( int fieldIndex ) ; boolean isReadable ( String field ) ; void setReadable ( int fieldIndex ) ; void setReadable ( String field ) ; void clearReadable ( int fieldIndex ) ; void clearReadable ( String field ) ; void clearReadable ( ) ; Persistent clone ( ) ; } </s>
|
<s> package org . apache . gora . persistency ; public interface BeanFactory < K , T extends Persistent > { K newKey ( ) throws Exception ; T newPersistent ( ) ; K getCachedKey ( ) ; T getCachedPersistent ( ) ; Class < K > getKeyClass ( ) ; Class < T > getPersistentClass ( ) ; } </s>
|
<s> package org . apache . gora . persistency ; public interface StateManager { void setManagedPersistent ( Persistent persistent ) ; boolean isNew ( Persistent persistent ) ; void setNew ( Persistent persistent ) ; void clearNew ( Persistent persistent ) ; boolean isDirty ( Persistent persistent ) ; boolean isDirty ( Persistent persistent , int fieldIndex ) ; void setDirty ( Persistent persistent ) ; void setDirty ( Persistent persistent , int fieldIndex ) ; void clearDirty ( Persistent persistent , int fieldIndex ) ; void clearDirty ( Persistent persistent ) ; boolean isReadable ( Persistent persistent , int fieldIndex ) ; void setReadable ( Persistent persistent , int fieldIndex ) ; void clearReadable ( Persistent persistent , int fieldIndex ) ; void clearReadable ( Persistent persistent ) ; } </s>
|
<s> package org . apache . gora . persistency ; import java . util . ArrayList ; import java . util . Iterator ; import java . util . List ; import org . apache . avro . Schema ; import org . apache . avro . generic . GenericArray ; import org . apache . avro . generic . GenericData ; public class ListGenericArray < T > implements GenericArray < T > , Comparable < ListGenericArray < T > > { private static final int LIST_DEFAULT_SIZE = <NUM_LIT:10> ; private List < T > list ; private Schema schema ; public ListGenericArray ( Schema schema , List < T > list ) { this . schema = schema ; this . list = list ; } public ListGenericArray ( Schema schema ) { this ( LIST_DEFAULT_SIZE , schema ) ; } public ListGenericArray ( int size , Schema schema ) { this . schema = schema ; this . list = new ArrayList < T > ( size ) ; } @ Override public void add ( T element ) { list . add ( element ) ; } @ Override public void clear ( ) { list . clear ( ) ; } @ Override public T peek ( ) { return null ; } @ Override public long size ( ) { return list . size ( ) ; } @ Override public Iterator < T > iterator ( ) { return list . iterator ( ) ; } @ Override public Schema getSchema ( ) { return schema ; } @ Override public int hashCode ( ) { return this . list . hashCode ( ) ; } @ SuppressWarnings ( { "<STR_LIT:unchecked>" , "<STR_LIT:rawtypes>" } ) @ Override public boolean equals ( Object obj ) { if ( obj == this ) return true ; if ( ! ( obj instanceof ListGenericArray ) ) return false ; ListGenericArray that = ( ListGenericArray ) obj ; if ( ! schema . equals ( that . schema ) ) return false ; return this . compareTo ( that ) == <NUM_LIT:0> ; } @ Override public int compareTo ( ListGenericArray < T > o ) { return GenericData . get ( ) . compare ( this , o , schema ) ; } @ Override public String toString ( ) { return list . toString ( ) ; } } </s>
|
<s> package org . apache . gora . persistency ; import java . util . Map ; public interface StatefulMap < K , V > extends Map < K , V > { State getState ( K key ) ; void putState ( K key , State state ) ; Map < K , State > states ( ) ; void clearStates ( ) ; void reuse ( ) ; } </s>
|
<s> package org . apache . gora . persistency ; public enum State { NEW , CLEAN , DIRTY , DELETED } </s>
|
<s> package org . apache . gora . compiler ; import java . io . File ; import java . io . FileOutputStream ; import java . io . IOException ; import java . io . OutputStreamWriter ; import java . io . Writer ; import java . util . HashSet ; import java . util . List ; import java . util . Map ; import java . util . Set ; import org . apache . avro . Protocol ; import org . apache . avro . Schema ; import org . apache . avro . Protocol . Message ; import org . apache . avro . Schema . Field ; import org . apache . avro . specific . SpecificData ; import org . slf4j . Logger ; import org . slf4j . LoggerFactory ; public class GoraCompiler { private File dest ; private Writer out ; private Set < Schema > queue = new HashSet < Schema > ( ) ; private static final Logger log = LoggerFactory . getLogger ( GoraCompiler . class ) ; private GoraCompiler ( File dest ) { this . dest = dest ; } public static void compileProtocol ( File src , File dest ) throws IOException { GoraCompiler compiler = new GoraCompiler ( dest ) ; Protocol protocol = Protocol . parse ( src ) ; for ( Schema s : protocol . getTypes ( ) ) compiler . enqueue ( s ) ; compiler . compileInterface ( protocol ) ; compiler . compile ( ) ; } public static void compileSchema ( File src , File dest ) throws IOException { log . info ( "<STR_LIT>" + src + "<STR_LIT:U+0020toU+0020>" + dest ) ; GoraCompiler compiler = new GoraCompiler ( dest ) ; compiler . enqueue ( Schema . parse ( src ) ) ; compiler . compile ( ) ; } private static String camelCasify ( String s ) { return s . substring ( <NUM_LIT:0> , <NUM_LIT:1> ) . toUpperCase ( ) + s . substring ( <NUM_LIT:1> ) ; } private static String toUpperCase ( String s ) { StringBuilder builder = new StringBuilder ( ) ; for ( int i = <NUM_LIT:0> ; i < s . length ( ) ; i ++ ) { if ( i > <NUM_LIT:0> ) { if ( Character . isUpperCase ( s . charAt ( i ) ) && Character . isLowerCase ( s . charAt ( i - <NUM_LIT:1> ) ) && Character . isLetter ( s . charAt ( i ) ) ) { builder . append ( "<STR_LIT:_>" ) ; } } builder . append ( Character . toUpperCase ( s . charAt ( i ) ) ) ; } return builder . toString ( ) ; } private void enqueue ( Schema schema ) throws IOException { if ( queue . contains ( schema ) ) return ; switch ( schema . getType ( ) ) { case RECORD : queue . add ( schema ) ; for ( Field field : schema . getFields ( ) ) enqueue ( field . schema ( ) ) ; break ; case MAP : enqueue ( schema . getValueType ( ) ) ; break ; case ARRAY : enqueue ( schema . getElementType ( ) ) ; break ; case UNION : for ( Schema s : schema . getTypes ( ) ) enqueue ( s ) ; break ; case ENUM : case FIXED : queue . add ( schema ) ; break ; case STRING : case BYTES : case INT : case LONG : case FLOAT : case DOUBLE : case BOOLEAN : case NULL : break ; default : throw new RuntimeException ( "<STR_LIT>" + schema ) ; } } private void compile ( ) throws IOException { for ( Schema schema : queue ) compile ( schema ) ; } private void compileInterface ( Protocol protocol ) throws IOException { startFile ( protocol . getName ( ) , protocol . getNamespace ( ) ) ; try { line ( <NUM_LIT:0> , "<STR_LIT>" + protocol . getName ( ) + "<STR_LIT>" ) ; out . append ( "<STR_LIT:n>" ) ; for ( Map . Entry < String , Message > e : protocol . getMessages ( ) . entrySet ( ) ) { String name = e . getKey ( ) ; Message message = e . getValue ( ) ; Schema request = message . getRequest ( ) ; Schema response = message . getResponse ( ) ; line ( <NUM_LIT:1> , unbox ( response ) + "<STR_LIT:U+0020>" + name + "<STR_LIT:(>" + params ( request ) + "<STR_LIT:)>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + errors ( message . getErrors ( ) ) + "<STR_LIT:;>" ) ; } line ( <NUM_LIT:0> , "<STR_LIT:}>" ) ; } finally { out . close ( ) ; } } private void startFile ( String name , String space ) throws IOException { File dir = new File ( dest , space . replace ( '<CHAR_LIT:.>' , File . separatorChar ) ) ; if ( ! dir . exists ( ) ) if ( ! dir . mkdirs ( ) ) throw new IOException ( "<STR_LIT>" + dir ) ; name = cap ( name ) + "<STR_LIT>" ; out = new OutputStreamWriter ( new FileOutputStream ( new File ( dir , name ) ) ) ; header ( space ) ; } private void header ( String namespace ) throws IOException { if ( namespace != null ) { line ( <NUM_LIT:0> , "<STR_LIT>" + namespace + "<STR_LIT>" ) ; } line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; for ( Schema s : queue ) if ( namespace == null ? ( s . getNamespace ( ) != null ) : ! namespace . equals ( s . getNamespace ( ) ) ) line ( <NUM_LIT:0> , "<STR_LIT>" + SpecificData . get ( ) . getClassName ( s ) + "<STR_LIT:;>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" ) ; } private String params ( Schema request ) throws IOException { StringBuilder b = new StringBuilder ( ) ; int count = <NUM_LIT:0> ; for ( Field field : request . getFields ( ) ) { b . append ( unbox ( field . schema ( ) ) ) ; b . append ( "<STR_LIT:U+0020>" ) ; b . append ( field . name ( ) ) ; if ( ++ count < request . getFields ( ) . size ( ) ) b . append ( "<STR_LIT:U+002CU+0020>" ) ; } return b . toString ( ) ; } private String errors ( Schema errs ) throws IOException { StringBuilder b = new StringBuilder ( ) ; for ( Schema error : errs . getTypes ( ) . subList ( <NUM_LIT:1> , errs . getTypes ( ) . size ( ) ) ) { b . append ( "<STR_LIT:U+002CU+0020>" ) ; b . append ( error . getName ( ) ) ; } return b . toString ( ) ; } private void compile ( Schema schema ) throws IOException { startFile ( schema . getName ( ) , schema . getNamespace ( ) ) ; try { switch ( schema . getType ( ) ) { case RECORD : String type = type ( schema ) ; line ( <NUM_LIT:0> , "<STR_LIT>" + type + "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" + esc ( schema ) + "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" ) ; int i = <NUM_LIT:0> ; for ( Field field : schema . getFields ( ) ) { line ( <NUM_LIT:2> , toUpperCase ( field . name ( ) ) + "<STR_LIT:(>" + ( i ++ ) + "<STR_LIT>" + field . name ( ) + "<STR_LIT>" ) ; } line ( <NUM_LIT:2> , "<STR_LIT:;>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" ) ; StringBuilder builder = new StringBuilder ( "<STR_LIT>" ) ; for ( Field field : schema . getFields ( ) ) { builder . append ( "<STR_LIT:\">" ) . append ( field . name ( ) ) . append ( "<STR_LIT>" ) ; } builder . append ( "<STR_LIT>" ) ; line ( <NUM_LIT:1> , builder . toString ( ) ) ; line ( <NUM_LIT:1> , "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + type + "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; for ( Field field : schema . getFields ( ) ) { line ( <NUM_LIT:1> , "<STR_LIT>" + unbox ( field . schema ( ) ) + "<STR_LIT:U+0020>" + field . name ( ) + "<STR_LIT:;>" ) ; } line ( <NUM_LIT:1> , "<STR_LIT>" + type + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" + type + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" ) ; for ( Field field : schema . getFields ( ) ) { Schema fieldSchema = field . schema ( ) ; switch ( fieldSchema . getType ( ) ) { case ARRAY : String valueType = type ( fieldSchema . getElementType ( ) ) ; line ( <NUM_LIT:2> , field . name ( ) + "<STR_LIT>" + valueType + "<STR_LIT>" + "<STR_LIT>" + field . name ( ) + "<STR_LIT>" ) ; break ; case MAP : valueType = type ( fieldSchema . getValueType ( ) ) ; line ( <NUM_LIT:2> , field . name ( ) + "<STR_LIT>" + valueType + "<STR_LIT>" ) ; } } line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" + type + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + type + "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" ) ; i = <NUM_LIT:0> ; for ( Field field : schema . getFields ( ) ) { line ( <NUM_LIT:2> , "<STR_LIT>" + ( i ++ ) + "<STR_LIT>" + field . name ( ) + "<STR_LIT:;>" ) ; } line ( <NUM_LIT:2> , "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT:}>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" ) ; i = <NUM_LIT:0> ; for ( Field field : schema . getFields ( ) ) { line ( <NUM_LIT:2> , "<STR_LIT>" + i + "<STR_LIT::>" + field . name ( ) + "<STR_LIT>" + type ( field . schema ( ) ) + "<STR_LIT>" ) ; i ++ ; } line ( <NUM_LIT:2> , "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT:}>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; i = <NUM_LIT:0> ; for ( Field field : schema . getFields ( ) ) { String camelKey = camelCasify ( field . name ( ) ) ; Schema fieldSchema = field . schema ( ) ; switch ( fieldSchema . getType ( ) ) { case INT : case LONG : case FLOAT : case DOUBLE : case BOOLEAN : case BYTES : case STRING : case ENUM : case RECORD : case FIXED : String unboxed = unbox ( fieldSchema ) ; String fieldType = type ( fieldSchema ) ; line ( <NUM_LIT:1> , "<STR_LIT>" + unboxed + "<STR_LIT>" + camelKey + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + fieldType + "<STR_LIT>" + i + "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" + camelKey + "<STR_LIT:(>" + unboxed + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + i + "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; break ; case ARRAY : unboxed = unbox ( fieldSchema . getElementType ( ) ) ; fieldType = type ( fieldSchema . getElementType ( ) ) ; line ( <NUM_LIT:1> , "<STR_LIT>" + fieldType + "<STR_LIT>" + camelKey + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + fieldType + "<STR_LIT>" + i + "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" + camelKey + "<STR_LIT:(>" + unboxed + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + i + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , field . name ( ) + "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; break ; case MAP : unboxed = unbox ( fieldSchema . getValueType ( ) ) ; fieldType = type ( fieldSchema . getValueType ( ) ) ; line ( <NUM_LIT:1> , "<STR_LIT>" + fieldType + "<STR_LIT>" + camelKey + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + fieldType + "<STR_LIT>" + i + "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" + fieldType + "<STR_LIT>" + camelKey + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + field . name ( ) + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + field . name ( ) + "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" + camelKey + "<STR_LIT>" + unboxed + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + i + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , field . name ( ) + "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; line ( <NUM_LIT:1> , "<STR_LIT>" + fieldType + "<STR_LIT>" + camelKey + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + field . name ( ) + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + i + "<STR_LIT>" ) ; line ( <NUM_LIT:2> , "<STR_LIT>" + field . name ( ) + "<STR_LIT>" ) ; line ( <NUM_LIT:1> , "<STR_LIT:}>" ) ; } i ++ ; } line ( <NUM_LIT:0> , "<STR_LIT:}>" ) ; break ; case ENUM : line ( <NUM_LIT:0> , "<STR_LIT>" + type ( schema ) + "<STR_LIT>" ) ; StringBuilder b = new StringBuilder ( ) ; int count = <NUM_LIT:0> ; for ( String symbol : schema . getEnumSymbols ( ) ) { b . append ( symbol ) ; if ( ++ count < schema . getEnumSymbols ( ) . size ( ) ) b . append ( "<STR_LIT:U+002CU+0020>" ) ; } line ( <NUM_LIT:1> , b . toString ( ) ) ; line ( <NUM_LIT:0> , "<STR_LIT:}>" ) ; break ; case FIXED : line ( <NUM_LIT:0> , "<STR_LIT>" + schema . getFixedSize ( ) + "<STR_LIT:)>" ) ; line ( <NUM_LIT:0> , "<STR_LIT>" + type ( schema ) + "<STR_LIT>" ) ; break ; case MAP : case ARRAY : case UNION : case STRING : case BYTES : case INT : case LONG : case FLOAT : case DOUBLE : case BOOLEAN : case NULL : break ; default : throw new RuntimeException ( "<STR_LIT>" + schema ) ; } } finally { out . close ( ) ; } } private static final Schema NULL_SCHEMA = Schema . create ( Schema . Type . NULL ) ; public static String type ( Schema schema ) { switch ( schema . getType ( ) ) { case RECORD : case ENUM : case FIXED : return schema . getName ( ) ; case ARRAY : return "<STR_LIT>" + type ( schema . getElementType ( ) ) + "<STR_LIT:>>" ; case MAP : return "<STR_LIT>" + type ( schema . getValueType ( ) ) + "<STR_LIT:>>" ; case UNION : List < Schema > types = schema . getTypes ( ) ; if ( ( types . size ( ) == <NUM_LIT:2> ) && types . contains ( NULL_SCHEMA ) ) return type ( types . get ( types . get ( <NUM_LIT:0> ) . equals ( NULL_SCHEMA ) ? <NUM_LIT:1> : <NUM_LIT:0> ) ) ; return "<STR_LIT>" ; case STRING : return "<STR_LIT>" ; case BYTES : return "<STR_LIT>" ; case INT : return "<STR_LIT>" ; case LONG : return "<STR_LIT>" ; case FLOAT : return "<STR_LIT>" ; case DOUBLE : return "<STR_LIT>" ; case BOOLEAN : return "<STR_LIT>" ; case NULL : return "<STR_LIT>" ; default : throw new RuntimeException ( "<STR_LIT>" + schema ) ; } } public static String unbox ( Schema schema ) { switch ( schema . getType ( ) ) { case INT : return "<STR_LIT:int>" ; case LONG : return "<STR_LIT:long>" ; case FLOAT : return "<STR_LIT:float>" ; case DOUBLE : return "<STR_LIT:double>" ; case BOOLEAN : return "<STR_LIT:boolean>" ; default : return type ( schema ) ; } } private void line ( int indent , String text ) throws IOException { for ( int i = <NUM_LIT:0> ; i < indent ; i ++ ) { out . append ( "<STR_LIT:U+0020U+0020>" ) ; } out . append ( text ) ; out . append ( "<STR_LIT:n>" ) ; } static String cap ( String name ) { return name . substring ( <NUM_LIT:0> , <NUM_LIT:1> ) . toUpperCase ( ) + name . substring ( <NUM_LIT:1> , name . length ( ) ) ; } private static String esc ( Object o ) { return o . toString ( ) . replace ( "<STR_LIT:\">" , "<STR_LIT>" ) ; } public static void main ( String [ ] args ) throws Exception { if ( args . length < <NUM_LIT:2> ) { System . err . println ( "<STR_LIT>" ) ; System . exit ( <NUM_LIT:1> ) ; } compileSchema ( new File ( args [ <NUM_LIT:0> ] ) , new File ( args [ <NUM_LIT:1> ] ) ) ; } } </s>
|
<s> package org . apache . gora . avro . query ; import java . io . IOException ; import org . apache . avro . file . DataFileReader ; import org . apache . avro . file . SeekableInput ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . gora . query . impl . ResultBase ; import org . apache . gora . store . DataStore ; public class DataFileAvroResult < K , T extends Persistent > extends ResultBase < K , T > { private SeekableInput in ; private DataFileReader < T > reader ; private long start ; private long end ; public DataFileAvroResult ( DataStore < K , T > dataStore , Query < K , T > query , DataFileReader < T > reader ) throws IOException { this ( dataStore , query , reader , null , <NUM_LIT:0> , <NUM_LIT:0> ) ; } public DataFileAvroResult ( DataStore < K , T > dataStore , Query < K , T > query , DataFileReader < T > reader , SeekableInput in , long start , long length ) throws IOException { super ( dataStore , query ) ; this . reader = reader ; this . start = start ; this . end = start + length ; this . in = in ; if ( start > <NUM_LIT:0> ) { reader . sync ( start ) ; } } @ Override public void close ( ) throws IOException { if ( reader != null ) reader . close ( ) ; reader = null ; } @ Override public float getProgress ( ) throws IOException { if ( end == start ) { return <NUM_LIT:0.0f> ; } else { return Math . min ( <NUM_LIT:1.0f> , ( in . tell ( ) - start ) / ( float ) ( end - start ) ) ; } } @ Override public boolean nextInner ( ) throws IOException { if ( ! reader . hasNext ( ) ) return false ; if ( end > <NUM_LIT:0> && reader . pastSync ( end ) ) return false ; persistent = reader . next ( persistent ) ; return true ; } } </s>
|
<s> package org . apache . gora . avro . query ; import java . io . EOFException ; import java . io . IOException ; import org . apache . avro . AvroTypeException ; import org . apache . avro . io . DatumReader ; import org . apache . avro . io . Decoder ; import org . apache . gora . avro . store . AvroStore ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . impl . ResultBase ; public class AvroResult < K , T extends Persistent > extends ResultBase < K , T > { private DatumReader < T > reader ; private Decoder decoder ; public AvroResult ( AvroStore < K , T > dataStore , AvroQuery < K , T > query , DatumReader < T > reader , Decoder decoder ) { super ( dataStore , query ) ; this . reader = reader ; this . decoder = decoder ; } @ Override public void close ( ) throws IOException { } @ Override public float getProgress ( ) throws IOException { return <NUM_LIT:0> ; } @ Override public boolean nextInner ( ) throws IOException { try { persistent = reader . read ( persistent , decoder ) ; } catch ( AvroTypeException ex ) { return false ; } catch ( EOFException ex ) { return false ; } return persistent != null ; } } </s>
|
<s> package org . apache . gora . avro . query ; import org . apache . gora . avro . store . AvroStore ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . impl . QueryBase ; public class AvroQuery < K , T extends Persistent > extends QueryBase < K , T > { public AvroQuery ( ) { super ( null ) ; } public AvroQuery ( AvroStore < K , T > dataStore ) { super ( dataStore ) ; } } </s>
|
<s> package org . apache . gora . avro ; import java . io . IOException ; import java . util . Map . Entry ; import org . apache . avro . Schema ; import org . apache . avro . Schema . Field ; import org . apache . avro . io . Encoder ; import org . apache . avro . specific . SpecificDatumWriter ; import org . apache . avro . util . Utf8 ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . persistency . State ; import org . apache . gora . persistency . StateManager ; import org . apache . gora . persistency . StatefulMap ; import org . apache . gora . util . IOUtils ; public class PersistentDatumWriter < T extends Persistent > extends SpecificDatumWriter < T > { private T persistent = null ; private boolean writeDirtyBits = true ; public PersistentDatumWriter ( ) { } public PersistentDatumWriter ( Schema schema , boolean writeDirtyBits ) { setSchema ( schema ) ; this . writeDirtyBits = writeDirtyBits ; } public void setPersistent ( T persistent ) { this . persistent = persistent ; } @ Override public void write ( Schema schema , Object datum , Encoder out ) throws IOException { super . write ( schema , datum , out ) ; } @ Override @ SuppressWarnings ( "<STR_LIT:unchecked>" ) protected void writeRecord ( Schema schema , Object datum , Encoder out ) throws IOException { if ( persistent == null ) { persistent = ( T ) datum ; } if ( ! writeDirtyBits ) { super . writeRecord ( schema , datum , out ) ; return ; } if ( schema . equals ( persistent . getSchema ( ) ) ) { boolean [ ] dirtyFields = new boolean [ schema . getFields ( ) . size ( ) ] ; boolean [ ] readableFields = new boolean [ schema . getFields ( ) . size ( ) ] ; StateManager manager = persistent . getStateManager ( ) ; int i = <NUM_LIT:0> ; for ( @ SuppressWarnings ( "<STR_LIT:unused>" ) Field field : schema . getFields ( ) ) { dirtyFields [ i ] = manager . isDirty ( persistent , i ) ; readableFields [ i ] = manager . isReadable ( persistent , i ) ; i ++ ; } IOUtils . writeBoolArray ( out , dirtyFields ) ; IOUtils . writeBoolArray ( out , readableFields ) ; for ( Field field : schema . getFields ( ) ) { if ( readableFields [ field . pos ( ) ] ) { write ( field . schema ( ) , getField ( datum , field . name ( ) , field . pos ( ) ) , out ) ; } } } else { super . writeRecord ( schema , datum , out ) ; } } @ Override @ SuppressWarnings ( { "<STR_LIT:rawtypes>" , "<STR_LIT:unchecked>" } ) protected void writeMap ( Schema schema , Object datum , Encoder out ) throws IOException { if ( writeDirtyBits ) { StatefulMap < Utf8 , ? > map = ( StatefulMap ) datum ; out . writeInt ( map . states ( ) . size ( ) ) ; for ( Entry < Utf8 , State > e2 : map . states ( ) . entrySet ( ) ) { out . writeString ( e2 . getKey ( ) ) ; out . writeInt ( e2 . getValue ( ) . ordinal ( ) ) ; } } super . writeMap ( schema , datum , out ) ; } } </s>
|
<s> package org . apache . gora . avro ; import java . io . IOException ; import java . util . HashMap ; import java . util . List ; import java . util . Map ; import java . util . Map . Entry ; import java . util . WeakHashMap ; import org . apache . avro . Schema ; import org . apache . avro . Schema . Field ; import org . apache . avro . generic . GenericArray ; import org . apache . avro . io . Decoder ; import org . apache . avro . io . ResolvingDecoder ; import org . apache . avro . specific . SpecificDatumReader ; import org . apache . avro . util . Utf8 ; import org . apache . gora . mapreduce . FakeResolvingDecoder ; import org . apache . gora . persistency . ListGenericArray ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . persistency . State ; import org . apache . gora . persistency . StatefulHashMap ; import org . apache . gora . persistency . StatefulMap ; import org . apache . gora . persistency . impl . StateManagerImpl ; import org . apache . gora . util . IOUtils ; public class PersistentDatumReader < T extends Persistent > extends SpecificDatumReader < T > { private Schema rootSchema ; private T cachedPersistent ; private WeakHashMap < Decoder , ResolvingDecoder > decoderCache = new WeakHashMap < Decoder , ResolvingDecoder > ( ) ; private boolean readDirtyBits = true ; public PersistentDatumReader ( ) { } public PersistentDatumReader ( Schema schema , boolean readDirtyBits ) { this . readDirtyBits = readDirtyBits ; setSchema ( schema ) ; } @ Override public void setSchema ( Schema actual ) { this . rootSchema = actual ; super . setSchema ( actual ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public T newPersistent ( ) { if ( cachedPersistent == null ) { cachedPersistent = ( T ) super . newRecord ( null , rootSchema ) ; return cachedPersistent ; } return ( T ) cachedPersistent . newInstance ( new StateManagerImpl ( ) ) ; } @ Override protected Object newRecord ( Object old , Schema schema ) { if ( old != null ) { return old ; } if ( schema . equals ( rootSchema ) ) { return newPersistent ( ) ; } else { return super . newRecord ( old , schema ) ; } } @ Override @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public T read ( T reuse , Decoder in ) throws IOException { return ( T ) read ( reuse , rootSchema , in ) ; } public Object read ( Object reuse , Schema schema , Decoder decoder ) throws IOException { return super . read ( reuse , schema , getResolvingDecoder ( decoder ) ) ; } protected ResolvingDecoder getResolvingDecoder ( Decoder decoder ) throws IOException { ResolvingDecoder resolvingDecoder = decoderCache . get ( decoder ) ; if ( resolvingDecoder == null ) { resolvingDecoder = new FakeResolvingDecoder ( rootSchema , decoder ) ; decoderCache . put ( decoder , resolvingDecoder ) ; } return resolvingDecoder ; } @ Override @ SuppressWarnings ( "<STR_LIT:unchecked>" ) protected Object readRecord ( Object old , Schema expected , ResolvingDecoder in ) throws IOException { Object record = newRecord ( old , expected ) ; if ( expected . equals ( rootSchema ) && readDirtyBits ) { T persistent = ( T ) record ; persistent . clear ( ) ; boolean [ ] dirtyFields = IOUtils . readBoolArray ( in ) ; boolean [ ] readableFields = IOUtils . readBoolArray ( in ) ; int i = <NUM_LIT:0> ; for ( Field f : expected . getFields ( ) ) { if ( readableFields [ f . pos ( ) ] ) { int pos = f . pos ( ) ; String name = f . name ( ) ; Object oldDatum = ( old != null ) ? getField ( record , name , pos ) : null ; setField ( record , name , pos , read ( oldDatum , f . schema ( ) , in ) ) ; } } for ( i = <NUM_LIT:0> ; i < dirtyFields . length ; i ++ ) { if ( dirtyFields [ i ] ) { persistent . setDirty ( i ) ; } else { persistent . clearDirty ( i ) ; } } return record ; } else { for ( Field f : expected . getFields ( ) ) { int pos = f . pos ( ) ; String name = f . name ( ) ; Object oldDatum = ( old != null ) ? getField ( record , name , pos ) : null ; setField ( record , name , pos , read ( oldDatum , f . schema ( ) , in ) ) ; } return record ; } } @ Override @ SuppressWarnings ( "<STR_LIT:unchecked>" ) protected Object readMap ( Object old , Schema expected , ResolvingDecoder in ) throws IOException { StatefulMap < Utf8 , ? > map = ( StatefulMap < Utf8 , ? > ) newMap ( old , <NUM_LIT:0> ) ; Map < Utf8 , State > tempStates = null ; if ( readDirtyBits ) { tempStates = new HashMap < Utf8 , State > ( ) ; int size = in . readInt ( ) ; for ( int j = <NUM_LIT:0> ; j < size ; j ++ ) { Utf8 key = in . readString ( null ) ; State state = State . values ( ) [ in . readInt ( ) ] ; tempStates . put ( key , state ) ; } } super . readMap ( map , expected , in ) ; map . clearStates ( ) ; if ( readDirtyBits ) { for ( Entry < Utf8 , State > entry : tempStates . entrySet ( ) ) { map . putState ( entry . getKey ( ) , entry . getValue ( ) ) ; } } return map ; } @ Override @ SuppressWarnings ( { "<STR_LIT:rawtypes>" } ) protected Object newMap ( Object old , int size ) { if ( old instanceof StatefulHashMap ) { ( ( StatefulHashMap ) old ) . reuse ( ) ; return old ; } return new StatefulHashMap < Object , Object > ( ) ; } @ Override @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) protected Object newArray ( Object old , int size , Schema schema ) { if ( old instanceof ListGenericArray ) { ( ( GenericArray ) old ) . clear ( ) ; return old ; } else return new ListGenericArray ( size , schema ) ; } public Persistent clone ( Persistent persistent , Schema schema ) { Persistent cloned = persistent . newInstance ( new StateManagerImpl ( ) ) ; List < Field > fields = schema . getFields ( ) ; for ( Field field : fields ) { int pos = field . pos ( ) ; switch ( field . schema ( ) . getType ( ) ) { case MAP : case ARRAY : case RECORD : case STRING : cloned . put ( pos , cloneObject ( field . schema ( ) , persistent . get ( pos ) , cloned . get ( pos ) ) ) ; break ; case NULL : break ; default : cloned . put ( pos , persistent . get ( pos ) ) ; break ; } } return cloned ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) protected Object cloneObject ( Schema schema , Object toClone , Object cloned ) { if ( toClone == null ) { return null ; } switch ( schema . getType ( ) ) { case MAP : Map < Utf8 , Object > map = ( Map < Utf8 , Object > ) newMap ( cloned , <NUM_LIT:0> ) ; for ( Map . Entry < Utf8 , Object > entry : ( ( Map < Utf8 , Object > ) toClone ) . entrySet ( ) ) { map . put ( ( Utf8 ) createString ( entry . getKey ( ) . toString ( ) ) , cloneObject ( schema . getValueType ( ) , entry . getValue ( ) , null ) ) ; } return map ; case ARRAY : GenericArray < Object > array = ( GenericArray < Object > ) newArray ( cloned , ( int ) ( ( GenericArray < ? > ) toClone ) . size ( ) , schema ) ; for ( Object element : ( GenericArray < Object > ) toClone ) { array . add ( cloneObject ( schema . getElementType ( ) , element , null ) ) ; } return array ; case RECORD : return clone ( ( Persistent ) toClone , schema ) ; case STRING : return createString ( toClone . toString ( ) ) ; default : return toClone ; } } } </s>
|
<s> package org . apache . gora . avro . mapreduce ; import java . io . Closeable ; import java . io . IOException ; import org . apache . avro . file . SeekableInput ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . fs . FSDataInputStream ; import org . apache . hadoop . fs . Path ; public class FsInput implements Closeable , SeekableInput { private final FSDataInputStream stream ; private final long len ; public FsInput ( Path path , Configuration conf ) throws IOException { this . stream = path . getFileSystem ( conf ) . open ( path ) ; this . len = path . getFileSystem ( conf ) . getFileStatus ( path ) . getLen ( ) ; } public long length ( ) { return len ; } public int read ( byte [ ] b , int off , int len ) throws IOException { return stream . read ( b , off , len ) ; } public void seek ( long p ) throws IOException { stream . seek ( p ) ; } public long tell ( ) throws IOException { return stream . getPos ( ) ; } public void close ( ) throws IOException { stream . close ( ) ; } } </s>
|
<s> package org . apache . gora . avro . store ; import java . io . DataInput ; import java . io . DataOutput ; import java . io . IOException ; import java . util . Properties ; import org . apache . avro . io . BinaryDecoder ; import org . apache . avro . io . BinaryEncoder ; import org . apache . avro . io . DatumReader ; import org . apache . avro . io . DatumWriter ; import org . apache . avro . io . Decoder ; import org . apache . avro . io . Encoder ; import org . apache . avro . io . JsonDecoder ; import org . apache . avro . io . JsonEncoder ; import org . apache . avro . specific . SpecificDatumReader ; import org . apache . avro . specific . SpecificDatumWriter ; import org . apache . gora . avro . query . AvroQuery ; import org . apache . gora . avro . query . AvroResult ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . gora . query . Result ; import org . apache . gora . query . impl . FileSplitPartitionQuery ; import org . apache . gora . store . DataStoreFactory ; import org . apache . gora . store . impl . FileBackedDataStoreBase ; import org . apache . gora . util . OperationNotSupportedException ; import org . apache . hadoop . conf . Configurable ; import org . apache . hadoop . conf . Configuration ; public class AvroStore < K , T extends Persistent > extends FileBackedDataStoreBase < K , T > implements Configurable { public static final String CODEC_TYPE_KEY = "<STR_LIT>" ; public static enum CodecType { BINARY , JSON , } private DatumReader < T > datumReader ; private DatumWriter < T > datumWriter ; private Encoder encoder ; private Decoder decoder ; private CodecType codecType = CodecType . JSON ; @ Override public void initialize ( Class < K > keyClass , Class < T > persistentClass , Properties properties ) throws IOException { super . initialize ( keyClass , persistentClass , properties ) ; if ( properties != null ) { if ( this . codecType == null ) { String codecType = DataStoreFactory . findProperty ( properties , this , CODEC_TYPE_KEY , "<STR_LIT>" ) ; this . codecType = CodecType . valueOf ( codecType ) ; } } } public void setCodecType ( CodecType codecType ) { this . codecType = codecType ; } public void setEncoder ( Encoder encoder ) { this . encoder = encoder ; } public void setDecoder ( Decoder decoder ) { this . decoder = decoder ; } public void setDatumReader ( DatumReader < T > datumReader ) { this . datumReader = datumReader ; } public void setDatumWriter ( DatumWriter < T > datumWriter ) { this . datumWriter = datumWriter ; } @ Override public void close ( ) throws IOException { super . close ( ) ; if ( encoder != null ) { encoder . flush ( ) ; } encoder = null ; decoder = null ; } @ Override public boolean delete ( K key ) throws IOException { throw new OperationNotSupportedException ( "<STR_LIT>" ) ; } @ Override public long deleteByQuery ( Query < K , T > query ) throws IOException { throw new OperationNotSupportedException ( "<STR_LIT>" ) ; } @ Override protected Result < K , T > executeQuery ( Query < K , T > query ) throws IOException { return new AvroResult < K , T > ( this , ( AvroQuery < K , T > ) query , getDatumReader ( ) , getDecoder ( ) ) ; } @ Override protected Result < K , T > executePartial ( FileSplitPartitionQuery < K , T > query ) throws IOException { throw new OperationNotSupportedException ( "<STR_LIT>" ) ; } @ Override public void flush ( ) throws IOException { super . flush ( ) ; if ( encoder != null ) encoder . flush ( ) ; } @ Override public T get ( K key , String [ ] fields ) throws IOException { throw new OperationNotSupportedException ( ) ; } @ Override public AvroQuery < K , T > newQuery ( ) { return new AvroQuery < K , T > ( this ) ; } @ Override public void put ( K key , T obj ) throws IOException { getDatumWriter ( ) . write ( obj , getEncoder ( ) ) ; } public Encoder getEncoder ( ) throws IOException { if ( encoder == null ) { encoder = createEncoder ( ) ; } return encoder ; } public Decoder getDecoder ( ) throws IOException { if ( decoder == null ) { decoder = createDecoder ( ) ; } return decoder ; } public DatumReader < T > getDatumReader ( ) { if ( datumReader == null ) { datumReader = createDatumReader ( ) ; } return datumReader ; } public DatumWriter < T > getDatumWriter ( ) { if ( datumWriter == null ) { datumWriter = createDatumWriter ( ) ; } return datumWriter ; } protected Encoder createEncoder ( ) throws IOException { switch ( codecType ) { case BINARY : return new BinaryEncoder ( getOrCreateOutputStream ( ) ) ; case JSON : return new JsonEncoder ( schema , getOrCreateOutputStream ( ) ) ; } return null ; } @ SuppressWarnings ( "<STR_LIT:deprecation>" ) protected Decoder createDecoder ( ) throws IOException { switch ( codecType ) { case BINARY : return new BinaryDecoder ( getOrCreateInputStream ( ) ) ; case JSON : return new JsonDecoder ( schema , getOrCreateInputStream ( ) ) ; } return null ; } protected DatumWriter < T > createDatumWriter ( ) { return new SpecificDatumWriter < T > ( schema ) ; } protected DatumReader < T > createDatumReader ( ) { return new SpecificDatumReader < T > ( schema ) ; } @ Override public Configuration getConf ( ) { if ( conf == null ) { conf = new Configuration ( ) ; } return conf ; } @ Override public void write ( DataOutput out ) throws IOException { super . write ( out ) ; } @ Override public void readFields ( DataInput in ) throws IOException { super . readFields ( in ) ; } @ Override public String getSchemaName ( ) { return "<STR_LIT:default>" ; } } </s>
|
<s> package org . apache . gora . avro . store ; import java . io . IOException ; import org . apache . avro . file . DataFileReader ; import org . apache . avro . file . DataFileWriter ; import org . apache . gora . avro . mapreduce . FsInput ; import org . apache . gora . avro . query . DataFileAvroResult ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . gora . query . Result ; import org . apache . gora . query . impl . FileSplitPartitionQuery ; import org . apache . gora . util . OperationNotSupportedException ; import org . apache . hadoop . fs . Path ; public class DataFileAvroStore < K , T extends Persistent > extends AvroStore < K , T > { public DataFileAvroStore ( ) { } private DataFileWriter < T > writer ; @ Override public T get ( K key , String [ ] fields ) throws java . io . IOException { throw new OperationNotSupportedException ( "<STR_LIT>" ) ; } ; @ Override public void put ( K key , T obj ) throws java . io . IOException { getWriter ( ) . append ( obj ) ; } ; private DataFileWriter < T > getWriter ( ) throws IOException { if ( writer == null ) { writer = new DataFileWriter < T > ( getDatumWriter ( ) ) ; writer . create ( schema , getOrCreateOutputStream ( ) ) ; } return writer ; } @ Override protected Result < K , T > executeQuery ( Query < K , T > query ) throws IOException { return new DataFileAvroResult < K , T > ( this , query , createReader ( createFsInput ( ) ) ) ; } @ Override protected Result < K , T > executePartial ( FileSplitPartitionQuery < K , T > query ) throws IOException { FsInput fsInput = createFsInput ( ) ; DataFileReader < T > reader = createReader ( fsInput ) ; return new DataFileAvroResult < K , T > ( this , query , reader , fsInput , query . getStart ( ) , query . getLength ( ) ) ; } private DataFileReader < T > createReader ( FsInput fsInput ) throws IOException { return new DataFileReader < T > ( fsInput , getDatumReader ( ) ) ; } private FsInput createFsInput ( ) throws IOException { Path path = new Path ( getInputPath ( ) ) ; return new FsInput ( path , getConf ( ) ) ; } @ Override public void flush ( ) throws IOException { super . flush ( ) ; if ( writer != null ) { writer . flush ( ) ; } } @ Override public void close ( ) throws IOException { if ( writer != null ) writer . close ( ) ; writer = null ; super . close ( ) ; } } </s>
|
<s> package org . apache . gora . query . impl ; import java . io . DataInput ; import java . io . DataOutput ; import java . io . IOException ; import java . util . Arrays ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . PartitionQuery ; import org . apache . gora . query . Query ; import org . apache . gora . store . DataStore ; import org . apache . gora . util . IOUtils ; public class PartitionQueryImpl < K , T extends Persistent > extends QueryBase < K , T > implements PartitionQuery < K , T > { protected Query < K , T > baseQuery ; protected String [ ] locations ; public PartitionQueryImpl ( ) { super ( null ) ; } public PartitionQueryImpl ( Query < K , T > baseQuery , String ... locations ) { this ( baseQuery , null , null , locations ) ; } public PartitionQueryImpl ( Query < K , T > baseQuery , K startKey , K endKey , String ... locations ) { super ( baseQuery . getDataStore ( ) ) ; this . baseQuery = baseQuery ; this . locations = locations ; setStartKey ( startKey ) ; setEndKey ( endKey ) ; this . dataStore = baseQuery . getDataStore ( ) ; } @ Override public String [ ] getLocations ( ) { return locations ; } public Query < K , T > getBaseQuery ( ) { return baseQuery ; } @ Override public String [ ] getFields ( ) { return baseQuery . getFields ( ) ; } @ Override public DataStore < K , T > getDataStore ( ) { return baseQuery . getDataStore ( ) ; } @ Override public long getTimestamp ( ) { return baseQuery . getTimestamp ( ) ; } @ Override public long getStartTime ( ) { return baseQuery . getStartTime ( ) ; } @ Override public long getEndTime ( ) { return baseQuery . getEndTime ( ) ; } @ Override public long getLimit ( ) { return baseQuery . getLimit ( ) ; } @ Override public void setFields ( String ... fields ) { baseQuery . setFields ( fields ) ; } @ Override public void setTimestamp ( long timestamp ) { baseQuery . setTimestamp ( timestamp ) ; } @ Override public void setStartTime ( long startTime ) { baseQuery . setStartTime ( startTime ) ; } @ Override public void setEndTime ( long endTime ) { baseQuery . setEndTime ( endTime ) ; } @ Override public void setTimeRange ( long startTime , long endTime ) { baseQuery . setTimeRange ( startTime , endTime ) ; } @ Override public void setLimit ( long limit ) { baseQuery . setLimit ( limit ) ; } @ Override public void write ( DataOutput out ) throws IOException { super . write ( out ) ; IOUtils . serialize ( null , out , baseQuery ) ; IOUtils . writeStringArray ( out , locations ) ; } @ Override public void readFields ( DataInput in ) throws IOException { super . readFields ( in ) ; try { baseQuery = IOUtils . deserialize ( null , in , null ) ; } catch ( ClassNotFoundException ex ) { throw new IOException ( ex ) ; } locations = IOUtils . readStringArray ( in ) ; this . dataStore = baseQuery . getDataStore ( ) ; } @ Override @ SuppressWarnings ( { "<STR_LIT:rawtypes>" } ) public boolean equals ( Object obj ) { if ( obj instanceof PartitionQueryImpl ) { PartitionQueryImpl that = ( PartitionQueryImpl ) obj ; return this . baseQuery . equals ( that . baseQuery ) && Arrays . equals ( locations , that . locations ) ; } return false ; } } </s>
|
<s> package org . apache . gora . query . impl ; import java . io . DataInput ; import java . io . DataOutput ; import java . io . IOException ; import org . apache . commons . lang . builder . EqualsBuilder ; import org . apache . commons . lang . builder . HashCodeBuilder ; import org . apache . commons . lang . builder . ToStringBuilder ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . gora . query . Result ; import org . apache . gora . store . DataStore ; import org . apache . gora . util . ClassLoadingUtils ; import org . apache . gora . util . IOUtils ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . io . Text ; import org . apache . hadoop . io . WritableUtils ; import org . apache . hadoop . util . ReflectionUtils ; public abstract class QueryBase < K , T extends Persistent > implements Query < K , T > { protected DataStore < K , T > dataStore ; protected String queryString ; protected String [ ] fields ; protected K startKey ; protected K endKey ; protected long startTime = - <NUM_LIT:1> ; protected long endTime = - <NUM_LIT:1> ; protected String filter ; protected long limit = - <NUM_LIT:1> ; protected boolean isCompiled = false ; private Configuration conf ; public QueryBase ( DataStore < K , T > dataStore ) { this . dataStore = dataStore ; } @ Override public Result < K , T > execute ( ) throws IOException { return dataStore . execute ( this ) ; } @ Override public void setDataStore ( DataStore < K , T > dataStore ) { this . dataStore = dataStore ; } @ Override public DataStore < K , T > getDataStore ( ) { return dataStore ; } @ Override public void setFields ( String ... fields ) { this . fields = fields ; } @ Override public String [ ] getFields ( ) { return fields ; } @ Override public void setKey ( K key ) { setKeyRange ( key , key ) ; } @ Override public void setStartKey ( K startKey ) { this . startKey = startKey ; } @ Override public void setEndKey ( K endKey ) { this . endKey = endKey ; } @ Override public void setKeyRange ( K startKey , K endKey ) { this . startKey = startKey ; this . endKey = endKey ; } @ Override public K getKey ( ) { if ( startKey == endKey ) { return startKey ; } return null ; } @ Override public K getStartKey ( ) { return startKey ; } @ Override public K getEndKey ( ) { return endKey ; } @ Override public void setTimestamp ( long timestamp ) { setTimeRange ( timestamp , timestamp ) ; } @ Override public void setStartTime ( long startTime ) { this . startTime = startTime ; } @ Override public void setEndTime ( long endTime ) { this . endTime = endTime ; } @ Override public void setTimeRange ( long startTime , long endTime ) { this . startTime = startTime ; this . endTime = endTime ; } @ Override public long getTimestamp ( ) { return startTime == endTime ? startTime : - <NUM_LIT:1> ; } @ Override public long getStartTime ( ) { return startTime ; } @ Override public long getEndTime ( ) { return endTime ; } @ Override public void setLimit ( long limit ) { this . limit = limit ; } @ Override public long getLimit ( ) { return limit ; } @ Override public Configuration getConf ( ) { return conf ; } @ Override public void setConf ( Configuration conf ) { this . conf = conf ; } @ Override @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public void readFields ( DataInput in ) throws IOException { String dataStoreClass = Text . readString ( in ) ; try { dataStore = ( DataStore < K , T > ) ReflectionUtils . newInstance ( ClassLoadingUtils . loadClass ( dataStoreClass ) , conf ) ; dataStore . readFields ( in ) ; } catch ( ClassNotFoundException ex ) { throw new IOException ( ex ) ; } boolean [ ] nullFields = IOUtils . readNullFieldsInfo ( in ) ; if ( ! nullFields [ <NUM_LIT:0> ] ) queryString = Text . readString ( in ) ; if ( ! nullFields [ <NUM_LIT:1> ] ) fields = IOUtils . readStringArray ( in ) ; if ( ! nullFields [ <NUM_LIT:2> ] ) startKey = IOUtils . deserialize ( null , in , null , dataStore . getKeyClass ( ) ) ; if ( ! nullFields [ <NUM_LIT:3> ] ) endKey = IOUtils . deserialize ( null , in , null , dataStore . getKeyClass ( ) ) ; if ( ! nullFields [ <NUM_LIT:4> ] ) filter = Text . readString ( in ) ; startTime = WritableUtils . readVLong ( in ) ; endTime = WritableUtils . readVLong ( in ) ; limit = WritableUtils . readVLong ( in ) ; } @ Override public void write ( DataOutput out ) throws IOException { Text . writeString ( out , dataStore . getClass ( ) . getCanonicalName ( ) ) ; dataStore . write ( out ) ; IOUtils . writeNullFieldsInfo ( out , queryString , ( fields ) , startKey , endKey , filter ) ; if ( queryString != null ) Text . writeString ( out , queryString ) ; if ( fields != null ) IOUtils . writeStringArray ( out , fields ) ; if ( startKey != null ) IOUtils . serialize ( getConf ( ) , out , startKey , dataStore . getKeyClass ( ) ) ; if ( endKey != null ) IOUtils . serialize ( getConf ( ) , out , endKey , dataStore . getKeyClass ( ) ) ; if ( filter != null ) Text . writeString ( out , filter ) ; WritableUtils . writeVLong ( out , getStartTime ( ) ) ; WritableUtils . writeVLong ( out , getEndTime ( ) ) ; WritableUtils . writeVLong ( out , getLimit ( ) ) ; } @ SuppressWarnings ( { "<STR_LIT:rawtypes>" } ) @ Override public boolean equals ( Object obj ) { if ( obj instanceof QueryBase ) { QueryBase that = ( QueryBase ) obj ; EqualsBuilder builder = new EqualsBuilder ( ) ; builder . append ( dataStore , that . dataStore ) ; builder . append ( queryString , that . queryString ) ; builder . append ( fields , that . fields ) ; builder . append ( startKey , that . startKey ) ; builder . append ( endKey , that . endKey ) ; builder . append ( filter , that . filter ) ; builder . append ( limit , that . limit ) ; return builder . isEquals ( ) ; } return false ; } @ Override public int hashCode ( ) { HashCodeBuilder builder = new HashCodeBuilder ( ) ; builder . append ( dataStore ) ; builder . append ( queryString ) ; builder . append ( fields ) ; builder . append ( startKey ) ; builder . append ( endKey ) ; builder . append ( filter ) ; builder . append ( limit ) ; return builder . toHashCode ( ) ; } @ Override public String toString ( ) { ToStringBuilder builder = new ToStringBuilder ( this ) ; builder . append ( "<STR_LIT>" , dataStore ) ; builder . append ( "<STR_LIT>" , fields ) ; builder . append ( "<STR_LIT>" , startKey ) ; builder . append ( "<STR_LIT>" , endKey ) ; builder . append ( "<STR_LIT>" , filter ) ; builder . append ( "<STR_LIT>" , limit ) ; return builder . toString ( ) ; } } </s>
|
<s> package org . apache . gora . query . impl ; import java . io . DataInput ; import java . io . DataOutput ; import java . io . IOException ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . hadoop . mapreduce . InputFormat ; import org . apache . hadoop . mapreduce . lib . input . FileSplit ; public class FileSplitPartitionQuery < K , T extends Persistent > extends PartitionQueryImpl < K , T > { private FileSplit split ; public FileSplitPartitionQuery ( ) { super ( ) ; } public FileSplitPartitionQuery ( Query < K , T > baseQuery , FileSplit split ) throws IOException { super ( baseQuery , split . getLocations ( ) ) ; this . split = split ; } public FileSplit getSplit ( ) { return split ; } public long getLength ( ) { return split . getLength ( ) ; } public long getStart ( ) { return split . getStart ( ) ; } @ Override public void write ( DataOutput out ) throws IOException { super . write ( out ) ; split . write ( out ) ; } @ Override public void readFields ( DataInput in ) throws IOException { super . readFields ( in ) ; if ( split == null ) split = new FileSplit ( null , <NUM_LIT:0> , <NUM_LIT:0> , null ) ; split . readFields ( in ) ; } @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) @ Override public boolean equals ( Object obj ) { if ( obj instanceof FileSplitPartitionQuery ) { return super . equals ( obj ) && this . split . equals ( ( ( FileSplitPartitionQuery ) obj ) . split ) ; } return false ; } } </s>
|
<s> package org . apache . gora . query . impl ; import java . io . IOException ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . gora . query . Result ; import org . apache . gora . store . DataStore ; public abstract class ResultBase < K , T extends Persistent > implements Result < K , T > { protected final DataStore < K , T > dataStore ; protected final Query < K , T > query ; protected K key ; protected T persistent ; protected long limit ; protected long offset = <NUM_LIT:0> ; public ResultBase ( DataStore < K , T > dataStore , Query < K , T > query ) { this . dataStore = dataStore ; this . query = query ; this . limit = query . getLimit ( ) ; } @ Override public DataStore < K , T > getDataStore ( ) { return dataStore ; } @ Override public Query < K , T > getQuery ( ) { return query ; } @ Override public T get ( ) { return persistent ; } @ Override public K getKey ( ) { return key ; } @ Override public Class < K > getKeyClass ( ) { return getDataStore ( ) . getKeyClass ( ) ; } @ Override public Class < T > getPersistentClass ( ) { return getDataStore ( ) . getPersistentClass ( ) ; } protected boolean isLimitReached ( ) { if ( limit > <NUM_LIT:0> && offset >= limit ) { return true ; } return false ; } protected void clear ( ) { if ( persistent != null ) { persistent . clear ( ) ; } if ( key != null && key instanceof Persistent ) { ( ( Persistent ) key ) . clear ( ) ; } } @ Override public final boolean next ( ) throws IOException { if ( isLimitReached ( ) ) { return false ; } clear ( ) ; persistent = getOrCreatePersistent ( persistent ) ; boolean ret = nextInner ( ) ; if ( ret ) ++ offset ; return ret ; } @ Override public long getOffset ( ) { return offset ; } protected abstract boolean nextInner ( ) throws IOException ; protected T getOrCreatePersistent ( T persistent ) throws IOException { if ( persistent != null ) { return persistent ; } return dataStore . newPersistent ( ) ; } } </s>
|
<s> package org . apache . gora . query ; import org . apache . gora . persistency . Persistent ; public interface PartitionQuery < K , T extends Persistent > extends Query < K , T > { String [ ] getLocations ( ) ; } </s>
|
<s> package org . apache . gora . query ; import java . io . Closeable ; import java . io . IOException ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . store . DataStore ; public interface Result < K , T extends Persistent > extends Closeable { DataStore < K , T > getDataStore ( ) ; Query < K , T > getQuery ( ) ; boolean next ( ) throws IOException ; K getKey ( ) ; T get ( ) ; Class < K > getKeyClass ( ) ; Class < T > getPersistentClass ( ) ; long getOffset ( ) ; float getProgress ( ) throws IOException ; @ Override void close ( ) throws IOException ; } </s>
|
<s> package org . apache . gora . query ; import java . io . IOException ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . store . DataStore ; import org . apache . hadoop . conf . Configurable ; import org . apache . hadoop . io . Writable ; public interface Query < K , T extends Persistent > extends Writable , Configurable { void setDataStore ( DataStore < K , T > dataStore ) ; DataStore < K , T > getDataStore ( ) ; Result < K , T > execute ( ) throws IOException ; void setFields ( String ... fieldNames ) ; String [ ] getFields ( ) ; void setKey ( K key ) ; void setStartKey ( K startKey ) ; void setEndKey ( K endKey ) ; void setKeyRange ( K startKey , K endKey ) ; K getKey ( ) ; K getStartKey ( ) ; K getEndKey ( ) ; void setTimestamp ( long timestamp ) ; void setStartTime ( long startTime ) ; void setEndTime ( long endTime ) ; void setTimeRange ( long startTime , long endTime ) ; long getTimestamp ( ) ; long getStartTime ( ) ; long getEndTime ( ) ; void setLimit ( long limit ) ; long getLimit ( ) ; } </s>
|
<s> package org . apache . gora . memory . store ; import java . io . IOException ; import java . util . ArrayList ; import java . util . Arrays ; import java . util . Iterator ; import java . util . List ; import java . util . NavigableMap ; import java . util . TreeMap ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . persistency . impl . StateManagerImpl ; import org . apache . gora . query . PartitionQuery ; import org . apache . gora . query . Query ; import org . apache . gora . query . Result ; import org . apache . gora . query . impl . PartitionQueryImpl ; import org . apache . gora . query . impl . QueryBase ; import org . apache . gora . query . impl . ResultBase ; import org . apache . gora . store . DataStore ; import org . apache . gora . store . impl . DataStoreBase ; public class MemStore < K , T extends Persistent > extends DataStoreBase < K , T > { public static class MemQuery < K , T extends Persistent > extends QueryBase < K , T > { public MemQuery ( ) { super ( null ) ; } public MemQuery ( DataStore < K , T > dataStore ) { super ( dataStore ) ; } } public static class MemResult < K , T extends Persistent > extends ResultBase < K , T > { private NavigableMap < K , T > map ; private Iterator < K > iterator ; public MemResult ( DataStore < K , T > dataStore , Query < K , T > query , NavigableMap < K , T > map ) { super ( dataStore , query ) ; this . map = map ; iterator = map . navigableKeySet ( ) . iterator ( ) ; } @ Override public void close ( ) throws IOException { } @ Override public float getProgress ( ) throws IOException { return <NUM_LIT:0> ; } @ Override protected void clear ( ) { } @ Override public boolean nextInner ( ) throws IOException { if ( ! iterator . hasNext ( ) ) { return false ; } key = iterator . next ( ) ; persistent = map . get ( key ) ; return true ; } } private TreeMap < K , T > map = new TreeMap < K , T > ( ) ; @ Override public String getSchemaName ( ) { return "<STR_LIT:default>" ; } @ Override public boolean delete ( K key ) throws IOException { return map . remove ( key ) != null ; } @ Override public long deleteByQuery ( Query < K , T > query ) throws IOException { long deletedRows = <NUM_LIT:0> ; Result < K , T > result = query . execute ( ) ; while ( result . next ( ) ) { if ( delete ( result . getKey ( ) ) ) deletedRows ++ ; } return <NUM_LIT:0> ; } @ Override public Result < K , T > execute ( Query < K , T > query ) throws IOException { K startKey = query . getStartKey ( ) ; K endKey = query . getEndKey ( ) ; if ( startKey == null ) { startKey = map . firstKey ( ) ; } if ( endKey == null ) { endKey = map . lastKey ( ) ; } query . setFields ( getFieldsToQuery ( query . getFields ( ) ) ) ; NavigableMap < K , T > submap = map . subMap ( startKey , true , endKey , true ) ; return new MemResult < K , T > ( this , query , submap ) ; } @ Override public T get ( K key , String [ ] fields ) throws IOException { T obj = map . get ( key ) ; return getPersistent ( obj , getFieldsToQuery ( fields ) ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) private static < T extends Persistent > T getPersistent ( T obj , String [ ] fields ) { if ( Arrays . equals ( fields , obj . getFields ( ) ) ) { return obj ; } T newObj = ( T ) obj . newInstance ( new StateManagerImpl ( ) ) ; for ( String field : fields ) { int index = newObj . getFieldIndex ( field ) ; newObj . put ( index , obj . get ( index ) ) ; } return newObj ; } @ Override public Query < K , T > newQuery ( ) { return new MemQuery < K , T > ( this ) ; } @ Override public void put ( K key , T obj ) throws IOException { map . put ( key , obj ) ; } @ Override public List < PartitionQuery < K , T > > getPartitions ( Query < K , T > query ) throws IOException { List < PartitionQuery < K , T > > list = new ArrayList < PartitionQuery < K , T > > ( ) ; list . add ( new PartitionQueryImpl < K , T > ( query ) ) ; return list ; } @ Override public void close ( ) throws IOException { map . clear ( ) ; } @ Override public void createSchema ( ) throws IOException { } @ Override public void deleteSchema ( ) throws IOException { map . clear ( ) ; } @ Override public boolean schemaExists ( ) throws IOException { return true ; } @ Override public void flush ( ) throws IOException { } } </s>
|
<s> package org . apache . gora . util ; public class ClassLoadingUtils { private ClassLoadingUtils ( ) { } public static Class < ? > loadClass ( String className ) throws ClassNotFoundException { return ClassLoadingUtils . loadClass ( ClassLoadingUtils . class , className ) ; } public static Class < ? > loadClass ( Class < ? > contextClass , String className ) throws ClassNotFoundException { Class < ? > clazz = null ; if ( contextClass . getClassLoader ( ) != null ) { clazz = loadClass ( className , contextClass . getClassLoader ( ) ) ; } if ( clazz == null && Thread . currentThread ( ) . getContextClassLoader ( ) != null ) { clazz = loadClass ( className , Thread . currentThread ( ) . getContextClassLoader ( ) ) ; } if ( clazz == null ) { throw new ClassNotFoundException ( "<STR_LIT>" + className ) ; } return clazz ; } private static Class < ? > loadClass ( String className , ClassLoader classLoader ) { Class < ? > clazz = null ; if ( classLoader != null && className != null ) { try { clazz = classLoader . loadClass ( className ) ; } catch ( ClassNotFoundException e ) { } } return clazz ; } } </s>
|
<s> package org . apache . gora . util ; import java . lang . reflect . Constructor ; import java . lang . reflect . InvocationTargetException ; public class ReflectionUtils { public static Class < ? > [ ] EMPTY_CLASS_ARRAY = new Class < ? > [ <NUM_LIT:0> ] ; public static Object [ ] EMPTY_OBJECT_ARRAY = new Object [ <NUM_LIT:0> ] ; public static < T > Constructor < T > getConstructor ( Class < T > clazz ) throws SecurityException , NoSuchMethodException { if ( clazz == null ) { throw new IllegalArgumentException ( "<STR_LIT>" ) ; } Constructor < T > cons = clazz . getConstructor ( EMPTY_CLASS_ARRAY ) ; cons . setAccessible ( true ) ; return cons ; } public static boolean hasConstructor ( Class < ? > clazz ) throws SecurityException , NoSuchMethodException { if ( clazz == null ) { throw new IllegalArgumentException ( "<STR_LIT>" ) ; } Constructor < ? > [ ] consts = clazz . getConstructors ( ) ; boolean found = false ; for ( Constructor < ? > cons : consts ) { if ( cons . getParameterTypes ( ) . length == <NUM_LIT:0> ) { found = true ; } } return found ; } public static < T > T newInstance ( Class < T > clazz ) throws InstantiationException , IllegalAccessException , SecurityException , NoSuchMethodException , IllegalArgumentException , InvocationTargetException { Constructor < T > cons = getConstructor ( clazz ) ; return cons . newInstance ( EMPTY_OBJECT_ARRAY ) ; } public static Object newInstance ( String classStr ) throws InstantiationException , IllegalAccessException , ClassNotFoundException , SecurityException , IllegalArgumentException , NoSuchMethodException , InvocationTargetException { if ( classStr == null ) { throw new IllegalArgumentException ( "<STR_LIT>" ) ; } Class < ? > clazz = ClassLoadingUtils . loadClass ( classStr ) ; return newInstance ( clazz ) ; } public static Object getStaticField ( Class < ? > clazz , String fieldName ) throws IllegalArgumentException , SecurityException , IllegalAccessException , NoSuchFieldException { return clazz . getField ( fieldName ) . get ( null ) ; } } </s>
|
<s> package org . apache . gora . util ; import java . io . ByteArrayOutputStream ; import java . io . DataInput ; import java . io . DataOutput ; import java . io . IOException ; import java . io . InputStream ; import java . io . ObjectInput ; import java . io . ObjectInputStream ; import java . io . ObjectOutput ; import java . io . ObjectOutputStream ; import java . io . OutputStream ; import java . nio . ByteBuffer ; import java . util . ArrayList ; import java . util . List ; import org . apache . avro . Schema ; import org . apache . avro . io . BinaryDecoder ; import org . apache . avro . io . BinaryEncoder ; import org . apache . avro . io . Decoder ; import org . apache . avro . io . DecoderFactory ; import org . apache . avro . io . Encoder ; import org . apache . avro . ipc . ByteBufferInputStream ; import org . apache . avro . ipc . ByteBufferOutputStream ; import org . apache . gora . avro . PersistentDatumReader ; import org . apache . gora . avro . PersistentDatumWriter ; import org . apache . gora . persistency . Persistent ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . io . DataInputBuffer ; import org . apache . hadoop . io . DataOutputBuffer ; import org . apache . hadoop . io . DefaultStringifier ; import org . apache . hadoop . io . Text ; import org . apache . hadoop . io . WritableUtils ; import org . apache . hadoop . io . serializer . Deserializer ; import org . apache . hadoop . io . serializer . SerializationFactory ; import org . apache . hadoop . io . serializer . Serializer ; public class IOUtils { private static SerializationFactory serializationFactory = null ; private static Configuration conf ; public static final int BUFFER_SIZE = <NUM_LIT> ; private static BinaryDecoder decoder ; private static Configuration getOrCreateConf ( Configuration conf ) { if ( conf == null ) { if ( IOUtils . conf == null ) { IOUtils . conf = new Configuration ( ) ; } } return conf != null ? conf : IOUtils . conf ; } public static Object readObject ( DataInput in ) throws ClassNotFoundException , IOException { if ( in instanceof ObjectInput ) { return ( ( ObjectInput ) in ) . readObject ( ) ; } else { if ( in instanceof InputStream ) { ObjectInput objIn = new ObjectInputStream ( ( InputStream ) in ) ; Object obj = objIn . readObject ( ) ; return obj ; } } throw new IOException ( "<STR_LIT>" + in . getClass ( ) ) ; } public static void writeObject ( DataOutput out , Object obj ) throws IOException { if ( out instanceof ObjectOutput ) { ( ( ObjectOutput ) out ) . writeObject ( obj ) ; } else { if ( out instanceof OutputStream ) { ObjectOutput objOut = new ObjectOutputStream ( ( OutputStream ) out ) ; objOut . writeObject ( obj ) ; } } throw new IOException ( "<STR_LIT>" + out . getClass ( ) ) ; } public static < T > void serialize ( Configuration conf , DataOutput out , T obj , Class < T > objClass ) throws IOException { if ( serializationFactory == null ) { serializationFactory = new SerializationFactory ( getOrCreateConf ( conf ) ) ; } Serializer < T > serializer = serializationFactory . getSerializer ( objClass ) ; ByteBufferOutputStream os = new ByteBufferOutputStream ( ) ; try { serializer . open ( os ) ; serializer . serialize ( obj ) ; int length = <NUM_LIT:0> ; List < ByteBuffer > buffers = os . getBufferList ( ) ; for ( ByteBuffer buffer : buffers ) { length += buffer . limit ( ) - buffer . arrayOffset ( ) ; } WritableUtils . writeVInt ( out , length ) ; for ( ByteBuffer buffer : buffers ) { byte [ ] arr = buffer . array ( ) ; out . write ( arr , buffer . arrayOffset ( ) , buffer . limit ( ) ) ; } } finally { if ( serializer != null ) serializer . close ( ) ; if ( os != null ) os . close ( ) ; } } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public static < T > void serialize ( Configuration conf , DataOutput out , T obj ) throws IOException { Text . writeString ( out , obj . getClass ( ) . getCanonicalName ( ) ) ; serialize ( conf , out , obj , ( Class < T > ) obj . getClass ( ) ) ; } public static < T > byte [ ] serialize ( Configuration conf , T obj ) throws IOException { DataOutputBuffer buffer = new DataOutputBuffer ( ) ; serialize ( conf , buffer , obj ) ; return buffer . getData ( ) ; } public static < T extends Persistent > void serialize ( OutputStream os , PersistentDatumWriter < T > datumWriter , Schema schema , Object object ) throws IOException { BinaryEncoder encoder = new BinaryEncoder ( os ) ; datumWriter . write ( schema , object , encoder ) ; encoder . flush ( ) ; } public static < T extends Persistent > byte [ ] serialize ( PersistentDatumWriter < T > datumWriter , Schema schema , Object object ) throws IOException { ByteArrayOutputStream os = new ByteArrayOutputStream ( ) ; serialize ( os , datumWriter , schema , object ) ; return os . toByteArray ( ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public static < T > T deserialize ( Configuration conf , DataInput in , T obj , String objClass ) throws IOException , ClassNotFoundException { Class < T > c = ( Class < T > ) ClassLoadingUtils . loadClass ( objClass ) ; return deserialize ( conf , in , obj , c ) ; } public static < T > T deserialize ( Configuration conf , DataInput in , T obj , Class < T > objClass ) throws IOException { if ( serializationFactory == null ) { serializationFactory = new SerializationFactory ( getOrCreateConf ( conf ) ) ; } Deserializer < T > deserializer = serializationFactory . getDeserializer ( objClass ) ; int length = WritableUtils . readVInt ( in ) ; byte [ ] arr = new byte [ length ] ; in . readFully ( arr ) ; List < ByteBuffer > list = new ArrayList < ByteBuffer > ( ) ; list . add ( ByteBuffer . wrap ( arr ) ) ; ByteBufferInputStream is = new ByteBufferInputStream ( list ) ; try { deserializer . open ( is ) ; T newObj = deserializer . deserialize ( obj ) ; return newObj ; } finally { if ( deserializer != null ) deserializer . close ( ) ; if ( is != null ) is . close ( ) ; } } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public static < T > T deserialize ( Configuration conf , DataInput in , T obj ) throws IOException , ClassNotFoundException { String clazz = Text . readString ( in ) ; Class < T > c = ( Class < T > ) ClassLoadingUtils . loadClass ( clazz ) ; return deserialize ( conf , in , obj , c ) ; } public static < T > T deserialize ( Configuration conf , byte [ ] in , T obj ) throws IOException , ClassNotFoundException { DataInputBuffer buffer = new DataInputBuffer ( ) ; buffer . reset ( in , in . length ) ; return deserialize ( conf , buffer , obj ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public static < K , T extends Persistent > K deserialize ( InputStream is , PersistentDatumReader < T > datumReader , Schema schema , K object ) throws IOException { decoder = DecoderFactory . defaultFactory ( ) . createBinaryDecoder ( is , decoder ) ; return ( K ) datumReader . read ( object , schema , decoder ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public static < K , T extends Persistent > K deserialize ( byte [ ] bytes , PersistentDatumReader < T > datumReader , Schema schema , K object ) throws IOException { decoder = DecoderFactory . defaultFactory ( ) . createBinaryDecoder ( bytes , decoder ) ; return ( K ) datumReader . read ( object , schema , decoder ) ; } public static < T extends Persistent > byte [ ] deserialize ( PersistentDatumWriter < T > datumWriter , Schema schema , Object object ) throws IOException { ByteArrayOutputStream os = new ByteArrayOutputStream ( ) ; serialize ( os , datumWriter , schema , object ) ; return os . toByteArray ( ) ; } public static void writeNullFieldsInfo ( DataOutput out , Object ... fields ) throws IOException { boolean [ ] isNull = new boolean [ fields . length ] ; for ( int i = <NUM_LIT:0> ; i < fields . length ; i ++ ) { isNull [ i ] = ( fields [ i ] == null ) ; } writeBoolArray ( out , isNull ) ; } public static boolean [ ] readNullFieldsInfo ( DataInput in ) throws IOException { return readBoolArray ( in ) ; } public static void writeBoolArray ( DataOutput out , boolean [ ] boolArray ) throws IOException { WritableUtils . writeVInt ( out , boolArray . length ) ; byte b = <NUM_LIT:0> ; int i = <NUM_LIT:0> ; for ( i = <NUM_LIT:0> ; i < boolArray . length ; i ++ ) { if ( i % <NUM_LIT:8> == <NUM_LIT:0> && i != <NUM_LIT:0> ) { out . writeByte ( b ) ; b = <NUM_LIT:0> ; } b >>= <NUM_LIT:1> ; if ( boolArray [ i ] ) b |= <NUM_LIT> ; else b &= <NUM_LIT> ; } if ( i % <NUM_LIT:8> != <NUM_LIT:0> ) { for ( int j = <NUM_LIT:0> ; j < <NUM_LIT:8> - ( i % <NUM_LIT:8> ) ; j ++ ) { b >>= <NUM_LIT:1> ; b &= <NUM_LIT> ; } } out . writeByte ( b ) ; } public static boolean [ ] readBoolArray ( DataInput in ) throws IOException { int length = WritableUtils . readVInt ( in ) ; boolean [ ] arr = new boolean [ length ] ; byte b = <NUM_LIT:0> ; for ( int i = <NUM_LIT:0> ; i < length ; i ++ ) { if ( i % <NUM_LIT:8> == <NUM_LIT:0> ) { b = in . readByte ( ) ; } arr [ i ] = ( b & <NUM_LIT> ) > <NUM_LIT:0> ; b >>= <NUM_LIT:1> ; } return arr ; } public static void writeBoolArray ( Encoder out , boolean [ ] boolArray ) throws IOException { out . writeInt ( boolArray . length ) ; int byteArrLength = ( int ) Math . ceil ( boolArray . length / <NUM_LIT> ) ; byte b = <NUM_LIT:0> ; byte [ ] arr = new byte [ byteArrLength ] ; int i = <NUM_LIT:0> ; int arrIndex = <NUM_LIT:0> ; for ( i = <NUM_LIT:0> ; i < boolArray . length ; i ++ ) { if ( i % <NUM_LIT:8> == <NUM_LIT:0> && i != <NUM_LIT:0> ) { arr [ arrIndex ++ ] = b ; b = <NUM_LIT:0> ; } b >>= <NUM_LIT:1> ; if ( boolArray [ i ] ) b |= <NUM_LIT> ; else b &= <NUM_LIT> ; } if ( i % <NUM_LIT:8> != <NUM_LIT:0> ) { for ( int j = <NUM_LIT:0> ; j < <NUM_LIT:8> - ( i % <NUM_LIT:8> ) ; j ++ ) { b >>= <NUM_LIT:1> ; b &= <NUM_LIT> ; } } arr [ arrIndex ++ ] = b ; out . writeFixed ( arr ) ; } public static boolean [ ] readBoolArray ( Decoder in ) throws IOException { int length = in . readInt ( ) ; boolean [ ] boolArr = new boolean [ length ] ; int byteArrLength = ( int ) Math . ceil ( length / <NUM_LIT> ) ; byte [ ] byteArr = new byte [ byteArrLength ] ; in . readFixed ( byteArr ) ; int arrIndex = <NUM_LIT:0> ; byte b = <NUM_LIT:0> ; for ( int i = <NUM_LIT:0> ; i < length ; i ++ ) { if ( i % <NUM_LIT:8> == <NUM_LIT:0> ) { b = byteArr [ arrIndex ++ ] ; } boolArr [ i ] = ( b & <NUM_LIT> ) > <NUM_LIT:0> ; b >>= <NUM_LIT:1> ; } return boolArr ; } public static void writeStringArray ( DataOutput out , String [ ] arr ) throws IOException { WritableUtils . writeVInt ( out , arr . length ) ; for ( String str : arr ) { Text . writeString ( out , str ) ; } } public static String [ ] readStringArray ( DataInput in ) throws IOException { int len = WritableUtils . readVInt ( in ) ; String [ ] arr = new String [ len ] ; for ( int i = <NUM_LIT:0> ; i < len ; i ++ ) { arr [ i ] = Text . readString ( in ) ; } return arr ; } public static < T > void storeToConf ( T obj , Configuration conf , String dataKey ) throws IOException { String classKey = dataKey + "<STR_LIT>" ; conf . set ( classKey , obj . getClass ( ) . getCanonicalName ( ) ) ; DefaultStringifier . store ( conf , obj , dataKey ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public static < T > T loadFromConf ( Configuration conf , String dataKey ) throws IOException { String classKey = dataKey + "<STR_LIT>" ; String className = conf . get ( classKey ) ; try { T obj = ( T ) DefaultStringifier . load ( conf , dataKey , ClassLoadingUtils . loadClass ( className ) ) ; return obj ; } catch ( Exception ex ) { throw new IOException ( ex ) ; } } public static byte [ ] getAsBytes ( List < ByteBuffer > buffers ) { int size = <NUM_LIT:0> ; for ( ByteBuffer buffer : buffers ) { size += buffer . remaining ( ) ; } byte [ ] arr = new byte [ size ] ; int offset = <NUM_LIT:0> ; for ( ByteBuffer buffer : buffers ) { int len = buffer . remaining ( ) ; buffer . get ( arr , offset , len ) ; offset += len ; } return arr ; } public static byte [ ] readFully ( InputStream in ) throws IOException { List < ByteBuffer > buffers = new ArrayList < ByteBuffer > ( <NUM_LIT:4> ) ; while ( true ) { ByteBuffer buffer = ByteBuffer . allocate ( BUFFER_SIZE ) ; int count = in . read ( buffer . array ( ) , <NUM_LIT:0> , BUFFER_SIZE ) ; if ( count > <NUM_LIT:0> ) { buffer . limit ( count ) ; buffers . add ( buffer ) ; } if ( count < BUFFER_SIZE ) break ; } return getAsBytes ( buffers ) ; } } </s>
|
<s> package org . apache . gora . util ; import java . util . Collection ; import java . util . HashSet ; import java . util . LinkedHashSet ; import java . util . List ; import java . util . Set ; public class StringUtils { public static String [ ] joinStringArrays ( String [ ] arr1 , String ... arr2 ) { HashSet < String > set = new HashSet < String > ( ) ; for ( String str : arr1 ) set . add ( str ) ; for ( String str : arr2 ) set . add ( str ) ; return set . toArray ( new String [ set . size ( ) ] ) ; } public static String join ( List < String > strs ) { return join ( new StringBuilder ( ) , strs ) . toString ( ) ; } public static String join ( String [ ] strs ) { return join ( new StringBuilder ( ) , strs ) . toString ( ) ; } public static StringBuilder join ( StringBuilder builder , Collection < String > strs ) { int i = <NUM_LIT:0> ; for ( String s : strs ) { if ( i != <NUM_LIT:0> ) builder . append ( '<CHAR_LIT:U+002C>' ) ; builder . append ( s ) ; i ++ ; } return builder ; } public static StringBuilder join ( StringBuilder builder , String [ ] strs ) { for ( int i = <NUM_LIT:0> ; i < strs . length ; i ++ ) { if ( i != <NUM_LIT:0> ) builder . append ( "<STR_LIT:U+002C>" ) ; builder . append ( strs [ i ] ) ; } return builder ; } public static boolean is ( String str ) { return str != null && str . length ( ) > <NUM_LIT:0> ; } public static LinkedHashSet < Set < String > > powerset ( String [ ] set ) { LinkedHashSet < Set < String > > power = new LinkedHashSet < Set < String > > ( ) ; int elements = set . length ; int powerElements = ( int ) Math . pow ( <NUM_LIT:2> , elements ) ; for ( int i = <NUM_LIT:0> ; i < powerElements ; i ++ ) { String binary = intToBinary ( i , elements ) ; LinkedHashSet < String > innerSet = new LinkedHashSet < String > ( ) ; for ( int j = <NUM_LIT:0> ; j < binary . length ( ) ; j ++ ) { if ( binary . charAt ( j ) == '<CHAR_LIT:1>' ) innerSet . add ( set [ j ] ) ; } power . add ( innerSet ) ; } return power ; } private static String intToBinary ( int binary , int digits ) { String temp = Integer . toBinaryString ( binary ) ; int foundDigits = temp . length ( ) ; String returner = temp ; for ( int i = foundDigits ; i < digits ; i ++ ) { returner = "<STR_LIT:0>" + returner ; } return returner ; } public static int parseInt ( String str , int defaultValue ) { if ( str == null ) { return defaultValue ; } return Integer . parseInt ( str ) ; } public static String getClassname ( Class < ? > clazz ) { return getClassname ( clazz . getName ( ) ) ; } public static String getClassname ( String classname ) { String [ ] parts = classname . split ( "<STR_LIT:\\.>" ) ; return parts [ parts . length - <NUM_LIT:1> ] ; } } </s>
|
<s> package org . apache . gora . util ; public class Null { private static final Null INSTANCE = new Null ( ) ; public Null ( ) { } public static Null get ( ) { return INSTANCE ; } } </s>
|
<s> package org . apache . gora . util ; import java . io . IOException ; public class GoraException extends IOException { private static final long serialVersionUID = - <NUM_LIT> ; public GoraException ( ) { super ( ) ; } public GoraException ( String message , Throwable cause ) { super ( message , cause ) ; } public GoraException ( String message ) { super ( message ) ; } public GoraException ( Throwable cause ) { super ( cause ) ; } } </s>
|
<s> package org . apache . gora . util ; public class OperationNotSupportedException extends RuntimeException { private static final long serialVersionUID = <NUM_LIT> ; public OperationNotSupportedException ( ) { super ( ) ; } public OperationNotSupportedException ( String message , Throwable cause ) { super ( message , cause ) ; } public OperationNotSupportedException ( String message ) { super ( message ) ; } public OperationNotSupportedException ( Throwable cause ) { super ( cause ) ; } } </s>
|
<s> package org . apache . gora . util ; import java . util . HashMap ; import java . util . List ; import java . util . Map ; import org . apache . avro . Schema ; import org . apache . avro . Schema . Field ; import org . apache . avro . reflect . ReflectData ; import org . apache . gora . persistency . Persistent ; public class AvroUtils { public static Map < String , Field > getFieldMap ( Schema schema ) { List < Field > fields = schema . getFields ( ) ; HashMap < String , Field > fieldMap = new HashMap < String , Field > ( fields . size ( ) ) ; for ( Field field : fields ) { fieldMap . put ( field . name ( ) , field ) ; } return fieldMap ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public static Object getEnumValue ( Schema schema , String symbol ) { return Enum . valueOf ( ReflectData . get ( ) . getClass ( schema ) , symbol ) ; } public static Object getEnumValue ( Schema schema , int enumOrdinal ) { String symbol = schema . getEnumSymbols ( ) . get ( enumOrdinal ) ; return getEnumValue ( schema , symbol ) ; } public static Schema getSchema ( Class < ? extends Persistent > clazz ) throws SecurityException , NoSuchFieldException , IllegalArgumentException , IllegalAccessException { java . lang . reflect . Field field = clazz . getDeclaredField ( "<STR_LIT>" ) ; return ( Schema ) field . get ( null ) ; } } </s>
|
<s> package org . apache . gora . util ; import java . io . DataInput ; import java . io . DataOutput ; import java . io . IOException ; import java . util . Map . Entry ; import java . util . Properties ; import org . apache . hadoop . io . MapWritable ; import org . apache . hadoop . io . Text ; import org . apache . hadoop . io . Writable ; public class WritableUtils { private WritableUtils ( ) { } public static final void writeProperties ( DataOutput out , Properties props ) throws IOException { MapWritable propsWritable = new MapWritable ( ) ; for ( Entry < Object , Object > prop : props . entrySet ( ) ) { Writable key = new Text ( prop . getKey ( ) . toString ( ) ) ; Writable value = new Text ( prop . getValue ( ) . toString ( ) ) ; propsWritable . put ( key , value ) ; } propsWritable . write ( out ) ; } public static final Properties readProperties ( DataInput in ) throws IOException { Properties props = new Properties ( ) ; MapWritable propsWritable = new MapWritable ( ) ; propsWritable . readFields ( in ) ; for ( Entry < Writable , Writable > prop : propsWritable . entrySet ( ) ) { String key = prop . getKey ( ) . toString ( ) ; String value = prop . getValue ( ) . toString ( ) ; props . put ( key , value ) ; } return props ; } } </s>
|
<s> package org . apache . gora . util ; import java . io . IOException ; import java . io . UnsupportedEncodingException ; import java . nio . ByteBuffer ; import org . apache . avro . Schema ; import org . apache . avro . Schema . Type ; import org . apache . avro . reflect . ReflectData ; import org . apache . avro . util . Utf8 ; import org . apache . gora . avro . PersistentDatumReader ; import org . apache . gora . avro . PersistentDatumWriter ; import org . apache . hadoop . io . WritableUtils ; public class ByteUtils { public static final int SIZEOF_BOOLEAN = Byte . SIZE / Byte . SIZE ; public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN ; public static final int SIZEOF_CHAR = Character . SIZE / Byte . SIZE ; public static final int SIZEOF_DOUBLE = Double . SIZE / Byte . SIZE ; public static final int SIZEOF_FLOAT = Float . SIZE / Byte . SIZE ; public static final int SIZEOF_INT = Integer . SIZE / Byte . SIZE ; public static final int SIZEOF_LONG = Long . SIZE / Byte . SIZE ; public static final int SIZEOF_SHORT = Short . SIZE / Byte . SIZE ; public static int putBytes ( byte [ ] tgtBytes , int tgtOffset , byte [ ] srcBytes , int srcOffset , int srcLength ) { System . arraycopy ( srcBytes , srcOffset , tgtBytes , tgtOffset , srcLength ) ; return tgtOffset + srcLength ; } public static int putByte ( byte [ ] bytes , int offset , byte b ) { bytes [ offset ] = b ; return offset + <NUM_LIT:1> ; } public static byte [ ] toBytes ( ByteBuffer bb ) { int length = bb . limit ( ) ; byte [ ] result = new byte [ length ] ; System . arraycopy ( bb . array ( ) , bb . arrayOffset ( ) , result , <NUM_LIT:0> , length ) ; return result ; } public static String toString ( final byte [ ] b ) { if ( b == null ) { return null ; } return toString ( b , <NUM_LIT:0> , b . length ) ; } public static String toString ( final byte [ ] b1 , String sep , final byte [ ] b2 ) { return toString ( b1 , <NUM_LIT:0> , b1 . length ) + sep + toString ( b2 , <NUM_LIT:0> , b2 . length ) ; } public static String toString ( final byte [ ] b , int off , int len ) { if ( b == null ) { return null ; } if ( len == <NUM_LIT:0> ) { return "<STR_LIT>" ; } String result = null ; try { result = new String ( b , off , len , "<STR_LIT:UTF-8>" ) ; } catch ( UnsupportedEncodingException e ) { e . printStackTrace ( ) ; } return result ; } public static byte [ ] toBytes ( String s ) { if ( s == null ) { throw new IllegalArgumentException ( "<STR_LIT>" ) ; } byte [ ] result = null ; try { result = s . getBytes ( "<STR_LIT:UTF-8>" ) ; } catch ( UnsupportedEncodingException e ) { e . printStackTrace ( ) ; } return result ; } public static byte [ ] toBytes ( final boolean b ) { byte [ ] bb = new byte [ <NUM_LIT:1> ] ; bb [ <NUM_LIT:0> ] = b ? ( byte ) - <NUM_LIT:1> : ( byte ) <NUM_LIT:0> ; return bb ; } public static boolean toBoolean ( final byte [ ] b ) { if ( b == null || b . length > <NUM_LIT:1> ) { throw new IllegalArgumentException ( "<STR_LIT>" ) ; } return b [ <NUM_LIT:0> ] != ( byte ) <NUM_LIT:0> ; } public static byte [ ] toBytes ( long val ) { byte [ ] b = new byte [ <NUM_LIT:8> ] ; for ( int i = <NUM_LIT:7> ; i > <NUM_LIT:0> ; i -- ) { b [ i ] = ( byte ) ( val ) ; val >>>= <NUM_LIT:8> ; } b [ <NUM_LIT:0> ] = ( byte ) ( val ) ; return b ; } public static long toLong ( byte [ ] bytes ) { return toLong ( bytes , <NUM_LIT:0> ) ; } public static long toLong ( byte [ ] bytes , int offset ) { return toLong ( bytes , offset , SIZEOF_LONG ) ; } public static long toLong ( byte [ ] bytes , int offset , final int length ) { if ( bytes == null || length != SIZEOF_LONG || ( offset + length > bytes . length ) ) { return - <NUM_LIT:1L> ; } long l = <NUM_LIT:0> ; for ( int i = offset ; i < ( offset + length ) ; i ++ ) { l <<= <NUM_LIT:8> ; l ^= ( long ) bytes [ i ] & <NUM_LIT> ; } return l ; } public static float toFloat ( byte [ ] bytes ) { return toFloat ( bytes , <NUM_LIT:0> ) ; } public static float toFloat ( byte [ ] bytes , int offset ) { int i = toInt ( bytes , offset ) ; return Float . intBitsToFloat ( i ) ; } public static byte [ ] toBytes ( final float f ) { int i = Float . floatToRawIntBits ( f ) ; return toBytes ( i ) ; } public static double toDouble ( final byte [ ] bytes ) { return toDouble ( bytes , <NUM_LIT:0> ) ; } public static double toDouble ( final byte [ ] bytes , final int offset ) { long l = toLong ( bytes , offset ) ; return Double . longBitsToDouble ( l ) ; } public static byte [ ] toBytes ( final double d ) { long l = Double . doubleToRawLongBits ( d ) ; return toBytes ( l ) ; } public static byte [ ] toBytes ( int val ) { byte [ ] b = new byte [ <NUM_LIT:4> ] ; for ( int i = <NUM_LIT:3> ; i > <NUM_LIT:0> ; i -- ) { b [ i ] = ( byte ) ( val ) ; val >>>= <NUM_LIT:8> ; } b [ <NUM_LIT:0> ] = ( byte ) ( val ) ; return b ; } public static int toInt ( byte [ ] bytes ) { return toInt ( bytes , <NUM_LIT:0> ) ; } public static int toInt ( byte [ ] bytes , int offset ) { return toInt ( bytes , offset , SIZEOF_INT ) ; } public static int toInt ( byte [ ] bytes , int offset , final int length ) { if ( bytes == null || length != SIZEOF_INT || ( offset + length > bytes . length ) ) { return - <NUM_LIT:1> ; } int n = <NUM_LIT:0> ; for ( int i = offset ; i < ( offset + length ) ; i ++ ) { n <<= <NUM_LIT:8> ; n ^= bytes [ i ] & <NUM_LIT> ; } return n ; } public static byte [ ] toBytes ( short val ) { byte [ ] b = new byte [ SIZEOF_SHORT ] ; b [ <NUM_LIT:1> ] = ( byte ) ( val ) ; val >>= <NUM_LIT:8> ; b [ <NUM_LIT:0> ] = ( byte ) ( val ) ; return b ; } public static short toShort ( byte [ ] bytes ) { return toShort ( bytes , <NUM_LIT:0> ) ; } public static short toShort ( byte [ ] bytes , int offset ) { return toShort ( bytes , offset , SIZEOF_SHORT ) ; } public static short toShort ( byte [ ] bytes , int offset , final int length ) { if ( bytes == null || length != SIZEOF_SHORT || ( offset + length > bytes . length ) ) { return - <NUM_LIT:1> ; } short n = <NUM_LIT:0> ; n ^= bytes [ offset ] & <NUM_LIT> ; n <<= <NUM_LIT:8> ; n ^= bytes [ offset + <NUM_LIT:1> ] & <NUM_LIT> ; return n ; } public static byte [ ] toBytes ( char val ) { byte [ ] b = new byte [ SIZEOF_CHAR ] ; b [ <NUM_LIT:1> ] = ( byte ) ( val ) ; val >>= <NUM_LIT:8> ; b [ <NUM_LIT:0> ] = ( byte ) ( val ) ; return b ; } public static char toChar ( byte [ ] bytes ) { return toChar ( bytes , <NUM_LIT:0> ) ; } public static char toChar ( byte [ ] bytes , int offset ) { return toChar ( bytes , offset , SIZEOF_CHAR ) ; } public static char toChar ( byte [ ] bytes , int offset , final int length ) { if ( bytes == null || length != SIZEOF_CHAR || ( offset + length > bytes . length ) ) { return ( char ) - <NUM_LIT:1> ; } char n = <NUM_LIT:0> ; n ^= bytes [ offset ] & <NUM_LIT> ; n <<= <NUM_LIT:8> ; n ^= bytes [ offset + <NUM_LIT:1> ] & <NUM_LIT> ; return n ; } public static char [ ] toChars ( byte [ ] bytes ) { return toChars ( bytes , <NUM_LIT:0> , bytes . length ) ; } public static char [ ] toChars ( byte [ ] bytes , int offset ) { return toChars ( bytes , offset , bytes . length - offset ) ; } public static char [ ] toChars ( byte [ ] bytes , int offset , final int length ) { int max = offset + length ; if ( bytes == null || ( max > bytes . length ) || length % <NUM_LIT:2> == <NUM_LIT:1> ) { return null ; } char [ ] chars = new char [ length / <NUM_LIT:2> ] ; for ( int i = <NUM_LIT:0> , j = offset ; i < chars . length && j < max ; i ++ , j += <NUM_LIT:2> ) { char c = <NUM_LIT:0> ; c ^= bytes [ j ] & <NUM_LIT> ; c <<= <NUM_LIT:8> ; c ^= bytes [ j + <NUM_LIT:1> ] & <NUM_LIT> ; chars [ i ] = c ; } return chars ; } public static byte [ ] vintToBytes ( final long vint ) { long i = vint ; int size = WritableUtils . getVIntSize ( i ) ; byte [ ] result = new byte [ size ] ; int offset = <NUM_LIT:0> ; if ( i >= - <NUM_LIT> && i <= <NUM_LIT> ) { result [ offset ] = ( ( byte ) i ) ; return result ; } int len = - <NUM_LIT> ; if ( i < <NUM_LIT:0> ) { i ^= - <NUM_LIT:1L> ; len = - <NUM_LIT> ; } long tmp = i ; while ( tmp != <NUM_LIT:0> ) { tmp = tmp > > <NUM_LIT:8> ; len -- ; } result [ offset ++ ] = ( byte ) len ; len = ( len < - <NUM_LIT> ) ? - ( len + <NUM_LIT> ) : - ( len + <NUM_LIT> ) ; for ( int idx = len ; idx != <NUM_LIT:0> ; idx -- ) { int shiftbits = ( idx - <NUM_LIT:1> ) * <NUM_LIT:8> ; long mask = <NUM_LIT> << shiftbits ; result [ offset ++ ] = ( byte ) ( ( i & mask ) > > shiftbits ) ; } return result ; } public static long bytesToVlong ( final byte [ ] buffer ) { int offset = <NUM_LIT:0> ; byte firstByte = buffer [ offset ++ ] ; int len = WritableUtils . decodeVIntSize ( firstByte ) ; if ( len == <NUM_LIT:1> ) { return firstByte ; } long i = <NUM_LIT:0> ; for ( int idx = <NUM_LIT:0> ; idx < len - <NUM_LIT:1> ; idx ++ ) { byte b = buffer [ offset ++ ] ; i = i << <NUM_LIT:8> ; i = i | ( b & <NUM_LIT> ) ; } return ( WritableUtils . isNegativeVInt ( firstByte ) ? ( i ^ - <NUM_LIT:1L> ) : i ) ; } public static int bytesToVint ( final byte [ ] buffer ) { int offset = <NUM_LIT:0> ; byte firstByte = buffer [ offset ++ ] ; int len = WritableUtils . decodeVIntSize ( firstByte ) ; if ( len == <NUM_LIT:1> ) { return firstByte ; } long i = <NUM_LIT:0> ; for ( int idx = <NUM_LIT:0> ; idx < len - <NUM_LIT:1> ; idx ++ ) { byte b = buffer [ offset ++ ] ; i = i << <NUM_LIT:8> ; i = i | ( b & <NUM_LIT> ) ; } return ( int ) ( WritableUtils . isNegativeVInt ( firstByte ) ? ( i ^ - <NUM_LIT:1L> ) : i ) ; } public static long readVLong ( final byte [ ] buffer , final int offset ) throws IOException { byte firstByte = buffer [ offset ] ; int len = WritableUtils . decodeVIntSize ( firstByte ) ; if ( len == <NUM_LIT:1> ) { return firstByte ; } long i = <NUM_LIT:0> ; for ( int idx = <NUM_LIT:0> ; idx < len - <NUM_LIT:1> ; idx ++ ) { byte b = buffer [ offset + <NUM_LIT:1> + idx ] ; i = i << <NUM_LIT:8> ; i = i | ( b & <NUM_LIT> ) ; } return ( WritableUtils . isNegativeVInt ( firstByte ) ? ( i ^ - <NUM_LIT:1L> ) : i ) ; } public static int compareTo ( final byte [ ] left , final byte [ ] right ) { return compareTo ( left , <NUM_LIT:0> , left . length , right , <NUM_LIT:0> , right . length ) ; } public static int compareTo ( byte [ ] b1 , int s1 , int l1 , byte [ ] b2 , int s2 , int l2 ) { int end1 = s1 + l1 ; int end2 = s2 + l2 ; for ( int i = s1 , j = s2 ; i < end1 && j < end2 ; i ++ , j ++ ) { int a = ( b1 [ i ] & <NUM_LIT> ) ; int b = ( b2 [ j ] & <NUM_LIT> ) ; if ( a != b ) { return a - b ; } } return l1 - l2 ; } public static boolean equals ( final byte [ ] left , final byte [ ] right ) { return left == null && right == null ? true : ( left == null || right == null || ( left . length != right . length ) ) ? false : compareTo ( left , right ) == <NUM_LIT:0> ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public static Object fromBytes ( byte [ ] val , Schema schema , PersistentDatumReader < ? > datumReader , Object object ) throws IOException { Type type = schema . getType ( ) ; switch ( type ) { case ENUM : String symbol = schema . getEnumSymbols ( ) . get ( val [ <NUM_LIT:0> ] ) ; return Enum . valueOf ( ReflectData . get ( ) . getClass ( schema ) , symbol ) ; case STRING : return new Utf8 ( toString ( val ) ) ; case BYTES : return ByteBuffer . wrap ( val ) ; case INT : return bytesToVint ( val ) ; case LONG : return bytesToVlong ( val ) ; case FLOAT : return toFloat ( val ) ; case DOUBLE : return toDouble ( val ) ; case BOOLEAN : return val [ <NUM_LIT:0> ] != <NUM_LIT:0> ; case RECORD : case MAP : case ARRAY : return IOUtils . deserialize ( val , datumReader , schema , object ) ; default : throw new RuntimeException ( "<STR_LIT>" + type ) ; } } public static byte [ ] toBytes ( Object o , Schema schema , PersistentDatumWriter < ? > datumWriter ) throws IOException { Type type = schema . getType ( ) ; switch ( type ) { case STRING : return toBytes ( ( ( Utf8 ) o ) . toString ( ) ) ; case BYTES : return ( ( ByteBuffer ) o ) . array ( ) ; case INT : return vintToBytes ( ( Integer ) o ) ; case LONG : return vintToBytes ( ( Long ) o ) ; case FLOAT : return toBytes ( ( Float ) o ) ; case DOUBLE : return toBytes ( ( Double ) o ) ; case BOOLEAN : return ( Boolean ) o ? new byte [ ] { <NUM_LIT:1> } : new byte [ ] { <NUM_LIT:0> } ; case ENUM : return new byte [ ] { ( byte ) ( ( Enum < ? > ) o ) . ordinal ( ) } ; case RECORD : case MAP : case ARRAY : return IOUtils . serialize ( datumWriter , schema , o ) ; default : throw new RuntimeException ( "<STR_LIT>" + type ) ; } } } </s>
|
<s> package org . apache . gora . util ; import java . util . Stack ; import org . w3c . dom . Node ; import org . w3c . dom . NodeList ; public class NodeWalker { private Node currentNode ; private NodeList currentChildren ; private Stack < Node > nodes ; public NodeWalker ( Node rootNode ) { nodes = new Stack < Node > ( ) ; nodes . add ( rootNode ) ; } public Node nextNode ( ) { if ( ! hasNext ( ) ) { return null ; } currentNode = nodes . pop ( ) ; currentChildren = currentNode . getChildNodes ( ) ; int childLen = ( currentChildren != null ) ? currentChildren . getLength ( ) : <NUM_LIT:0> ; for ( int i = childLen - <NUM_LIT:1> ; i >= <NUM_LIT:0> ; i -- ) { nodes . add ( currentChildren . item ( i ) ) ; } return currentNode ; } public void skipChildren ( ) { int childLen = ( currentChildren != null ) ? currentChildren . getLength ( ) : <NUM_LIT:0> ; for ( int i = <NUM_LIT:0> ; i < childLen ; i ++ ) { Node child = nodes . peek ( ) ; if ( child . equals ( currentChildren . item ( i ) ) ) { nodes . pop ( ) ; } } } public boolean hasNext ( ) { return ( nodes . size ( ) > <NUM_LIT:0> ) ; } } </s>
|
<s> package org . apache . gora . store ; import java . io . Closeable ; import java . io . DataInput ; import java . io . DataOutput ; import java . io . IOException ; import java . util . List ; import java . util . Properties ; import org . apache . gora . persistency . BeanFactory ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . PartitionQuery ; import org . apache . gora . query . Query ; import org . apache . gora . query . Result ; import org . apache . hadoop . conf . Configurable ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . io . Writable ; public interface DataStore < K , T extends Persistent > extends Closeable , Writable , Configurable { void initialize ( Class < K > keyClass , Class < T > persistentClass , Properties properties ) throws IOException ; void setKeyClass ( Class < K > keyClass ) ; Class < K > getKeyClass ( ) ; void setPersistentClass ( Class < T > persistentClass ) ; Class < T > getPersistentClass ( ) ; String getSchemaName ( ) ; void createSchema ( ) throws IOException ; void deleteSchema ( ) throws IOException ; void truncateSchema ( ) throws IOException ; boolean schemaExists ( ) throws IOException ; K newKey ( ) throws IOException ; T newPersistent ( ) throws IOException ; T get ( K key ) throws IOException ; T get ( K key , String [ ] fields ) throws IOException ; void put ( K key , T obj ) throws IOException ; boolean delete ( K key ) throws IOException ; long deleteByQuery ( Query < K , T > query ) throws IOException ; Result < K , T > execute ( Query < K , T > query ) throws IOException ; Query < K , T > newQuery ( ) ; List < PartitionQuery < K , T > > getPartitions ( Query < K , T > query ) throws IOException ; void flush ( ) throws IOException ; void setBeanFactory ( BeanFactory < K , T > beanFactory ) ; BeanFactory < K , T > getBeanFactory ( ) ; void close ( ) throws IOException ; Configuration getConf ( ) ; void setConf ( Configuration conf ) ; void readFields ( DataInput in ) throws IOException ; void write ( DataOutput out ) throws IOException ; } </s>
|
<s> package org . apache . gora . store ; import java . io . IOException ; import java . io . InputStream ; import java . util . Properties ; import org . slf4j . Logger ; import org . slf4j . LoggerFactory ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . store . impl . DataStoreBase ; import org . apache . gora . util . ClassLoadingUtils ; import org . apache . gora . util . GoraException ; import org . apache . gora . util . ReflectionUtils ; import org . apache . hadoop . conf . Configurable ; import org . apache . hadoop . conf . Configuration ; public class DataStoreFactory { public static final Logger log = LoggerFactory . getLogger ( DataStoreFactory . class ) ; public static final String GORA_DEFAULT_PROPERTIES_FILE = "<STR_LIT>" ; public static final String GORA_DEFAULT_DATASTORE_KEY = "<STR_LIT>" ; public static final String GORA = "<STR_LIT>" ; public static final String DATASTORE = "<STR_LIT>" ; private static final String GORA_DATASTORE = GORA + "<STR_LIT:.>" + DATASTORE + "<STR_LIT:.>" ; public static final String AUTO_CREATE_SCHEMA = "<STR_LIT>" ; public static final String INPUT_PATH = "<STR_LIT>" ; public static final String OUTPUT_PATH = "<STR_LIT>" ; public static final String MAPPING_FILE = "<STR_LIT>" ; public static final String SCHEMA_NAME = "<STR_LIT>" ; @ Deprecated ( ) public static final Properties properties = createProps ( ) ; public static Properties createProps ( ) { try { Properties properties = new Properties ( ) ; InputStream stream = DataStoreFactory . class . getClassLoader ( ) . getResourceAsStream ( GORA_DEFAULT_PROPERTIES_FILE ) ; if ( stream != null ) { try { properties . load ( stream ) ; return properties ; } finally { stream . close ( ) ; } } else { log . warn ( GORA_DEFAULT_PROPERTIES_FILE + "<STR_LIT>" ) ; } return properties ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } private DataStoreFactory ( ) { } private static < K , T extends Persistent > void initializeDataStore ( DataStore < K , T > dataStore , Class < K > keyClass , Class < T > persistent , Properties properties ) throws IOException { dataStore . initialize ( keyClass , persistent , properties ) ; } public static < D extends DataStore < K , T > , K , T extends Persistent > D createDataStore ( Class < D > dataStoreClass , Class < K > keyClass , Class < T > persistent , Configuration conf ) throws GoraException { return createDataStore ( dataStoreClass , keyClass , persistent , conf , createProps ( ) , null ) ; } public static < D extends DataStore < K , T > , K , T extends Persistent > D createDataStore ( Class < D > dataStoreClass , Class < K > keyClass , Class < T > persistent , Configuration conf , String schemaName ) throws GoraException { return createDataStore ( dataStoreClass , keyClass , persistent , conf , createProps ( ) , schemaName ) ; } public static < D extends DataStore < K , T > , K , T extends Persistent > D createDataStore ( Class < D > dataStoreClass , Class < K > keyClass , Class < T > persistent , Configuration conf , Properties properties , String schemaName ) throws GoraException { try { setDefaultSchemaName ( properties , schemaName ) ; D dataStore = ReflectionUtils . newInstance ( dataStoreClass ) ; if ( ( dataStore instanceof Configurable ) && conf != null ) { ( ( Configurable ) dataStore ) . setConf ( conf ) ; } initializeDataStore ( dataStore , keyClass , persistent , properties ) ; return dataStore ; } catch ( GoraException ex ) { throw ex ; } catch ( Exception ex ) { throw new GoraException ( ex ) ; } } public static < D extends DataStore < K , T > , K , T extends Persistent > D createDataStore ( Class < D > dataStoreClass , Class < K > keyClass , Class < T > persistent , Configuration conf , Properties properties ) throws GoraException { return createDataStore ( dataStoreClass , keyClass , persistent , conf , properties , null ) ; } public static < D extends DataStore < K , T > , K , T extends Persistent > D getDataStore ( Class < D > dataStoreClass , Class < K > keyClass , Class < T > persistentClass , Configuration conf ) throws GoraException { return createDataStore ( dataStoreClass , keyClass , persistentClass , conf , createProps ( ) , null ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public static < K , T extends Persistent > DataStore < K , T > getDataStore ( String dataStoreClass , Class < K > keyClass , Class < T > persistentClass , Configuration conf ) throws GoraException { try { Class < ? extends DataStore < K , T > > c = ( Class < ? extends DataStore < K , T > > ) Class . forName ( dataStoreClass ) ; return createDataStore ( c , keyClass , persistentClass , conf , createProps ( ) , null ) ; } catch ( GoraException ex ) { throw ex ; } catch ( Exception ex ) { throw new GoraException ( ex ) ; } } @ SuppressWarnings ( { "<STR_LIT:unchecked>" } ) public static < K , T extends Persistent > DataStore < K , T > getDataStore ( String dataStoreClass , String keyClass , String persistentClass , Configuration conf ) throws GoraException { try { Class < ? extends DataStore < K , T > > c = ( Class < ? extends DataStore < K , T > > ) Class . forName ( dataStoreClass ) ; Class < K > k = ( Class < K > ) ClassLoadingUtils . loadClass ( keyClass ) ; Class < T > p = ( Class < T > ) ClassLoadingUtils . loadClass ( persistentClass ) ; return createDataStore ( c , k , p , conf , createProps ( ) , null ) ; } catch ( GoraException ex ) { throw ex ; } catch ( Exception ex ) { throw new GoraException ( ex ) ; } } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public static < K , T extends Persistent > DataStore < K , T > getDataStore ( Class < K > keyClass , Class < T > persistent , Configuration conf ) throws GoraException { Properties createProps = createProps ( ) ; Class < ? extends DataStore < K , T > > c ; try { c = ( Class < ? extends DataStore < K , T > > ) Class . forName ( getDefaultDataStore ( createProps ) ) ; } catch ( Exception ex ) { throw new GoraException ( ex ) ; } return createDataStore ( c , keyClass , persistent , conf , createProps , null ) ; } public static String findProperty ( Properties properties , DataStore < ? , ? > store , String baseKey , String defaultValue ) { Class < ? > clazz = store . getClass ( ) ; while ( true ) { String fullKey = GORA + "<STR_LIT:.>" + org . apache . gora . util . StringUtils . getClassname ( clazz ) + "<STR_LIT:.>" + baseKey ; String value = getProperty ( properties , fullKey ) ; if ( value != null ) { return value ; } value = getProperty ( properties , fullKey . toLowerCase ( ) ) ; if ( value != null ) { return value ; } if ( clazz . equals ( DataStoreBase . class ) ) { break ; } clazz = clazz . getSuperclass ( ) ; if ( clazz == null ) { break ; } } String fullKey = GORA + "<STR_LIT:.>" + DATASTORE + "<STR_LIT:.>" + baseKey ; String value = getProperty ( properties , fullKey ) ; if ( value != null ) { return value ; } return defaultValue ; } public static String findPropertyOrDie ( Properties properties , DataStore < ? , ? > store , String baseKey ) throws IOException { String val = findProperty ( properties , store , baseKey , null ) ; if ( val == null ) { throw new IOException ( "<STR_LIT>" + baseKey + "<STR_LIT>" + "<STR_LIT>" ) ; } return val ; } public static boolean findBooleanProperty ( Properties properties , DataStore < ? , ? > store , String baseKey , String defaultValue ) { return Boolean . parseBoolean ( findProperty ( properties , store , baseKey , defaultValue ) ) ; } public static boolean getAutoCreateSchema ( Properties properties , DataStore < ? , ? > store ) { return findBooleanProperty ( properties , store , AUTO_CREATE_SCHEMA , "<STR_LIT:true>" ) ; } public static String getInputPath ( Properties properties , DataStore < ? , ? > store ) { return findProperty ( properties , store , INPUT_PATH , null ) ; } public static String getOutputPath ( Properties properties , DataStore < ? , ? > store ) { return findProperty ( properties , store , OUTPUT_PATH , null ) ; } public static String getMappingFile ( Properties properties , DataStore < ? , ? > store , String defaultValue ) { return findProperty ( properties , store , MAPPING_FILE , defaultValue ) ; } private static String getDefaultDataStore ( Properties properties ) { return getProperty ( properties , GORA_DEFAULT_DATASTORE_KEY ) ; } private static String getProperty ( Properties properties , String key ) { return getProperty ( properties , key , null ) ; } private static String getProperty ( Properties properties , String key , String defaultValue ) { if ( properties == null ) { return defaultValue ; } String result = properties . getProperty ( key ) ; if ( result == null ) { return defaultValue ; } return result ; } private static void setProperty ( Properties properties , String baseKey , String value ) { if ( value != null ) { properties . setProperty ( GORA_DATASTORE + baseKey , value ) ; } } private static < D extends DataStore < K , T > , K , T extends Persistent > void setProperty ( Properties properties , Class < D > dataStoreClass , String baseKey , String value ) { properties . setProperty ( GORA + "<STR_LIT:.>" + org . apache . gora . util . StringUtils . getClassname ( dataStoreClass ) + "<STR_LIT:.>" + baseKey , value ) ; } public static String getDefaultSchemaName ( Properties properties , DataStore < ? , ? > store ) { return findProperty ( properties , store , SCHEMA_NAME , null ) ; } public static void setDefaultSchemaName ( Properties properties , String schemaName ) { if ( schemaName != null ) { setProperty ( properties , SCHEMA_NAME , schemaName ) ; } } public static < D extends DataStore < K , T > , K , T extends Persistent > void setDefaultSchemaName ( Properties properties , Class < D > dataStoreClass , String schemaName ) { setProperty ( properties , dataStoreClass , SCHEMA_NAME , schemaName ) ; } } </s>
|
<s> package org . apache . gora . store ; import java . io . InputStream ; import java . io . OutputStream ; import org . apache . gora . persistency . Persistent ; public interface FileBackedDataStore < K , T extends Persistent > extends DataStore < K , T > { void setInputPath ( String inputPath ) ; void setOutputPath ( String outputPath ) ; String getInputPath ( ) ; String getOutputPath ( ) ; void setInputStream ( InputStream inputStream ) ; void setOutputStream ( OutputStream outputStream ) ; InputStream getInputStream ( ) ; OutputStream getOutputStream ( ) ; } </s>
|
<s> package org . apache . gora . store . impl ; import java . io . DataInput ; import java . io . DataOutput ; import java . io . IOException ; import java . util . Map ; import java . util . Properties ; import org . apache . avro . Schema ; import org . apache . avro . Schema . Field ; import org . apache . commons . lang . builder . EqualsBuilder ; import org . apache . gora . avro . PersistentDatumReader ; import org . apache . gora . avro . PersistentDatumWriter ; import org . apache . gora . persistency . BeanFactory ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . persistency . impl . BeanFactoryImpl ; import org . apache . gora . store . DataStore ; import org . apache . gora . store . DataStoreFactory ; import org . apache . gora . util . AvroUtils ; import org . apache . gora . util . ClassLoadingUtils ; import org . apache . gora . util . StringUtils ; import org . apache . gora . util . WritableUtils ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . io . Text ; public abstract class DataStoreBase < K , T extends Persistent > implements DataStore < K , T > { protected BeanFactory < K , T > beanFactory ; protected Class < K > keyClass ; protected Class < T > persistentClass ; protected Schema schema ; protected Map < String , Field > fieldMap ; protected Configuration conf ; protected boolean autoCreateSchema ; protected Properties properties ; protected PersistentDatumReader < T > datumReader ; protected PersistentDatumWriter < T > datumWriter ; public DataStoreBase ( ) { } @ Override public void initialize ( Class < K > keyClass , Class < T > persistentClass , Properties properties ) throws IOException { setKeyClass ( keyClass ) ; setPersistentClass ( persistentClass ) ; if ( this . beanFactory == null ) this . beanFactory = new BeanFactoryImpl < K , T > ( keyClass , persistentClass ) ; schema = this . beanFactory . getCachedPersistent ( ) . getSchema ( ) ; fieldMap = AvroUtils . getFieldMap ( schema ) ; autoCreateSchema = DataStoreFactory . getAutoCreateSchema ( properties , this ) ; this . properties = properties ; datumReader = new PersistentDatumReader < T > ( schema , false ) ; datumWriter = new PersistentDatumWriter < T > ( schema , false ) ; } @ Override public void setPersistentClass ( Class < T > persistentClass ) { this . persistentClass = persistentClass ; } @ Override public Class < T > getPersistentClass ( ) { return persistentClass ; } @ Override public Class < K > getKeyClass ( ) { return keyClass ; } @ Override public void setKeyClass ( Class < K > keyClass ) { if ( keyClass != null ) this . keyClass = keyClass ; } @ Override public K newKey ( ) throws IOException { try { return beanFactory . newKey ( ) ; } catch ( Exception ex ) { throw new IOException ( ex ) ; } } @ Override public T newPersistent ( ) throws IOException { try { return beanFactory . newPersistent ( ) ; } catch ( Exception ex ) { throw new IOException ( ex ) ; } } @ Override public void setBeanFactory ( BeanFactory < K , T > beanFactory ) { this . beanFactory = beanFactory ; } @ Override public BeanFactory < K , T > getBeanFactory ( ) { return beanFactory ; } @ Override public T get ( K key ) throws IOException { return get ( key , getFieldsToQuery ( null ) ) ; } ; protected String [ ] getFieldsToQuery ( String [ ] fields ) { if ( fields != null ) { return fields ; } return beanFactory . getCachedPersistent ( ) . getFields ( ) ; } @ Override public Configuration getConf ( ) { return conf ; } @ Override public void setConf ( Configuration conf ) { this . conf = conf ; } protected Configuration getOrCreateConf ( ) { if ( conf == null ) { conf = new Configuration ( ) ; } return conf ; } @ Override @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public void readFields ( DataInput in ) throws IOException { try { Class < K > keyClass = ( Class < K > ) ClassLoadingUtils . loadClass ( Text . readString ( in ) ) ; Class < T > persistentClass = ( Class < T > ) ClassLoadingUtils . loadClass ( Text . readString ( in ) ) ; Properties props = WritableUtils . readProperties ( in ) ; initialize ( keyClass , persistentClass , props ) ; } catch ( ClassNotFoundException ex ) { throw new IOException ( ex ) ; } } @ Override public void write ( DataOutput out ) throws IOException { Text . writeString ( out , getKeyClass ( ) . getCanonicalName ( ) ) ; Text . writeString ( out , getPersistentClass ( ) . getCanonicalName ( ) ) ; WritableUtils . writeProperties ( out , properties ) ; } @ Override public boolean equals ( Object obj ) { if ( obj instanceof DataStoreBase ) { @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) DataStoreBase that = ( DataStoreBase ) obj ; EqualsBuilder builder = new EqualsBuilder ( ) ; builder . append ( this . keyClass , that . keyClass ) ; builder . append ( this . persistentClass , that . persistentClass ) ; return builder . isEquals ( ) ; } return false ; } @ Override public void truncateSchema ( ) throws IOException { deleteSchema ( ) ; createSchema ( ) ; } protected String getSchemaName ( String mappingSchemaName , Class < ? > persistentClass ) { String prefix = getOrCreateConf ( ) . get ( "<STR_LIT>" , "<STR_LIT>" ) ; String schemaName = DataStoreFactory . getDefaultSchemaName ( properties , this ) ; if ( schemaName != null ) { return prefix + schemaName ; } if ( mappingSchemaName != null ) { return prefix + mappingSchemaName ; } return prefix + StringUtils . getClassname ( persistentClass ) ; } } </s>
|
<s> package org . apache . gora . store . impl ; import java . io . DataInput ; import java . io . DataOutput ; import java . io . IOException ; import java . io . InputStream ; import java . io . OutputStream ; import java . util . ArrayList ; import java . util . List ; import java . util . Properties ; import org . apache . gora . mapreduce . GoraMapReduceUtils ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . PartitionQuery ; import org . apache . gora . query . Query ; import org . apache . gora . query . Result ; import org . apache . gora . query . impl . FileSplitPartitionQuery ; import org . apache . gora . store . DataStoreFactory ; import org . apache . gora . store . FileBackedDataStore ; import org . apache . gora . util . OperationNotSupportedException ; import org . apache . hadoop . fs . FileSystem ; import org . apache . hadoop . fs . Path ; import org . apache . hadoop . io . IOUtils ; import org . apache . hadoop . io . Text ; import org . apache . hadoop . mapreduce . InputSplit ; import org . apache . hadoop . mapreduce . lib . input . FileSplit ; public abstract class FileBackedDataStoreBase < K , T extends Persistent > extends DataStoreBase < K , T > implements FileBackedDataStore < K , T > { protected long inputSize ; protected String inputPath ; protected String outputPath ; protected InputStream inputStream ; protected OutputStream outputStream ; @ Override public void initialize ( Class < K > keyClass , Class < T > persistentClass , Properties properties ) throws IOException { super . initialize ( keyClass , persistentClass , properties ) ; if ( properties != null ) { if ( this . inputPath == null ) { this . inputPath = DataStoreFactory . getInputPath ( properties , this ) ; } if ( this . outputPath == null ) { this . outputPath = DataStoreFactory . getOutputPath ( properties , this ) ; } } } @ Override public void setInputPath ( String inputPath ) { this . inputPath = inputPath ; } @ Override public void setOutputPath ( String outputPath ) { this . outputPath = outputPath ; } @ Override public String getInputPath ( ) { return inputPath ; } @ Override public String getOutputPath ( ) { return outputPath ; } @ Override public void setInputStream ( InputStream inputStream ) { this . inputStream = inputStream ; } @ Override public void setOutputStream ( OutputStream outputStream ) { this . outputStream = outputStream ; } @ Override public InputStream getInputStream ( ) { return inputStream ; } @ Override public OutputStream getOutputStream ( ) { return outputStream ; } protected InputStream createInputStream ( ) throws IOException { Path path = new Path ( inputPath ) ; FileSystem fs = path . getFileSystem ( getConf ( ) ) ; inputSize = fs . getFileStatus ( path ) . getLen ( ) ; return fs . open ( path ) ; } protected OutputStream createOutputStream ( ) throws IOException { Path path = new Path ( outputPath ) ; FileSystem fs = path . getFileSystem ( getConf ( ) ) ; return fs . create ( path ) ; } protected InputStream getOrCreateInputStream ( ) throws IOException { if ( inputStream == null ) { inputStream = createInputStream ( ) ; } return inputStream ; } protected OutputStream getOrCreateOutputStream ( ) throws IOException { if ( outputStream == null ) { outputStream = createOutputStream ( ) ; } return outputStream ; } @ Override public List < PartitionQuery < K , T > > getPartitions ( Query < K , T > query ) throws IOException { List < InputSplit > splits = GoraMapReduceUtils . getSplits ( getConf ( ) , inputPath ) ; List < PartitionQuery < K , T > > queries = new ArrayList < PartitionQuery < K , T > > ( splits . size ( ) ) ; for ( InputSplit split : splits ) { queries . add ( new FileSplitPartitionQuery < K , T > ( query , ( FileSplit ) split ) ) ; } return queries ; } @ Override public Result < K , T > execute ( Query < K , T > query ) throws IOException { if ( query instanceof FileSplitPartitionQuery ) { return executePartial ( ( FileSplitPartitionQuery < K , T > ) query ) ; } else { return executeQuery ( query ) ; } } protected abstract Result < K , T > executeQuery ( Query < K , T > query ) throws IOException ; protected abstract Result < K , T > executePartial ( FileSplitPartitionQuery < K , T > query ) throws IOException ; @ Override public void flush ( ) throws IOException { if ( outputStream != null ) outputStream . flush ( ) ; } @ Override public void createSchema ( ) throws IOException { } @ Override public void deleteSchema ( ) throws IOException { throw new OperationNotSupportedException ( "<STR_LIT>" + "<STR_LIT>" ) ; } @ Override public boolean schemaExists ( ) throws IOException { return true ; } @ Override public void write ( DataOutput out ) throws IOException { super . write ( out ) ; org . apache . gora . util . IOUtils . writeNullFieldsInfo ( out , inputPath , outputPath ) ; if ( inputPath != null ) Text . writeString ( out , inputPath ) ; if ( outputPath != null ) Text . writeString ( out , outputPath ) ; } @ Override public void readFields ( DataInput in ) throws IOException { super . readFields ( in ) ; boolean [ ] nullFields = org . apache . gora . util . IOUtils . readNullFieldsInfo ( in ) ; if ( ! nullFields [ <NUM_LIT:0> ] ) inputPath = Text . readString ( in ) ; if ( ! nullFields [ <NUM_LIT:1> ] ) outputPath = Text . readString ( in ) ; } @ Override public void close ( ) throws IOException { IOUtils . closeStream ( inputStream ) ; IOUtils . closeStream ( outputStream ) ; inputStream = null ; outputStream = null ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import java . io . DataInput ; import java . io . DataOutput ; import java . io . IOException ; import org . apache . gora . query . PartitionQuery ; import org . apache . gora . util . IOUtils ; import org . apache . hadoop . conf . Configurable ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . io . Writable ; import org . apache . hadoop . mapreduce . InputSplit ; public class GoraInputSplit extends InputSplit implements Writable , Configurable { protected PartitionQuery < ? , ? > query ; private Configuration conf ; public GoraInputSplit ( ) { } public GoraInputSplit ( Configuration conf , PartitionQuery < ? , ? > query ) { setConf ( conf ) ; this . query = query ; } @ Override public Configuration getConf ( ) { return conf ; } @ Override public void setConf ( Configuration conf ) { this . conf = conf ; } @ Override public long getLength ( ) throws IOException , InterruptedException { return <NUM_LIT:0> ; } @ Override public String [ ] getLocations ( ) { return query . getLocations ( ) ; } public PartitionQuery < ? , ? > getQuery ( ) { return query ; } @ Override public void readFields ( DataInput in ) throws IOException { try { query = ( PartitionQuery < ? , ? > ) IOUtils . deserialize ( conf , in , null ) ; } catch ( ClassNotFoundException ex ) { throw new IOException ( ex ) ; } } @ Override public void write ( DataOutput out ) throws IOException { IOUtils . serialize ( getConf ( ) , out , query ) ; } @ Override public boolean equals ( Object obj ) { if ( obj instanceof GoraInputSplit ) { return this . query . equals ( ( ( GoraInputSplit ) obj ) . query ) ; } return false ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import java . io . IOException ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . store . DataStore ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . mapreduce . RecordWriter ; import org . apache . hadoop . mapreduce . TaskAttemptContext ; import org . slf4j . Logger ; import org . slf4j . LoggerFactory ; public class GoraRecordWriter < K , T > extends RecordWriter < K , T > { public static final Logger LOG = LoggerFactory . getLogger ( GoraRecordWriter . class ) ; private static final String BUFFER_LIMIT_WRITE_NAME = "<STR_LIT>" ; private static final int BUFFER_LIMIT_WRITE_VALUE = <NUM_LIT> ; private DataStore < K , Persistent > store ; private GoraRecordCounter counter = new GoraRecordCounter ( ) ; public GoraRecordWriter ( DataStore < K , Persistent > store , TaskAttemptContext context ) { this . store = store ; Configuration configuration = context . getConfiguration ( ) ; int recordsMax = configuration . getInt ( BUFFER_LIMIT_WRITE_NAME , BUFFER_LIMIT_WRITE_VALUE ) ; counter . setRecordsMax ( recordsMax ) ; LOG . info ( "<STR_LIT>" + recordsMax ) ; } @ Override public void close ( TaskAttemptContext context ) throws IOException , InterruptedException { store . close ( ) ; } @ Override public void write ( K key , T value ) throws IOException , InterruptedException { store . put ( key , ( Persistent ) value ) ; counter . increment ( ) ; if ( counter . isModulo ( ) ) { LOG . info ( "<STR_LIT>" + counter . getRecordsNumber ( ) + "<STR_LIT>" ) ; store . flush ( ) ; } } } </s>
|
<s> package org . apache . gora . mapreduce ; import java . io . IOException ; import java . io . InputStream ; import org . apache . avro . Schema ; import org . apache . avro . io . BinaryDecoder ; import org . apache . avro . io . DecoderFactory ; import org . apache . gora . avro . PersistentDatumReader ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . util . AvroUtils ; import org . apache . hadoop . io . serializer . Deserializer ; public class PersistentDeserializer implements Deserializer < Persistent > { private BinaryDecoder decoder ; private Class < ? extends Persistent > persistentClass ; private boolean reuseObjects ; private PersistentDatumReader < Persistent > datumReader ; public PersistentDeserializer ( Class < ? extends Persistent > c , boolean reuseObjects ) { this . persistentClass = c ; this . reuseObjects = reuseObjects ; try { Schema schema = AvroUtils . getSchema ( persistentClass ) ; datumReader = new PersistentDatumReader < Persistent > ( schema , true ) ; } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; } } @ Override public void open ( InputStream in ) throws IOException { decoder = new DecoderFactory ( ) . configureDirectDecoder ( true ) . createBinaryDecoder ( in , decoder ) ; } @ Override public void close ( ) throws IOException { } @ Override public Persistent deserialize ( Persistent persistent ) throws IOException { return datumReader . read ( reuseObjects ? persistent : null , decoder ) ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import java . io . IOException ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . gora . store . DataStore ; import org . apache . hadoop . mapreduce . Job ; import org . apache . hadoop . mapreduce . Mapper ; import org . apache . hadoop . mapreduce . Partitioner ; public class GoraMapper < K1 , V1 extends Persistent , K2 , V2 > extends Mapper < K1 , V1 , K2 , V2 > { @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) public static < K1 , V1 extends Persistent , K2 , V2 > void initMapperJob ( Job job , Class < ? extends DataStore < K1 , V1 > > dataStoreClass , Class < K1 > inKeyClass , Class < V1 > inValueClass , Class < K2 > outKeyClass , Class < V2 > outValueClass , Class < ? extends GoraMapper > mapperClass , Class < ? extends Partitioner > partitionerClass , boolean reuseObjects ) throws IOException { GoraInputFormat . setInput ( job , dataStoreClass , inKeyClass , inValueClass , reuseObjects ) ; job . setMapperClass ( mapperClass ) ; job . setMapOutputKeyClass ( outKeyClass ) ; job . setMapOutputValueClass ( outValueClass ) ; if ( partitionerClass != null ) { job . setPartitionerClass ( partitionerClass ) ; } } @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) public static < K1 , V1 extends Persistent , K2 , V2 > void initMapperJob ( Job job , Class < ? extends DataStore < K1 , V1 > > dataStoreClass , Class < K1 > inKeyClass , Class < V1 > inValueClass , Class < K2 > outKeyClass , Class < V2 > outValueClass , Class < ? extends GoraMapper > mapperClass , boolean reuseObjects ) throws IOException { initMapperJob ( job , dataStoreClass , inKeyClass , inValueClass , outKeyClass , outValueClass , mapperClass , null , reuseObjects ) ; } @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) public static < K1 , V1 extends Persistent , K2 , V2 > void initMapperJob ( Job job , Query < K1 , V1 > query , DataStore < K1 , V1 > dataStore , Class < K2 > outKeyClass , Class < V2 > outValueClass , Class < ? extends GoraMapper > mapperClass , Class < ? extends Partitioner > partitionerClass , boolean reuseObjects ) throws IOException { GoraInputFormat . setInput ( job , query , dataStore , reuseObjects ) ; job . setMapperClass ( mapperClass ) ; job . setMapOutputKeyClass ( outKeyClass ) ; job . setMapOutputValueClass ( outValueClass ) ; if ( partitionerClass != null ) { job . setPartitionerClass ( partitionerClass ) ; } } @ SuppressWarnings ( { "<STR_LIT:rawtypes>" } ) public static < K1 , V1 extends Persistent , K2 , V2 > void initMapperJob ( Job job , DataStore < K1 , V1 > dataStore , Class < K2 > outKeyClass , Class < V2 > outValueClass , Class < ? extends GoraMapper > mapperClass , boolean reuseObjects ) throws IOException { initMapperJob ( job , dataStore . newQuery ( ) , dataStore , outKeyClass , outValueClass , mapperClass , reuseObjects ) ; } @ SuppressWarnings ( { "<STR_LIT:rawtypes>" } ) public static < K1 , V1 extends Persistent , K2 , V2 > void initMapperJob ( Job job , Query < K1 , V1 > query , DataStore < K1 , V1 > dataStore , Class < K2 > outKeyClass , Class < V2 > outValueClass , Class < ? extends GoraMapper > mapperClass , boolean reuseObjects ) throws IOException { initMapperJob ( job , query , dataStore , outKeyClass , outValueClass , mapperClass , null , reuseObjects ) ; } } </s>
|
<s> package org . apache . gora . mapreduce ; public class GoraRecordCounter { private int recordsNumber = <NUM_LIT:0> ; private int recordsMax ; public int getRecordsNumber ( ) { return recordsNumber ; } public int getRecordsMax ( ) { return recordsMax ; } public void setRecordsMax ( int recordsMax ) { this . recordsMax = recordsMax ; } public void increment ( ) { ++ this . recordsNumber ; } public boolean isModulo ( ) { return ( ( this . recordsNumber % this . recordsMax ) == <NUM_LIT:0> ) ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import java . io . IOException ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . gora . query . Result ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . mapreduce . InputSplit ; import org . apache . hadoop . mapreduce . RecordReader ; import org . apache . hadoop . mapreduce . TaskAttemptContext ; import org . slf4j . Logger ; import org . slf4j . LoggerFactory ; public class GoraRecordReader < K , T extends Persistent > extends RecordReader < K , T > { public static final Logger LOG = LoggerFactory . getLogger ( GoraRecordReader . class ) ; public static final String BUFFER_LIMIT_READ_NAME = "<STR_LIT>" ; public static final int BUFFER_LIMIT_READ_VALUE = <NUM_LIT> ; protected Query < K , T > query ; protected Result < K , T > result ; private GoraRecordCounter counter = new GoraRecordCounter ( ) ; public GoraRecordReader ( Query < K , T > query , TaskAttemptContext context ) { this . query = query ; Configuration configuration = context . getConfiguration ( ) ; int recordsMax = configuration . getInt ( BUFFER_LIMIT_READ_NAME , BUFFER_LIMIT_READ_VALUE ) ; if ( recordsMax <= <NUM_LIT:1> ) { LOG . info ( "<STR_LIT>" + recordsMax + "<STR_LIT>" + BUFFER_LIMIT_READ_VALUE ) ; recordsMax = BUFFER_LIMIT_READ_VALUE ; } counter . setRecordsMax ( recordsMax ) ; LOG . info ( "<STR_LIT>" + recordsMax ) ; this . query . setLimit ( recordsMax ) ; } public void executeQuery ( ) throws IOException { this . result = query . execute ( ) ; } @ Override public K getCurrentKey ( ) throws IOException , InterruptedException { return result . getKey ( ) ; } @ Override public T getCurrentValue ( ) throws IOException , InterruptedException { return result . get ( ) ; } @ Override public float getProgress ( ) throws IOException , InterruptedException { return result . getProgress ( ) ; } @ Override public void initialize ( InputSplit split , TaskAttemptContext context ) throws IOException , InterruptedException { } @ Override public boolean nextKeyValue ( ) throws IOException , InterruptedException { if ( counter . isModulo ( ) ) { boolean firstBatch = ( this . result == null ) ; if ( ! firstBatch ) { this . query . setStartKey ( this . result . getKey ( ) ) ; if ( this . query . getLimit ( ) == counter . getRecordsMax ( ) ) { this . query . setLimit ( counter . getRecordsMax ( ) + <NUM_LIT:1> ) ; } } if ( this . result != null ) { this . result . close ( ) ; } executeQuery ( ) ; if ( ! firstBatch ) { this . result . next ( ) ; } } counter . increment ( ) ; return this . result . next ( ) ; } @ Override public void close ( ) throws IOException { if ( result != null ) { result . close ( ) ; } } } </s>
|
<s> package org . apache . gora . mapreduce ; import java . io . IOException ; import java . util . List ; import org . apache . gora . util . StringUtils ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . fs . Path ; import org . apache . hadoop . mapreduce . InputSplit ; import org . apache . hadoop . mapreduce . Job ; import org . apache . hadoop . mapreduce . JobContext ; import org . apache . hadoop . mapreduce . RecordReader ; import org . apache . hadoop . mapreduce . TaskAttemptContext ; import org . apache . hadoop . mapreduce . lib . input . FileInputFormat ; public class GoraMapReduceUtils { public static class HelperInputFormat < K , V > extends FileInputFormat < K , V > { @ Override public RecordReader < K , V > createRecordReader ( InputSplit arg0 , TaskAttemptContext arg1 ) throws IOException , InterruptedException { return null ; } } public static void setIOSerializations ( Configuration conf , boolean reuseObjects ) { String serializationClass = PersistentSerialization . class . getCanonicalName ( ) ; if ( ! reuseObjects ) { serializationClass = PersistentNonReusingSerialization . class . getCanonicalName ( ) ; } String [ ] serializations = StringUtils . joinStringArrays ( conf . getStrings ( "<STR_LIT>" ) , "<STR_LIT>" , StringSerialization . class . getCanonicalName ( ) , serializationClass ) ; conf . setStrings ( "<STR_LIT>" , serializations ) ; } public static List < InputSplit > getSplits ( Configuration conf , String inputPath ) throws IOException { JobContext context = createJobContext ( conf , inputPath ) ; HelperInputFormat < ? , ? > inputFormat = new HelperInputFormat < Object , Object > ( ) ; return inputFormat . getSplits ( context ) ; } public static JobContext createJobContext ( Configuration conf , String inputPath ) throws IOException { if ( inputPath != null ) { Job job = new Job ( conf ) ; FileInputFormat . addInputPath ( job , new Path ( inputPath ) ) ; return new JobContext ( job . getConfiguration ( ) , null ) ; } return new JobContext ( conf , null ) ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import org . apache . gora . persistency . Persistent ; import org . apache . hadoop . io . serializer . Deserializer ; import org . apache . hadoop . io . serializer . Serialization ; import org . apache . hadoop . io . serializer . Serializer ; public class PersistentNonReusingSerialization implements Serialization < Persistent > { @ Override public boolean accept ( Class < ? > c ) { return Persistent . class . isAssignableFrom ( c ) ; } @ Override public Deserializer < Persistent > getDeserializer ( Class < Persistent > c ) { return new PersistentDeserializer ( c , false ) ; } @ Override public Serializer < Persistent > getSerializer ( Class < Persistent > c ) { return new PersistentSerializer ( ) ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import java . io . DataInputStream ; import java . io . DataOutputStream ; import java . io . IOException ; import java . io . InputStream ; import java . io . OutputStream ; import org . apache . hadoop . io . Text ; import org . apache . hadoop . io . serializer . Deserializer ; import org . apache . hadoop . io . serializer . Serialization ; import org . apache . hadoop . io . serializer . Serializer ; public class StringSerialization implements Serialization < String > { @ Override public boolean accept ( Class < ? > c ) { return c . equals ( String . class ) ; } @ Override public Deserializer < String > getDeserializer ( Class < String > c ) { return new Deserializer < String > ( ) { private DataInputStream in ; @ Override public void open ( InputStream in ) throws IOException { this . in = new DataInputStream ( in ) ; } @ Override public void close ( ) throws IOException { this . in . close ( ) ; } @ Override public String deserialize ( String t ) throws IOException { return Text . readString ( in ) ; } } ; } @ Override public Serializer < String > getSerializer ( Class < String > c ) { return new Serializer < String > ( ) { private DataOutputStream out ; @ Override public void close ( ) throws IOException { this . out . close ( ) ; } @ Override public void open ( OutputStream out ) throws IOException { this . out = new DataOutputStream ( out ) ; } @ Override public void serialize ( String str ) throws IOException { Text . writeString ( out , str ) ; } } ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import java . io . IOException ; import java . util . ArrayList ; import java . util . List ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . PartitionQuery ; import org . apache . gora . query . Query ; import org . apache . gora . query . impl . FileSplitPartitionQuery ; import org . apache . gora . store . DataStore ; import org . apache . gora . store . DataStoreFactory ; import org . apache . gora . store . FileBackedDataStore ; import org . apache . gora . util . IOUtils ; import org . apache . hadoop . conf . Configurable ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . mapreduce . InputFormat ; import org . apache . hadoop . mapreduce . InputSplit ; import org . apache . hadoop . mapreduce . Job ; import org . apache . hadoop . mapreduce . JobContext ; import org . apache . hadoop . mapreduce . RecordReader ; import org . apache . hadoop . mapreduce . TaskAttemptContext ; import org . apache . hadoop . mapreduce . lib . input . FileSplit ; public class GoraInputFormat < K , T extends Persistent > extends InputFormat < K , T > implements Configurable { public static final String QUERY_KEY = "<STR_LIT>" ; private DataStore < K , T > dataStore ; private Configuration conf ; private Query < K , T > query ; @ SuppressWarnings ( { "<STR_LIT:rawtypes>" } ) private void setInputPath ( PartitionQuery < K , T > partitionQuery , TaskAttemptContext context ) throws IOException { if ( partitionQuery instanceof FileSplitPartitionQuery ) { FileSplit split = ( ( FileSplitPartitionQuery < K , T > ) partitionQuery ) . getSplit ( ) ; ( ( FileBackedDataStore ) partitionQuery . getDataStore ( ) ) . setInputPath ( split . getPath ( ) . toString ( ) ) ; } } @ Override @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public RecordReader < K , T > createRecordReader ( InputSplit split , TaskAttemptContext context ) throws IOException , InterruptedException { PartitionQuery < K , T > partitionQuery = ( PartitionQuery < K , T > ) ( ( GoraInputSplit ) split ) . getQuery ( ) ; setInputPath ( partitionQuery , context ) ; return new GoraRecordReader < K , T > ( partitionQuery , context ) ; } @ Override public List < InputSplit > getSplits ( JobContext context ) throws IOException , InterruptedException { List < PartitionQuery < K , T > > queries = dataStore . getPartitions ( query ) ; List < InputSplit > splits = new ArrayList < InputSplit > ( queries . size ( ) ) ; for ( PartitionQuery < K , T > query : queries ) { splits . add ( new GoraInputSplit ( context . getConfiguration ( ) , query ) ) ; } return splits ; } @ Override public Configuration getConf ( ) { return conf ; } @ Override public void setConf ( Configuration conf ) { this . conf = conf ; try { this . query = getQuery ( conf ) ; this . dataStore = query . getDataStore ( ) ; } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; } } public static < K , T extends Persistent > void setQuery ( Job job , Query < K , T > query ) throws IOException { IOUtils . storeToConf ( query , job . getConfiguration ( ) , QUERY_KEY ) ; } public Query < K , T > getQuery ( Configuration conf ) throws IOException { return IOUtils . loadFromConf ( conf , QUERY_KEY ) ; } public static < K1 , V1 extends Persistent > void setInput ( Job job , Query < K1 , V1 > query , boolean reuseObjects ) throws IOException { setInput ( job , query , query . getDataStore ( ) , reuseObjects ) ; } public static < K1 , V1 extends Persistent > void setInput ( Job job , Query < K1 , V1 > query , DataStore < K1 , V1 > dataStore , boolean reuseObjects ) throws IOException { Configuration conf = job . getConfiguration ( ) ; GoraMapReduceUtils . setIOSerializations ( conf , reuseObjects ) ; job . setInputFormatClass ( GoraInputFormat . class ) ; GoraInputFormat . setQuery ( job , query ) ; } public static < K1 , V1 extends Persistent > void setInput ( Job job , Class < ? extends DataStore < K1 , V1 > > dataStoreClass , Class < K1 > inKeyClass , Class < V1 > inValueClass , boolean reuseObjects ) throws IOException { DataStore < K1 , V1 > store = DataStoreFactory . getDataStore ( dataStoreClass , inKeyClass , inValueClass , job . getConfiguration ( ) ) ; setInput ( job , store . newQuery ( ) , store , reuseObjects ) ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import java . io . IOException ; import org . apache . hadoop . mapreduce . JobContext ; import org . apache . hadoop . mapreduce . OutputCommitter ; import org . apache . hadoop . mapreduce . TaskAttemptContext ; public class NullOutputCommitter extends OutputCommitter { @ Override public void abortTask ( TaskAttemptContext arg0 ) throws IOException { } @ Override public void cleanupJob ( JobContext arg0 ) throws IOException { } @ Override public void commitTask ( TaskAttemptContext arg0 ) throws IOException { } @ Override public boolean needsTaskCommit ( TaskAttemptContext arg0 ) throws IOException { return false ; } @ Override public void setupJob ( JobContext arg0 ) throws IOException { } @ Override public void setupTask ( TaskAttemptContext arg0 ) throws IOException { } } </s>
|
<s> package org . apache . gora . mapreduce ; import org . apache . gora . persistency . Persistent ; import org . apache . hadoop . io . serializer . Deserializer ; import org . apache . hadoop . io . serializer . Serialization ; import org . apache . hadoop . io . serializer . Serializer ; public class PersistentSerialization implements Serialization < Persistent > { @ Override public boolean accept ( Class < ? > c ) { return Persistent . class . isAssignableFrom ( c ) ; } @ Override public Deserializer < Persistent > getDeserializer ( Class < Persistent > c ) { return new PersistentDeserializer ( c , true ) ; } @ Override public Serializer < Persistent > getSerializer ( Class < Persistent > c ) { return new PersistentSerializer ( ) ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import java . io . IOException ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . store . DataStore ; import org . apache . gora . store . DataStoreFactory ; import org . apache . gora . store . FileBackedDataStore ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . fs . Path ; import org . apache . hadoop . mapreduce . Job ; import org . apache . hadoop . mapreduce . JobContext ; import org . apache . hadoop . mapreduce . OutputCommitter ; import org . apache . hadoop . mapreduce . OutputFormat ; import org . apache . hadoop . mapreduce . RecordWriter ; import org . apache . hadoop . mapreduce . TaskAttemptContext ; import org . apache . hadoop . mapreduce . lib . output . FileOutputFormat ; public class GoraOutputFormat < K , T extends Persistent > extends OutputFormat < K , T > { public static final String DATA_STORE_CLASS = "<STR_LIT>" ; public static final String OUTPUT_KEY_CLASS = "<STR_LIT>" ; public static final String OUTPUT_VALUE_CLASS = "<STR_LIT>" ; @ Override public void checkOutputSpecs ( JobContext context ) throws IOException , InterruptedException { } @ Override public OutputCommitter getOutputCommitter ( TaskAttemptContext context ) throws IOException , InterruptedException { return new NullOutputCommitter ( ) ; } private void setOutputPath ( DataStore < K , T > store , TaskAttemptContext context ) { if ( store instanceof FileBackedDataStore ) { FileBackedDataStore < K , T > fileStore = ( FileBackedDataStore < K , T > ) store ; String uniqueName = FileOutputFormat . getUniqueFile ( context , "<STR_LIT>" , "<STR_LIT>" ) ; if ( fileStore . getOutputPath ( ) == null ) { fileStore . setOutputPath ( FileOutputFormat . getOutputPath ( context ) . toString ( ) ) ; } String path = fileStore . getOutputPath ( ) ; fileStore . setOutputPath ( path + Path . SEPARATOR + uniqueName ) ; } } @ Override @ SuppressWarnings ( "<STR_LIT:unchecked>" ) public RecordWriter < K , T > getRecordWriter ( TaskAttemptContext context ) throws IOException , InterruptedException { Configuration conf = context . getConfiguration ( ) ; Class < ? extends DataStore < K , T > > dataStoreClass = ( Class < ? extends DataStore < K , T > > ) conf . getClass ( DATA_STORE_CLASS , null ) ; Class < K > keyClass = ( Class < K > ) conf . getClass ( OUTPUT_KEY_CLASS , null ) ; Class < T > rowClass = ( Class < T > ) conf . getClass ( OUTPUT_VALUE_CLASS , null ) ; final DataStore < K , T > store = DataStoreFactory . createDataStore ( dataStoreClass , keyClass , rowClass , context . getConfiguration ( ) ) ; setOutputPath ( store , context ) ; return new GoraRecordWriter ( store , context ) ; } public static < K , V extends Persistent > void setOutput ( Job job , DataStore < K , V > dataStore , boolean reuseObjects ) { setOutput ( job , dataStore . getClass ( ) , dataStore . getKeyClass ( ) , dataStore . getPersistentClass ( ) , reuseObjects ) ; } @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) public static < K , V extends Persistent > void setOutput ( Job job , Class < ? extends DataStore > dataStoreClass , Class < K > keyClass , Class < V > persistentClass , boolean reuseObjects ) { Configuration conf = job . getConfiguration ( ) ; GoraMapReduceUtils . setIOSerializations ( conf , reuseObjects ) ; job . setOutputFormatClass ( GoraOutputFormat . class ) ; job . setOutputKeyClass ( keyClass ) ; job . setOutputValueClass ( persistentClass ) ; conf . setClass ( GoraOutputFormat . DATA_STORE_CLASS , dataStoreClass , DataStore . class ) ; conf . setClass ( GoraOutputFormat . OUTPUT_KEY_CLASS , keyClass , Object . class ) ; conf . setClass ( GoraOutputFormat . OUTPUT_VALUE_CLASS , persistentClass , Persistent . class ) ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import org . apache . hadoop . io . RawComparator ; import org . apache . hadoop . io . Text ; public class StringComparator implements RawComparator < String > { @ Override public int compare ( byte [ ] b1 , int s1 , int l1 , byte [ ] b2 , int s2 , int l2 ) { return Text . Comparator . compareBytes ( b1 , s1 , l1 , b2 , s2 , l2 ) ; } @ Override public int compare ( String o1 , String o2 ) { return o1 . compareTo ( o2 ) ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . store . DataStore ; import org . apache . hadoop . mapreduce . Job ; import org . apache . hadoop . mapreduce . Reducer ; public class GoraReducer < K1 , V1 , K2 , V2 extends Persistent > extends Reducer < K1 , V1 , K2 , V2 > { public static < K1 , V1 , K2 , V2 extends Persistent > void initReducerJob ( Job job , Class < ? extends DataStore < K2 , V2 > > dataStoreClass , Class < K2 > keyClass , Class < V2 > persistentClass , Class < ? extends GoraReducer < K1 , V1 , K2 , V2 > > reducerClass , boolean reuseObjects ) { GoraOutputFormat . setOutput ( job , dataStoreClass , keyClass , persistentClass , reuseObjects ) ; job . setReducerClass ( reducerClass ) ; } public static < K1 , V1 , K2 , V2 extends Persistent > void initReducerJob ( Job job , DataStore < K2 , V2 > dataStore , Class < ? extends GoraReducer < K1 , V1 , K2 , V2 > > reducerClass ) { initReducerJob ( job , dataStore , reducerClass , true ) ; } public static < K1 , V1 , K2 , V2 extends Persistent > void initReducerJob ( Job job , DataStore < K2 , V2 > dataStore , Class < ? extends GoraReducer < K1 , V1 , K2 , V2 > > reducerClass , boolean reuseObjects ) { GoraOutputFormat . setOutput ( job , dataStore , reuseObjects ) ; job . setReducerClass ( reducerClass ) ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import java . io . IOException ; import java . io . InputStream ; import java . nio . ByteBuffer ; import org . apache . avro . Schema ; import org . apache . avro . io . Decoder ; import org . apache . avro . io . ResolvingDecoder ; import org . apache . avro . io . parsing . Symbol ; import org . apache . avro . util . Utf8 ; public class FakeResolvingDecoder extends ResolvingDecoder { public FakeResolvingDecoder ( Schema schema , Decoder in ) throws IOException { super ( schema , schema , in ) ; } @ Override public long arrayNext ( ) throws IOException { return in . arrayNext ( ) ; } @ Override public Symbol doAction ( Symbol input , Symbol top ) throws IOException { return null ; } @ Override public void init ( InputStream in ) throws IOException { this . in . init ( in ) ; } @ Override public long mapNext ( ) throws IOException { return in . mapNext ( ) ; } @ Override public double readDouble ( ) throws IOException { return in . readDouble ( ) ; } @ Override public int readEnum ( ) throws IOException { return in . readEnum ( ) ; } @ Override public int readIndex ( ) throws IOException { return in . readIndex ( ) ; } @ Override public long readLong ( ) throws IOException { return in . readLong ( ) ; } @ Override public void skipAction ( ) throws IOException { } @ Override public long readArrayStart ( ) throws IOException { return in . readArrayStart ( ) ; } @ Override public boolean readBoolean ( ) throws IOException { return in . readBoolean ( ) ; } @ Override public ByteBuffer readBytes ( ByteBuffer old ) throws IOException { return in . readBytes ( old ) ; } @ Override public void readFixed ( byte [ ] bytes , int start , int len ) throws IOException { in . readFixed ( bytes , start , len ) ; } @ Override public float readFloat ( ) throws IOException { return in . readFloat ( ) ; } @ Override public int readInt ( ) throws IOException { return in . readInt ( ) ; } @ Override public long readMapStart ( ) throws IOException { return in . readMapStart ( ) ; } @ Override public void readNull ( ) throws IOException { in . readNull ( ) ; } @ Override public Utf8 readString ( Utf8 old ) throws IOException { return in . readString ( old ) ; } @ Override public long skipArray ( ) throws IOException { return in . skipArray ( ) ; } @ Override public void skipBytes ( ) throws IOException { in . skipBytes ( ) ; } @ Override protected void skipFixed ( ) throws IOException { } @ Override public void skipFixed ( int length ) throws IOException { in . skipFixed ( length ) ; } @ Override public long skipMap ( ) throws IOException { return in . skipMap ( ) ; } @ Override public void skipString ( ) throws IOException { } @ Override public void skipTopSymbol ( ) throws IOException { } @ Override public void readFixed ( byte [ ] bytes ) throws IOException { in . readFixed ( bytes ) ; } } </s>
|
<s> package org . apache . gora . mapreduce ; import java . io . IOException ; import java . io . OutputStream ; import org . apache . avro . io . BinaryEncoder ; import org . apache . gora . avro . PersistentDatumWriter ; import org . apache . gora . persistency . Persistent ; import org . apache . hadoop . io . serializer . Serializer ; public class PersistentSerializer implements Serializer < Persistent > { private PersistentDatumWriter < Persistent > datumWriter ; private BinaryEncoder encoder ; public PersistentSerializer ( ) { this . datumWriter = new PersistentDatumWriter < Persistent > ( ) ; } @ Override public void close ( ) throws IOException { encoder . flush ( ) ; } @ Override public void open ( OutputStream out ) throws IOException { encoder = new BinaryEncoder ( out ) ; } @ Override public void serialize ( Persistent persistent ) throws IOException { datumWriter . setSchema ( persistent . getSchema ( ) ) ; datumWriter . setPersistent ( persistent ) ; datumWriter . write ( persistent , encoder ) ; } } </s>
|
<s> package org . apache . gora . examples ; import java . io . IOException ; import java . nio . ByteBuffer ; import java . util . Arrays ; import java . util . HashMap ; import org . apache . avro . util . Utf8 ; import org . slf4j . Logger ; import org . slf4j . LoggerFactory ; import org . apache . gora . examples . generated . Metadata ; import org . apache . gora . examples . generated . WebPage ; import org . apache . gora . store . DataStore ; import org . apache . gora . store . DataStoreFactory ; import org . apache . hadoop . conf . Configuration ; public class WebPageDataCreator { private static final Logger log = LoggerFactory . getLogger ( WebPageDataCreator . class ) ; public static final String [ ] URLS = { "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , } ; public static HashMap < String , Integer > URL_INDEXES = new HashMap < String , Integer > ( ) ; static { for ( int i = <NUM_LIT:0> ; i < URLS . length ; i ++ ) { URL_INDEXES . put ( URLS [ i ] , i ) ; } } public static final String [ ] CONTENTS = { "<STR_LIT>" , "<STR_LIT:foo>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:1>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , } ; public static final int [ ] [ ] LINKS = { { <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:9> } , { <NUM_LIT:3> , <NUM_LIT:9> } , { } , { <NUM_LIT:9> } , { <NUM_LIT:5> } , { <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> } , { <NUM_LIT:1> } , { <NUM_LIT:2> } , { <NUM_LIT:3> } , { <NUM_LIT:8> , <NUM_LIT:1> } , } ; public static final String [ ] [ ] ANCHORS = { { "<STR_LIT:foo>" , "<STR_LIT:foo>" , "<STR_LIT:foo>" , "<STR_LIT:foo>" } , { "<STR_LIT>" , "<STR_LIT>" } , { } , { "<STR_LIT>" } , { "<STR_LIT:bar>" } , { "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" } , { "<STR_LIT:foo>" } , { "<STR_LIT:baz>" } , { "<STR_LIT>" } , { "<STR_LIT:baz>" , "<STR_LIT:bar>" } , } ; public static final String [ ] SORTED_URLS = new String [ URLS . length ] ; static { for ( int i = <NUM_LIT:0> ; i < URLS . length ; i ++ ) { SORTED_URLS [ i ] = URLS [ i ] ; } Arrays . sort ( SORTED_URLS ) ; } public static void createWebPageData ( DataStore < String , WebPage > dataStore ) throws IOException { WebPage page ; log . info ( "<STR_LIT>" ) ; for ( int i = <NUM_LIT:0> ; i < URLS . length ; i ++ ) { page = new WebPage ( ) ; page . setUrl ( new Utf8 ( URLS [ i ] ) ) ; page . setContent ( ByteBuffer . wrap ( CONTENTS [ i ] . getBytes ( ) ) ) ; for ( String token : CONTENTS [ i ] . split ( "<STR_LIT:U+0020>" ) ) { page . addToParsedContent ( new Utf8 ( token ) ) ; } for ( int j = <NUM_LIT:0> ; j < LINKS [ i ] . length ; j ++ ) { page . putToOutlinks ( new Utf8 ( URLS [ LINKS [ i ] [ j ] ] ) , new Utf8 ( ANCHORS [ i ] [ j ] ) ) ; } Metadata metadata = new Metadata ( ) ; metadata . setVersion ( <NUM_LIT:1> ) ; metadata . putToData ( new Utf8 ( "<STR_LIT>" ) , new Utf8 ( "<STR_LIT>" ) ) ; page . setMetadata ( metadata ) ; dataStore . put ( URLS [ i ] , page ) ; } dataStore . flush ( ) ; log . info ( "<STR_LIT>" ) ; } public int run ( String [ ] args ) throws Exception { String dataStoreClass = "<STR_LIT>" ; if ( args . length > <NUM_LIT:0> ) { dataStoreClass = args [ <NUM_LIT:0> ] ; } DataStore < String , WebPage > store = DataStoreFactory . getDataStore ( dataStoreClass , String . class , WebPage . class , new Configuration ( ) ) ; createWebPageData ( store ) ; return <NUM_LIT:0> ; } public static void main ( String [ ] args ) throws Exception { new WebPageDataCreator ( ) . run ( args ) ; } } </s>
|
<s> package org . apache . gora . examples . generated ; import java . nio . ByteBuffer ; import java . util . Map ; import java . util . HashMap ; import org . apache . avro . Protocol ; import org . apache . avro . Schema ; import org . apache . avro . AvroRuntimeException ; import org . apache . avro . Protocol ; import org . apache . avro . util . Utf8 ; import org . apache . avro . ipc . AvroRemoteException ; import org . apache . avro . generic . GenericArray ; import org . apache . avro . specific . SpecificExceptionBase ; import org . apache . avro . specific . SpecificRecordBase ; import org . apache . avro . specific . SpecificRecord ; import org . apache . avro . specific . SpecificFixed ; import org . apache . gora . persistency . StateManager ; import org . apache . gora . persistency . impl . PersistentBase ; import org . apache . gora . persistency . impl . StateManagerImpl ; import org . apache . gora . persistency . StatefulHashMap ; import org . apache . gora . persistency . ListGenericArray ; @ SuppressWarnings ( "<STR_LIT:all>" ) public class WebPage extends PersistentBase { public static final Schema _SCHEMA = Schema . parse ( "<STR_LIT>" ) ; public static enum Field { URL ( <NUM_LIT:0> , "<STR_LIT:url>" ) , CONTENT ( <NUM_LIT:1> , "<STR_LIT:content>" ) , PARSED_CONTENT ( <NUM_LIT:2> , "<STR_LIT>" ) , OUTLINKS ( <NUM_LIT:3> , "<STR_LIT>" ) , METADATA ( <NUM_LIT:4> , "<STR_LIT>" ) , ; private int index ; private String name ; Field ( int index , String name ) { this . index = index ; this . name = name ; } public int getIndex ( ) { return index ; } public String getName ( ) { return name ; } public String toString ( ) { return name ; } } ; public static final String [ ] _ALL_FIELDS = { "<STR_LIT:url>" , "<STR_LIT:content>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , } ; static { PersistentBase . registerFields ( WebPage . class , _ALL_FIELDS ) ; } private Utf8 url ; private ByteBuffer content ; private GenericArray < Utf8 > parsedContent ; private Map < Utf8 , Utf8 > outlinks ; private Metadata metadata ; public WebPage ( ) { this ( new StateManagerImpl ( ) ) ; } public WebPage ( StateManager stateManager ) { super ( stateManager ) ; parsedContent = new ListGenericArray < Utf8 > ( getSchema ( ) . getField ( "<STR_LIT>" ) . schema ( ) ) ; outlinks = new StatefulHashMap < Utf8 , Utf8 > ( ) ; } public WebPage newInstance ( StateManager stateManager ) { return new WebPage ( stateManager ) ; } public Schema getSchema ( ) { return _SCHEMA ; } public Object get ( int _field ) { switch ( _field ) { case <NUM_LIT:0> : return url ; case <NUM_LIT:1> : return content ; case <NUM_LIT:2> : return parsedContent ; case <NUM_LIT:3> : return outlinks ; case <NUM_LIT:4> : return metadata ; default : throw new AvroRuntimeException ( "<STR_LIT>" ) ; } } @ SuppressWarnings ( value = "<STR_LIT:unchecked>" ) public void put ( int _field , Object _value ) { if ( isFieldEqual ( _field , _value ) ) return ; getStateManager ( ) . setDirty ( this , _field ) ; switch ( _field ) { case <NUM_LIT:0> : url = ( Utf8 ) _value ; break ; case <NUM_LIT:1> : content = ( ByteBuffer ) _value ; break ; case <NUM_LIT:2> : parsedContent = ( GenericArray < Utf8 > ) _value ; break ; case <NUM_LIT:3> : outlinks = ( Map < Utf8 , Utf8 > ) _value ; break ; case <NUM_LIT:4> : metadata = ( Metadata ) _value ; break ; default : throw new AvroRuntimeException ( "<STR_LIT>" ) ; } } public Utf8 getUrl ( ) { return ( Utf8 ) get ( <NUM_LIT:0> ) ; } public void setUrl ( Utf8 value ) { put ( <NUM_LIT:0> , value ) ; } public ByteBuffer getContent ( ) { return ( ByteBuffer ) get ( <NUM_LIT:1> ) ; } public void setContent ( ByteBuffer value ) { put ( <NUM_LIT:1> , value ) ; } public GenericArray < Utf8 > getParsedContent ( ) { return ( GenericArray < Utf8 > ) get ( <NUM_LIT:2> ) ; } public void addToParsedContent ( Utf8 element ) { getStateManager ( ) . setDirty ( this , <NUM_LIT:2> ) ; parsedContent . add ( element ) ; } public Map < Utf8 , Utf8 > getOutlinks ( ) { return ( Map < Utf8 , Utf8 > ) get ( <NUM_LIT:3> ) ; } public Utf8 getFromOutlinks ( Utf8 key ) { if ( outlinks == null ) { return null ; } return outlinks . get ( key ) ; } public void putToOutlinks ( Utf8 key , Utf8 value ) { getStateManager ( ) . setDirty ( this , <NUM_LIT:3> ) ; outlinks . put ( key , value ) ; } public Utf8 removeFromOutlinks ( Utf8 key ) { if ( outlinks == null ) { return null ; } getStateManager ( ) . setDirty ( this , <NUM_LIT:3> ) ; return outlinks . remove ( key ) ; } public Metadata getMetadata ( ) { return ( Metadata ) get ( <NUM_LIT:4> ) ; } public void setMetadata ( Metadata value ) { put ( <NUM_LIT:4> , value ) ; } } </s>
|
<s> package org . apache . gora . examples . generated ; import java . nio . ByteBuffer ; import java . util . Map ; import java . util . HashMap ; import org . apache . avro . Protocol ; import org . apache . avro . Schema ; import org . apache . avro . AvroRuntimeException ; import org . apache . avro . Protocol ; import org . apache . avro . util . Utf8 ; import org . apache . avro . ipc . AvroRemoteException ; import org . apache . avro . generic . GenericArray ; import org . apache . avro . specific . SpecificExceptionBase ; import org . apache . avro . specific . SpecificRecordBase ; import org . apache . avro . specific . SpecificRecord ; import org . apache . avro . specific . SpecificFixed ; import org . apache . gora . persistency . StateManager ; import org . apache . gora . persistency . impl . PersistentBase ; import org . apache . gora . persistency . impl . StateManagerImpl ; import org . apache . gora . persistency . StatefulHashMap ; import org . apache . gora . persistency . ListGenericArray ; @ SuppressWarnings ( "<STR_LIT:all>" ) public class Employee extends PersistentBase { public static final Schema _SCHEMA = Schema . parse ( "<STR_LIT>" ) ; public static enum Field { NAME ( <NUM_LIT:0> , "<STR_LIT:name>" ) , DATE_OF_BIRTH ( <NUM_LIT:1> , "<STR_LIT>" ) , SSN ( <NUM_LIT:2> , "<STR_LIT>" ) , SALARY ( <NUM_LIT:3> , "<STR_LIT>" ) , ; private int index ; private String name ; Field ( int index , String name ) { this . index = index ; this . name = name ; } public int getIndex ( ) { return index ; } public String getName ( ) { return name ; } public String toString ( ) { return name ; } } ; public static final String [ ] _ALL_FIELDS = { "<STR_LIT:name>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , } ; static { PersistentBase . registerFields ( Employee . class , _ALL_FIELDS ) ; } private Utf8 name ; private long dateOfBirth ; private Utf8 ssn ; private int salary ; public Employee ( ) { this ( new StateManagerImpl ( ) ) ; } public Employee ( StateManager stateManager ) { super ( stateManager ) ; } public Employee newInstance ( StateManager stateManager ) { return new Employee ( stateManager ) ; } public Schema getSchema ( ) { return _SCHEMA ; } public Object get ( int _field ) { switch ( _field ) { case <NUM_LIT:0> : return name ; case <NUM_LIT:1> : return dateOfBirth ; case <NUM_LIT:2> : return ssn ; case <NUM_LIT:3> : return salary ; default : throw new AvroRuntimeException ( "<STR_LIT>" ) ; } } @ SuppressWarnings ( value = "<STR_LIT:unchecked>" ) public void put ( int _field , Object _value ) { if ( isFieldEqual ( _field , _value ) ) return ; getStateManager ( ) . setDirty ( this , _field ) ; switch ( _field ) { case <NUM_LIT:0> : name = ( Utf8 ) _value ; break ; case <NUM_LIT:1> : dateOfBirth = ( Long ) _value ; break ; case <NUM_LIT:2> : ssn = ( Utf8 ) _value ; break ; case <NUM_LIT:3> : salary = ( Integer ) _value ; break ; default : throw new AvroRuntimeException ( "<STR_LIT>" ) ; } } public Utf8 getName ( ) { return ( Utf8 ) get ( <NUM_LIT:0> ) ; } public void setName ( Utf8 value ) { put ( <NUM_LIT:0> , value ) ; } public long getDateOfBirth ( ) { return ( Long ) get ( <NUM_LIT:1> ) ; } public void setDateOfBirth ( long value ) { put ( <NUM_LIT:1> , value ) ; } public Utf8 getSsn ( ) { return ( Utf8 ) get ( <NUM_LIT:2> ) ; } public void setSsn ( Utf8 value ) { put ( <NUM_LIT:2> , value ) ; } public int getSalary ( ) { return ( Integer ) get ( <NUM_LIT:3> ) ; } public void setSalary ( int value ) { put ( <NUM_LIT:3> , value ) ; } } </s>
|
<s> package org . apache . gora . examples . generated ; import java . nio . ByteBuffer ; import java . util . Map ; import java . util . HashMap ; import org . apache . avro . Protocol ; import org . apache . avro . Schema ; import org . apache . avro . AvroRuntimeException ; import org . apache . avro . Protocol ; import org . apache . avro . util . Utf8 ; import org . apache . avro . ipc . AvroRemoteException ; import org . apache . avro . generic . GenericArray ; import org . apache . avro . specific . SpecificExceptionBase ; import org . apache . avro . specific . SpecificRecordBase ; import org . apache . avro . specific . SpecificRecord ; import org . apache . avro . specific . SpecificFixed ; import org . apache . gora . persistency . StateManager ; import org . apache . gora . persistency . impl . PersistentBase ; import org . apache . gora . persistency . impl . StateManagerImpl ; import org . apache . gora . persistency . StatefulHashMap ; import org . apache . gora . persistency . ListGenericArray ; @ SuppressWarnings ( "<STR_LIT:all>" ) public class TokenDatum extends PersistentBase { public static final Schema _SCHEMA = Schema . parse ( "<STR_LIT>" ) ; public static enum Field { COUNT ( <NUM_LIT:0> , "<STR_LIT:count>" ) , ; private int index ; private String name ; Field ( int index , String name ) { this . index = index ; this . name = name ; } public int getIndex ( ) { return index ; } public String getName ( ) { return name ; } public String toString ( ) { return name ; } } ; public static final String [ ] _ALL_FIELDS = { "<STR_LIT:count>" , } ; static { PersistentBase . registerFields ( TokenDatum . class , _ALL_FIELDS ) ; } private int count ; public TokenDatum ( ) { this ( new StateManagerImpl ( ) ) ; } public TokenDatum ( StateManager stateManager ) { super ( stateManager ) ; } public TokenDatum newInstance ( StateManager stateManager ) { return new TokenDatum ( stateManager ) ; } public Schema getSchema ( ) { return _SCHEMA ; } public Object get ( int _field ) { switch ( _field ) { case <NUM_LIT:0> : return count ; default : throw new AvroRuntimeException ( "<STR_LIT>" ) ; } } @ SuppressWarnings ( value = "<STR_LIT:unchecked>" ) public void put ( int _field , Object _value ) { if ( isFieldEqual ( _field , _value ) ) return ; getStateManager ( ) . setDirty ( this , _field ) ; switch ( _field ) { case <NUM_LIT:0> : count = ( Integer ) _value ; break ; default : throw new AvroRuntimeException ( "<STR_LIT>" ) ; } } public int getCount ( ) { return ( Integer ) get ( <NUM_LIT:0> ) ; } public void setCount ( int value ) { put ( <NUM_LIT:0> , value ) ; } } </s>
|
<s> package org . apache . gora . examples . generated ; import java . nio . ByteBuffer ; import java . util . Map ; import java . util . HashMap ; import org . apache . avro . Protocol ; import org . apache . avro . Schema ; import org . apache . avro . AvroRuntimeException ; import org . apache . avro . Protocol ; import org . apache . avro . util . Utf8 ; import org . apache . avro . ipc . AvroRemoteException ; import org . apache . avro . generic . GenericArray ; import org . apache . avro . specific . SpecificExceptionBase ; import org . apache . avro . specific . SpecificRecordBase ; import org . apache . avro . specific . SpecificRecord ; import org . apache . avro . specific . SpecificFixed ; import org . apache . gora . persistency . StateManager ; import org . apache . gora . persistency . impl . PersistentBase ; import org . apache . gora . persistency . impl . StateManagerImpl ; import org . apache . gora . persistency . StatefulHashMap ; import org . apache . gora . persistency . ListGenericArray ; @ SuppressWarnings ( "<STR_LIT:all>" ) public class Metadata extends PersistentBase { public static final Schema _SCHEMA = Schema . parse ( "<STR_LIT>" ) ; public static enum Field { VERSION ( <NUM_LIT:0> , "<STR_LIT:version>" ) , DATA ( <NUM_LIT:1> , "<STR_LIT:data>" ) , ; private int index ; private String name ; Field ( int index , String name ) { this . index = index ; this . name = name ; } public int getIndex ( ) { return index ; } public String getName ( ) { return name ; } public String toString ( ) { return name ; } } ; public static final String [ ] _ALL_FIELDS = { "<STR_LIT:version>" , "<STR_LIT:data>" , } ; static { PersistentBase . registerFields ( Metadata . class , _ALL_FIELDS ) ; } private int version ; private Map < Utf8 , Utf8 > data ; public Metadata ( ) { this ( new StateManagerImpl ( ) ) ; } public Metadata ( StateManager stateManager ) { super ( stateManager ) ; data = new StatefulHashMap < Utf8 , Utf8 > ( ) ; } public Metadata newInstance ( StateManager stateManager ) { return new Metadata ( stateManager ) ; } public Schema getSchema ( ) { return _SCHEMA ; } public Object get ( int _field ) { switch ( _field ) { case <NUM_LIT:0> : return version ; case <NUM_LIT:1> : return data ; default : throw new AvroRuntimeException ( "<STR_LIT>" ) ; } } @ SuppressWarnings ( value = "<STR_LIT:unchecked>" ) public void put ( int _field , Object _value ) { if ( isFieldEqual ( _field , _value ) ) return ; getStateManager ( ) . setDirty ( this , _field ) ; switch ( _field ) { case <NUM_LIT:0> : version = ( Integer ) _value ; break ; case <NUM_LIT:1> : data = ( Map < Utf8 , Utf8 > ) _value ; break ; default : throw new AvroRuntimeException ( "<STR_LIT>" ) ; } } public int getVersion ( ) { return ( Integer ) get ( <NUM_LIT:0> ) ; } public void setVersion ( int value ) { put ( <NUM_LIT:0> , value ) ; } public Map < Utf8 , Utf8 > getData ( ) { return ( Map < Utf8 , Utf8 > ) get ( <NUM_LIT:1> ) ; } public Utf8 getFromData ( Utf8 key ) { if ( data == null ) { return null ; } return data . get ( key ) ; } public void putToData ( Utf8 key , Utf8 value ) { getStateManager ( ) . setDirty ( this , <NUM_LIT:1> ) ; data . put ( key , value ) ; } public Utf8 removeFromData ( Utf8 key ) { if ( data == null ) { return null ; } getStateManager ( ) . setDirty ( this , <NUM_LIT:1> ) ; return data . remove ( key ) ; } } </s>
|
<s> package org . apache . gora . examples . mapreduce ; import java . io . IOException ; import org . apache . gora . mapreduce . GoraMapper ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . gora . store . DataStore ; import org . apache . gora . store . DataStoreFactory ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . conf . Configured ; import org . apache . hadoop . io . NullWritable ; import org . apache . hadoop . mapreduce . Job ; import org . apache . hadoop . mapreduce . lib . output . NullOutputFormat ; import org . apache . hadoop . util . Tool ; import org . apache . hadoop . util . ToolRunner ; import org . apache . gora . util . ClassLoadingUtils ; public class QueryCounter < K , T extends Persistent > extends Configured implements Tool { public static final String COUNTER_GROUP = "<STR_LIT>" ; public static final String ROWS = "<STR_LIT>" ; public QueryCounter ( Configuration conf ) { setConf ( conf ) ; } public static class QueryCounterMapper < K , T extends Persistent > extends GoraMapper < K , T , NullWritable , NullWritable > { @ Override protected void map ( K key , T value , Context context ) throws IOException , InterruptedException { context . getCounter ( COUNTER_GROUP , ROWS ) . increment ( <NUM_LIT:1L> ) ; } ; } public Query < K , T > getQuery ( DataStore < K , T > dataStore ) { Query < K , T > query = dataStore . newQuery ( ) ; return query ; } public Job createJob ( DataStore < K , T > dataStore , Query < K , T > query ) throws IOException { Job job = new Job ( getConf ( ) ) ; job . setJobName ( "<STR_LIT>" ) ; job . setNumReduceTasks ( <NUM_LIT:0> ) ; job . setJarByClass ( getClass ( ) ) ; GoraMapper . initMapperJob ( job , query , dataStore , NullWritable . class , NullWritable . class , QueryCounterMapper . class , true ) ; job . setOutputFormatClass ( NullOutputFormat . class ) ; return job ; } public long countQuery ( DataStore < K , T > dataStore , Query < K , T > query ) throws Exception { Job job = createJob ( dataStore , query ) ; job . waitForCompletion ( true ) ; return job . getCounters ( ) . findCounter ( COUNTER_GROUP , ROWS ) . getValue ( ) ; } public long countQuery ( DataStore < K , T > dataStore ) throws Exception { Query < K , T > query = getQuery ( dataStore ) ; Job job = createJob ( dataStore , query ) ; job . waitForCompletion ( true ) ; return job . getCounters ( ) . findCounter ( COUNTER_GROUP , ROWS ) . getValue ( ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) @ Override public int run ( String [ ] args ) throws Exception { if ( args . length < <NUM_LIT:2> ) { System . err . println ( "<STR_LIT>" ) ; return <NUM_LIT:1> ; } Class < K > keyClass = ( Class < K > ) ClassLoadingUtils . loadClass ( args [ <NUM_LIT:0> ] ) ; Class < T > persistentClass = ( Class < T > ) ClassLoadingUtils . loadClass ( args [ <NUM_LIT:1> ] ) ; DataStore < K , T > dataStore ; Configuration conf = new Configuration ( ) ; if ( args . length > <NUM_LIT:2> ) { Class < ? extends DataStore < K , T > > dataStoreClass = ( Class < ? extends DataStore < K , T > > ) Class . forName ( args [ <NUM_LIT:2> ] ) ; dataStore = DataStoreFactory . getDataStore ( dataStoreClass , keyClass , persistentClass , conf ) ; } else { dataStore = DataStoreFactory . getDataStore ( keyClass , persistentClass , conf ) ; } long results = countQuery ( dataStore ) ; System . out . println ( "<STR_LIT>" + results ) ; return <NUM_LIT:0> ; } @ SuppressWarnings ( "<STR_LIT:rawtypes>" ) public static void main ( String [ ] args ) throws Exception { int ret = ToolRunner . run ( new QueryCounter ( new Configuration ( ) ) , args ) ; System . exit ( ret ) ; } } </s>
|
<s> package org . apache . gora . examples . mapreduce ; import java . io . IOException ; import java . util . StringTokenizer ; import org . apache . gora . examples . generated . TokenDatum ; import org . apache . gora . examples . generated . WebPage ; import org . apache . gora . mapreduce . GoraMapper ; import org . apache . gora . mapreduce . GoraReducer ; import org . apache . gora . query . Query ; import org . apache . gora . store . DataStore ; import org . apache . gora . store . DataStoreFactory ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . conf . Configured ; import org . apache . hadoop . io . IntWritable ; import org . apache . hadoop . io . Text ; import org . apache . hadoop . mapreduce . Job ; import org . apache . hadoop . util . Tool ; import org . apache . hadoop . util . ToolRunner ; public class WordCount extends Configured implements Tool { public WordCount ( ) { } public WordCount ( Configuration conf ) { setConf ( conf ) ; } public static class TokenizerMapper extends GoraMapper < String , WebPage , Text , IntWritable > { private final static IntWritable one = new IntWritable ( <NUM_LIT:1> ) ; private Text word = new Text ( ) ; @ Override protected void map ( String key , WebPage page , Context context ) throws IOException , InterruptedException { String content = new String ( page . getContent ( ) . array ( ) ) ; StringTokenizer itr = new StringTokenizer ( content ) ; while ( itr . hasMoreTokens ( ) ) { word . set ( itr . nextToken ( ) ) ; context . write ( word , one ) ; } } ; } public static class WordCountReducer extends GoraReducer < Text , IntWritable , String , TokenDatum > { TokenDatum result = new TokenDatum ( ) ; @ Override protected void reduce ( Text key , Iterable < IntWritable > values , Context context ) throws IOException , InterruptedException { int sum = <NUM_LIT:0> ; for ( IntWritable val : values ) { sum += val . get ( ) ; } result . setCount ( sum ) ; context . write ( key . toString ( ) , result ) ; } ; } public Job createJob ( DataStore < String , WebPage > inStore , Query < String , WebPage > query , DataStore < String , TokenDatum > outStore ) throws IOException { Job job = new Job ( getConf ( ) ) ; job . setJobName ( "<STR_LIT>" ) ; job . setNumReduceTasks ( <NUM_LIT:10> ) ; job . setJarByClass ( getClass ( ) ) ; GoraMapper . initMapperJob ( job , query , inStore , Text . class , IntWritable . class , TokenizerMapper . class , true ) ; GoraReducer . initReducerJob ( job , outStore , WordCountReducer . class ) ; return job ; } public int wordCount ( DataStore < String , WebPage > inStore , DataStore < String , TokenDatum > outStore ) throws IOException , InterruptedException , ClassNotFoundException { Query < String , WebPage > query = inStore . newQuery ( ) ; Job job = createJob ( inStore , query , outStore ) ; return job . waitForCompletion ( true ) ? <NUM_LIT:0> : <NUM_LIT:1> ; } @ Override public int run ( String [ ] args ) throws Exception { DataStore < String , WebPage > inStore ; DataStore < String , TokenDatum > outStore ; Configuration conf = new Configuration ( ) ; if ( args . length > <NUM_LIT:0> ) { String dataStoreClass = args [ <NUM_LIT:0> ] ; inStore = DataStoreFactory . getDataStore ( dataStoreClass , String . class , WebPage . class , conf ) ; if ( args . length > <NUM_LIT:1> ) { dataStoreClass = args [ <NUM_LIT:1> ] ; } outStore = DataStoreFactory . getDataStore ( dataStoreClass , String . class , TokenDatum . class , conf ) ; } else { inStore = DataStoreFactory . getDataStore ( String . class , WebPage . class , conf ) ; outStore = DataStoreFactory . getDataStore ( String . class , TokenDatum . class , conf ) ; } return wordCount ( inStore , outStore ) ; } public static void main ( String [ ] args ) throws Exception { int ret = ToolRunner . run ( new WordCount ( ) , args ) ; System . exit ( ret ) ; } } </s>
|
<s> package org . apache . gora . avro . mapreduce ; import static org . apache . gora . avro . store . TestAvroStore . WEBPAGE_OUTPUT ; import java . io . IOException ; import org . apache . gora . avro . store . DataFileAvroStore ; import org . apache . gora . examples . generated . WebPage ; import org . apache . gora . mapreduce . DataStoreMapReduceTestBase ; import org . apache . gora . store . DataStore ; import org . apache . gora . store . DataStoreFactory ; public class TestDataFileAvroStoreMapReduce extends DataStoreMapReduceTestBase { public TestDataFileAvroStoreMapReduce ( ) throws IOException { super ( ) ; } @ Override protected DataStore < String , WebPage > createWebPageDataStore ( ) throws IOException { DataFileAvroStore < String , WebPage > webPageStore = new DataFileAvroStore < String , WebPage > ( ) ; webPageStore . initialize ( String . class , WebPage . class , DataStoreFactory . createProps ( ) ) ; webPageStore . setOutputPath ( WEBPAGE_OUTPUT ) ; webPageStore . setInputPath ( WEBPAGE_OUTPUT ) ; return webPageStore ; } } </s>
|
<s> package org . apache . gora . avro . store ; import static org . apache . gora . examples . WebPageDataCreator . URLS ; import static org . apache . gora . examples . WebPageDataCreator . URL_INDEXES ; import static org . apache . gora . examples . WebPageDataCreator . createWebPageData ; import java . io . IOException ; import junit . framework . Assert ; import org . apache . gora . avro . store . AvroStore . CodecType ; import org . apache . gora . examples . generated . Employee ; import org . apache . gora . examples . generated . WebPage ; import org . apache . gora . query . Query ; import org . apache . gora . query . Result ; import org . apache . gora . store . DataStore ; import org . apache . gora . store . DataStoreFactory ; import org . apache . gora . store . DataStoreTestUtil ; import org . apache . gora . util . GoraException ; import org . apache . hadoop . conf . Configuration ; import org . apache . hadoop . fs . Path ; import org . junit . After ; import org . junit . Before ; import org . junit . Test ; public class TestAvroStore { public static final String EMPLOYEE_OUTPUT = System . getProperty ( "<STR_LIT>" ) + "<STR_LIT>" ; public static final String WEBPAGE_OUTPUT = System . getProperty ( "<STR_LIT>" ) + "<STR_LIT>" ; protected AvroStore < String , Employee > employeeStore ; protected AvroStore < String , WebPage > webPageStore ; protected Configuration conf = new Configuration ( ) ; @ Before public void setUp ( ) throws Exception { employeeStore = createEmployeeDataStore ( ) ; employeeStore . initialize ( String . class , Employee . class , DataStoreFactory . createProps ( ) ) ; employeeStore . setOutputPath ( EMPLOYEE_OUTPUT ) ; employeeStore . setInputPath ( EMPLOYEE_OUTPUT ) ; webPageStore = new AvroStore < String , WebPage > ( ) ; webPageStore . initialize ( String . class , WebPage . class , DataStoreFactory . createProps ( ) ) ; webPageStore . setOutputPath ( WEBPAGE_OUTPUT ) ; webPageStore . setInputPath ( WEBPAGE_OUTPUT ) ; } @ SuppressWarnings ( "<STR_LIT:unchecked>" ) protected AvroStore < String , Employee > createEmployeeDataStore ( ) throws GoraException { return DataStoreFactory . getDataStore ( AvroStore . class , String . class , Employee . class , conf ) ; } protected AvroStore < String , WebPage > createWebPageDataStore ( ) { return new AvroStore < String , WebPage > ( ) ; } @ After public void tearDown ( ) throws Exception { deletePath ( employeeStore . getOutputPath ( ) ) ; deletePath ( webPageStore . getOutputPath ( ) ) ; employeeStore . close ( ) ; webPageStore . close ( ) ; } private void deletePath ( String output ) throws IOException { if ( output != null ) { Path path = new Path ( output ) ; path . getFileSystem ( conf ) . delete ( path , true ) ; } } @ Test public void testNewInstance ( ) throws IOException { DataStoreTestUtil . testNewPersistent ( employeeStore ) ; } @ Test public void testCreateSchema ( ) throws IOException { DataStoreTestUtil . testCreateEmployeeSchema ( employeeStore ) ; } @ Test public void testAutoCreateSchema ( ) throws IOException { DataStoreTestUtil . testAutoCreateSchema ( employeeStore ) ; } @ Test public void testPut ( ) throws IOException { DataStoreTestUtil . testPutEmployee ( employeeStore ) ; } @ Test public void testQuery ( ) throws IOException { createWebPageData ( webPageStore ) ; webPageStore . close ( ) ; webPageStore . setInputPath ( webPageStore . getOutputPath ( ) ) ; testQueryWebPages ( webPageStore ) ; } @ Test public void testQueryBinaryEncoder ( ) throws IOException { webPageStore . setCodecType ( CodecType . BINARY ) ; webPageStore . setInputPath ( webPageStore . getOutputPath ( ) ) ; createWebPageData ( webPageStore ) ; webPageStore . close ( ) ; testQueryWebPages ( webPageStore ) ; } public static void testQueryWebPages ( DataStore < String , WebPage > store ) throws IOException { Query < String , WebPage > query = store . newQuery ( ) ; Result < String , WebPage > result = query . execute ( ) ; int i = <NUM_LIT:0> ; while ( result . next ( ) ) { WebPage page = result . get ( ) ; DataStoreTestUtil . assertWebPage ( page , URL_INDEXES . get ( page . getUrl ( ) . toString ( ) ) ) ; i ++ ; } Assert . assertEquals ( i , URLS . length ) ; } } </s>
|
<s> package org . apache . gora . avro . store ; import org . apache . gora . avro . store . AvroStore ; import org . apache . gora . avro . store . DataFileAvroStore ; import org . apache . gora . examples . generated . Employee ; import org . apache . gora . examples . generated . WebPage ; public class TestDataFileAvroStore extends TestAvroStore { @ Override protected AvroStore < String , Employee > createEmployeeDataStore ( ) { return new DataFileAvroStore < String , Employee > ( ) ; } @ Override protected AvroStore < String , WebPage > createWebPageDataStore ( ) { return new DataFileAvroStore < String , WebPage > ( ) ; } } </s>
|
<s> package org . apache . gora . avro ; import java . io . IOException ; import junit . framework . Assert ; import org . apache . avro . util . Utf8 ; import org . apache . gora . avro . PersistentDatumReader ; import org . apache . gora . examples . WebPageDataCreator ; import org . apache . gora . examples . generated . Employee ; import org . apache . gora . examples . generated . WebPage ; import org . apache . gora . memory . store . MemStore ; import org . apache . gora . persistency . Persistent ; import org . apache . gora . query . Query ; import org . apache . gora . query . Result ; import org . apache . gora . store . DataStore ; import org . apache . gora . store . DataStoreFactory ; import org . apache . gora . store . DataStoreTestUtil ; import org . apache . hadoop . conf . Configuration ; import org . junit . Test ; public class TestPersistentDatumReader { private PersistentDatumReader < WebPage > webPageDatumReader = new PersistentDatumReader < WebPage > ( ) ; private Configuration conf = new Configuration ( ) ; private void testClone ( Persistent persistent ) throws IOException { Persistent cloned = webPageDatumReader . clone ( persistent , persistent . getSchema ( ) ) ; assertClone ( persistent , cloned ) ; } private void assertClone ( Persistent persistent , Persistent cloned ) { Assert . assertNotNull ( "<STR_LIT>" , cloned ) ; Assert . assertEquals ( "<STR_LIT>" , persistent , cloned ) ; } @ Test public void testCloneEmployee ( ) throws Exception { @ SuppressWarnings ( "<STR_LIT:unchecked>" ) MemStore < String , Employee > store = DataStoreFactory . getDataStore ( MemStore . class , String . class , Employee . class , conf ) ; Employee employee = DataStoreTestUtil . createEmployee ( store ) ; testClone ( employee ) ; } @ Test public void testCloneEmployeeOneField ( ) throws Exception { Employee employee = new Employee ( ) ; employee . setSsn ( new Utf8 ( "<STR_LIT>" ) ) ; testClone ( employee ) ; } @ Test public void testCloneEmployeeTwoFields ( ) throws Exception { Employee employee = new Employee ( ) ; employee . setSsn ( new Utf8 ( "<STR_LIT>" ) ) ; employee . setSalary ( <NUM_LIT:100> ) ; testClone ( employee ) ; } @ Test public void testCloneWebPage ( ) throws Exception { @ SuppressWarnings ( "<STR_LIT:unchecked>" ) DataStore < String , WebPage > store = DataStoreFactory . createDataStore ( MemStore . class , String . class , WebPage . class , conf ) ; WebPageDataCreator . createWebPageData ( store ) ; Query < String , WebPage > query = store . newQuery ( ) ; Result < String , WebPage > result = query . execute ( ) ; int tested = <NUM_LIT:0> ; while ( result . next ( ) ) { WebPage page = result . get ( ) ; testClone ( page ) ; tested ++ ; } Assert . assertEquals ( WebPageDataCreator . URLS . length , tested ) ; } } </s>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.