repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveudf/src/main/java/org/zuinnote/hadoop/ethereum/hive/udf/EthereumGetChainIdUDF.java | hiveudf/src/main/java/org/zuinnote/hadoop/ethereum/hive/udf/EthereumGetChainIdUDF.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.hive.udf;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.LongWritable;
import org.zuinnote.hadoop.ethereum.format.common.EthereumTransaction;
import org.zuinnote.hadoop.ethereum.format.common.EthereumUtil;
@Description(
name = "hclEthereumGetChainId",
value = "_FUNC_(Struct<EthereumTransaction>) - calculates the chainId of a EthereumTransaction and returns a long",
extended = "Example:\n" +
" > SELECT hclEthereumGetChainId(ethereumTransactions[0]) FROM EthereumBlockChain LIMIT 1;\n")
public class EthereumGetChainIdUDF extends GenericUDF {
private static final Log LOG = LogFactory.getLog(EthereumGetChainIdUDF.class.getName());
private EthereumUDFUtil ethereumUDFUtil;
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments==null) {
throw new UDFArgumentLengthException("ethereumGetChainId only takes one argument: Struct<EthereumTransction> ");
}
if (arguments.length != 1) {
throw new UDFArgumentLengthException("ethereumGetChainId only takes one argument: Struct<EthereumTransction> ");
}
if (!(arguments[0] instanceof StructObjectInspector)) {
throw new UDFArgumentException("first argument must be a Struct containing a EthereumTransction");
}
this.ethereumUDFUtil=new EthereumUDFUtil((StructObjectInspector) arguments[0]);
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
}
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if ((arguments==null) || (arguments.length!=1)) {
return null;
}
EthereumTransaction eTrans = this.ethereumUDFUtil.getEthereumTransactionFromObject(arguments[0].get());
Long chainId=EthereumUtil.calculateChainId(eTrans);
if (chainId==null) {
return null;
}
return new LongWritable(chainId);
}
@Override
public String getDisplayString(String[] children) {
return "hclEthereumGetChainId()";
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/presto-connector/src/main/java/org/zuinnote/presto/bitcoin/BitcoinConnector.java | presto-connector/src/main/java/org/zuinnote/presto/bitcoin/BitcoinConnector.java | /**
* Copyright 2018 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.presto.bitcoin;
import com.facebook.presto.spi.connector.Connector;
import com.facebook.presto.spi.connector.ConnectorMetadata;
import com.facebook.presto.spi.connector.ConnectorSplitManager;
import com.facebook.presto.spi.connector.ConnectorTransactionHandle;
import com.facebook.presto.spi.transaction.IsolationLevel;
/**
* @author jornfranke
*
*/
public class BitcoinConnector implements Connector {
@Override
public ConnectorTransactionHandle beginTransaction(IsolationLevel isolationLevel, boolean readOnly) {
// TODO Auto-generated method stub
return null;
}
@Override
public ConnectorMetadata getMetadata(ConnectorTransactionHandle transactionHandle) {
// TODO Auto-generated method stub
return null;
}
@Override
public ConnectorSplitManager getSplitManager() {
// TODO Auto-generated method stub
return null;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/test/java/org/zuinnote/hadoop/bitcoin/hive/serde/BitcoinHiveSerdeTest.java | hiveserde/src/test/java/org/zuinnote/hadoop/bitcoin/hive/serde/BitcoinHiveSerdeTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.hive.serde;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.math.BigInteger;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlock;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlockWritable;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlockReader;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import org.zuinnote.hadoop.bitcoin.format.mapred.AbstractBitcoinRecordReader;
import org.zuinnote.hadoop.bitcoin.hive.datatypes.HiveBitcoinBlock;
public class BitcoinHiveSerdeTest {
private static final int DEFAULT_BUFFERSIZE=AbstractBitcoinRecordReader.DEFAULT_BUFFERSIZE;
private static final int DEFAULT_MAXSIZE_BITCOINBLOCK=AbstractBitcoinRecordReader.DEFAULT_MAXSIZE_BITCOINBLOCK;
private static final byte[][] DEFAULT_MAGIC = {{(byte)0xF9,(byte)0xBE,(byte)0xB4,(byte)0xD9}};
@Test
public void checkTestDataGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void initializePositive() {
BitcoinBlockSerde testSerde = new BitcoinBlockSerde();
Configuration conf = new Configuration();
Properties tblProperties = new Properties();
// just for testing purposes - these values may have no real meaning
tblProperties.setProperty(BitcoinBlockSerde.CONF_MAXBLOCKSIZE, String.valueOf(1));
tblProperties.setProperty(BitcoinBlockSerde.CONF_FILTERMAGIC, "A0A0A0A0");
tblProperties.setProperty(BitcoinBlockSerde.CONF_USEDIRECTBUFFER,"true");
tblProperties.setProperty(BitcoinBlockSerde.CONF_ISSPLITABLE,"true");
tblProperties.setProperty(BitcoinBlockSerde.CONF_READAUXPOW,"true");
testSerde.initialize(conf,tblProperties);
assertEquals(1, conf.getInt(BitcoinBlockSerde.CONF_MAXBLOCKSIZE,2),"MAXBLOCKSIZE set correctly");
assertEquals("A0A0A0A0", conf.get(BitcoinBlockSerde.CONF_FILTERMAGIC,"B0B0B0B0"),"FILTERMAGIC set correctly");
assertTrue(conf.getBoolean(BitcoinBlockSerde.CONF_USEDIRECTBUFFER,false),"USEDIRECTBUFFER set correctly");
assertTrue(conf.getBoolean(BitcoinBlockSerde.CONF_ISSPLITABLE,false),"ISSPLITABLE set correctly");
assertTrue(conf.getBoolean(BitcoinBlockSerde.CONF_READAUXPOW,false),"ISSPLITABLE set correctly");
}
@Test
public void deserialize() throws FileNotFoundException, IOException, BitcoinBlockReadException {
BitcoinBlockSerde testSerde = new BitcoinBlockSerde();
// create a BitcoinBlock based on the genesis block test data
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fullFileNameString=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fullFileNameString);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
BitcoinBlock readBitcoinBlock = bbr.readBlock();
BitcoinBlockWritable theBitcoinBlock = new BitcoinBlockWritable();
theBitcoinBlock.set(readBitcoinBlock);
// deserialize it
Object deserializedObject = testSerde.deserialize(theBitcoinBlock);
assertTrue( deserializedObject instanceof HiveBitcoinBlock,"Deserialized Object is of type BitcoinBlock");
HiveBitcoinBlock deserializedBitcoinBlockStruct = (HiveBitcoinBlock)deserializedObject;
// verify certain attributes
assertEquals( 1, deserializedBitcoinBlockStruct.getTransactions().size(),"Genesis Block must contain exactly one transaction");
assertEquals( 1, deserializedBitcoinBlockStruct.getTransactions().get(0).getListOfInputs().size(),"Genesis Block must contain exactly one transaction with one input");
assertEquals( 77, deserializedBitcoinBlockStruct.getTransactions().get(0).getListOfInputs().get(0).getTxInScript().length,"Genesis Block must contain exactly one transaction with one input and script length 77");
assertEquals( 0, HiveDecimal.create(5000000000L).compareTo(deserializedBitcoinBlockStruct.getTransactions().get(0).getListOfOutputs().get(0).getValue()), "Value must be BigInteger corresponding to 5000000000L");
assertEquals( 1, deserializedBitcoinBlockStruct.getTransactions().get(0).getListOfOutputs().size(),"Genesis Block must contain exactly one transaction with one output");
assertEquals( 67, deserializedBitcoinBlockStruct.getTransactions().get(0).getListOfOutputs().get(0).getTxOutScript().length,"Genesis Block must contain exactly one transaction with one output and script length 67");
} finally {
if (bbr!=null)
bbr.close();
}
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/test/java/org/zuinnote/hadoop/ethereum/hive/serde/EthereumHiveSerdeTest.java | hiveserde/src/test/java/org/zuinnote/hadoop/ethereum/hive/serde/EthereumHiveSerdeTest.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.hive.serde;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlock;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockWritable;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockReader;
import org.zuinnote.hadoop.ethereum.format.exception.EthereumBlockReadException;
import org.zuinnote.hadoop.ethereum.format.mapred.AbstractEthereumRecordReader;
import org.zuinnote.hadoop.ethereum.hive.datatypes.HiveEthereumBlock;
public class EthereumHiveSerdeTest {
static final int DEFAULT_BUFFERSIZE=AbstractEthereumRecordReader.DEFAULT_BUFFERSIZE;
static final int DEFAULT_MAXSIZE_ETHEREUMBLOCK=AbstractEthereumRecordReader. DEFAULT_MAXSIZE_ETHEREUMBLOCK;
@Test
public void checkTestDataBlock1346406Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void initializePositive() throws SerDeException {
EthereumBlockSerde testSerde = new EthereumBlockSerde();
Configuration conf = new Configuration();
Properties tblProperties = new Properties();
// just for testing purposes - these values may have no real meaning
tblProperties.setProperty(EthereumBlockSerde.CONF_MAXBLOCKSIZE, String.valueOf(1));
tblProperties.setProperty(EthereumBlockSerde.CONF_USEDIRECTBUFFER,"true");
testSerde.initialize(conf,tblProperties);
assertEquals(1,conf.getInt(EthereumBlockSerde.CONF_MAXBLOCKSIZE,2),"MAXBLOCKSIZE set correctly");
assertTrue(conf.getBoolean(EthereumBlockSerde.CONF_USEDIRECTBUFFER,false),"USEDIRECTBUFFER set correctly");
}
@Test
public void deserialize() throws IOException, EthereumBlockReadException, SerDeException{
EthereumBlockSerde testSerde = new EthereumBlockSerde();
// create a BitcoinBlock based on the genesis block test data
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fullFileNameString=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fullFileNameString);
FileInputStream fin = new FileInputStream(file);
boolean direct=false;
EthereumBlockReader ebr = new EthereumBlockReader(fin,EthereumHiveSerdeTest.DEFAULT_MAXSIZE_ETHEREUMBLOCK,EthereumHiveSerdeTest.DEFAULT_BUFFERSIZE,direct );
EthereumBlock readBlock = ebr.readBlock();
EthereumBlockWritable block = new EthereumBlockWritable();
block.set(readBlock);
Object deserializedObject = testSerde.deserialize(block);
assertTrue( deserializedObject instanceof HiveEthereumBlock,"Deserialized Object is of type HiveEthereumBlock");
HiveEthereumBlock deserializedBitcoinBlockStruct = (HiveEthereumBlock)deserializedObject;
assertEquals( 6, deserializedBitcoinBlockStruct.getEthereumTransactions().size(),"Block contains 6 transactions");
assertEquals(0, deserializedBitcoinBlockStruct.getUncleHeaders().size(),"Block contains 0 uncleHeaders");
byte[] expectedParentHash = new byte[] {(byte)0xBA,(byte)0x6D,(byte)0xD2,(byte)0x60,(byte)0x12,(byte)0xB3,(byte)0x71,(byte)0x90,(byte)0x48,(byte)0xF3,(byte)0x16,(byte)0xC6,(byte)0xED,(byte)0xB3,(byte)0x34,(byte)0x9B,(byte)0xDF,(byte)0xBD,(byte)0x61,(byte)0x31,(byte)0x9F,(byte)0xA9,(byte)0x7C,(byte)0x61,(byte)0x6A,(byte)0x61,(byte)0x31,(byte)0x18,(byte)0xA1,(byte)0xAF,(byte)0x30,(byte)0x67};
assertArrayEquals( expectedParentHash, deserializedBitcoinBlockStruct.getEthereumBlockHeader().getParentHash(),"Block contains a correct 32 byte parent hash");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/main/java/org/zuinnote/hadoop/bitcoin/hive/datatypes/HiveBitcoinTransactionOutput.java | hiveserde/src/main/java/org/zuinnote/hadoop/bitcoin/hive/datatypes/HiveBitcoinTransactionOutput.java | /**
* Copyright 2018 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.hive.datatypes;
import java.io.Serializable;
import java.math.BigInteger;
import org.apache.hadoop.hive.common.type.HiveDecimal;
public class HiveBitcoinTransactionOutput implements Serializable {
/**
*
*/
private static final long serialVersionUID = 2854570630540937753L;
private HiveDecimal value;
private byte[] txOutScriptLength;
private byte[] txOutScript;
public HiveBitcoinTransactionOutput(HiveDecimal value, byte[] txOutScriptLength, byte[] txOutScript) {
this.value=value;
this.txOutScriptLength=txOutScriptLength;
this.txOutScript=txOutScript;
}
public HiveDecimal getValue() {
return this.value;
}
public byte[] getTxOutScriptLength() {
return this.txOutScriptLength;
}
public byte[] getTxOutScript() {
return this.txOutScript;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/main/java/org/zuinnote/hadoop/bitcoin/hive/datatypes/HiveBitcoinBlock.java | hiveserde/src/main/java/org/zuinnote/hadoop/bitcoin/hive/datatypes/HiveBitcoinBlock.java | /**
* Copyright 2018 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.hive.datatypes;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.io.Writable;
import java.util.List;
import java.util.ArrayList;
/**
* This class is an object storing relevant fields of a Bitcoin Block.
*/
public class HiveBitcoinBlock implements Writable {
private long blockSize;
private byte[] magicNo;
private long version;
private long time;
private byte[] bits;
private long nonce;
private long transactionCounter;
private byte[] hashPrevBlock;
private byte[] hashMerkleRoot;
private List<HiveBitcoinTransaction> transactions;
private HiveBitcoinAuxPOW auxPOW;
public HiveBitcoinBlock() {
this.magicNo=new byte[0];
this.bits=new byte[0];
this.transactionCounter=0;
this.hashPrevBlock=new byte[0];
this.hashMerkleRoot=new byte[0];
this.transactions=new ArrayList<>();
this.auxPOW=new HiveBitcoinAuxPOW();
}
public long getBlockSize() {
return this.blockSize;
}
public void setBlockSize(long blockSize) {
this.blockSize=blockSize;
}
public byte[] getMagicNo() {
return this.magicNo;
}
public void setMagicNo(byte[] magicNo) {
this.magicNo=magicNo;
}
public long getVersion() {
return this.version;
}
public void setVersion(long version) {
this.version=version;
}
public long getTime() {
return this.time;
}
public void setTime(long time) {
this.time=time;
}
public byte[] getBits() {
return this.bits;
}
public void setBits(byte[] bits) {
this.bits=bits;
}
public long getNonce() {
return this.nonce;
}
public void setNonce(long nonce) {
this.nonce=nonce;
}
public long getTransactionCounter() {
return this.transactionCounter;
}
public void setTransactionCounter(long transactionCounter) {
this.transactionCounter=transactionCounter;
}
public byte[] getHashPrevBlock() {
return this.hashPrevBlock;
}
public void setHashPrevBlock(byte[] hashPrevBlock) {
this.hashPrevBlock=hashPrevBlock;
}
public byte[] getHashMerkleRoot() {
return this.hashMerkleRoot;
}
public void setHashMerkleRoot(byte[] hashMerkleRoot) {
this.hashMerkleRoot=hashMerkleRoot;
}
public List<HiveBitcoinTransaction> getTransactions() {
return this.transactions;
}
public void setTransactions(List<HiveBitcoinTransaction> transactions) {
this.transactions=transactions;
}
public HiveBitcoinAuxPOW getAuxPOW() {
return this.auxPOW;
}
public void setAuxPOW(HiveBitcoinAuxPOW auxPOW) {
this.auxPOW = auxPOW;
}
public void set(HiveBitcoinBlock newBitcoinBlock) {
this.blockSize=newBitcoinBlock.getBlockSize();
this.magicNo=newBitcoinBlock.getMagicNo();
this.version=newBitcoinBlock.getVersion();
this.time=newBitcoinBlock.getTime();
this.bits=newBitcoinBlock.getBits();
this.nonce=newBitcoinBlock.getNonce();
this.transactionCounter=newBitcoinBlock.getTransactionCounter();
this.hashPrevBlock=newBitcoinBlock.getHashPrevBlock();
this.hashMerkleRoot=newBitcoinBlock.getHashMerkleRoot();
this.transactions=newBitcoinBlock.getTransactions();
this.auxPOW=newBitcoinBlock.getAuxPOW();
}
/** Writable **/
@Override
public void write(DataOutput dataOutput) throws IOException {
throw new UnsupportedOperationException("write unsupported");
}
@Override
public void readFields(DataInput dataInput) throws IOException {
throw new UnsupportedOperationException("readFields unsupported");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/main/java/org/zuinnote/hadoop/bitcoin/hive/datatypes/HiveBitcoinAuxPOW.java | hiveserde/src/main/java/org/zuinnote/hadoop/bitcoin/hive/datatypes/HiveBitcoinAuxPOW.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.hive.datatypes;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinAuxPOWBlockHeader;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinAuxPOWBranch;
public class HiveBitcoinAuxPOW {
private int version;
private HiveBitcoinTransaction coinbaseTransaction;
private byte[] parentBlockHeaderHash;
private BitcoinAuxPOWBranch coinbaseBranch;
private BitcoinAuxPOWBranch auxBlockChainBranch;
private BitcoinAuxPOWBlockHeader parentBlockHeader;
/*
* Creates an empy AuxPOW object in case the feature is not used (e.g. in the main Bitcoin blockchain)
*
*/
public HiveBitcoinAuxPOW() {
this.version=0;
this.coinbaseTransaction=null;
this.coinbaseBranch=null;
this.auxBlockChainBranch=null;
this.parentBlockHeader=null;
}
public HiveBitcoinAuxPOW(int version, HiveBitcoinTransaction coinbaseTransaction, byte[] parentBlockHeaderHash, BitcoinAuxPOWBranch coinbaseBranch, BitcoinAuxPOWBranch auxBlockChainBranch, BitcoinAuxPOWBlockHeader parentBlockHeader) {
this.version=version;
this.coinbaseTransaction=coinbaseTransaction;
this.parentBlockHeaderHash=parentBlockHeaderHash;
this.coinbaseBranch=coinbaseBranch;
this.auxBlockChainBranch=auxBlockChainBranch;
this.parentBlockHeader=parentBlockHeader;
}
public int getVersion() {
return version;
}
public BitcoinAuxPOWBranch getCoinbaseBranch() {
return coinbaseBranch;
}
public void setCoinbaseBranch(BitcoinAuxPOWBranch coinbaseBranch) {
this.coinbaseBranch = coinbaseBranch;
}
public BitcoinAuxPOWBranch getAuxBlockChainBranch() {
return auxBlockChainBranch;
}
public BitcoinAuxPOWBlockHeader getParentBlockHeader() {
return parentBlockHeader;
}
public HiveBitcoinTransaction getCoinbaseTransaction() {
return coinbaseTransaction;
}
public byte[] getParentBlockHeaderHash() {
return parentBlockHeaderHash;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/main/java/org/zuinnote/hadoop/bitcoin/hive/datatypes/HiveBitcoinTransaction.java | hiveserde/src/main/java/org/zuinnote/hadoop/bitcoin/hive/datatypes/HiveBitcoinTransaction.java | /**
* Copyright 2018 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.hive.datatypes;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.commons.io.output.ThresholdingOutputStream;
import org.apache.hadoop.io.Writable;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinScriptWitnessItem;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransactionInput;
import java.util.List;
import java.util.ArrayList;
public class HiveBitcoinTransaction implements Writable {
private long version;
private byte marker;
private byte flag;
private byte[] inCounter;
private byte[] outCounter;
private List<BitcoinTransactionInput> listOfInputs;
private List<HiveBitcoinTransactionOutput> listOfOutputs;
private List<BitcoinScriptWitnessItem> listOfScriptWitnessItem;
private long lockTime;
public HiveBitcoinTransaction() {
this.version=0;
this.marker=1;
this.flag=0;
this.inCounter=new byte[0];
this.outCounter=new byte[0];
this.listOfInputs=new ArrayList<>();
this.listOfOutputs=new ArrayList<>();
this.listOfScriptWitnessItem=new ArrayList<>();
this.lockTime=0;
}
/***
* Creates a traditional Bitcoin Transaction without ScriptWitness
*
* @param version
* @param inCounter
* @param listOfInputs
* @param outCounter
* @param listOfOutputs
* @param lockTime
*/
public HiveBitcoinTransaction(long version, byte[] inCounter, List<BitcoinTransactionInput> listOfInputs, byte[] outCounter, List<HiveBitcoinTransactionOutput> listOfOutputs, long lockTime) {
this.marker=1;
this.flag=0;
this.version=version;
this.inCounter=inCounter;
this.listOfInputs=listOfInputs;
this.outCounter=outCounter;
this.listOfOutputs=listOfOutputs;
this.listOfScriptWitnessItem=new ArrayList<>();
this.lockTime=lockTime;
}
/**
* Creates a Bitcoin Transaction with Segwitness
*
* @param marker
* @param flag
* @param version
* @param inCounter
* @param listOfInputs
* @param outCounter
* @param listOfOutputs
* @param listOfScriptWitnessItem
* @param lockTime
*/
public HiveBitcoinTransaction(byte marker, byte flag, long version, byte[] inCounter, List<BitcoinTransactionInput> listOfInputs, byte[] outCounter, List<HiveBitcoinTransactionOutput> listOfOutputs, List<BitcoinScriptWitnessItem> listOfScriptWitnessItem, long lockTime) {
this.marker=marker;
this.flag=flag;
this.version=version;
this.inCounter=inCounter;
this.listOfInputs=listOfInputs;
this.outCounter=outCounter;
this.listOfOutputs=listOfOutputs;
this.listOfScriptWitnessItem=listOfScriptWitnessItem;
this.lockTime=lockTime;
}
public long getVersion() {
return this.version;
}
public byte getMarker() {
return this.marker;
}
public byte getFlag() {
return this.flag;
}
public byte[] getInCounter() {
return this.inCounter;
}
public List<BitcoinTransactionInput> getListOfInputs() {
return this.listOfInputs;
}
public byte[] getOutCounter() {
return this.outCounter;
}
public List<HiveBitcoinTransactionOutput> getListOfOutputs() {
return this.listOfOutputs;
}
public List<BitcoinScriptWitnessItem> getBitcoinScriptWitness() {
return this.listOfScriptWitnessItem;
}
public long getLockTime() {
return this.lockTime;
}
public void set(HiveBitcoinTransaction newTransaction) {
this.version=newTransaction.getVersion();
this.marker=newTransaction.getMarker();
this.flag=newTransaction.getFlag();
this.inCounter=newTransaction.getInCounter();
this.listOfInputs=newTransaction.getListOfInputs();
this.outCounter=newTransaction.getOutCounter();
this.listOfOutputs=newTransaction.getListOfOutputs();
this.listOfScriptWitnessItem=newTransaction.getBitcoinScriptWitness();
this.lockTime=newTransaction.getLockTime();
}
/** Writable **/
@Override
public void write(DataOutput dataOutput) throws IOException {
throw new UnsupportedOperationException("write unsupported");
}
@Override
public void readFields(DataInput dataInput) throws IOException {
throw new UnsupportedOperationException("readFields unsupported");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/main/java/org/zuinnote/hadoop/bitcoin/hive/serde/BitcoinBlockSerde.java | hiveserde/src/main/java/org/zuinnote/hadoop/bitcoin/hive/serde/BitcoinBlockSerde.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Hive Deserializer to represent Bitcoin Blockchain data in Hive using the BitcoinBlockFileInputFormat provided by the hadoopcryptoledger library
*/
package org.zuinnote.hadoop.bitcoin.hive.serde;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.hive.serde2.AbstractDeserializer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedSerde;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinAuxPOWBlockHeader;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinAuxPOWBranch;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlock;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransaction;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransactionOutput;
import org.zuinnote.hadoop.bitcoin.format.mapred.AbstractBitcoinRecordReader;
import org.zuinnote.hadoop.bitcoin.hive.datatypes.HiveBitcoinAuxPOW;
import org.zuinnote.hadoop.bitcoin.hive.datatypes.HiveBitcoinBlock;
import org.zuinnote.hadoop.bitcoin.hive.datatypes.HiveBitcoinTransaction;
import org.zuinnote.hadoop.bitcoin.hive.datatypes.HiveBitcoinTransactionOutput;
import org.zuinnote.hadoop.bitcoin.format.mapred.AbstractBitcoinFileInputFormat;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
/**
* Enables access to Bitcoin Blockchain data via Hive tables. Usage:
*
* Create table BitcoinBlockchain ROW FORMAT SERDE
* 'org.zuinnote.hadoop.bitcoin.hive.serde.BitcoinBlockSerde' STORED AS
* INPUTFORMAT
* 'org.zuinnote.hadoop.bitcoin.format.mapred.BitcoinBlockFileInputFormat'
* OUTPUTFORMAT 'org.apache.hadoop.mapred.lib.NullOutputFormat' LOCATION
* '/user/test/bitcoin/input';
*
* Example structure: describe BitcoinBlockchain
*
*/
public class BitcoinBlockSerde extends AbstractDeserializer {
public static final String CONF_MAXBLOCKSIZE = AbstractBitcoinRecordReader.CONF_MAXBLOCKSIZE;
public static final String CONF_FILTERMAGIC = AbstractBitcoinRecordReader.CONF_FILTERMAGIC;
public static final String CONF_USEDIRECTBUFFER = AbstractBitcoinRecordReader.CONF_USEDIRECTBUFFER;
public static final String CONF_ISSPLITABLE = AbstractBitcoinFileInputFormat.CONF_ISSPLITABLE;
public static final String CONF_READAUXPOW = AbstractBitcoinRecordReader.CONF_READAUXPOW;
private static final Log LOG = LogFactory.getLog(BitcoinBlockSerde.class.getName());
private ObjectInspector bitcoinBlockObjectInspector;
/** Deserializer **/
@Override
public Object deserialize(Writable blob) {
HiveBitcoinBlock result = null;
if (blob instanceof BitcoinBlock) {
result = convertToHiveBitcoinBlock((BitcoinBlock) blob);
}
return result;
}
@Override
public ObjectInspector getObjectInspector() {
return this.bitcoinBlockObjectInspector;
}
@Override
public SerDeStats getSerDeStats() {
// not supported
return null;
}
public Class<? extends Writable> getSerializedClass() {
return HiveBitcoinBlock.class;
}
@Override
public void initialize(Configuration conf, Properties tbl) {
LOG.debug("Initializing");
// get objectinspector with introspection for class BitcoinBlockStruct to reuse
// functionality
bitcoinBlockObjectInspector = ObjectInspectorFactory.getReflectionObjectInspector(HiveBitcoinBlock.class,
ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
// pass tbl properties to Configuration
String maxBlockSizeStr = tbl.getProperty(BitcoinBlockSerde.CONF_MAXBLOCKSIZE);
if (maxBlockSizeStr != null) {
conf.setInt(BitcoinBlockSerde.CONF_MAXBLOCKSIZE, Integer.parseInt(maxBlockSizeStr));
LOG.info("Setting max block size: " + maxBlockSizeStr);
}
String filterMagicStr = tbl.getProperty(BitcoinBlockSerde.CONF_FILTERMAGIC);
if (filterMagicStr != null) {
conf.set(BitcoinBlockSerde.CONF_FILTERMAGIC, filterMagicStr);
LOG.info("Setting filter magic: " + filterMagicStr);
}
String useDirectBufferStr = tbl.getProperty(BitcoinBlockSerde.CONF_USEDIRECTBUFFER);
if (useDirectBufferStr != null) {
conf.setBoolean(BitcoinBlockSerde.CONF_USEDIRECTBUFFER, Boolean.parseBoolean(useDirectBufferStr));
LOG.info("Use direct buffer: " + useDirectBufferStr);
}
String isSplitableStr = tbl.getProperty(BitcoinBlockSerde.CONF_ISSPLITABLE);
if (isSplitableStr != null) {
conf.setBoolean(BitcoinBlockSerde.CONF_ISSPLITABLE, Boolean.parseBoolean(isSplitableStr));
LOG.info("Enable splitable heuristic: " + isSplitableStr);
}
String readAuxPOWStr = tbl.getProperty(BitcoinBlockSerde.CONF_READAUXPOW);
if (readAuxPOWStr != null) {
conf.setBoolean(BitcoinBlockSerde.CONF_READAUXPOW, Boolean.parseBoolean(readAuxPOWStr));
LOG.info("Enable read aux pow: " + readAuxPOWStr);
}
LOG.debug("Finish initializion");
}
private HiveBitcoinBlock convertToHiveBitcoinBlock(BitcoinBlock block) {
// convert to HiveBitcoinBlock
// convert transactions
List<HiveBitcoinTransaction> newTransactions = new ArrayList<>();
for (int i = 0; i < block.getTransactions().size(); i++) {
BitcoinTransaction currentTransaction = block.getTransactions().get(i);
List<HiveBitcoinTransactionOutput> newTransactionsOutputList = new ArrayList<>();
for (int j = 0; j < currentTransaction.getListOfOutputs().size(); j++) {
BitcoinTransactionOutput currentOutput = currentTransaction.getListOfOutputs().get(j);
HiveDecimal newValue = HiveDecimal.create(currentOutput.getValue());
newTransactionsOutputList.add(new HiveBitcoinTransactionOutput(newValue,
currentOutput.getTxOutScriptLength(), currentOutput.getTxOutScript()));
}
HiveBitcoinTransaction newTransaction = new HiveBitcoinTransaction(currentTransaction.getMarker(),
currentTransaction.getFlag(), currentTransaction.getVersion(), currentTransaction.getInCounter(),
currentTransaction.getListOfInputs(), currentTransaction.getOutCounter(), newTransactionsOutputList,
currentTransaction.getBitcoinScriptWitness(), currentTransaction.getLockTime());
newTransactions.add(newTransaction);
}
// convertAuxPow
// convert Transaction
HiveBitcoinAuxPOW newHiveBitcoinAuxPOW = null;
if (block.getAuxPOW() != null) {
BitcoinTransaction currentCoinbaseTransaction = block.getAuxPOW().getCoinbaseTransaction();
List<HiveBitcoinTransactionOutput> newCoinbaseTransactionOutputList = new ArrayList<>();
for (int i = 0; i < currentCoinbaseTransaction.getListOfOutputs().size(); i++) {
BitcoinTransactionOutput currentOutput = currentCoinbaseTransaction.getListOfOutputs().get(i);
HiveDecimal newValue = HiveDecimal.create(currentOutput.getValue());
newCoinbaseTransactionOutputList.add(new HiveBitcoinTransactionOutput(newValue,
currentOutput.getTxOutScriptLength(), currentOutput.getTxOutScript()));
}
HiveBitcoinTransaction newCoinbaseTransaction = new HiveBitcoinTransaction(
currentCoinbaseTransaction.getMarker(), currentCoinbaseTransaction.getFlag(),
currentCoinbaseTransaction.getVersion(), currentCoinbaseTransaction.getInCounter(),
currentCoinbaseTransaction.getListOfInputs(), currentCoinbaseTransaction.getInCounter(),
newCoinbaseTransactionOutputList, currentCoinbaseTransaction.getBitcoinScriptWitness(),
currentCoinbaseTransaction.getLockTime());
newHiveBitcoinAuxPOW = new HiveBitcoinAuxPOW((int)block.getAuxPOW().getVersion(), newCoinbaseTransaction,
block.getAuxPOW().getParentBlockHeaderHash(), block.getAuxPOW().getCoinbaseBranch(),
block.getAuxPOW().getAuxBlockChainBranch(), block.getAuxPOW().getParentBlockHeader());
}
// final result
HiveBitcoinBlock result = new HiveBitcoinBlock();
result.setBlockSize(block.getBlockSize());
result.setMagicNo(block.getMagicNo());
result.setVersion(block.getVersion());
result.setTime(block.getTime());
result.setBits(block.getBits());
result.setNonce(block.getNonce());
result.setTransactionCounter(block.getTransactionCounter());
result.setHashPrevBlock(block.getHashPrevBlock());
result.setHashMerkleRoot(block.getHashMerkleRoot());
result.setTransactions(newTransactions);
result.setAuxPOW(newHiveBitcoinAuxPOW);
return result;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/main/java/org/zuinnote/hadoop/bitcoin/hive/serde/BitcoinBlockStorageFormatDescriptor.java | hiveserde/src/main/java/org/zuinnote/hadoop/bitcoin/hive/serde/BitcoinBlockStorageFormatDescriptor.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.hive.serde;
import java.util.HashSet;
import java.util.Set;
import org.apache.hadoop.hive.ql.io.AbstractStorageFormatDescriptor;
import org.apache.hadoop.mapred.lib.NullOutputFormat;
import org.zuinnote.hadoop.bitcoin.format.mapred.BitcoinBlockFileInputFormat;
public class BitcoinBlockStorageFormatDescriptor extends AbstractStorageFormatDescriptor {
@Override
public Set<String> getNames() {
HashSet<String> result = new HashSet<>();
result.add("BITCOINBLOCK");
return result;
}
@Override
public String getInputFormat() {
return BitcoinBlockFileInputFormat.class.getName();
}
@Override
public String getOutputFormat() {
return NullOutputFormat.class.getName();
}
@Override
public String getSerde() {
return BitcoinBlockSerde.class.getName();
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/main/java/org/zuinnote/hadoop/ethereum/hive/datatypes/HiveEthereumTransaction.java | hiveserde/src/main/java/org/zuinnote/hadoop/ethereum/hive/datatypes/HiveEthereumTransaction.java | /**
* Copyright 2018 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.hive.datatypes;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.io.Writable;
/**
*
*
*/
public class HiveEthereumTransaction implements Writable {
private byte[] nonce;
private HiveDecimal value;
private byte[] valueRaw;
private byte[] receiveAddress;
private HiveDecimal gasPrice;
private byte[] gasPriceRaw;
private HiveDecimal gasLimit;
private byte[] gasLimitRaw;
private byte[] data;
private byte[] sig_v;
private byte[] sig_r;
private byte[] sig_s;
public HiveEthereumTransaction() {
// please use setter to set the data
}
@Override
public void write(DataOutput out) throws IOException {
throw new UnsupportedOperationException("write unsupported");
}
@Override
public void readFields(DataInput in) throws IOException {
throw new UnsupportedOperationException("readFields unsupported");
}
public byte[] getNonce() {
return nonce;
}
public void setNonce(byte[] nonce) {
this.nonce = nonce;
}
public HiveDecimal getValue() {
return this.value;
}
public void setValue(HiveDecimal value) {
this.value = value;
}
public byte[] getReceiveAddress() {
return receiveAddress;
}
public void setReceiveAddress(byte[] receiveAddress) {
this.receiveAddress = receiveAddress;
}
public HiveDecimal getGasPrice() {
return this.gasPrice;
}
public void setGasPrice(HiveDecimal gasPrice) {
this.gasPrice = gasPrice;
}
public HiveDecimal getGasLimit() {
return this.gasLimit;
}
public void setGasLimit(HiveDecimal gasLimit) {
this.gasLimit = gasLimit;
}
public byte[] getData() {
return data;
}
public void setData(byte[] data) {
this.data = data;
}
public void set(HiveEthereumTransaction newTransaction) {
this.nonce=newTransaction.getNonce();
this.valueRaw=newTransaction.getValueRaw();
this.value=newTransaction.getValue();
this.receiveAddress=newTransaction.getReceiveAddress();
this.gasPriceRaw=newTransaction.getGasPriceRaw();
this.gasPrice = newTransaction.getGasPrice();
this.gasLimitRaw=newTransaction.getGasLimitRaw();
this.gasLimit=newTransaction.getGasLimit();
this.data=newTransaction.getData();
this.sig_v=newTransaction.getSig_v();
this.sig_r=newTransaction.getSig_r();
this.sig_s=newTransaction.getSig_s();
}
public byte[] getSig_v() {
return sig_v;
}
public void setSig_v(byte[] sig_v) {
this.sig_v = sig_v;
}
public byte[] getSig_r() {
return sig_r;
}
public void setSig_r(byte[] sig_r) {
this.sig_r = sig_r;
}
public byte[] getSig_s() {
return sig_s;
}
public void setSig_s(byte[] sig_s) {
this.sig_s = sig_s;
}
public byte[] getValueRaw() {
return valueRaw;
}
public void setValueRaw(byte[] valueRaw) {
this.valueRaw = valueRaw;
}
public byte[] getGasPriceRaw() {
return gasPriceRaw;
}
public void setGasPriceRaw(byte[] gasPriceRaw) {
this.gasPriceRaw = gasPriceRaw;
}
public byte[] getGasLimitRaw() {
return gasLimitRaw;
}
public void setGasLimitRaw(byte[] gasLimitRaw) {
this.gasLimitRaw = gasLimitRaw;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/main/java/org/zuinnote/hadoop/ethereum/hive/datatypes/HiveEthereumBlockHeader.java | hiveserde/src/main/java/org/zuinnote/hadoop/ethereum/hive/datatypes/HiveEthereumBlockHeader.java | /**
* Copyright 2018 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.hive.datatypes;
import java.io.Serializable;
import org.apache.hadoop.hive.common.type.HiveDecimal;
/**
*
*
*/
public class HiveEthereumBlockHeader implements Serializable {
/**
*
*/
private static final long serialVersionUID = 2446091414374317679L;
private byte[] parentHash;
private byte[] uncleHash;
private byte[] coinBase;
private byte[] stateRoot;
private byte[] txTrieRoot;
private byte[] receiptTrieRoot;
private byte[] logsBloom;
private byte[] difficulty;
private long timestamp;
private HiveDecimal number;
private byte[] numberRaw;
private HiveDecimal gasLimit;
private byte[] gasLimitRaw;
private HiveDecimal gasUsed;
private byte[] gasUsedRaw;
private byte[] mixHash;
private byte[] extraData;
private byte[] nonce;
public HiveEthereumBlockHeader() {
// please use the set method to modify the data
}
public byte[] getParentHash() {
return parentHash;
}
public void setParentHash(byte[] parentHash) {
this.parentHash = parentHash;
}
public byte[] getUncleHash() {
return uncleHash;
}
public void setUncleHash(byte[] uncleHash) {
this.uncleHash = uncleHash;
}
public byte[] getCoinBase() {
return coinBase;
}
public void setCoinBase(byte[] coinBase) {
this.coinBase = coinBase;
}
public byte[] getStateRoot() {
return stateRoot;
}
public void setStateRoot(byte[] stateRoot) {
this.stateRoot = stateRoot;
}
public byte[] getTxTrieRoot() {
return txTrieRoot;
}
public void setTxTrieRoot(byte[] txTrieRoot) {
this.txTrieRoot = txTrieRoot;
}
public byte[] getReceiptTrieRoot() {
return receiptTrieRoot;
}
public void setReceiptTrieRoot(byte[] receiptTrieRoot) {
this.receiptTrieRoot = receiptTrieRoot;
}
public byte[] getLogsBloom() {
return logsBloom;
}
public void setLogsBloom(byte[] logsBloom) {
this.logsBloom = logsBloom;
}
public byte[] getDifficulty() {
return difficulty;
}
public void setDifficulty(byte[] difficulty) {
this.difficulty = difficulty;
}
public long getTimestamp() {
return timestamp;
}
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
public HiveDecimal getNumber() {
return this.number;
}
public void setNumber(HiveDecimal number) {
this.number = number;
}
public HiveDecimal getGasLimit() {
return this.gasLimit;
}
public void setGasLimit(HiveDecimal gasLimit) {
this.gasLimit = gasLimit;
}
public HiveDecimal getGasUsed() {
return this.gasUsed;
}
public void setGasUsed(HiveDecimal gasUsed) {
this.gasUsed = gasUsed;
}
public byte[] getMixHash() {
return mixHash;
}
public void setMixHash(byte[] mixHash) {
this.mixHash = mixHash;
}
public byte[] getExtraData() {
return extraData;
}
public void setExtraData(byte[] extraData) {
this.extraData = extraData;
}
public byte[] getNonce() {
return nonce;
}
public void setNonce(byte[] nonce) {
this.nonce = nonce;
}
public void set(HiveEthereumBlockHeader newEthereumBlockHeader) {
this.parentHash=newEthereumBlockHeader.getParentHash();
this.uncleHash=newEthereumBlockHeader.getUncleHash();
this.coinBase=newEthereumBlockHeader.getCoinBase();
this.stateRoot=newEthereumBlockHeader.getStateRoot();
this.txTrieRoot=newEthereumBlockHeader.getTxTrieRoot();
this.receiptTrieRoot=newEthereumBlockHeader.getReceiptTrieRoot();
this.logsBloom=newEthereumBlockHeader.getLogsBloom();
this.difficulty=newEthereumBlockHeader.getDifficulty();
this.timestamp=newEthereumBlockHeader.getTimestamp();
this.number=newEthereumBlockHeader.getNumber();
this.numberRaw=newEthereumBlockHeader.getNumberRaw();
this.gasLimit=newEthereumBlockHeader.getGasLimit();
this.gasLimitRaw=newEthereumBlockHeader.getGasLimitRaw();
this.gasUsed=newEthereumBlockHeader.getGasUsed();
this.gasUsedRaw=newEthereumBlockHeader.getGasUsedRaw();
this.mixHash=newEthereumBlockHeader.getMixHash();
this.extraData=newEthereumBlockHeader.getExtraData();
this.nonce=newEthereumBlockHeader.getNonce();
}
public byte[] getNumberRaw() {
return numberRaw;
}
public void setNumberRaw(byte[] numberRaw) {
this.numberRaw = numberRaw;
}
public byte[] getGasLimitRaw() {
return gasLimitRaw;
}
public void setGasLimitRaw(byte[] gasLimitRaw) {
this.gasLimitRaw = gasLimitRaw;
}
public byte[] getGasUsedRaw() {
return gasUsedRaw;
}
public void setGasUsedRaw(byte[] gasUsedRaw) {
this.gasUsedRaw = gasUsedRaw;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/main/java/org/zuinnote/hadoop/ethereum/hive/datatypes/HiveEthereumBlock.java | hiveserde/src/main/java/org/zuinnote/hadoop/ethereum/hive/datatypes/HiveEthereumBlock.java | /**
* Copyright 2018 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.hive.datatypes;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.io.Writable;
public class HiveEthereumBlock implements Writable {
private HiveEthereumBlockHeader ethereumBlockHeader;
private List<HiveEthereumTransaction> ethereumTransactions;
private List<HiveEthereumBlockHeader> uncleHeaders;
public HiveEthereumBlock() {
}
public HiveEthereumBlock(HiveEthereumBlockHeader ethereumBlockHeader, List<HiveEthereumTransaction> ethereumTransactions, List<HiveEthereumBlockHeader> uncleHeaders) {
this.ethereumBlockHeader=ethereumBlockHeader;
this.ethereumTransactions=ethereumTransactions;
this.uncleHeaders=uncleHeaders;
}
public HiveEthereumBlockHeader getEthereumBlockHeader() {
return ethereumBlockHeader;
}
public List<HiveEthereumBlockHeader> getUncleHeaders() {
return uncleHeaders;
}
public List<HiveEthereumTransaction> getEthereumTransactions() {
return ethereumTransactions;
}
public void set(HiveEthereumBlock newBlock) {
this.ethereumBlockHeader=newBlock.getEthereumBlockHeader();
this.uncleHeaders=newBlock.getUncleHeaders();
this.ethereumTransactions=newBlock.getEthereumTransactions();
}
@Override
public void write(DataOutput out) throws IOException {
throw new UnsupportedOperationException("write unsupported");
}
@Override
public void readFields(DataInput in) throws IOException {
throw new UnsupportedOperationException("readFields unsupported");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/main/java/org/zuinnote/hadoop/ethereum/hive/serde/EthereumBlockSerde.java | hiveserde/src/main/java/org/zuinnote/hadoop/ethereum/hive/serde/EthereumBlockSerde.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.hive.serde;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedSerde;
import org.apache.hadoop.hive.serde2.AbstractDeserializer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.io.Writable;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlock;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockHeader;
import org.zuinnote.hadoop.ethereum.format.common.EthereumTransaction;
import org.zuinnote.hadoop.ethereum.format.mapred.AbstractEthereumRecordReader;
import org.zuinnote.hadoop.ethereum.hive.datatypes.HiveEthereumBlock;
import org.zuinnote.hadoop.ethereum.hive.datatypes.HiveEthereumBlockHeader;
import org.zuinnote.hadoop.ethereum.hive.datatypes.HiveEthereumTransaction;
public class EthereumBlockSerde extends AbstractDeserializer {
public static final String CONF_MAXBLOCKSIZE=AbstractEthereumRecordReader.CONF_MAXBLOCKSIZE;
public static final String CONF_USEDIRECTBUFFER=AbstractEthereumRecordReader.CONF_USEDIRECTBUFFER;
private static final Log LOG = LogFactory.getLog(EthereumBlockSerde.class.getName());
private ObjectInspector ethereumBlockObjectInspector;
@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
LOG.debug("Initializing");
// get objectinspector with introspection for class BitcoinBlockStruct to reuse functionality
ethereumBlockObjectInspector = ObjectInspectorFactory
.getReflectionObjectInspector(HiveEthereumBlock.class,
ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
// pass tbl properties to Configuration
String maxBlockSizeStr=tbl.getProperty(EthereumBlockSerde.CONF_MAXBLOCKSIZE);
if (maxBlockSizeStr!=null) {
conf.setInt(EthereumBlockSerde.CONF_MAXBLOCKSIZE, Integer.parseInt(maxBlockSizeStr));
LOG.info("Setting max block size: "+maxBlockSizeStr);
}
String useDirectBufferStr=tbl.getProperty(EthereumBlockSerde.CONF_USEDIRECTBUFFER);
if (useDirectBufferStr!=null) {
conf.setBoolean(EthereumBlockSerde.CONF_USEDIRECTBUFFER, Boolean.parseBoolean(useDirectBufferStr));
LOG.info("Use direct buffer: "+useDirectBufferStr);
}
LOG.debug("Finish initializion");
}
@Override
public Object deserialize(Writable arg0) throws SerDeException {
HiveEthereumBlock result=null;
if (arg0 instanceof EthereumBlock) {
result=convertToHiveEthereumBlock((EthereumBlock) arg0);
}
return result;
}
@Override
public ObjectInspector getObjectInspector() throws SerDeException {
return this.ethereumBlockObjectInspector;
}
@Override
public SerDeStats getSerDeStats() {
// not supported
return null;
}
private HiveEthereumBlock convertToHiveEthereumBlock(EthereumBlock block) {
HiveEthereumBlockHeader ethereumBlockHeader = new HiveEthereumBlockHeader();
ethereumBlockHeader.setParentHash(block.getEthereumBlockHeader().getParentHash());
ethereumBlockHeader.setUncleHash(block.getEthereumBlockHeader().getUncleHash());
ethereumBlockHeader.setCoinBase(block.getEthereumBlockHeader().getCoinBase());
ethereumBlockHeader.setStateRoot(block.getEthereumBlockHeader().getStateRoot());
ethereumBlockHeader.setTxTrieRoot(block.getEthereumBlockHeader().getTxTrieRoot());
ethereumBlockHeader.setReceiptTrieRoot(block.getEthereumBlockHeader().getReceiptTrieRoot());
ethereumBlockHeader.setLogsBloom(block.getEthereumBlockHeader().getLogsBloom());
ethereumBlockHeader.setDifficulty(block.getEthereumBlockHeader().getDifficulty());
ethereumBlockHeader.setTimestamp(block.getEthereumBlockHeader().getTimestamp());
ethereumBlockHeader.setNumber(HiveDecimal.create(block.getEthereumBlockHeader().getNumber()));
ethereumBlockHeader.setNumberRaw(block.getEthereumBlockHeader().getNumberRaw());
ethereumBlockHeader.setGasLimit(HiveDecimal.create(block.getEthereumBlockHeader().getGasLimit()));
ethereumBlockHeader.setGasLimitRaw(block.getEthereumBlockHeader().getGasLimitRaw());
ethereumBlockHeader.setGasUsed(HiveDecimal.create(block.getEthereumBlockHeader().getGasUsed()));
ethereumBlockHeader.setGasUsedRaw(block.getEthereumBlockHeader().getGasUsedRaw());
ethereumBlockHeader.setMixHash(block.getEthereumBlockHeader().getMixHash());
ethereumBlockHeader.setExtraData(block.getEthereumBlockHeader().getExtraData());
ethereumBlockHeader.setNonce(block.getEthereumBlockHeader().getNonce());
List<HiveEthereumTransaction> ethereumTransactions = new ArrayList<>();
for (int i=0;i<block.getEthereumTransactions().size();i++) {
EthereumTransaction currentTransaction = block.getEthereumTransactions().get(i);
HiveEthereumTransaction newTransaction = new HiveEthereumTransaction();
newTransaction.setNonce(currentTransaction.getNonce());
newTransaction.setValue(HiveDecimal.create(currentTransaction.getValue()));
newTransaction.setValueRaw(currentTransaction.getValueRaw());
newTransaction.setReceiveAddress(currentTransaction.getReceiveAddress());
newTransaction.setGasPrice(HiveDecimal.create(currentTransaction.getGasPrice()));
newTransaction.setGasPriceRaw(currentTransaction.getGasPriceRaw());
newTransaction.setGasLimit(HiveDecimal.create(currentTransaction.getGasLimit()));
newTransaction.setGasLimitRaw(currentTransaction.getGasLimitRaw());
newTransaction.setData(currentTransaction.getData());
newTransaction.setSig_v(currentTransaction.getSig_v());
newTransaction.setSig_r(currentTransaction.getSig_r());
newTransaction.setSig_s(currentTransaction.getSig_s());
ethereumTransactions.add(newTransaction);
}
List<HiveEthereumBlockHeader> uncleHeaders = new ArrayList<>();
for (int i=0;i<block.getUncleHeaders().size();i++) {
EthereumBlockHeader currentUncleHeader = block.getUncleHeaders().get(i);
HiveEthereumBlockHeader newUncleHeader = new HiveEthereumBlockHeader();
newUncleHeader.setParentHash(currentUncleHeader.getParentHash());
newUncleHeader.setUncleHash(currentUncleHeader.getUncleHash());
newUncleHeader.setCoinBase(currentUncleHeader.getCoinBase());
newUncleHeader.setStateRoot(currentUncleHeader.getStateRoot());
newUncleHeader.setTxTrieRoot(currentUncleHeader.getTxTrieRoot());
newUncleHeader.setReceiptTrieRoot(currentUncleHeader.getReceiptTrieRoot());
newUncleHeader.setLogsBloom(currentUncleHeader.getLogsBloom());
newUncleHeader.setDifficulty(currentUncleHeader.getDifficulty());
newUncleHeader.setTimestamp(currentUncleHeader.getTimestamp());
newUncleHeader.setNumber(HiveDecimal.create(currentUncleHeader.getNumber()));
newUncleHeader.setNumberRaw(currentUncleHeader.getNumberRaw());
newUncleHeader.setGasLimit(HiveDecimal.create(currentUncleHeader.getGasLimit()));
newUncleHeader.setGasLimitRaw(currentUncleHeader.getGasLimitRaw());
newUncleHeader.setGasUsed(HiveDecimal.create(currentUncleHeader.getGasUsed()));
newUncleHeader.setGasUsedRaw(currentUncleHeader.getGasUsedRaw());
newUncleHeader.setMixHash(currentUncleHeader.getMixHash());
newUncleHeader.setExtraData(currentUncleHeader.getExtraData());
newUncleHeader.setNonce(currentUncleHeader.getNonce());
uncleHeaders.add(newUncleHeader);
}
return new HiveEthereumBlock(ethereumBlockHeader,ethereumTransactions,uncleHeaders);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/hiveserde/src/main/java/org/zuinnote/hadoop/ethereum/hive/serde/EthereumBlockStorageFormatDescriptor.java | hiveserde/src/main/java/org/zuinnote/hadoop/ethereum/hive/serde/EthereumBlockStorageFormatDescriptor.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.hive.serde;
import java.util.HashSet;
import java.util.Set;
import org.apache.hadoop.hive.ql.io.AbstractStorageFormatDescriptor;
import org.apache.hadoop.mapred.lib.NullOutputFormat;
import org.zuinnote.hadoop.ethereum.format.mapred.EthereumBlockFileInputFormat;
public class EthereumBlockStorageFormatDescriptor extends AbstractStorageFormatDescriptor {
@Override
public Set<String> getNames() {
HashSet<String> result = new HashSet<>();
result.add("ETHEREUMBLCOK");
return result;
}
@Override
public String getInputFormat() {
return EthereumBlockFileInputFormat.class.getName();
}
@Override
public String getOutputFormat() {
return NullOutputFormat.class.getName();
}
@Override
public String getSerde() {
return EthereumBlockSerde.class.getName();
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-bitcointransaction/src/test/java/org/zuinnote/hadoop/bitcoin/example/MapReduceBitcoinTransactionTest.java | examples/mapreduce-bitcointransaction/src/test/java/org/zuinnote/hadoop/bitcoin/example/MapReduceBitcoinTransactionTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.example;
import mockit.*;
import java.lang.InterruptedException;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransaction;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransactionInput;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransactionOutput;
import org.zuinnote.hadoop.bitcoin.example.tasks.BitcoinTransactionMap;
import org.zuinnote.hadoop.bitcoin.example.tasks.BitcoinTransactionReducer;
public final class MapReduceBitcoinTransactionTest {
@BeforeAll
public static void oneTimeSetUp() {
}
@AfterAll
public static void oneTimeTearDown() {
// one-time cleanup code
}
@BeforeEach
public void setUp() {
}
@AfterEach
public void tearDown() {
}
@Test
public void map(@Mocked final Mapper.Context defaultContext) throws IOException,InterruptedException {
BitcoinTransactionMap mapper = new BitcoinTransactionMap();
final BytesWritable key = new BytesWritable();
final BitcoinTransaction value = new BitcoinTransaction(0,new byte[0], new ArrayList<BitcoinTransactionInput>(),new byte[0],new ArrayList<BitcoinTransactionOutput>(),0);
final Text defaultKey = new Text("Transaction Input Count:");
final IntWritable nullInt = new IntWritable(0);
new Expectations() {{
defaultContext.write(defaultKey,nullInt); times=1;
}};
mapper.map(key,value,defaultContext);
}
@Test
public void reduce(@Mocked final Reducer.Context defaultContext) throws IOException,InterruptedException {
BitcoinTransactionReducer reducer = new BitcoinTransactionReducer();
final Text defaultKey = new Text("Transaction Input Count:");
final IntWritable oneInt = new IntWritable(1);
final IntWritable twoInt = new IntWritable(2);
final LongWritable resultLong = new LongWritable(3);
final ArrayList al = new ArrayList<IntWritable>();
al.add(oneInt);
al.add(twoInt);
new Expectations() {{
defaultContext.write(defaultKey,resultLong); times=1;
}};
reducer.reduce(defaultKey,al,defaultContext);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-bitcointransaction/src/main/java/org/zuinnote/hadoop/bitcoin/example/tasks/BitcoinTransactionMap.java | examples/mapreduce-bitcointransaction/src/main/java/org/zuinnote/hadoop/bitcoin/example/tasks/BitcoinTransactionMap.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Simple Mapper for counting the total number of Bitcoin transaction inputs of all Bitcoin transactions
*/
package org.zuinnote.hadoop.bitcoin.example.tasks;
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
import java.io.IOException;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;
import org.zuinnote.hadoop.bitcoin.format.common.*;
import org.zuinnote.hadoop.bitcoin.format.mapreduce.*;
import java.util.*;
public class BitcoinTransactionMap extends Mapper<BytesWritable, BitcoinTransaction, Text, IntWritable> {
private static final Text defaultKey = new Text("Transaction Input Count:");
@Override
public void setup(Context context) throws IOException, InterruptedException {
// no action required
}
@Override
public void map(BytesWritable key, BitcoinTransaction value, Context context) throws IOException, InterruptedException {
// get the number of inputs to transaction
context.write(defaultKey, new IntWritable(value.getListOfInputs().size()));
}
@Override
public void cleanup(Context context) {
// no action required
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-bitcointransaction/src/main/java/org/zuinnote/hadoop/bitcoin/example/tasks/BitcoinTransactionReducer.java | examples/mapreduce-bitcointransaction/src/main/java/org/zuinnote/hadoop/bitcoin/example/tasks/BitcoinTransactionReducer.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Simple Reducer for summarizing counts by the mapper
*/
package org.zuinnote.hadoop.bitcoin.example.tasks;
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
import java.io.IOException;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;
import java.util.*;
public class BitcoinTransactionReducer extends Reducer<Text, IntWritable, Text, LongWritable> {
private LongWritable result= new LongWritable();
@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
long sum = 0;
for (IntWritable val: values) {
sum += val.get();
}
result.set(sum);
context.write(key, result);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-bitcointransaction/src/main/java/org/zuinnote/hadoop/bitcoin/example/driver/BitcoinTransactionCounterDriver.java | examples/mapreduce-bitcointransaction/src/main/java/org/zuinnote/hadoop/bitcoin/example/driver/BitcoinTransactionCounterDriver.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Simple Driver for a map reduce job counting the number of transacton inputs in a given blocks from the specified files containing Bitcoin blockchain data
*/
package org.zuinnote.hadoop.bitcoin.example.driver;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.util.*;
import org.zuinnote.hadoop.bitcoin.example.tasks.BitcoinTransactionMap;
import org.zuinnote.hadoop.bitcoin.example.tasks.BitcoinTransactionReducer;
import org.zuinnote.hadoop.bitcoin.format.common.*;
import org.zuinnote.hadoop.bitcoin.format.mapreduce.*;
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
public class BitcoinTransactionCounterDriver extends Configured implements Tool {
public BitcoinTransactionCounterDriver() {
// nothing needed here
}
public int run(String[] args) throws Exception {
Job job = Job.getInstance();
job.setJobName("example-hadoop-bitcoin-transactioncounter-job");
job.setJarByClass(BitcoinTransactionCounterDriver.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
job.setMapperClass(BitcoinTransactionMap.class);
job.setReducerClass(BitcoinTransactionReducer.class);
job.setInputFormatClass(BitcoinTransactionFileInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
return job.waitForCompletion(true)?0:1;
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
/** Set as an example some of the options to configure the Bitcoin fileformat **/
/** Find here all configuration options: https://github.com/ZuInnoTe/hadoopcryptoledger/wiki/Hadoop-File-Format **/
conf.set("hadoopcryptoledger.bitcoinblockinputformat.filter.magic","F9BEB4D9");
// Let ToolRunner handle generic command-line options
int res = ToolRunner.run(conf, new BitcoinTransactionCounterDriver(), args);
System.exit(res);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-bitcointransaction/src/integration-test/java/org/zuinnote/hadoop/bitcoin/example/MapReduceBitcoinTransactionIntegrationTest.java | examples/mapreduce-bitcointransaction/src/integration-test/java/org/zuinnote/hadoop/bitcoin/example/MapReduceBitcoinTransactionIntegrationTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.example;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.Files;
import java.nio.file.FileVisitResult;
import java.nio.file.SimpleFileVisitor;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.io.compress.SplitCompressionInputStream;
import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.zuinnote.hadoop.bitcoin.example.driver.BitcoinTransactionCounterDriver;
public final class MapReduceBitcoinTransactionIntegrationTest {
private static final String tmpPrefix = "hcl-integrationtest";
private static java.nio.file.Path tmpPath;
private static String CLUSTERNAME="hcl-minicluster";
private static String DFS_INPUT_DIR_NAME = "/input";
private static String DFS_OUTPUT_DIR_NAME = "/output";
private static String DEFAULT_OUTPUT_FILENAME = "part-r-00000";
private static Path DFS_INPUT_DIR = new Path(DFS_INPUT_DIR_NAME);
private static Path DFS_OUTPUT_DIR = new Path(DFS_OUTPUT_DIR_NAME);
private static int NOOFNODEMANAGERS=1;
private static int NOOFDATANODES=4;
private static boolean STARTTIMELINESERVER=true;
private static MiniDFSCluster dfsCluster;
private static MiniMRYarnCluster miniCluster;
private ArrayList<Decompressor> openDecompressors = new ArrayList<>();
@BeforeAll
public static void oneTimeSetUp() throws IOException {
// Create temporary directory for HDFS base and shutdownhook
// create temp directory
tmpPath = Files.createTempDirectory(tmpPrefix);
// create shutdown hook to remove temp files (=HDFS MiniCluster) after shutdown, may need to rethink to avoid many threads are created
Runtime.getRuntime().addShutdownHook(new Thread(
new Runnable() {
@Override
public void run() {
try {
Files.walkFileTree(tmpPath, new SimpleFileVisitor<java.nio.file.Path>() {
@Override
public FileVisitResult visitFile(java.nio.file.Path file,BasicFileAttributes attrs)
throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(java.nio.file.Path dir, IOException e) throws IOException {
if (e == null) {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
throw e;
}
});
} catch (IOException e) {
throw new RuntimeException("Error temporary files in following path could not be deleted "+tmpPath, e);
}
}}));
// Create Configuration
Configuration conf = new Configuration();
// create HDFS cluster
File baseDir = new File(tmpPath.toString()).getAbsoluteFile();
conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
dfsCluster = builder.numDataNodes(NOOFDATANODES).build();
// create Yarn cluster
YarnConfiguration clusterConf = new YarnConfiguration(conf);
conf.set("fs.defaultFS", dfsCluster.getFileSystem().getUri().toString());
conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 64);
conf.setClass(YarnConfiguration.RM_SCHEDULER,FifoScheduler.class, ResourceScheduler.class);
miniCluster = new MiniMRYarnCluster(CLUSTERNAME, NOOFNODEMANAGERS, STARTTIMELINESERVER);
miniCluster.init(conf);
miniCluster.start();
}
@AfterAll
public static void oneTimeTearDown() {
// destroy Yarn cluster
miniCluster.stop();
// destroy HDFS cluster
dfsCluster.shutdown();
}
@BeforeEach
public void setUp() throws IOException {
// create input directory
dfsCluster.getFileSystem().mkdirs(DFS_INPUT_DIR);
}
@AfterEach
public void tearDown() throws IOException {
// Remove input and output directory
dfsCluster.getFileSystem().delete(DFS_INPUT_DIR,true);
dfsCluster.getFileSystem().delete(DFS_OUTPUT_DIR,true);
// close any open decompressor
for (Decompressor currentDecompressor: this.openDecompressors) {
if (currentDecompressor!=null) {
CodecPool.returnDecompressor(currentDecompressor);
}
}
}
@Test
public void checkTestDataGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void mapReduceGenesisBlock() throws IOException, Exception {
ClassLoader classLoader = getClass().getClassLoader();
// put testdata on DFS
String fileName="genesis.blk";
String fileNameFullLocal=classLoader.getResource("testdata/"+fileName).getFile();
Path inputFile=new Path(fileNameFullLocal);
dfsCluster.getFileSystem().copyFromLocalFile(false, false, inputFile, DFS_INPUT_DIR);
// submit the application
/** Set as an example some of the options to configure the Bitcoin fileformat **/
/** Find here all configuration options: https://github.com/ZuInnoTe/hadoopcryptoledger/wiki/Hadoop-File-Format **/
miniCluster.getConfig().set("hadoopcryptoledger.bitcoinblockinputformat.filter.magic","F9BEB4D9");
// Let ToolRunner handle generic command-line options
int res = ToolRunner.run(miniCluster.getConfig(), new BitcoinTransactionCounterDriver(), new String[]{dfsCluster.getFileSystem().getUri().toString()+DFS_INPUT_DIR_NAME,dfsCluster.getFileSystem().getUri().toString()+DFS_OUTPUT_DIR_NAME});
// check if successfully executed
// note the following does only work on Linux platforms, other platforms may show issue due to the Hadoop Unit testing framework only supports Linux
// You can remove this test if you work on another platform. The application itself builds and run on a real cluster without any issues.
//assertEquals( 0, res,"Successfully executed mapreduce application");
// fetch results
List<String> resultLines = readDefaultResults(1);
// compare results
assertEquals(1,resultLines.size(),"Number of result line is 1");
assertEquals("Transaction Input Count:\t1",resultLines.get(0),"Number of transaction inputs is 1");
}
/**
* Read results from the default output directory and default outputfile name
*
* @param numOfRows number of rows to read
*
*/
private List<String> readDefaultResults(int numOfRows) throws IOException {
ArrayList<String> result = new ArrayList<>();
Path defaultOutputfile = new Path(DFS_OUTPUT_DIR_NAME+"/"+DEFAULT_OUTPUT_FILENAME);
InputStream defaultInputStream = openFile(defaultOutputfile);
BufferedReader reader=new BufferedReader(new InputStreamReader(defaultInputStream));
int i=0;
while(reader.ready())
{
if (i==numOfRows) {
break;
}
result.add(reader.readLine());
i++;
}
reader.close();
return result;
}
/*
* Opens a file using the Hadoop API. It supports uncompressed and compressed files.
*
* @param path path to the file, e.g. file://path/to/file for a local file or hdfs://path/to/file for HDFS file. All filesystem configured for Hadoop can be used
*
* @return InputStream from which the file content can be read
*
* @throws java.io.Exception in case there is an issue reading the file
*
*
*/
private InputStream openFile(Path path) throws IOException {
CompressionCodec codec=new CompressionCodecFactory(miniCluster.getConfig()).getCodec(path);
FSDataInputStream fileIn=dfsCluster.getFileSystem().open(path);
// check if compressed
if (codec==null) { // uncompressed
return fileIn;
} else { // compressed
Decompressor decompressor = CodecPool.getDecompressor(codec);
this.openDecompressors.add(decompressor); // to be returned later using close
if (codec instanceof SplittableCompressionCodec) {
long end = dfsCluster.getFileSystem().getFileStatus(path).getLen();
final SplitCompressionInputStream cIn =((SplittableCompressionCodec)codec).createInputStream(fileIn, decompressor, 0, end,SplittableCompressionCodec.READ_MODE.CONTINUOUS);
return cIn;
} else {
return codec.createInputStream(fileIn,decompressor);
}
}
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/spark2-bitcoinblock/src/test/java/org/zuinnote/spark2/bitcoin/example/Spark2BitcoinBlockCounterTest.java | examples/spark2-bitcoinblock/src/test/java/org/zuinnote/spark2/bitcoin/example/Spark2BitcoinBlockCounterTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.spark2.bitcoin.example;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.ArrayList;
import scala.Tuple2;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlockWritable;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransaction;
public class Spark2BitcoinBlockCounterTest {
@BeforeAll
public static void oneTimeSetUp() {
}
@AfterAll
public static void oneTimeTearDown() {
// one-time cleanup code
}
@BeforeEach
public void setUp() {
}
@AfterEach
public void tearDown() {
}
@Test
public void mapNoOfTransaction() {
Spark2BitcoinBlockCounter sparkTransformator = new Spark2BitcoinBlockCounter();
BitcoinBlockWritable testBlock = new BitcoinBlockWritable();
BitcoinTransaction testTransaction = new BitcoinTransaction();
ArrayList<BitcoinTransaction> testTransactionList = new ArrayList<BitcoinTransaction>();
testTransactionList.add(testTransaction);
testBlock.setTransactions(testTransactionList);
Tuple2<String,Long> result = sparkTransformator.mapNoOfTransaction(testBlock);
assertEquals((long)1,(long)result._2(),"One transaction should have been mapped");
}
@Test
public void reduceSumUpTransactions() {
Spark2BitcoinBlockCounter sparkTransformator = new Spark2BitcoinBlockCounter();
Long transactionCountA = new Long(1);
Long transactionCountB = new Long(2);
assertEquals((long)3,(long)sparkTransformator.reduceSumUpTransactions(transactionCountA,transactionCountB),"Transaction count should sum up to 3");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/spark2-bitcoinblock/src/main/java/org/zuinnote/spark2/bitcoin/example/Spark2BitcoinBlockCounter.java | examples/spark2-bitcoinblock/src/main/java/org/zuinnote/spark2/bitcoin/example/Spark2BitcoinBlockCounter.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Simple Driver for a Spark 2 job counting the number of transactons in a given block from the specified files containing Bitcoin blockchain data
*/
package org.zuinnote.spark2.bitcoin.example;
import java.util.*;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.util.*;
import scala.Tuple2;
import org.apache.spark.sql.*;
import org.apache.spark.api.java.*;
import org.apache.spark.api.java.function.*;
import org.apache.spark.SparkConf;
import org.zuinnote.hadoop.bitcoin.format.common.*;
import org.zuinnote.hadoop.bitcoin.format.mapreduce.*;
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
public class Spark2BitcoinBlockCounter {
public Spark2BitcoinBlockCounter() {
super();
}
public static void main(String[] args) throws Exception {
SparkConf conf = new SparkConf().setAppName("Spark2 BitcoinBlock Analytics (hadoopcryptoledger)");
JavaSparkContext sc = new JavaSparkContext(conf);
// create Hadoop Configuration
Configuration hadoopConf= new Configuration();
/** Set as an example some of the options to configure the Bitcoin fileformat **/
/** Find here all configuration options: https://github.com/ZuInnoTe/hadoopcryptoledger/wiki/Hadoop-File-Format **/
hadoopConf.set("hadoopcryptoledger.bitcoinblockinputformat.filter.magic","F9BEB4D9");
jobTotalNumOfTransactions(sc,hadoopConf, args[0],args[1]);
sc.close();
}
/**
* a job for counting the total number of transactions
*
* @param sc context
* @param hadoopConf Configuration for input format
* @param inputFile Input file
* @param output outputFile file
*
*
**/
public static void jobTotalNumOfTransactions(JavaSparkContext sc, Configuration hadoopConf, String inputFile, String outputFile) {
// read bitcoin data from HDFS
JavaPairRDD<BytesWritable, BitcoinBlockWritable> bitcoinBlocksRDD = sc.newAPIHadoopFile(inputFile, BitcoinBlockFileInputFormat.class, BytesWritable.class, BitcoinBlockWritable.class,hadoopConf);
// extract the no transactions / block (map)
JavaPairRDD<String, Long> noOfTransactionPair = bitcoinBlocksRDD.mapToPair(new PairFunction<Tuple2<BytesWritable,BitcoinBlockWritable>, String, Long>() {
@Override
public Tuple2<String, Long> call(Tuple2<BytesWritable,BitcoinBlockWritable> tupleBlock) {
return mapNoOfTransaction(tupleBlock._2());
}
});
// combine the results from all blocks
JavaPairRDD<String, Long> totalCount = noOfTransactionPair.reduceByKey(new Function2<Long, Long, Long>() {
@Override
public Long call(Long a, Long b) {
return reduceSumUpTransactions(a,b);
}
});
// write results to HDFS
totalCount.repartition(1).saveAsTextFile(outputFile);
}
/**
* Maps the number of transactions of a block to a tuple
*
* @param block Bitcoinblock
*
* @return Tuple containing the String "No of transactions. " and the number of transactions as long
*
**/
public static Tuple2<String,Long> mapNoOfTransaction(BitcoinBlock block) {
return new Tuple2<String, Long>("No of transactions: ",(long)(block.getTransactions().size()));
}
/**
* Sums up the transaction count within a reduce step
*
* @param a transaction count
* @param b transaction count
*
* @return sum of a and b
*
**/
public static Long reduceSumUpTransactions(Long a, Long b) {
return a+b;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/spark2-bitcoinblock/src/integration-test/java/org/zuinnote/spark2/bitcoin/example/Spark2BitcoinBlockCounterSparkMasterIntegrationTest.java | examples/spark2-bitcoinblock/src/integration-test/java/org/zuinnote/spark2/bitcoin/example/Spark2BitcoinBlockCounterSparkMasterIntegrationTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* This is an integration test for testing the application with Spark2
*/
package org.zuinnote.spark2.bitcoin.example;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.Files;
import java.nio.file.FileVisitResult;
import java.nio.file.SimpleFileVisitor;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.io.compress.SplitCompressionInputStream;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class Spark2BitcoinBlockCounterSparkMasterIntegrationTest {
private static String master = "local[2]";
private static String appName = "example-spark2bitcoinblockcounter-integrationtest";
private static final String tmpPrefix = "hcl-integrationtest";
private static java.nio.file.Path tmpPath;
private static String CLUSTERNAME="hcl-minicluster";
private static String DFS_INPUT_DIR_NAME = "/input";
private static String DFS_OUTPUT_DIR_NAME = "/output";
private static String DEFAULT_OUTPUT_FILENAME = "part-00000";
private static Path DFS_INPUT_DIR = new Path(DFS_INPUT_DIR_NAME);
private static Path DFS_OUTPUT_DIR = new Path(DFS_OUTPUT_DIR_NAME);
private static int NOOFDATANODES=4;
private static MiniDFSCluster dfsCluster;
private static JavaSparkContext sc;
private static Configuration conf;
private ArrayList<Decompressor> openDecompressors = new ArrayList<>();
@BeforeAll
public static void oneTimeSetUp() throws IOException {
// Create temporary directory for HDFS base and shutdownhook
// create temp directory
tmpPath = Files.createTempDirectory(tmpPrefix);
// create shutdown hook to remove temp files (=HDFS MiniCluster) after shutdown, may need to rethink to avoid many threads are created
Runtime.getRuntime().addShutdownHook(new Thread(
new Runnable() {
@Override
public void run() {
try {
Files.walkFileTree(tmpPath, new SimpleFileVisitor<java.nio.file.Path>() {
@Override
public FileVisitResult visitFile(java.nio.file.Path file,BasicFileAttributes attrs)
throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(java.nio.file.Path dir, IOException e) throws IOException {
if (e == null) {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
throw e;
}
});
} catch (IOException e) {
throw new RuntimeException("Error temporary files in following path could not be deleted "+tmpPath, e);
}
}}));
// Create Configuration
conf = new Configuration();
// create HDFS cluster
File baseDir = new File(tmpPath.toString()).getAbsoluteFile();
conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
dfsCluster = builder.numDataNodes(NOOFDATANODES).build();
conf.set("fs.defaultFS", dfsCluster.getFileSystem().getUri().toString());
// create Spark Context
SparkConf sparkConf = new SparkConf().setAppName(appName).setMaster(master).set( "spark.driver.host", "localhost" );
sc = new JavaSparkContext(sparkConf);
}
@AfterAll
public static void oneTimeTearDown() throws IOException {
// destroy Spark cluster
if (sc!=null) {
sc.stop();
}
// destroy HDFS cluster
dfsCluster.shutdown();
}
@BeforeEach
public void setUp() throws IOException {
// create input directory
dfsCluster.getFileSystem().mkdirs(DFS_INPUT_DIR);
}
@AfterEach
public void tearDown() throws IOException {
// Remove input and output directory
dfsCluster.getFileSystem().delete(DFS_INPUT_DIR,true);
dfsCluster.getFileSystem().delete(DFS_OUTPUT_DIR,true);
// close any open decompressor
for (Decompressor currentDecompressor: this.openDecompressors) {
if (currentDecompressor!=null) {
CodecPool.returnDecompressor(currentDecompressor);
}
}
}
@Test
public void checkTestDataGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void application() throws IOException {
ClassLoader classLoader = getClass().getClassLoader();
// put testdata on DFS
String fileName="genesis.blk";
String fileNameFullLocal=classLoader.getResource("testdata/"+fileName).getFile();
Path inputFile=new Path(fileNameFullLocal);
dfsCluster.getFileSystem().copyFromLocalFile(false, false, inputFile, DFS_INPUT_DIR);
// configure application
conf.set("hadoopcryptoledger.bitcoinblockinputformat.filter.magic","F9BEB4D9");
// submit application to Spark Local
Spark2BitcoinBlockCounter bcc = new Spark2BitcoinBlockCounter();
bcc.jobTotalNumOfTransactions(sc,conf,dfsCluster.getFileSystem().getUri().toString()+DFS_INPUT_DIR_NAME,dfsCluster.getFileSystem().getUri().toString()+DFS_OUTPUT_DIR_NAME);
// fetch results
List<String> resultLines = readDefaultResults(1);
// compare results
assertEquals(1,resultLines.size(), "Number of result line is 1");
assertEquals("(No of transactions: ,1)",resultLines.get(0),"Number of transactions is 1");
}
/**
* Read results from the default output directory and default outputfile name
*
* @param numOfRows number of rows to read
*
*/
private List<String> readDefaultResults(int numOfRows) throws IOException {
ArrayList<String> result = new ArrayList<>();
Path defaultOutputfile = new Path(DFS_OUTPUT_DIR_NAME+"/"+DEFAULT_OUTPUT_FILENAME);
InputStream defaultInputStream = openFile(defaultOutputfile);
BufferedReader reader=new BufferedReader(new InputStreamReader(defaultInputStream));
int i=0;
while(reader.ready())
{
if (i==numOfRows) {
break;
}
result.add(reader.readLine());
i++;
}
reader.close();
return result;
}
/*
* Opens a file using the Hadoop API. It supports uncompressed and compressed files.
*
* @param path path to the file, e.g. file://path/to/file for a local file or hdfs://path/to/file for HDFS file. All filesystem configured for Hadoop can be used
*
* @return InputStream from which the file content can be read
*
* @throws java.io.Exception in case there is an issue reading the file
*
*
*/
private InputStream openFile(Path path) throws IOException {
CompressionCodec codec=new CompressionCodecFactory(conf).getCodec(path);
FSDataInputStream fileIn=dfsCluster.getFileSystem().open(path);
// check if compressed
if (codec==null) { // uncompressed
return fileIn;
} else { // compressed
Decompressor decompressor = CodecPool.getDecompressor(codec);
this.openDecompressors.add(decompressor); // to be returned later using close
if (codec instanceof SplittableCompressionCodec) {
long end = dfsCluster.getFileSystem().getFileStatus(path).getLen();
final SplitCompressionInputStream cIn =((SplittableCompressionCodec)codec).createInputStream(fileIn, decompressor, 0, end,SplittableCompressionCodec.READ_MODE.CONTINUOUS);
return cIn;
} else {
return codec.createInputStream(fileIn,decompressor);
}
}
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/spark-bitcoinblock/src/test/java/org/zuinnote/spark/bitcoin/example/SparkBitcoinBlockCounterTest.java | examples/spark-bitcoinblock/src/test/java/org/zuinnote/spark/bitcoin/example/SparkBitcoinBlockCounterTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.spark.bitcoin.example;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.ArrayList;
import scala.Tuple2;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransaction;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlockWritable;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransactionWritable;
public class SparkBitcoinBlockCounterTest {
@BeforeAll
public static void oneTimeSetUp() {
}
@AfterAll
public static void oneTimeTearDown() {
// one-time cleanup code
}
@BeforeEach
public void setUp() {
}
@AfterEach
public void tearDown() {
}
@Test
public void mapNoOfTransaction() {
SparkBitcoinBlockCounter sparkTransformator = new SparkBitcoinBlockCounter();
BitcoinBlockWritable testBlock = new BitcoinBlockWritable();
BitcoinTransaction testTransaction = new BitcoinTransaction();
ArrayList<BitcoinTransaction> testTransactionList = new ArrayList<BitcoinTransaction>();
testTransactionList.add(testTransaction);
testBlock.setTransactions(testTransactionList);
Tuple2<String,Long> result = sparkTransformator.mapNoOfTransaction(testBlock);
assertEquals((long)1,(long)result._2(),"One transaction should have been mapped");
}
@Test
public void reduceSumUpTransactions() {
SparkBitcoinBlockCounter sparkTransformator = new SparkBitcoinBlockCounter();
Long transactionCountA = new Long(1);
Long transactionCountB = new Long(2);
assertEquals((long)3,(long)sparkTransformator.reduceSumUpTransactions(transactionCountA,transactionCountB),"Transaction count should sum up to 3");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/spark-bitcoinblock/src/main/java/org/zuinnote/spark/bitcoin/example/SparkBitcoinBlockCounter.java | examples/spark-bitcoinblock/src/main/java/org/zuinnote/spark/bitcoin/example/SparkBitcoinBlockCounter.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Simple Driver for a Spark job counting the number of transactons in a given block from the specified files containing Bitcoin blockchain data
*/
package org.zuinnote.spark.bitcoin.example;
import java.util.*;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.util.*;
import scala.Tuple2;
import org.apache.hadoop.conf.*;
import org.apache.spark.api.java.*;
import org.apache.spark.api.java.function.*;
import org.apache.spark.SparkConf;
import org.zuinnote.hadoop.bitcoin.format.common.*;
import org.zuinnote.hadoop.bitcoin.format.mapreduce.*;
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
public class SparkBitcoinBlockCounter {
public SparkBitcoinBlockCounter() {
super();
}
public static void main(String[] args) throws Exception {
SparkConf conf = new SparkConf().setAppName("Spark2 BitcoinBlock Analytics (hadoopcryptoledger)");
JavaSparkContext sc = new JavaSparkContext(conf);
// create Hadoop Configuration
Configuration hadoopConf= new Configuration();
/** Set as an example some of the options to configure the Bitcoin fileformat **/
/** Find here all configuration options: https://github.com/ZuInnoTe/hadoopcryptoledger/wiki/Hadoop-File-Format **/
hadoopConf.set("hadoopcryptoledger.bitcoinblockinputformat.filter.magic","F9BEB4D9");
jobTotalNumOfTransactions(sc,hadoopConf, args[0],args[1]);
sc.close();
}
/**
* a job for counting the total number of transactions
*
* @param sc context
* @param hadoopConf Configuration for input format
* @param inputFile Input file
* @param output outputFile file
*
*
**/
public static void jobTotalNumOfTransactions(JavaSparkContext sc, Configuration hadoopConf, String inputFile, String outputFile) {
// read bitcoin data from HDFS
JavaPairRDD<BytesWritable, BitcoinBlockWritable> bitcoinBlocksRDD = sc.newAPIHadoopFile(inputFile, BitcoinBlockFileInputFormat.class, BytesWritable.class, BitcoinBlockWritable.class,hadoopConf);
// extract the no transactions / block (map)
JavaPairRDD<String, Long> noOfTransactionPair = bitcoinBlocksRDD.mapToPair(new PairFunction<Tuple2<BytesWritable,BitcoinBlockWritable>, String, Long>() {
@Override
public Tuple2<String, Long> call(Tuple2<BytesWritable,BitcoinBlockWritable> tupleBlock) {
return mapNoOfTransaction(tupleBlock._2());
}
});
// combine the results from all blocks
JavaPairRDD<String, Long> totalCount = noOfTransactionPair.reduceByKey(new Function2<Long, Long, Long>() {
@Override
public Long call(Long a, Long b) {
return reduceSumUpTransactions(a,b);
}
});
// write results to HDFS
totalCount.repartition(1).saveAsTextFile(outputFile);
}
/**
* Maps the number of transactions of a block to a tuple
*
* @param block Bitcoinblock
*
* @return Tuple containing the String "No of transactions. " and the number of transactions as long
*
**/
public static Tuple2<String,Long> mapNoOfTransaction(BitcoinBlockWritable block) {
return new Tuple2<String, Long>("No of transactions: ",(long)(block.getTransactions().size()));
}
/**
* Sums up the transaction count within a reduce step
*
* @param a transaction count
* @param b transaction count
*
* @return sum of a and b
*
**/
public static Long reduceSumUpTransactions(Long a, Long b) {
return a+b;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/spark-bitcoinblock/src/integration-test/java/org/zuinnote/spark/bitcoin/example/SparkBitcoinBlockCounterSparkMasterIntegrationTest.java | examples/spark-bitcoinblock/src/integration-test/java/org/zuinnote/spark/bitcoin/example/SparkBitcoinBlockCounterSparkMasterIntegrationTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* This is an integration test for testing the application with Spark
*/
package org.zuinnote.spark.bitcoin.example;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.Files;
import java.nio.file.FileVisitResult;
import java.nio.file.SimpleFileVisitor;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.io.compress.SplitCompressionInputStream;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class SparkBitcoinBlockCounterSparkMasterIntegrationTest {
private static String master = "local[2]";
private static String appName = "example-sparkbitcoinblockcounter-integrationtest";
private static final String tmpPrefix = "hcl-integrationtest";
private static java.nio.file.Path tmpPath;
private static String CLUSTERNAME="hcl-minicluster";
private static String DFS_INPUT_DIR_NAME = "/input";
private static String DFS_OUTPUT_DIR_NAME = "/output";
private static String DEFAULT_OUTPUT_FILENAME = "part-00000";
private static Path DFS_INPUT_DIR = new Path(DFS_INPUT_DIR_NAME);
private static Path DFS_OUTPUT_DIR = new Path(DFS_OUTPUT_DIR_NAME);
private static int NOOFDATANODES=4;
private static MiniDFSCluster dfsCluster;
private static JavaSparkContext sc;
private static Configuration conf;
private ArrayList<Decompressor> openDecompressors = new ArrayList<>();
@BeforeAll
public static void oneTimeSetUp() throws IOException {
// Create temporary directory for HDFS base and shutdownhook
// create temp directory
tmpPath = Files.createTempDirectory(tmpPrefix);
// create shutdown hook to remove temp files (=HDFS MiniCluster) after shutdown, may need to rethink to avoid many threads are created
Runtime.getRuntime().addShutdownHook(new Thread( new Runnable() {
@Override
public void run() {
try {
Files.walkFileTree(tmpPath, new SimpleFileVisitor<java.nio.file.Path>() {
@Override
public FileVisitResult visitFile(java.nio.file.Path file,BasicFileAttributes attrs)
throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(java.nio.file.Path dir, IOException e) throws IOException {
if (e == null) {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
throw e;
}
});
} catch (IOException e) {
throw new RuntimeException("Error temporary files in following path could not be deleted "+tmpPath, e);
}
}}));
// Create Configuration
conf = new Configuration();
// create HDFS cluster
File baseDir = new File(tmpPath.toString()).getAbsoluteFile();
conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
dfsCluster = builder.numDataNodes(NOOFDATANODES).build();
conf.set("fs.defaultFS", dfsCluster.getFileSystem().getUri().toString());
// create Spark Context
SparkConf sparkConf = new SparkConf().setAppName(appName).setMaster(master).set( "spark.driver.host", "localhost" );
sc = new JavaSparkContext(sparkConf);
}
@AfterAll
public static void oneTimeTearDown() throws IOException {
// destroy Spark cluster
if (sc!=null) {
sc.stop();
}
// destroy HDFS cluster
dfsCluster.shutdown();
}
@BeforeEach
public void setUp() throws IOException {
// create input directory
dfsCluster.getFileSystem().mkdirs(DFS_INPUT_DIR);
}
@AfterEach
public void tearDown() throws IOException {
// Remove input and output directory
dfsCluster.getFileSystem().delete(DFS_INPUT_DIR,true);
dfsCluster.getFileSystem().delete(DFS_OUTPUT_DIR,true);
// close any open decompressor
for (Decompressor currentDecompressor: this.openDecompressors) {
if (currentDecompressor!=null) {
CodecPool.returnDecompressor(currentDecompressor);
}
}
}
@Test
public void checkTestDataGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull("Test Data File \""+fileName+"\" is not null in resource path",fileNameGenesis);
File file = new File(fileNameGenesis);
assertTrue(file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse(file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void application() throws IOException {
ClassLoader classLoader = getClass().getClassLoader();
// put testdata on DFS
String fileName="genesis.blk";
String fileNameFullLocal=classLoader.getResource("testdata/"+fileName).getFile();
Path inputFile=new Path(fileNameFullLocal);
dfsCluster.getFileSystem().copyFromLocalFile(false, false, inputFile, DFS_INPUT_DIR);
// configure application
conf.set("hadoopcryptoledger.bitcoinblockinputformat.filter.magic","F9BEB4D9");
// submit application to Spark Local
SparkBitcoinBlockCounter bcc = new SparkBitcoinBlockCounter();
bcc.jobTotalNumOfTransactions(sc,conf,dfsCluster.getFileSystem().getUri().toString()+DFS_INPUT_DIR_NAME,dfsCluster.getFileSystem().getUri().toString()+DFS_OUTPUT_DIR_NAME);
// fetch results
List<String> resultLines = readDefaultResults(1);
// compare results
assertEquals(1,resultLines.size(),"Number of result line is 1");
assertEquals("(No of transactions: ,1)",resultLines.get(0),"Number of transactions is 1");
}
/**
* Read results from the default output directory and default outputfile name
*
* @param numOfRows number of rows to read
*
*/
private List<String> readDefaultResults(int numOfRows) throws IOException {
ArrayList<String> result = new ArrayList<>();
Path defaultOutputfile = new Path(DFS_OUTPUT_DIR_NAME+"/"+DEFAULT_OUTPUT_FILENAME);
InputStream defaultInputStream = openFile(defaultOutputfile);
BufferedReader reader=new BufferedReader(new InputStreamReader(defaultInputStream));
int i=0;
while(reader.ready())
{
if (i==numOfRows) {
break;
}
result.add(reader.readLine());
i++;
}
reader.close();
return result;
}
/*
* Opens a file using the Hadoop API. It supports uncompressed and compressed files.
*
* @param path path to the file, e.g. file://path/to/file for a local file or hdfs://path/to/file for HDFS file. All filesystem configured for Hadoop can be used
*
* @return InputStream from which the file content can be read
*
* @throws java.io.Exception in case there is an issue reading the file
*
*
*/
private InputStream openFile(Path path) throws IOException {
CompressionCodec codec=new CompressionCodecFactory(conf).getCodec(path);
FSDataInputStream fileIn=dfsCluster.getFileSystem().open(path);
// check if compressed
if (codec==null) { // uncompressed
return fileIn;
} else { // compressed
Decompressor decompressor = CodecPool.getDecompressor(codec);
this.openDecompressors.add(decompressor); // to be returned later using close
if (codec instanceof SplittableCompressionCodec) {
long end = dfsCluster.getFileSystem().getFileStatus(path).getLen();
final SplitCompressionInputStream cIn =((SplittableCompressionCodec)codec).createInputStream(fileIn, decompressor, 0, end,SplittableCompressionCodec.READ_MODE.CONTINUOUS);
return cIn;
} else {
return codec.createInputStream(fileIn,decompressor);
}
}
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-bitcoinblock/src/test/java/org/zuinnote/hadoop/bitcoin/example/MapReduceBitcoinBlockTest.java | examples/mapreduce-bitcoinblock/src/test/java/org/zuinnote/hadoop/bitcoin/example/MapReduceBitcoinBlockTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.example;
import mockit.*;
import java.lang.InterruptedException;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlock;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransaction;
import org.zuinnote.hadoop.bitcoin.example.tasks.BitcoinBlockMap;
import org.zuinnote.hadoop.bitcoin.example.tasks.BitcoinBlockReducer;
public final class MapReduceBitcoinBlockTest {
@BeforeAll
public static void oneTimeSetUp() {
}
@AfterAll
public static void oneTimeTearDown() {
// one-time cleanup code
}
@BeforeEach
public void setUp() {
}
@AfterEach
public void tearDown() {
}
@Test
public void map(@Mocked final Mapper.Context defaultContext) throws IOException,InterruptedException {
BitcoinBlockMap mapper = new BitcoinBlockMap();
final BytesWritable key = new BytesWritable();
final BitcoinBlock value = new BitcoinBlock();
final Text defaultKey = new Text("Transaction Count:");
final IntWritable nullInt = new IntWritable(0);
value.setTransactions(new ArrayList<BitcoinTransaction>());
new Expectations() {{
defaultContext.write(defaultKey,nullInt); times=1;
}};
mapper.map(key,value,defaultContext);
}
@Test
public void reduce(@Mocked final Reducer.Context defaultContext) throws IOException,InterruptedException {
BitcoinBlockReducer reducer = new BitcoinBlockReducer();
final Text defaultKey = new Text("Transaction Count:");
final IntWritable oneInt = new IntWritable(1);
final IntWritable twoInt = new IntWritable(2);
final LongWritable resultLong = new LongWritable(3);
final ArrayList al = new ArrayList<IntWritable>();
al.add(oneInt);
al.add(twoInt);
new Expectations() {{
defaultContext.write(defaultKey,resultLong); times=1;
}};
reducer.reduce(defaultKey,al,defaultContext);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-bitcoinblock/src/main/java/org/zuinnote/hadoop/bitcoin/example/tasks/BitcoinBlockReducer.java | examples/mapreduce-bitcoinblock/src/main/java/org/zuinnote/hadoop/bitcoin/example/tasks/BitcoinBlockReducer.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Simple Reducer for summarizing counts by the mapper
*/
package org.zuinnote.hadoop.bitcoin.example.tasks;
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
import java.io.IOException;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;
import java.util.*;
public class BitcoinBlockReducer extends Reducer<Text, IntWritable, Text, LongWritable> {
private LongWritable result = new LongWritable();
@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
long sum = 0;
for (IntWritable val: values) {
sum += val.get();
}
result.set(sum);
context.write(key, result);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-bitcoinblock/src/main/java/org/zuinnote/hadoop/bitcoin/example/tasks/BitcoinBlockMap.java | examples/mapreduce-bitcoinblock/src/main/java/org/zuinnote/hadoop/bitcoin/example/tasks/BitcoinBlockMap.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Simple Mapper for counting the number of Bitcoin transactions in a file on HDFS
*/
package org.zuinnote.hadoop.bitcoin.example.tasks;
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
import java.io.IOException;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;
import org.zuinnote.hadoop.bitcoin.format.common.*;
import org.zuinnote.hadoop.bitcoin.format.mapreduce.*;
import java.util.*;
public class BitcoinBlockMap extends Mapper<BytesWritable, BitcoinBlock, Text, IntWritable> {
private static final Text defaultKey = new Text("Transaction Count:");
@Override
public void setup(Context context) throws IOException, InterruptedException {
// nothing to set up
}
@Override
public void map(BytesWritable key, BitcoinBlock value, Context context) throws IOException, InterruptedException {
// get the number of transactions
context.write(defaultKey, new IntWritable(value.getTransactions().size()));
}
@Override
public void cleanup(Context context) {
// nothing to cleanup
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-bitcoinblock/src/main/java/org/zuinnote/hadoop/bitcoin/example/driver/BitcoinBlockCounterDriver.java | examples/mapreduce-bitcoinblock/src/main/java/org/zuinnote/hadoop/bitcoin/example/driver/BitcoinBlockCounterDriver.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Simple Driver for a map reduce job counting the number of transactons in a given blocks from the specified files containing Bitcoin blockchain data
*/
package org.zuinnote.hadoop.bitcoin.example.driver;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.util.*;
import org.zuinnote.hadoop.bitcoin.example.tasks.BitcoinBlockMap;
import org.zuinnote.hadoop.bitcoin.example.tasks.BitcoinBlockReducer;
import org.zuinnote.hadoop.bitcoin.format.common.*;
import org.zuinnote.hadoop.bitcoin.format.mapreduce.*;
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
public class BitcoinBlockCounterDriver extends Configured implements Tool {
public BitcoinBlockCounterDriver() {
// nothing needed here
}
public int run(String[] args) throws Exception {
Job job = Job.getInstance();
job.setJobName("example-hadoop-bitcoin-transactioncounter-job");
job.setJarByClass(BitcoinBlockCounterDriver.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
job.setMapperClass(BitcoinBlockMap.class);
job.setReducerClass(BitcoinBlockReducer.class);
job.setInputFormatClass(BitcoinBlockFileInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
return job.waitForCompletion(true)?0:1;
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
/** Set as an example some of the options to configure the Bitcoin fileformat **/
/** Find here all configuration options: https://github.com/ZuInnoTe/hadoopcryptoledger/wiki/Hadoop-File-Format **/
conf.set("hadoopcryptoledger.bitcoinblockinputformat.filter.magic","F9BEB4D9");
// Let ToolRunner handle generic command-line options
int res = ToolRunner.run(conf, new BitcoinBlockCounterDriver(), args);
System.exit(res);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-bitcoinblock/src/integration-test/java/org/zuinnote/hadoop/bitcoin/example/MapReduceBitcoinBlockIntegrationTest.java | examples/mapreduce-bitcoinblock/src/integration-test/java/org/zuinnote/hadoop/bitcoin/example/MapReduceBitcoinBlockIntegrationTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.example;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.Files;
import java.nio.file.FileVisitResult;
import java.nio.file.SimpleFileVisitor;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.io.compress.SplitCompressionInputStream;
import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.zuinnote.hadoop.bitcoin.example.driver.BitcoinBlockCounterDriver;
public final class MapReduceBitcoinBlockIntegrationTest {
private static final String tmpPrefix = "hcl-integrationtest";
private static java.nio.file.Path tmpPath;
private static String CLUSTERNAME="hcl-minicluster";
private static String DFS_INPUT_DIR_NAME = "/input";
private static String DFS_OUTPUT_DIR_NAME = "/output";
private static String DEFAULT_OUTPUT_FILENAME = "part-r-00000";
private static Path DFS_INPUT_DIR = new Path(DFS_INPUT_DIR_NAME);
private static Path DFS_OUTPUT_DIR = new Path(DFS_OUTPUT_DIR_NAME);
private static int NOOFNODEMANAGERS=1;
private static int NOOFDATANODES=4;
private static boolean STARTTIMELINESERVER=true;
private static MiniDFSCluster dfsCluster;
private static MiniMRYarnCluster miniCluster;
private ArrayList<Decompressor> openDecompressors = new ArrayList<>();
@BeforeAll
public static void oneTimeSetUp() throws IOException {
// Create temporary directory for HDFS base and shutdownhook
// create temp directory
tmpPath = Files.createTempDirectory(tmpPrefix);
// create shutdown hook to remove temp files (=HDFS MiniCluster) after shutdown, may need to rethink to avoid many threads are created
Runtime.getRuntime().addShutdownHook(new Thread(
new Runnable() {
@Override
public void run() {
try {
Files.walkFileTree(tmpPath, new SimpleFileVisitor<java.nio.file.Path>() {
@Override
public FileVisitResult visitFile(java.nio.file.Path file,BasicFileAttributes attrs)
throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(java.nio.file.Path dir, IOException e) throws IOException {
if (e == null) {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
throw e;
}
});
} catch (IOException e) {
throw new RuntimeException("Error temporary files in following path could not be deleted "+tmpPath, e);
}
}}));
// Create Configuration
Configuration conf = new Configuration();
// create HDFS cluster
File baseDir = new File(tmpPath.toString()).getAbsoluteFile();
conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
dfsCluster = builder.numDataNodes(NOOFDATANODES).build();
// create Yarn cluster
YarnConfiguration clusterConf = new YarnConfiguration(conf);
conf.set("fs.defaultFS", dfsCluster.getFileSystem().getUri().toString());
conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 64);
conf.setClass(YarnConfiguration.RM_SCHEDULER,FifoScheduler.class, ResourceScheduler.class);
miniCluster = new MiniMRYarnCluster(CLUSTERNAME, NOOFNODEMANAGERS, STARTTIMELINESERVER);
miniCluster.init(conf);
miniCluster.start();
}
@AfterAll
public static void oneTimeTearDown() {
// destroy Yarn cluster
miniCluster.stop();
// destroy HDFS cluster
dfsCluster.shutdown();
}
@BeforeEach
public void setUp() throws IOException {
// create input directory
dfsCluster.getFileSystem().mkdirs(DFS_INPUT_DIR);
}
@AfterEach
public void tearDown() throws IOException {
// Remove input and output directory
dfsCluster.getFileSystem().delete(DFS_INPUT_DIR,true);
dfsCluster.getFileSystem().delete(DFS_OUTPUT_DIR,true);
// close any open decompressor
for (Decompressor currentDecompressor: this.openDecompressors) {
if (currentDecompressor!=null) {
CodecPool.returnDecompressor(currentDecompressor);
}
}
}
@Test
public void checkTestDataGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void mapReduceGenesisBlock() throws IOException, Exception {
ClassLoader classLoader = getClass().getClassLoader();
// put testdata on DFS
String fileName="genesis.blk";
String fileNameFullLocal=classLoader.getResource("testdata/"+fileName).getFile();
Path inputFile=new Path(fileNameFullLocal);
dfsCluster.getFileSystem().copyFromLocalFile(false, false, inputFile, DFS_INPUT_DIR);
// submit the application
/** Set as an example some of the options to configure the Bitcoin fileformat **/
/** Find here all configuration options: https://github.com/ZuInnoTe/hadoopcryptoledger/wiki/Hadoop-File-Format **/
miniCluster.getConfig().set("hadoopcryptoledger.bitcoinblockinputformat.filter.magic","F9BEB4D9");
// Let ToolRunner handle generic command-line options
int res = ToolRunner.run(miniCluster.getConfig(), new BitcoinBlockCounterDriver(), new String[]{dfsCluster.getFileSystem().getUri().toString()+DFS_INPUT_DIR_NAME,dfsCluster.getFileSystem().getUri().toString()+DFS_OUTPUT_DIR_NAME});
// check if successfully executed
// note the following does only work on Linux platforms, other platforms may show issue due to the Hadoop Unit testing framework only supports Linux
// You can remove this test if you work on another platform. The application itself builds and run on a real cluster without any issues.
// assertEquals( 0, res,"Successfully executed mapreduce application");
// fetch results
List<String> resultLines = readDefaultResults(1);
// compare results
assertEquals(1,resultLines.size(),"Number of result line is 1");
assertEquals("Transaction Count:\t1",resultLines.get(0),"Number of transactions is 1");
}
/**
* Read results from the default output directory and default outputfile name
*
* @param numOfRows number of rows to read
*
*/
private List<String> readDefaultResults(int numOfRows) throws IOException {
ArrayList<String> result = new ArrayList<>();
Path defaultOutputfile = new Path(DFS_OUTPUT_DIR_NAME+"/"+DEFAULT_OUTPUT_FILENAME);
InputStream defaultInputStream = openFile(defaultOutputfile);
BufferedReader reader=new BufferedReader(new InputStreamReader(defaultInputStream));
int i=0;
while(reader.ready())
{
if (i==numOfRows) {
break;
}
result.add(reader.readLine());
i++;
}
reader.close();
return result;
}
/*
* Opens a file using the Hadoop API. It supports uncompressed and compressed files.
*
* @param path path to the file, e.g. file://path/to/file for a local file or hdfs://path/to/file for HDFS file. All filesystem configured for Hadoop can be used
*
* @return InputStream from which the file content can be read
*
* @throws java.io.Exception in case there is an issue reading the file
*
*
*/
private InputStream openFile(Path path) throws IOException {
CompressionCodec codec=new CompressionCodecFactory(miniCluster.getConfig()).getCodec(path);
FSDataInputStream fileIn=dfsCluster.getFileSystem().open(path);
// check if compressed
if (codec==null) { // uncompressed
return fileIn;
} else { // compressed
Decompressor decompressor = CodecPool.getDecompressor(codec);
this.openDecompressors.add(decompressor); // to be returned later using close
if (codec instanceof SplittableCompressionCodec) {
long end = dfsCluster.getFileSystem().getFileStatus(path).getLen();
final SplitCompressionInputStream cIn =((SplittableCompressionCodec)codec).createInputStream(fileIn, decompressor, 0, end,SplittableCompressionCodec.READ_MODE.CONTINUOUS);
return cIn;
} else {
return codec.createInputStream(fileIn,decompressor);
}
}
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-ethereumblock/src/test/java/org/zuinnote/hadoop/ethereum/example/MapReduceEthereumBlockTest.java | examples/mapreduce-ethereumblock/src/test/java/org/zuinnote/hadoop/ethereum/example/MapReduceEthereumBlockTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.example;
import mockit.*;
import java.lang.InterruptedException;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlock;
import org.zuinnote.hadoop.ethereum.format.common.EthereumTransaction;
import org.zuinnote.hadoop.ethereum.example.tasks.EthereumBlockMap;
import org.zuinnote.hadoop.ethereum.example.tasks.EthereumBlockReducer;
public final class MapReduceEthereumBlockTest {
@BeforeAll
public static void oneTimeSetUp() {
}
@AfterAll
public static void oneTimeTearDown() {
// one-time cleanup code
}
@BeforeEach
public void setUp() {
}
@AfterEach
public void tearDown() {
}
@Test
public void map(@Mocked final Mapper.Context defaultContext) throws IOException,InterruptedException {
EthereumBlockMap mapper = new EthereumBlockMap();
final BytesWritable key = new BytesWritable( );
final EthereumBlock value = new EthereumBlock(null,new ArrayList<EthereumTransaction>(),null);
final Text defaultKey = new Text("Transaction Count:");
final IntWritable nullInt = new IntWritable(0);
new Expectations() {{
defaultContext.write(defaultKey,nullInt); times=1;
}};
mapper.map(key,value,defaultContext);
}
@Test
public void reduce(@Mocked final Reducer.Context defaultContext) throws IOException,InterruptedException {
EthereumBlockReducer reducer = new EthereumBlockReducer();
final Text defaultKey = new Text("Transaction Count:");
final IntWritable oneInt = new IntWritable(1);
final IntWritable twoInt = new IntWritable(2);
final LongWritable resultLong = new LongWritable(3);
final ArrayList al = new ArrayList<IntWritable>();
al.add(oneInt);
al.add(twoInt);
new Expectations() {{
defaultContext.write(defaultKey,resultLong); times=1;
}};
reducer.reduce(defaultKey,al,defaultContext);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-ethereumblock/src/main/java/org/zuinnote/hadoop/ethereum/example/tasks/EthereumBlockMap.java | examples/mapreduce-ethereumblock/src/main/java/org/zuinnote/hadoop/ethereum/example/tasks/EthereumBlockMap.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Simple Mapper for counting the number of Bitcoin transactions in a file on HDFS
*/
package org.zuinnote.hadoop.ethereum.example.tasks;
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
import java.io.IOException;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;
import java.util.*;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlock;
public class EthereumBlockMap extends Mapper<BytesWritable, EthereumBlock, Text, IntWritable> {
private static final Text defaultKey = new Text("Transaction Count:");
@Override
public void setup(Context context) throws IOException, InterruptedException {
// nothing to set up
}
@Override
public void map(BytesWritable key, EthereumBlock value, Context context) throws IOException, InterruptedException {
// get the number of transactions
context.write(defaultKey, new IntWritable(value.getEthereumTransactions().size()));
}
@Override
public void cleanup(Context context) {
// nothing to cleanup
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-ethereumblock/src/main/java/org/zuinnote/hadoop/ethereum/example/tasks/EthereumBlockReducer.java | examples/mapreduce-ethereumblock/src/main/java/org/zuinnote/hadoop/ethereum/example/tasks/EthereumBlockReducer.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Simple Reducer for summarizing counts by the mapper
*/
package org.zuinnote.hadoop.ethereum.example.tasks;
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
import java.io.IOException;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;
import java.util.*;
public class EthereumBlockReducer extends Reducer<Text, IntWritable, Text, LongWritable> {
private LongWritable result = new LongWritable();
@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
long sum = 0;
for (IntWritable val: values) {
sum += val.get();
}
result.set(sum);
context.write(key, result);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-ethereumblock/src/main/java/org/zuinnote/hadoop/ethereum/example/driver/EthereumBlockCounterDriver.java | examples/mapreduce-ethereumblock/src/main/java/org/zuinnote/hadoop/ethereum/example/driver/EthereumBlockCounterDriver.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Simple Driver for a map reduce job counting the number of transactons in a given blocks from the specified files containing Bitcoin blockchain data
*/
package org.zuinnote.hadoop.ethereum.example.driver;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.util.*;
import org.zuinnote.hadoop.ethereum.format.mapreduce.EthereumBlockFileInputFormat;
import org.zuinnote.hadoop.ethereum.example.tasks.EthereumBlockMap;
import org.zuinnote.hadoop.ethereum.example.tasks.EthereumBlockReducer;
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
public class EthereumBlockCounterDriver extends Configured implements Tool {
public EthereumBlockCounterDriver() {
// nothing needed here
}
public int run(String[] args) throws Exception {
Job job = Job.getInstance();
job.setJobName("example-hadoop-ethereum-transactioncounter-job");
job.setJarByClass(EthereumBlockCounterDriver.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
job.setMapperClass(EthereumBlockMap.class);
job.setReducerClass(EthereumBlockReducer.class);
job.setInputFormatClass(EthereumBlockFileInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
return job.waitForCompletion(true)?0:1;
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
/** Set as an example some of the options to configure the Ethereum fileformat **/
/** Find here all configuration options: https://github.com/ZuInnoTe/hadoopcryptoledger/wiki/Hadoop-File-Format **/
conf.set("hadoopcryptoledeger.ethereumblockinputformat.usedirectbuffer","false");
// Let ToolRunner handle generic command-line options
int res = ToolRunner.run(conf, new EthereumBlockCounterDriver(), args);
System.exit(res);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/examples/mapreduce-ethereumblock/src/integration-test/java/org/zuinnote/hadoop/ethereum/example/MapReduceEthereumBlockIntegrationTest.java | examples/mapreduce-ethereumblock/src/integration-test/java/org/zuinnote/hadoop/ethereum/example/MapReduceEthereumBlockIntegrationTest.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.example;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.Files;
import java.nio.file.FileVisitResult;
import java.nio.file.SimpleFileVisitor;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.io.compress.SplitCompressionInputStream;
import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.zuinnote.hadoop.ethereum.example.driver.EthereumBlockCounterDriver;
public final class MapReduceEthereumBlockIntegrationTest {
private static final String tmpPrefix = "hcl-integrationtest";
private static java.nio.file.Path tmpPath;
private static String CLUSTERNAME="hcl-minicluster";
private static String DFS_INPUT_DIR_NAME = "/input";
private static String DFS_OUTPUT_DIR_NAME = "/output";
private static String DEFAULT_OUTPUT_FILENAME = "part-r-00000";
private static Path DFS_INPUT_DIR = new Path(DFS_INPUT_DIR_NAME);
private static Path DFS_OUTPUT_DIR = new Path(DFS_OUTPUT_DIR_NAME);
private static int NOOFNODEMANAGERS=1;
private static int NOOFDATANODES=4;
private static boolean STARTTIMELINESERVER=true;
private static MiniDFSCluster dfsCluster;
private static MiniMRYarnCluster miniCluster;
private ArrayList<Decompressor> openDecompressors = new ArrayList<>();
@BeforeAll
public static void oneTimeSetUp() throws IOException {
// Create temporary directory for HDFS base and shutdownhook
// create temp directory
tmpPath = Files.createTempDirectory(tmpPrefix);
// create shutdown hook to remove temp files (=HDFS MiniCluster) after shutdown, may need to rethink to avoid many threads are created
Runtime.getRuntime().addShutdownHook(new Thread(
new Runnable() {
@Override
public void run() {
try {
Files.walkFileTree(tmpPath, new SimpleFileVisitor<java.nio.file.Path>() {
@Override
public FileVisitResult visitFile(java.nio.file.Path file,BasicFileAttributes attrs)
throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(java.nio.file.Path dir, IOException e) throws IOException {
if (e == null) {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
throw e;
}
});
} catch (IOException e) {
throw new RuntimeException("Error temporary files in following path could not be deleted "+tmpPath, e);
}
}}));
// Create Configuration
Configuration conf = new Configuration();
// create HDFS cluster
File baseDir = new File(tmpPath.toString()).getAbsoluteFile();
conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
dfsCluster = builder.numDataNodes(NOOFDATANODES).build();
// create Yarn cluster
YarnConfiguration clusterConf = new YarnConfiguration(conf);
conf.set("fs.defaultFS", dfsCluster.getFileSystem().getUri().toString());
conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 64);
conf.setClass(YarnConfiguration.RM_SCHEDULER,FifoScheduler.class, ResourceScheduler.class);
miniCluster = new MiniMRYarnCluster(CLUSTERNAME, NOOFNODEMANAGERS, STARTTIMELINESERVER);
miniCluster.init(conf);
miniCluster.start();
}
@AfterAll
public static void oneTimeTearDown() {
// destroy Yarn cluster
miniCluster.stop();
// destroy HDFS cluster
dfsCluster.shutdown();
}
@BeforeEach
public void setUp() throws IOException {
// create input directory
dfsCluster.getFileSystem().mkdirs(DFS_INPUT_DIR);
}
@AfterEach
public void tearDown() throws IOException {
// Remove input and output directory
dfsCluster.getFileSystem().delete(DFS_INPUT_DIR,true);
dfsCluster.getFileSystem().delete(DFS_OUTPUT_DIR,true);
// close any open decompressor
for (Decompressor currentDecompressor: this.openDecompressors) {
if (currentDecompressor!=null) {
CodecPool.returnDecompressor(currentDecompressor);
}
}
}
@Test
public void checkTestDataBlock1346406Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void mapReduceBlock1346406() throws IOException, Exception {
ClassLoader classLoader = getClass().getClassLoader();
// put testdata on DFS
String fileName="eth1346406.bin";
String fileNameFullLocal=classLoader.getResource("testdata/"+fileName).getFile();
Path inputFile=new Path(fileNameFullLocal);
dfsCluster.getFileSystem().copyFromLocalFile(false, false, inputFile, DFS_INPUT_DIR);
// submit the application
/** Set as an example some of the options to configure the Bitcoin fileformat **/
/** Find here all configuration options: https://github.com/ZuInnoTe/hadoopcryptoledger/wiki/Hadoop-File-Format **/
miniCluster.getConfig().set("hadoopcryptoledeger.ethereumblockinputformat.usedirectbuffer","false");
// Let ToolRunner handle generic command-line options
int res = ToolRunner.run(miniCluster.getConfig(), new EthereumBlockCounterDriver(), new String[]{dfsCluster.getFileSystem().getUri().toString()+DFS_INPUT_DIR_NAME,dfsCluster.getFileSystem().getUri().toString()+DFS_OUTPUT_DIR_NAME});
// check if successfully executed
// note the following does only work on Linux platforms, other platforms may show issue due to the Hadoop Unit testing framework only supports Linux
// You can remove this test if you work on another platform. The application itself builds and run on a real cluster without any issues.
//assertEquals( 0, res,"Successfully executed mapreduce application");
// fetch results
List<String> resultLines = readDefaultResults(1);
// compare results
assertEquals(1,resultLines.size(),"Number of result line is 1");
assertEquals("Transaction Count:\t6",resultLines.get(0),"Number of transactions is 6");
}
/**
* Read results from the default output directory and default outputfile name
*
* @param numOfRows number of rows to read
*
*/
private List<String> readDefaultResults(int numOfRows) throws IOException {
ArrayList<String> result = new ArrayList<>();
Path defaultOutputfile = new Path(DFS_OUTPUT_DIR_NAME+"/"+DEFAULT_OUTPUT_FILENAME);
InputStream defaultInputStream = openFile(defaultOutputfile);
BufferedReader reader=new BufferedReader(new InputStreamReader(defaultInputStream));
int i=0;
while(reader.ready())
{
if (i==numOfRows) {
break;
}
result.add(reader.readLine());
i++;
}
reader.close();
return result;
}
/*
* Opens a file using the Hadoop API. It supports uncompressed and compressed files.
*
* @param path path to the file, e.g. file://path/to/file for a local file or hdfs://path/to/file for HDFS file. All filesystem configured for Hadoop can be used
*
* @return InputStream from which the file content can be read
*
* @throws java.io.Exception in case there is an issue reading the file
*
*
*/
private InputStream openFile(Path path) throws IOException {
CompressionCodec codec=new CompressionCodecFactory(miniCluster.getConfig()).getCodec(path);
FSDataInputStream fileIn=dfsCluster.getFileSystem().open(path);
// check if compressed
if (codec==null) { // uncompressed
return fileIn;
} else { // compressed
Decompressor decompressor = CodecPool.getDecompressor(codec);
this.openDecompressors.add(decompressor); // to be returned later using close
if (codec instanceof SplittableCompressionCodec) {
long end = dfsCluster.getFileSystem().getFileStatus(path).getLen();
final SplitCompressionInputStream cIn =((SplittableCompressionCodec)codec).createInputStream(fileIn, decompressor, 0, end,SplittableCompressionCodec.READ_MODE.CONTINUOUS);
return cIn;
} else {
return codec.createInputStream(fileIn,decompressor);
}
}
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/flinkdatasource/src/test/java/org/zuinnote/flink/bitcoin/FlinkBitcoinDataSourceTest.java | flinkdatasource/src/test/java/org/zuinnote/flink/bitcoin/FlinkBitcoinDataSourceTest.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.flink.bitcoin;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.fs.FileInputSplit;
import org.apache.flink.core.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlock;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransaction;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
public class FlinkBitcoinDataSourceTest {
@BeforeAll
public static void oneTimeSetUp() throws IOException {
// one-time initialization code
}
@AfterAll
public static void oneTimeTearDown() {
// one-time cleanup code
}
@BeforeEach
public void setUp() {
}
@AfterEach
public void tearDown() {
}
@Test
public void checkTestDataGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion1BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion2BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion3BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version3.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion4BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataMultiBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multiblock.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void restoreStateBitcoinBlock() throws HadoopCryptoLedgerConfigurationException, IOException {
// test if state is correctly restored
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multiblock.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputSplit blockInputSplit = new FileInputSplit(0,file,0, -1, null);
BitcoinBlockFlinkInputFormat inputFormat = new BitcoinBlockFlinkInputFormat(1024*1024, "F9BEB4D9",false);
inputFormat.open(blockInputSplit);
assertFalse(inputFormat.reachedEnd(),"End not reached");
BitcoinBlock reuse = new BitcoinBlock();
BitcoinBlock nextBlock = inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"First Block returned");
assertEquals(1,nextBlock.getTransactions().size(),"First block contains exactly one transction");
// save state
Long state = inputFormat.getCurrentState();
assertEquals(293,state.longValue(),"state 293");
// read 2nd block
nextBlock=inputFormat.nextRecord(reuse);
assertEquals(2,nextBlock.getTransactions().size(),"Second block contains two transactions");
// restore state
inputFormat.reopen(blockInputSplit, state);
// read 2nd block again
nextBlock=inputFormat.nextRecord(reuse);
assertEquals(2,nextBlock.getTransactions().size(),"Second block contains two transactions");
// read 3rd block
nextBlock=inputFormat.nextRecord(reuse);
assertEquals(343,nextBlock.getTransactions().size(),"Third block contains 343 transactions");
nextBlock=inputFormat.nextRecord(reuse);
assertNull(nextBlock,"No further block");
assertTrue(inputFormat.reachedEnd(),"End reached");
}
@Test
public void restoreStateBitcoinRawBlock() throws HadoopCryptoLedgerConfigurationException, IOException {
// test if state is correctly restored
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multiblock.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputSplit blockInputSplit = new FileInputSplit(0,file,0, -1, null);
BitcoinRawBlockFlinkInputFormat inputFormat = new BitcoinRawBlockFlinkInputFormat(1024*1024, "F9BEB4D9",false);
inputFormat.open(blockInputSplit);
assertFalse(inputFormat.reachedEnd(),"End not reached");
BytesWritable reuse = new BytesWritable();
BytesWritable nextBlock = inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"First Block returned");
// save state
Long state = inputFormat.getCurrentState();
assertEquals(293,state.longValue(),"state 293");
// read 2nd block
nextBlock=inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"Second block after state save exist");
// restore state
inputFormat.reopen(blockInputSplit, state);
// read 2nd block again
nextBlock=inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"Second block after state restore exist");
// read 3rd block
nextBlock=inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"Third block after state restore exist");
nextBlock=inputFormat.nextRecord(reuse);
assertNull(nextBlock,"No further block");
assertTrue(inputFormat.reachedEnd(),"End reached");
}
@Test
public void restoreStateBitcoinTransaction() throws HadoopCryptoLedgerConfigurationException, IOException {
// test if state is correctly restored
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multiblock.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputSplit transactionInputSplit = new FileInputSplit(0,file,0, -1, null);
BitcoinTransactionFlinkInputFormat inputFormat = new BitcoinTransactionFlinkInputFormat(1024*1024, "F9BEB4D9",false);
inputFormat.open(transactionInputSplit);
assertFalse(inputFormat.reachedEnd(),"End not reached");
BitcoinTransaction reuse = new BitcoinTransaction();
BitcoinTransaction nextTransaction = inputFormat.nextRecord(reuse);
assertNotNull(nextTransaction,"First Transaction returned");
nextTransaction = inputFormat.nextRecord(reuse);
assertNotNull(nextTransaction,"Second Transaction returned");
// save state
Tuple2<Long,Long> state = inputFormat.getCurrentState();
assertEquals(775,(long)state.f0,"state buffer position: 775");
assertEquals(1,(long)state.f1,"state transacton position: 1");
nextTransaction = inputFormat.nextRecord(reuse);
assertNotNull(nextTransaction,"Third Transaction returned after state save");
// restore state
inputFormat.reopen(transactionInputSplit, state);
nextTransaction = inputFormat.nextRecord(reuse);
assertNotNull(nextTransaction,"Third Transaction returned after state restore");
// further transactions
int remainingTransactionCounter=0;
while (inputFormat.nextRecord(reuse)!=null) {
remainingTransactionCounter++;
}
assertEquals(343,remainingTransactionCounter,"Reamining transactions after state restore from block 3: 343");
assertTrue(inputFormat.reachedEnd(),"End reached");
}
@Test
public void parseBitcoinBlockGenesis() throws HadoopCryptoLedgerConfigurationException, IOException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputSplit blockInputSplit = new FileInputSplit(0,file,0, -1, null);
BitcoinBlockFlinkInputFormat inputFormat = new BitcoinBlockFlinkInputFormat(1024*1024, "F9BEB4D9",false);
inputFormat.open(blockInputSplit);
assertFalse(inputFormat.reachedEnd(),"End not reached");
BitcoinBlock reuse = new BitcoinBlock();
BitcoinBlock nextBlock = inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"First Block returned");
assertEquals(1,nextBlock.getTransactions().size(),"First block contains exactly one transction");
nextBlock=inputFormat.nextRecord(reuse);
assertNull(nextBlock,"No further block");
assertTrue(inputFormat.reachedEnd(),"End reached");
}
@Test
public void parseBitcoinRawBlock() throws HadoopCryptoLedgerConfigurationException, IOException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputSplit blockInputSplit = new FileInputSplit(0,file,0, -1, null);
BitcoinRawBlockFlinkInputFormat inputFormat = new BitcoinRawBlockFlinkInputFormat(1024*1024,"F9BEB4D9",false);
inputFormat.open(blockInputSplit);
assertFalse(inputFormat.reachedEnd(),"End not reached");
BytesWritable reuse = new BytesWritable();
BytesWritable nextBlock = inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"First Block returned");
assertEquals( 293, nextBlock.getLength(),"First Block must have size of 293");
nextBlock=inputFormat.nextRecord(reuse);
assertNull(nextBlock,"No further block");
assertTrue(inputFormat.reachedEnd(),"End reached");
}
@Test
public void parseBitcoinTransaction() throws HadoopCryptoLedgerConfigurationException, IOException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputSplit blockInputSplit = new FileInputSplit(0,file,0, -1, null);
BitcoinTransactionFlinkInputFormat inputFormat = new BitcoinTransactionFlinkInputFormat(1024*1024, "F9BEB4D9",false);
inputFormat.open(blockInputSplit);
assertFalse(inputFormat.reachedEnd(),"End not reached");
BitcoinTransaction reuse = new BitcoinTransaction();
int transactCount=0;
while (!inputFormat.reachedEnd() && (inputFormat.nextRecord(reuse)!=null)) {
transactCount++;
}
assertEquals( 1, transactCount,"Genesis Block must contain exactly one transactions");
}
} | java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/flinkdatasource/src/test/java/org/zuinnote/flink/ethereum/FlinkEthereumDataSourceTest.java | flinkdatasource/src/test/java/org/zuinnote/flink/ethereum/FlinkEthereumDataSourceTest.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.flink.ethereum;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
import org.apache.flink.core.fs.FileInputSplit;
import org.apache.flink.core.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlock;
public class FlinkEthereumDataSourceTest {
@BeforeAll
public static void oneTimeSetUp() throws IOException {
// one-time initialization code
}
@AfterAll
public static void oneTimeTearDown() {
// one-time cleanup code
}
@BeforeEach
public void setUp() {
}
@AfterEach
public void tearDown() {
}
@Test
public void checkTestDataBlock0to10Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth0to10.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock1346406Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock3346406Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth3346406.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void restoreStateEthereumBlock() throws IOException {
// test if state is correctly restored
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth0to10.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputSplit blockInputSplit = new FileInputSplit(0,file,0, -1, null);
EthereumBlockFlinkInputFormat inputFormat = new EthereumBlockFlinkInputFormat(1024*1024, false);
inputFormat.open(blockInputSplit);
assertFalse(inputFormat.reachedEnd(),"End not reached");
EthereumBlock reuse = new EthereumBlock();
EthereumBlock nextBlock = inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"First Block returned");
assertEquals(0,nextBlock.getEthereumTransactions().size(),"First block contains 0 transactions");
byte[] expectedParentHash = new byte[] {0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
assertArrayEquals( expectedParentHash, nextBlock.getEthereumBlockHeader().getParentHash(),"Block 0 contains a correct 32 byte parent hash");
// save state
Long state = inputFormat.getCurrentState();
assertEquals(540,state.longValue(),"state 540");
// read 2nd block
nextBlock=inputFormat.nextRecord(reuse);
assertEquals( 0, nextBlock.getEthereumTransactions().size(),"Block 1 contains 0 transactions");
assertEquals(0, nextBlock.getUncleHeaders().size(),"Block 1 contains 0 uncleHeaders");
expectedParentHash = new byte[] {(byte) 0xD4,(byte) 0xE5,0x67,0x40,(byte) 0xF8,0x76,(byte) 0xAE,(byte) 0xF8,(byte) 0xC0,0x10,(byte) 0xB8,0x6A,0x40,(byte) 0xD5,(byte) 0xF5,0x67,0x45,(byte) 0xA1,0x18,(byte) 0xD0,(byte) 0x90,0x6A,0x34,(byte) 0xE6,(byte) 0x9A,(byte) 0xEC,(byte) 0x8C,0x0D,(byte) 0xB1,(byte) 0xCB,(byte) 0x8F,(byte) 0xA3};
assertArrayEquals( expectedParentHash, nextBlock.getEthereumBlockHeader().getParentHash(),"Block 1 contains a correct 32 byte parent hash");
// restore state
inputFormat.reopen(blockInputSplit, state);
// read 2nd block again
nextBlock=inputFormat.nextRecord(reuse);
assertEquals( 0, nextBlock.getEthereumTransactions().size(),"Block 1 contains 0 transactions");
assertEquals(0, nextBlock.getUncleHeaders().size(),"Block 1 contains 0 uncleHeaders");
expectedParentHash = new byte[] {(byte) 0xD4,(byte) 0xE5,0x67,0x40,(byte) 0xF8,0x76,(byte) 0xAE,(byte) 0xF8,(byte) 0xC0,0x10,(byte) 0xB8,0x6A,0x40,(byte) 0xD5,(byte) 0xF5,0x67,0x45,(byte) 0xA1,0x18,(byte) 0xD0,(byte) 0x90,0x6A,0x34,(byte) 0xE6,(byte) 0x9A,(byte) 0xEC,(byte) 0x8C,0x0D,(byte) 0xB1,(byte) 0xCB,(byte) 0x8F,(byte) 0xA3};
assertArrayEquals( expectedParentHash, nextBlock.getEthereumBlockHeader().getParentHash(),"Block 1 contains a correct 32 byte parent hash");
// read remaing blocks (9)
int remainingBlockCounter=0;
while (nextBlock!=null) {
nextBlock=inputFormat.nextRecord(reuse);
if (nextBlock!=null) {
remainingBlockCounter++;
}
}
assertEquals(9,remainingBlockCounter,"Remaining blocks 9");
assertNull(nextBlock,"No further block");
assertTrue(inputFormat.reachedEnd(),"End reached");
}
@Test
public void restoreStateBitcoinRawBlock() throws IOException {
// test if state is correctly restored
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth0to10.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputSplit blockInputSplit = new FileInputSplit(0,file,0, -1, null);
EthereumRawBlockFlinkInputFormat inputFormat = new EthereumRawBlockFlinkInputFormat(1024*1024, false);
inputFormat.open(blockInputSplit);
assertFalse(inputFormat.reachedEnd(),"End not reached");
BytesWritable reuse = new BytesWritable();
BytesWritable nextBlock = inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"First Block returned");
// save state
Long state = inputFormat.getCurrentState();
assertEquals(540,state.longValue(),"state 540");
// read 2nd block
nextBlock=inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"Second block after state save exist");
// restore state
inputFormat.reopen(blockInputSplit, state);
// read 2nd block again
nextBlock=inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"Second block after state restore exist");
// read remaing blocks (9)
int remainingBlockCounter=0;
while (nextBlock!=null) {
nextBlock=inputFormat.nextRecord(reuse);
if (nextBlock!=null) {
remainingBlockCounter++;
}
}
assertEquals(9,remainingBlockCounter,"Remaining blocks 9");
assertNull(nextBlock,"No further block");
assertTrue(inputFormat.reachedEnd(),"End reached");
}
@Test
public void parseEthereumBlock1346406() throws IOException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputSplit blockInputSplit = new FileInputSplit(0,file,0, -1, null);
EthereumBlockFlinkInputFormat inputFormat = new EthereumBlockFlinkInputFormat(1024*1024, false);
inputFormat.open(blockInputSplit);
assertFalse(inputFormat.reachedEnd(),"End not reached");
EthereumBlock reuse = new EthereumBlock();
EthereumBlock nextBlock = inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"First Block returned");
assertEquals(6,nextBlock.getEthereumTransactions().size(),"First block contains exactly 6 transactions");
nextBlock=inputFormat.nextRecord(reuse);
assertNull(nextBlock,"No further block");
assertTrue(inputFormat.reachedEnd(),"End reached");
}
@Test
public void parseEthereumRawBlock() throws IOException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputSplit blockInputSplit = new FileInputSplit(0,file,0, -1, null);
EthereumRawBlockFlinkInputFormat inputFormat = new EthereumRawBlockFlinkInputFormat(1024*1024,false);
inputFormat.open(blockInputSplit);
assertFalse(inputFormat.reachedEnd(),"End not reached");
BytesWritable reuse = new BytesWritable();
BytesWritable nextBlock = inputFormat.nextRecord(reuse);
assertNotNull(nextBlock,"First Block returned");
assertEquals( 1223, nextBlock.getLength(),"First Block must have size of 1223");
nextBlock=inputFormat.nextRecord(reuse);
assertNull(nextBlock,"No further block");
assertTrue(inputFormat.reachedEnd(),"End reached");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/flinkdatasource/src/main/java/org/zuinnote/flink/bitcoin/AbstractBitcoinFlinkInputFormat.java | flinkdatasource/src/main/java/org/zuinnote/flink/bitcoin/AbstractBitcoinFlinkInputFormat.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Flink Data Source base for Bitcoin Input formats
*/
package org.zuinnote.flink.bitcoin;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.flink.api.common.io.FileInputFormat;
import org.apache.flink.core.fs.FileInputSplit;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlockReader;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinUtil;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinRecordReader;
public abstract class AbstractBitcoinFlinkInputFormat<E> extends FileInputFormat<E> {
private static final Log LOG = LogFactory.getLog(AbstractBitcoinFlinkInputFormat.class.getName());
private transient BitcoinBlockReader bbr;
private int maxSizeBitcoinBlock;
private byte [][] specificMagicArray;
private boolean useDirectBuffer;
private boolean readAuxPOW;
/**
*
*/
private static final long serialVersionUID = -4661705676237973665L;
public AbstractBitcoinFlinkInputFormat() throws HadoopCryptoLedgerConfigurationException {
this(AbstractBitcoinRecordReader.DEFAULT_MAXSIZE_BITCOINBLOCK,AbstractBitcoinRecordReader.DEFAULT_MAGIC,AbstractBitcoinRecordReader.DEFAULT_USEDIRECTBUFFER);
}
public AbstractBitcoinFlinkInputFormat(int maxSizeBitcoinBlock, String specificMagicStr, boolean useDirectBuffer) throws HadoopCryptoLedgerConfigurationException {
this(maxSizeBitcoinBlock,specificMagicStr,useDirectBuffer,false);
}
public AbstractBitcoinFlinkInputFormat(int maxSizeBitcoinBlock, String specificMagicStr, boolean useDirectBuffer, boolean readAuxPOW) throws HadoopCryptoLedgerConfigurationException {
this.unsplittable=true;
this.maxSizeBitcoinBlock=maxSizeBitcoinBlock;
this.useDirectBuffer=useDirectBuffer;
if ((specificMagicStr!=null) && (specificMagicStr.length()>0)) {
String[] specificMagicStringArray=specificMagicStr.split(",");
this.specificMagicArray=new byte[specificMagicStringArray.length][4]; // each magic is always 4 byte
for (int i=0;i<specificMagicStringArray.length;i++) {
byte[] currentMagicNo=BitcoinUtil.convertHexStringToByteArray(specificMagicStringArray[i]);
if (currentMagicNo.length!=4) {
throw new HadoopCryptoLedgerConfigurationException("Error: Configuration. Magic number has not a length of 4 bytes. Index: "+i);
}
this.specificMagicArray[i]=currentMagicNo;
}
}
this.readAuxPOW=readAuxPOW;
}
/*
* Reads data supplied by Flink with @see org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlockReader
*
* (non-Javadoc)
* @see org.apache.flink.api.common.io.FileInputFormat#open(org.apache.flink.core.fs.FileInputSplit)
*/
@Override
public void open(FileInputSplit split) throws IOException {
super.open(split);
LOG.debug("Initialize Bitcoin reader");
// temporary measure to set buffer size to 1, otherwise we cannot guarantee that checkpointing works
bbr = new BitcoinBlockReader(this.stream,this.maxSizeBitcoinBlock,1,this.specificMagicArray,this.useDirectBuffer,this.readAuxPOW);
}
/*
* Returns the BitcoinBlockReader used to parse this stream
*
* @return BitcoinBlockReader
*
*/
public BitcoinBlockReader getBbr() {
return this.bbr;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/flinkdatasource/src/main/java/org/zuinnote/flink/bitcoin/BitcoinRawBlockFlinkInputFormat.java | flinkdatasource/src/main/java/org/zuinnote/flink/bitcoin/BitcoinRawBlockFlinkInputFormat.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Flink Data Source for the Bitcoin Raw Block format
*/
package org.zuinnote.flink.bitcoin;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.apache.commons.logging.LogFactory;
import org.apache.flink.api.common.io.CheckpointableInputFormat;
import org.apache.flink.core.fs.FileInputSplit;
import org.apache.hadoop.io.BytesWritable;
import org.apache.commons.logging.Log;
public class BitcoinRawBlockFlinkInputFormat extends AbstractBitcoinFlinkInputFormat<BytesWritable> implements CheckpointableInputFormat<FileInputSplit, Long> {
private static final Log LOG = LogFactory.getLog(BitcoinRawBlockFlinkInputFormat.class.getName());
/**
*
*/
private static final long serialVersionUID = 4150883073922261077L;
private boolean isEndReached;
public BitcoinRawBlockFlinkInputFormat(int maxSizeBitcoinBlock, String specificMagicStr,
boolean useDirectBuffer) throws HadoopCryptoLedgerConfigurationException {
this(maxSizeBitcoinBlock, specificMagicStr, useDirectBuffer,false);
}
public BitcoinRawBlockFlinkInputFormat(int maxSizeBitcoinBlock, String specificMagicStr,
boolean useDirectBuffer, boolean readAuxPOW) throws HadoopCryptoLedgerConfigurationException {
super(maxSizeBitcoinBlock, specificMagicStr, useDirectBuffer,readAuxPOW);
this.isEndReached=false;
}
@Override
public boolean reachedEnd() throws IOException {
return this.isEndReached;
}
/*
* Saves the current state of the stream
*
* @return current position in stream
*
* (non-Javadoc)
* @see org.apache.flink.api.common.io.CheckpointableInputFormat#getCurrentState()
*/
@Override
public Long getCurrentState() throws IOException {
return this.stream.getPos();
}
/*
* Reopens the stream at a specific previously stored position and initializes the BitcoinBlockReader
*
* @param split FileInputSplit
* @param state position in the stream
*
* (non-Javadoc)
* @see org.apache.flink.api.common.io.CheckpointableInputFormat#reopen(org.apache.flink.core.io.InputSplit, java.io.Serializable)
*/
@Override
public void reopen(FileInputSplit split, Long state) throws IOException {
try {
this.open(split);
} finally {
this.stream.seek(state);
}
}
@Override
public BytesWritable nextRecord(BytesWritable reuse) throws IOException {
ByteBuffer dataBlock=null;
if ((this.currentSplit.getLength()<0) ||(this.stream.getPos()<=this.currentSplit.getStart()+this.currentSplit.getLength())) {
try {
dataBlock=this.getBbr().readRawBlock();
} catch(BitcoinBlockReadException e) {
LOG.error(e);
}
if (dataBlock==null) {
this.isEndReached=true;
} else {
byte[] dataBlockArray;
if (dataBlock.hasArray()) {
dataBlockArray=dataBlock.array();
} else {
dataBlockArray=new byte[dataBlock.capacity()];
dataBlock.get(dataBlockArray);
}
reuse.set(dataBlockArray,0,dataBlockArray.length);
return reuse;
}
}
else {
this.isEndReached=true;
}
return null;
}
} | java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/flinkdatasource/src/main/java/org/zuinnote/flink/bitcoin/BitcoinBlockFlinkInputFormat.java | flinkdatasource/src/main/java/org/zuinnote/flink/bitcoin/BitcoinBlockFlinkInputFormat.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Flink Data Source for the Bitcoin Block format
*/
package org.zuinnote.flink.bitcoin;
import java.io.IOException;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlock;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.apache.commons.logging.LogFactory;
import org.apache.flink.api.common.io.CheckpointableInputFormat;
import org.apache.flink.core.fs.FileInputSplit;
import org.apache.commons.logging.Log;
public class BitcoinBlockFlinkInputFormat extends AbstractBitcoinFlinkInputFormat<BitcoinBlock> implements CheckpointableInputFormat<FileInputSplit, Long> {
private static final Log LOG = LogFactory.getLog(BitcoinBlockFlinkInputFormat.class.getName());
/**
*
*/
private static final long serialVersionUID = 4150883073922261077L;
private boolean isEndReached;
public BitcoinBlockFlinkInputFormat(int maxSizeBitcoinBlock, String specificMagicStr,
boolean useDirectBuffer) throws HadoopCryptoLedgerConfigurationException {
this(maxSizeBitcoinBlock, specificMagicStr, useDirectBuffer,false);
}
public BitcoinBlockFlinkInputFormat(int maxSizeBitcoinBlock, String specificMagicStr,
boolean useDirectBuffer, boolean readAuxPOW) throws HadoopCryptoLedgerConfigurationException {
super(maxSizeBitcoinBlock, specificMagicStr, useDirectBuffer);
this.isEndReached=false;
}
@Override
public boolean reachedEnd() throws IOException {
return this.isEndReached;
}
/*
* Saves the current state of the stream
*
* @return current position in stream
*
* (non-Javadoc)
* @see org.apache.flink.api.common.io.CheckpointableInputFormat#getCurrentState()
*/
@Override
public Long getCurrentState() throws IOException {
return this.stream.getPos();
}
/*
* Reopens the stream at a specific previously stored position and initializes the BitcoinBlockReader
*
* @param split FileInputSplit
* @param state position in the stream
*
* (non-Javadoc)
* @see org.apache.flink.api.common.io.CheckpointableInputFormat#reopen(org.apache.flink.core.io.InputSplit, java.io.Serializable)
*/
@Override
public void reopen(FileInputSplit split, Long state) throws IOException {
try {
this.open(split);
} finally {
this.stream.seek(state);
}
}
@Override
public BitcoinBlock nextRecord(BitcoinBlock reuse) throws IOException {
BitcoinBlock dataBlock=null;
if ((this.currentSplit.getLength()<0) ||(this.stream.getPos()<=this.currentSplit.getStart()+this.currentSplit.getLength())) {
try {
dataBlock=this.getBbr().readBlock();
} catch(BitcoinBlockReadException e) {
LOG.error(e);
}
if (dataBlock==null) {
this.isEndReached=true;
}
} else {
this.isEndReached=true;
}
return dataBlock;
}
} | java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/flinkdatasource/src/main/java/org/zuinnote/flink/bitcoin/BitcoinTransactionFlinkInputFormat.java | flinkdatasource/src/main/java/org/zuinnote/flink/bitcoin/BitcoinTransactionFlinkInputFormat.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Flink Data Source for the Bitcoin Transaction format
*/
package org.zuinnote.flink.bitcoin;
import java.io.IOException;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlock;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransaction;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.apache.commons.logging.LogFactory;
import org.apache.flink.api.common.io.CheckpointableInputFormat;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.fs.FileInputSplit;
import org.apache.commons.logging.Log;
public class BitcoinTransactionFlinkInputFormat extends AbstractBitcoinFlinkInputFormat<BitcoinTransaction> implements CheckpointableInputFormat<FileInputSplit, Tuple2<Long,Long>> {
private static final Log LOG = LogFactory.getLog(BitcoinBlockFlinkInputFormat.class.getName());
/**
*
*/
private static final long serialVersionUID = 4150883073922261077L;
private boolean isEndReached;
private transient BitcoinBlock currentBitcoinBlock;
private long currentTransactionCounterInBlock;
public BitcoinTransactionFlinkInputFormat(int maxSizeBitcoinBlock,String specificMagicStr,
boolean useDirectBuffer) throws HadoopCryptoLedgerConfigurationException {
this(maxSizeBitcoinBlock, specificMagicStr, useDirectBuffer,false);
}
public BitcoinTransactionFlinkInputFormat(int maxSizeBitcoinBlock,String specificMagicStr,
boolean useDirectBuffer, boolean readAuxPOW) throws HadoopCryptoLedgerConfigurationException {
super(maxSizeBitcoinBlock, specificMagicStr, useDirectBuffer,readAuxPOW);
this.isEndReached=false;
}
@Override
public boolean reachedEnd() throws IOException {
return this.isEndReached;
}
/*
* Saves the current state of the stream
*
* @return current position in stream
*
* (non-Javadoc)
* @see org.apache.flink.api.common.io.CheckpointableInputFormat#getCurrentState()
*/
@Override
public Tuple2<Long,Long> getCurrentState() throws IOException {
return new Tuple2<>(this.stream.getPos(), this.currentTransactionCounterInBlock);
}
/*
* Reopens the stream at a specific previously stored position and initializes the BitcoinBlockReader
*
* @param split FileInputSplit
* @param state position in the stream
*
* (non-Javadoc)
* @see org.apache.flink.api.common.io.CheckpointableInputFormat#reopen(org.apache.flink.core.io.InputSplit, java.io.Serializable)
*/
@Override
public void reopen(FileInputSplit split, Tuple2<Long,Long> state) throws IOException {
try {
this.open(split);
} finally {
this.stream.seek(state.f0);
this.currentTransactionCounterInBlock=state.f1;
}
}
@Override
public BitcoinTransaction nextRecord(BitcoinTransaction reuse) throws IOException {
BitcoinTransaction currentTransaction=null;
if ((this.currentSplit.getLength()<0) ||(this.stream.getPos()<=this.currentSplit.getStart()+this.currentSplit.getLength())) {
if ((currentBitcoinBlock==null) || (currentBitcoinBlock.getTransactions().size()==currentTransactionCounterInBlock)){
try {
currentBitcoinBlock=getBbr().readBlock();
currentTransactionCounterInBlock=0;
} catch (BitcoinBlockReadException e) {
// log
LOG.error(e);
}
}
if (currentBitcoinBlock==null) {
this.isEndReached=true;
} else {
currentTransaction=currentBitcoinBlock.getTransactions().get((int) currentTransactionCounterInBlock);
currentTransactionCounterInBlock++;
}
} else {
this.isEndReached=true;
}
return currentTransaction;
}
} | java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/flinkdatasource/src/main/java/org/zuinnote/flink/ethereum/AbstractEthereumFlinkInputFormat.java | flinkdatasource/src/main/java/org/zuinnote/flink/ethereum/AbstractEthereumFlinkInputFormat.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.flink.ethereum;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.flink.api.common.io.FileInputFormat;
import org.apache.flink.core.fs.FileInputSplit;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockReader;
import org.zuinnote.hadoop.ethereum.format.mapreduce.AbstractEthereumRecordReader;
public abstract class AbstractEthereumFlinkInputFormat<E> extends FileInputFormat<E> {
private static final Log LOG = LogFactory.getLog(AbstractEthereumFlinkInputFormat.class.getName());
private transient EthereumBlockReader ebr;
private int maxSizeEthereumBlock;
private boolean useDirectBuffer;
/**
*
*/
private static final long serialVersionUID = 5750478952540634456L;
public AbstractEthereumFlinkInputFormat() {
this(AbstractEthereumRecordReader.DEFAULT_MAXSIZE_ETHEREUMBLOCK,AbstractEthereumRecordReader.DEFAULT_USEDIRECTBUFFER);
}
public AbstractEthereumFlinkInputFormat(int maxSizeEthereumBlock, boolean useDirectBuffer) {
this.unsplittable=true;
this.maxSizeEthereumBlock=maxSizeEthereumBlock;
this.useDirectBuffer=useDirectBuffer;
}
/*
* Reads data supplied by Flink with @see org.zuinnote.hadoop.ethereum.format.common.EthereumBlockReader
*
* (non-Javadoc)
* @see org.apache.flink.api.common.io.FileInputFormat#open(org.apache.flink.core.fs.FileInputSplit)
*/
@Override
public void open(FileInputSplit split) throws IOException {
super.open(split);
LOG.debug("Initialize Ethereum reader");
// temporary measure to set buffer size to 1, otherwise we cannot guarantee that checkpointing works
ebr = new EthereumBlockReader(this.stream,this.maxSizeEthereumBlock,1,this.useDirectBuffer);
}
/*
* Returns the EthereumBlockReader used to parse this stream
*
* @return EthereumBlockReader
*
*/
public EthereumBlockReader getEbr() {
return this.ebr;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/flinkdatasource/src/main/java/org/zuinnote/flink/ethereum/EthereumBlockFlinkInputFormat.java | flinkdatasource/src/main/java/org/zuinnote/flink/ethereum/EthereumBlockFlinkInputFormat.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.flink.ethereum;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.flink.api.common.io.CheckpointableInputFormat;
import org.apache.flink.core.fs.FileInputSplit;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlock;
import org.zuinnote.hadoop.ethereum.format.exception.EthereumBlockReadException;
public class EthereumBlockFlinkInputFormat extends AbstractEthereumFlinkInputFormat<EthereumBlock> implements CheckpointableInputFormat<FileInputSplit, Long> {
/**
*
*/
private static final long serialVersionUID = 8852751104331531470L;
private static final Log LOG = LogFactory.getLog(EthereumBlockFlinkInputFormat.class.getName());
private boolean isEndReached;
/***
*
*
* @param maxSizeEthereumBlock
* @param useDirectBuffer
*/
public EthereumBlockFlinkInputFormat(int maxSizeEthereumBlock, boolean useDirectBuffer) {
super(maxSizeEthereumBlock, useDirectBuffer);
this.isEndReached=false;
}
/*
* Reopens the stream at a specific previously stored position and initializes the BitcoinBlockReader
*
* @param split FileInputSplit
* @param state position in the stream
*
* (non-Javadoc)
* @see org.apache.flink.api.common.io.CheckpointableInputFormat#reopen(org.apache.flink.core.io.InputSplit, java.io.Serializable)
*/
@Override
public void reopen(FileInputSplit split, Long state) throws IOException {
try {
this.open(split);
} finally {
this.stream.seek(state);
}
}
@Override
public EthereumBlock nextRecord(EthereumBlock reuse) throws IOException {
EthereumBlock dataBlock=null;
if ((this.currentSplit.getLength()<0) ||(this.stream.getPos()<=this.currentSplit.getStart()+this.currentSplit.getLength())) {
try {
dataBlock=this.getEbr().readBlock();
} catch (EthereumBlockReadException e) {
LOG.error(e);
throw new RuntimeException(e.toString());
}
if (dataBlock==null) {
this.isEndReached=true;
}
} else {
this.isEndReached=true;
}
return dataBlock;
}
@Override
public boolean reachedEnd() throws IOException {
return this.isEndReached;
}
/*
* Saves the current state of the stream
*
* @return current position in stream
*
* (non-Javadoc)
* @see org.apache.flink.api.common.io.CheckpointableInputFormat#getCurrentState()
*/
@Override
public Long getCurrentState() throws IOException {
return this.stream.getPos();
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/flinkdatasource/src/main/java/org/zuinnote/flink/ethereum/EthereumRawBlockFlinkInputFormat.java | flinkdatasource/src/main/java/org/zuinnote/flink/ethereum/EthereumRawBlockFlinkInputFormat.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.flink.ethereum;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.flink.api.common.io.CheckpointableInputFormat;
import org.apache.flink.core.fs.FileInputSplit;
import org.apache.hadoop.io.BytesWritable;
import org.zuinnote.hadoop.ethereum.format.exception.EthereumBlockReadException;
public class EthereumRawBlockFlinkInputFormat extends AbstractEthereumFlinkInputFormat<BytesWritable> implements CheckpointableInputFormat<FileInputSplit, Long>{
/**
*
*/
private static final long serialVersionUID = 8890497690418004762L;
private static final Log LOG = LogFactory.getLog(EthereumRawBlockFlinkInputFormat.class.getName());
private boolean isEndReached;
/***
*
* @param maxSizeEthereumBlock
* @param useDirectBuffer
*/
public EthereumRawBlockFlinkInputFormat(int maxSizeEthereumBlock, boolean useDirectBuffer) {
super(maxSizeEthereumBlock, useDirectBuffer);
this.isEndReached=false;
}
@Override
public boolean reachedEnd() throws IOException {
return this.isEndReached;
}
@Override
public BytesWritable nextRecord(BytesWritable reuse) throws IOException {
ByteBuffer dataBlock=null;
if ((this.currentSplit.getLength()<0) ||(this.stream.getPos()<=this.currentSplit.getStart()+this.currentSplit.getLength())) {
try {
dataBlock=this.getEbr().readRawBlock();
} catch (EthereumBlockReadException e) {
LOG.error(e);
throw new RuntimeException(e.toString());
}
if (dataBlock==null) {
this.isEndReached=true;
} else {
byte[] dataBlockArray;
if (dataBlock.hasArray()) {
dataBlockArray=dataBlock.array();
} else {
dataBlockArray=new byte[dataBlock.capacity()];
dataBlock.get(dataBlockArray);
}
reuse.set(dataBlockArray,0,dataBlockArray.length);
return reuse;
}
}
else {
this.isEndReached=true;
}
return null;
}
/*
* Saves the current state of the stream
*
* @return current position in stream
*
* (non-Javadoc)
* @see org.apache.flink.api.common.io.CheckpointableInputFormat#getCurrentState()
*/
@Override
public Long getCurrentState() throws IOException {
return this.stream.getPos();
}
/*
* Reopens the stream at a specific previously stored position and initializes the BitcoinBlockReader
*
* @param split FileInputSplit
* @param state position in the stream
*
* (non-Javadoc)
* @see org.apache.flink.api.common.io.CheckpointableInputFormat#reopen(org.apache.flink.core.io.InputSplit, java.io.Serializable)
*/
@Override
public void reopen(FileInputSplit split, Long state) throws IOException {
try {
this.open(split);
} finally {
this.stream.seek(state);
}
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/namecoin/format/common/NamecoinAuxPOWTest.java | inputformat/src/test/java/org/zuinnote/hadoop/namecoin/format/common/NamecoinAuxPOWTest.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.namecoin.format.common;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlock;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlockReader;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
public class NamecoinAuxPOWTest {
static final int DEFAULT_BUFFERSIZE=64*1024;
static final int DEFAULT_MAXSIZE_BITCOINBLOCK=8 * 1024 * 1024;
static final byte[][] DEFAULT_MAGIC = {{(byte)0xF9,(byte)0xBE,(byte)0xB4,(byte)0xFE}}; // namecoin
private static final byte[][] TESTNET3_MAGIC = {{(byte)0x0B,(byte)0x11,(byte)0x09,(byte)0x07}};
private static final byte[][] MULTINET_MAGIC = {{(byte)0xF9,(byte)0xBE,(byte)0xB4,(byte)0xD9},{(byte)0x0B,(byte)0x11,(byte)0x09,(byte)0x07}};
@Test
public void checkTestDataNamecoinGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="namecoingenesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataNamecoinRandomBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="namecoinblock.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataNamecoinThreeDifferentOpinOneBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="namecointhreedifferentopinoneblock.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void readNoAuxPowNamecoinGenesisBlock() throws BitcoinBlockReadException, IOException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="namecoingenesis.blk";
String fullFileNameString=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fullFileNameString);
BitcoinBlockReader bbr = null;
boolean direct=false;
boolean auxPow=true;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct,auxPow);
BitcoinBlock theBitcoinBlock = bbr.readBlock();
assertNotNull(theBitcoinBlock,"Namecoin Genesis Block contains a block");
assertNull(theBitcoinBlock.getAuxPOW(),"Namecoin Genesis Block has no AuxPowInformation");
assertEquals( 1, theBitcoinBlock.getTransactions().size(),"Namecoin Genesis Block must contain exactly one transaction");
assertEquals( 1, theBitcoinBlock.getTransactions().get(0).getListOfInputs().size(),"Namecoin Genesis Block must contain exactly one transaction with one input");
assertEquals( 84, theBitcoinBlock.getTransactions().get(0).getListOfInputs().get(0).getTxInScript().length,"Namecoin Genesis Block must contain exactly one transaction with one input and script length 84");
assertEquals( 1, theBitcoinBlock.getTransactions().get(0).getListOfOutputs().size(),"Namecoin Genesis Block must contain exactly one transaction with one output");
assertEquals( 67, theBitcoinBlock.getTransactions().get(0).getListOfOutputs().get(0).getTxOutScript().length,"Namecoin Genesis Block must contain exactly one transaction with one output and script length 67");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void readAuxPowNamecoinThreeDifferentOpinOneBlock() throws BitcoinBlockReadException, IOException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="namecointhreedifferentopinoneblock.blk";
String fullFileNameString=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fullFileNameString);
BitcoinBlockReader bbr = null;
boolean direct=false;
boolean auxPow=true;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct,auxPow);
BitcoinBlock theBitcoinBlock = bbr.readBlock();
assertNotNull(theBitcoinBlock,"Namecoin Three Different Op in One Block contains a block");
assertNotNull(theBitcoinBlock.getAuxPOW(),"Namecoin Three Different Op in Block has AuxPowInformation");
assertEquals( 7, theBitcoinBlock.getTransactions().size(),"Namecoin Three Different Op in Block must contain exactly 7 transactions");
} finally {
if (bbr!=null)
bbr.close();
}
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/namecoin/format/common/NamecoinUtilTest.java | inputformat/src/test/java/org/zuinnote/hadoop/namecoin/format/common/NamecoinUtilTest.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.namecoin.format.common;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinUtil;
public class NamecoinUtilTest {
@Test
public void extractNamecoinFieldFirstUpdate() {
String firstUpdateScript ="520A642F666C6173687570641460C7B068EDEA60281DAF424C38D8DAB87C96CF993D7B226970223A223134352E3234392E3130362E323238222C226D6170223A7B222A223A7B226970223A223134352E3234392E3130362E323238227D7D7D6D6D76A91451B4FC93AAB8CBDBD0AC9BC8EAF824643FC1E29B88AC";
String[] result=NamecoinUtil.extractNamecoinField(BitcoinUtil.convertHexStringToByteArray(firstUpdateScript));
assertNotNull( result,"Valid result obtained");
// test for domain name
assertEquals("d/flashupd",result[0],"Domain name of first update detected correctly");
// test for domain value
assertEquals("{\"ip\":\"145.249.106.228\",\"map\":{\"*\":{\"ip\":\"145.249.106.228\"}}}",result[1],"Domain value of first update detected correctly");
}
@Test
public void extractNamecoinFieldUpdate() {
String updateScript = "5309642F70616E656C6B612D7B226970223A22382E382E382E38222C226D6170223A7B222A223A7B226970223A22382E382E382E38227D7D7D6D7576A9148D804B079AC79AD0CA108A4E5B679DB591FF069B88AC";
String[] result=NamecoinUtil.extractNamecoinField(BitcoinUtil.convertHexStringToByteArray(updateScript));
assertNotNull( result,"Valid result obtained");
// test for domain name
assertEquals("d/panelka",result[0],"Domain name of first update detected correctly");
// test for domain value
assertEquals("{\"ip\":\"8.8.8.8\",\"map\":{\"*\":{\"ip\":\"8.8.8.8\"}}}",result[1],"Domain value of first update detected correctly");
}
@Test
public void extractNamecoinFieldInvalid() {
String[] resultNull = NamecoinUtil.extractNamecoinField(null);
assertNull(resultNull,"Null script leads to null result");
String[] resultInvalid = NamecoinUtil.extractNamecoinField(new byte[] {0x01,0x02,0x03});
assertNull(resultInvalid,"Invalid script leads to null result");
}
@Test
public void getNameOperationPositive() {
// new
String newScript = "511459C39A7CC5E0B91801294A272AD558B1F67A4E6D6D76A914DD900A6C1223698FC262E28C8A1D8D73B40B375188AC";
String resultOpNew=NamecoinUtil.getNameOperation(BitcoinUtil.convertHexStringToByteArray(newScript));
assertEquals(NamecoinUtil.STR_OP_NAME_NEW,resultOpNew,"Script containing new op detected correctly");
// firstupdate
String firstUpdateScript ="520A642F666C6173687570641460C7B068EDEA60281DAF424C38D8DAB87C96CF993D7B226970223A223134352E3234392E3130362E323238222C226D6170223A7B222A223A7B226970223A223134352E3234392E3130362E323238227D7D7D6D6D76A91451B4FC93AAB8CBDBD0AC9BC8EAF824643FC1E29B88AC";
String resultOpFirstUpdate=NamecoinUtil.getNameOperation(BitcoinUtil.convertHexStringToByteArray(firstUpdateScript));
assertEquals(NamecoinUtil.STR_OP_NAME_FIRSTUPDATE,resultOpFirstUpdate,"Script containing firstupdate op detected correctly");
// update
String updateScript = "5309642F70616E656C6B612D7B226970223A22382E382E382E38222C226D6170223A7B222A223A7B226970223A22382E382E382E38227D7D7D6D7576A9148D804B079AC79AD0CA108A4E5B679DB591FF069B88AC";
String resultOpUpdate=NamecoinUtil.getNameOperation(BitcoinUtil.convertHexStringToByteArray(updateScript));
assertEquals(NamecoinUtil.STR_OP_NAME_UDPATE,resultOpUpdate,"Script containing updateScript op detected correctly");
}
@Test
public void getNameOperationNegative() {
// null
assertEquals(NamecoinUtil.getNameOperation(null),NamecoinUtil.STR_OP_UNKNOWN,"Script containing null correctly");
// zero bytes
assertEquals(NamecoinUtil.getNameOperation(new byte[0]),NamecoinUtil.STR_OP_UNKNOWN,"Script containing byte array zero length correctly");
// two byte not namecoin op
assertEquals(NamecoinUtil.getNameOperation(new byte[] {0x01,0x02}),NamecoinUtil.STR_OP_UNKNOWN,"Script containing no namecoin op correctly");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinFormatHadoopTest.java | inputformat/src/test/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinFormatHadoopTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapred;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.hadoop.io.compress.BZip2Codec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.zuinnote.hadoop.bitcoin.format.common.*;
import org.zuinnote.hadoop.bitcoin.format.mapred.BitcoinBlockFileInputFormat;
/**
*
*/
public class BitcoinFormatHadoopTest {
private static JobConf defaultConf = new JobConf();
private static FileSystem localFs = null;
private static Reporter reporter = Reporter.NULL;
@BeforeAll
public static void oneTimeSetUp() throws IOException {
// one-time initialization code
defaultConf.set("fs.defaultFS", "file:///");
localFs = FileSystem.getLocal(defaultConf);
}
@AfterAll
public static void oneTimeTearDown() {
// one-time cleanup code
}
@BeforeEach
public void setUp() {
}
@AfterEach
public void tearDown() {
}
@Test
public void checkTestDataGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion1BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion2BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion3BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version3.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion4BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion4GzipCompressedBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4comp.blk.gz";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion4Bzip2CompressedBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4comp.blk.bz2";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion1SeekBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="reqseekversion1.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataMultiBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multiblock.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void readBitcoinRawBlockInputFormatGenesisBlock() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameGenesis);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for genesis block");
RecordReader<BytesWritable, BytesWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable genesisKey = new BytesWritable();
BytesWritable genesisBlock = new BytesWritable();
assertTrue( reader.next(genesisKey,genesisBlock),"Input Split for genesis block contains at least one block");
assertEquals( 293, genesisBlock.getLength(),"Genesis Block must have size of 293");
BytesWritable emptyKey = new BytesWritable();
BytesWritable emptyBlock = new BytesWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in genesis Block");
reader.close();
}
@Test
public void readBitcoinRawBlockInputFormatBlockVersion1() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block version 1");
RecordReader<BytesWritable, BytesWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BytesWritable block = new BytesWritable();
assertTrue( reader.next(key,block),"Input Split for block version contains at least one block");
assertEquals( 482, block.getLength(),"Random block version 1 must have size of 482 bytes");
BytesWritable emptyKey = new BytesWritable();
BytesWritable emptyBlock = new BytesWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in block version 1");
reader.close();
}
@Test
public void readBitcoinRawBlockInputFormatBlockVersion2() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block version 2");
RecordReader<BytesWritable, BytesWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BytesWritable block = new BytesWritable();
assertTrue( reader.next(key,block),"Input Split for block version contains at least one block");
assertEquals( 191198, block.getLength(),"Random block version 2 must have size of 191.198 bytes");
BytesWritable emptyKey = new BytesWritable();
BytesWritable emptyBlock = new BytesWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in block version 2");
reader.close();
}
@Test
public void readBitcoinRawBlockInputFormatBlockVersion3() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version3.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block version 3");
RecordReader<BytesWritable, BytesWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BytesWritable block = new BytesWritable();
assertTrue( reader.next(key,block),"Input Split for block version contains at least one block");
assertEquals( 932199, block.getLength(),"Random block version 3 must have size of 932.199 bytes");
BytesWritable emptyKey = new BytesWritable();
BytesWritable emptyBlock = new BytesWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in block version 3");
reader.close();
}
@Test
public void readBitcoinRawBlockInputFormatBlockVersion4() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block version 4");
RecordReader<BytesWritable, BytesWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BytesWritable block = new BytesWritable();
assertTrue( reader.next(key,block),"Input Split for block version contains at least one block");
assertEquals( 998039, block.getLength(),"Random block version 4 must have a size of 998.039 bytes");
BytesWritable emptyKey = new BytesWritable();
BytesWritable emptyBlock = new BytesWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in block version 4");
reader.close();
}
@Test
public void readBitcoinRawBlockInputFormatReqSeekBlockVersion1() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="reqseekversion1.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block requiring seek version 1");
RecordReader<BytesWritable, BytesWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BytesWritable block = new BytesWritable();
assertTrue( reader.next(key,block),"Input Split for block version contains at least one block");
assertEquals( 482, block.getLength(),"Random block requiring seek version 1 must have a size of 482 bytes");
BytesWritable emptyKey = new BytesWritable();
BytesWritable emptyBlock = new BytesWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in block requiring seek version 1");
reader.close();
}
@Test
public void readBitcoinRawBlockInputFormatMultiBlock() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multiblock.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for multiblock");
RecordReader<BytesWritable, BytesWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BytesWritable block = new BytesWritable();
assertTrue( reader.next(key,block),"Input Split for multi block contains the genesis block");
assertEquals( 293, block.getLength(),"Genesis Block must have size of 293");
assertTrue( reader.next(key,block),"Input Split for block version contains block version 1");
assertEquals( 482, block.getLength(),"Random block version 1 must have size of 482 bytes");
assertTrue( reader.next(key,block),"Input Split for block version contains block version 2");
assertEquals( 191198, block.getLength(),"Random block version 2 must have size of 191.198 bytes");
BytesWritable emptyKey = new BytesWritable();
BytesWritable emptyBlock = new BytesWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in multi block");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatGenesisBlock() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameGenesis);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for genesis block");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable genesisKey = new BytesWritable();
BitcoinBlockWritable genesisBlock = new BitcoinBlockWritable();
assertTrue( reader.next(genesisKey,genesisBlock),"Input Split for genesis block contains at least one block");
assertEquals( 1, genesisBlock.getTransactions().size(),"Genesis Block must contain exactly one transaction");
BytesWritable emptyKey = new BytesWritable();
BitcoinBlockWritable emptyBlock = new BitcoinBlockWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in genesis Block");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatBlockVersion1() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block version 1");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BitcoinBlockWritable block = new BitcoinBlockWritable();
assertTrue( reader.next(key,block),"Input Split for block version contains at least one block");
assertEquals( 2, block.getTransactions().size(),"Random block version 1 must contain exactly two transactions");
BytesWritable emptyKey = new BytesWritable();
BitcoinBlockWritable emptyBlock = new BitcoinBlockWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in block version 1");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatBlockVersion2() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block version 2");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BitcoinBlockWritable block = new BitcoinBlockWritable();
assertTrue( reader.next(key,block),"Input Split for block version contains at least one block");
assertEquals( 343, block.getTransactions().size(),"Random block version 2 must contain exactly 343 transactions");
BytesWritable emptyKey = new BytesWritable();
BitcoinBlockWritable emptyBlock = new BitcoinBlockWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in block version 2");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatBlockVersion3() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version3.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block version 3");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BitcoinBlockWritable block = new BitcoinBlockWritable();
assertTrue( reader.next(key,block),"Input Split for block version contains at least one block");
assertEquals( 1645, block.getTransactions().size(),"Random block version 3 must contain exactly 1645 transactions");
BytesWritable emptyKey = new BytesWritable();
BitcoinBlockWritable emptyBlock = new BitcoinBlockWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in block version 3");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatBlockVersion4() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block version 4");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BitcoinBlockWritable block = new BitcoinBlockWritable();
assertTrue( reader.next(key,block),"Input Split for block version contains at least one block");
assertEquals( 936, block.getTransactions().size(),"Random block version 4 must contain exactly 936 transactions");
BytesWritable emptyKey = new BytesWritable();
BitcoinBlockWritable emptyBlock = new BitcoinBlockWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in block version 4");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatReqSeekBlockVersion1() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="reqseekversion1.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block requiring seek version 1");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BitcoinBlockWritable block = new BitcoinBlockWritable();
assertTrue( reader.next(key,block),"Input Split for block version contains at least one block");
assertEquals( 2, block.getTransactions().size(),"Random block requiring seek version 1 must contain exactly two transactions");
BytesWritable emptyKey = new BytesWritable();
BitcoinBlockWritable emptyBlock = new BitcoinBlockWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in block requiring seek version 1");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatMultiBlock() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multiblock.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for multiblock");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BitcoinBlockWritable block = new BitcoinBlockWritable();
assertTrue( reader.next(key,block),"Input Split for multi block contains the genesis block");
assertEquals( 1, block.getTransactions().size(),"Genesis Block must contain exactly one transaction");
assertTrue( reader.next(key,block),"Input Split for block version contains block version 1");
assertEquals( 2, block.getTransactions().size(),"Random block version 1 must contain exactly two transactions");
assertTrue( reader.next(key,block),"Input Split for block version contains at least one block");
assertEquals( 343, block.getTransactions().size(),"Random block version 2 must contain exactly 343 transactions");
BytesWritable emptyKey = new BytesWritable();
BitcoinBlockWritable emptyBlock = new BitcoinBlockWritable();
assertFalse( reader.next(emptyKey,emptyBlock),"No further blocks in multi block");
reader.close();
}
@Test
public void readBitcoinTransactionInputFormatGenesisBlock() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinTransactionFileInputFormat format = new BitcoinTransactionFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for genesis block");
RecordReader<BytesWritable, BitcoinTransactionWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BitcoinTransactionWritable transaction = new BitcoinTransactionWritable();
int transactCount=0;
while (reader.next(key,transaction)) {
transactCount++;
}
assertEquals( 1, transactCount,"Genesis Block must contain exactly one transactions");
reader.close();
}
@Test
public void readBitcoinTransactionInputFormatBlockVersion1() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinTransactionFileInputFormat format = new BitcoinTransactionFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block version 1");
RecordReader<BytesWritable, BitcoinTransactionWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BitcoinTransactionWritable transaction = new BitcoinTransactionWritable();
int transactCount=0;
while (reader.next(key,transaction)) {
transactCount++;
}
assertEquals( 2, transactCount,"Block version 1 must contain exactly two transactions");
reader.close();
}
@Test
public void readBitcoinTransactionInputFormatBlockVersion2() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinTransactionFileInputFormat format = new BitcoinTransactionFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block version 2");
RecordReader<BytesWritable, BitcoinTransactionWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BitcoinTransactionWritable transaction = new BitcoinTransactionWritable();
int transactCount=0;
while (reader.next(key,transaction)) {
transactCount++;
}
assertEquals( 343, transactCount,"Block version 2 must contain exactly 343 transactions");
reader.close();
}
@Test
public void readBitcoinTransactionInputFormatBlockVersion3() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version3.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinTransactionFileInputFormat format = new BitcoinTransactionFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block version 3");
RecordReader<BytesWritable, BitcoinTransactionWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BitcoinTransactionWritable transaction = new BitcoinTransactionWritable();
int transactCount=0;
while (reader.next(key,transaction)) {
transactCount++;
}
assertEquals( 1645, transactCount,"Block version 3 must contain exactly 1645 transactions");
reader.close();
}
@Test
public void readBitcoinTransactionInputFormatBlockVersion4() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
BitcoinTransactionFileInputFormat format = new BitcoinTransactionFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block version 4");
RecordReader<BytesWritable, BitcoinTransactionWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
BitcoinTransactionWritable transaction = new BitcoinTransactionWritable();
int transactCount=0;
while (reader.next(key,transaction)) {
transactCount++;
}
assertEquals( 936, transactCount,"Block version 4 must contain exactly 936 transactions");
reader.close();
}
@Test
public void readBitcoinTransactionInputFormatBlockVersion1ReqSeek() throws IOException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | true |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinFormatHadoopTest.java | inputformat/src/test/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinFormatHadoopTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapreduce;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
import java.lang.InterruptedException;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.*;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.hadoop.io.compress.BZip2Codec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.zuinnote.hadoop.bitcoin.format.mapreduce.BitcoinBlockFileInputFormat;
import org.zuinnote.hadoop.bitcoin.format.common.*;
/**
*
*/
public class BitcoinFormatHadoopTest {
private static Configuration defaultConf = new Configuration();
private static FileSystem localFs = null;
@BeforeAll
public static void oneTimeSetUp() throws IOException {
// one-time initialization code
defaultConf.set("fs.defaultFS", "file:///");
localFs = FileSystem.getLocal(defaultConf);
}
@AfterAll
public static void oneTimeTearDown() {
// one-time cleanup code
}
@BeforeEach
public void setUp() {
}
@AfterEach
public void tearDown() {
}
@Test
public void checkTestDataGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion1BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion2BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion3BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version3.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion4BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion4GzipCompressedBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4comp.blk.gz";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion4Bzip2CompressedBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4comp.blk.bz2";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion1SeekBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="reqseekversion1.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataMultiBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multiblock.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void readBitcoinRawBlockInputFormatGenesisBlock() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameGenesis);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for genesis block");
RecordReader<BytesWritable, BytesWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable genesisKey = new BytesWritable();
BytesWritable genesisBlock = new BytesWritable();
assertTrue( reader.nextKeyValue(),"Input Split for genesis block contains at least one block");
genesisKey=reader.getCurrentKey();
genesisBlock=reader.getCurrentValue();
assertEquals( 293, genesisBlock.getLength(),"Genesis Block must have size of 293");
assertFalse( reader.nextKeyValue(),"No further blocks in genesis Block");
reader.close();
}
@Test
public void readBitcoinRawBlockInputFormatBlockVersion1() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block version 1");
RecordReader<BytesWritable, BytesWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
BytesWritable block = new BytesWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block version contains at least one block");
block=reader.getCurrentValue();
assertEquals( 482, block.getLength(),"Random block version 1 must have size of 482 bytes");
assertFalse( reader.nextKeyValue(),"No further blocks in block version 1");
reader.close();
}
@Test
public void readBitcoinRawBlockInputFormatBlockVersion2() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block version 2");
RecordReader<BytesWritable, BytesWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
BytesWritable block = new BytesWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block version contains at least one block");
block=reader.getCurrentValue();
assertEquals( 191198, block.getLength(),"Random block version 2 must have size of 191.198 bytes");
assertFalse( reader.nextKeyValue(),"No further blocks in block version 2");
reader.close();
}
@Test
public void readBitcoinRawBlockInputFormatBlockVersion3() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version3.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block version 3");
RecordReader<BytesWritable, BytesWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
BytesWritable block = new BytesWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block version contains at least one block");
block=reader.getCurrentValue();
assertEquals( 932199, block.getLength(),"Random block version 3 must have size of 932.199 bytes");
assertFalse( reader.nextKeyValue(),"No further blocks in block version 3");
reader.close();
}
@Test
public void readBitcoinRawBlockInputFormatBlockVersion4() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block version 4");
RecordReader<BytesWritable, BytesWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
BytesWritable block = new BytesWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block version contains at least one block");
block=reader.getCurrentValue();
assertEquals( 998039, block.getLength(),"Random block version 4 must have a size of 998.039 bytes");
assertFalse( reader.nextKeyValue(),"No further blocks in block version 4");
reader.close();
}
@Test
public void readBitcoinRawBlockInputFormatReqSeekBlockVersion1() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="reqseekversion1.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block requiring seek version 1");
RecordReader<BytesWritable, BytesWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
BytesWritable block = new BytesWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block version contains at least one block");
block=reader.getCurrentValue();
assertEquals( 482, block.getLength(),"Random block requiring seek version 1 must have a size of 482 bytes");
assertFalse( reader.nextKeyValue(),"No further blocks in block requiring seek version 1");
reader.close();
}
@Test
public void readBitcoinRawBlockInputFormatMultiBlock() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multiblock.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinRawBlockFileInputFormat format = new BitcoinRawBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for multiblock");
RecordReader<BytesWritable, BytesWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
BytesWritable block = new BytesWritable();
assertTrue( reader.nextKeyValue(),"Input Split for multi block contains the genesis block");
block=reader.getCurrentValue();
assertEquals( 293, block.getLength(),"Genesis Block must have size of 293");
assertTrue( reader.nextKeyValue(),"Input Split for block version contains block version 1");
block=reader.getCurrentValue();
assertEquals( 482, block.getLength(),"Random block version 1 must have size of 482 bytes");
assertTrue( reader.nextKeyValue(),"Input Split for block version contains block version 2");
block=reader.getCurrentValue();
assertEquals( 191198, block.getLength(),"Random block version 2 must have size of 191.198 bytes");
assertFalse( reader.nextKeyValue(),"No further blocks in multi block");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatGenesisBlock() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameGenesis);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for genesis block");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable genesisKey = new BytesWritable();
BitcoinBlockWritable genesisBlock = new BitcoinBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for genesis block contains at least one block");
genesisBlock=reader.getCurrentValue();
assertEquals( 1, genesisBlock.getTransactions().size(),"Genesis Block must contain exactly one transaction");
assertFalse( reader.nextKeyValue(),"No further blocks in genesis Block");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatBlockVersion1() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block version 1");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
BitcoinBlockWritable block = new BitcoinBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block version contains at least one block");
block=reader.getCurrentValue();
assertEquals( 2, block.getTransactions().size(),"Random block version 1 must contain exactly two transactions");
assertFalse( reader.nextKeyValue(),"No further blocks in block version 1");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatBlockVersion2() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block version 2");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
BitcoinBlockWritable block = new BitcoinBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block version contains at least one block");
block=reader.getCurrentValue();
assertEquals( 343, block.getTransactions().size(),"Random block version 2 must contain exactly 343 transactions");
assertFalse( reader.nextKeyValue(),"No further blocks in block version 2");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatBlockVersion3() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version3.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1,splits.size(),"Only one split generated for block version 3");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
BitcoinBlockWritable block = new BitcoinBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block version contains at least one block");
block=reader.getCurrentValue();
assertEquals( 1645, block.getTransactions().size(),"Random block version 3 must contain exactly 1645 transactions");
assertFalse( reader.nextKeyValue(),"No further blocks in block version 3");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatBlockVersion4() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block version 4");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
BitcoinBlockWritable block = new BitcoinBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block version contains at least one block");
block=reader.getCurrentValue();
assertEquals( 936, block.getTransactions().size(),"Random block version 4 must contain exactly 936 transactions");
assertFalse( reader.nextKeyValue(),"No further blocks in block version 4");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatReqSeekBlockVersion1() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="reqseekversion1.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block requiring seek version 1");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
BitcoinBlockWritable block = new BitcoinBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block version contains at least one block");
block=reader.getCurrentValue();
assertEquals( 2, block.getTransactions().size(),"Random block requiring seek version 1 must contain exactly two transactions");
assertFalse( reader.nextKeyValue(),"No further blocks in block requiring seek version 1");
reader.close();
}
@Test
public void readBitcoinBlockInputFormatMultiBlock() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multiblock.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinBlockFileInputFormat format = new BitcoinBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1,splits.size(),"Only one split generated for multiblock");
RecordReader<BytesWritable, BitcoinBlockWritable> reader = format.createRecordReader(splits.get(0),context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
BitcoinBlockWritable block = new BitcoinBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for multi block contains the genesis block");
block=reader.getCurrentValue();
assertEquals( 1, block.getTransactions().size(),"Genesis Block must contain exactly one transaction");
assertTrue( reader.nextKeyValue(),"Input Split for block version contains block version 1");
block=reader.getCurrentValue();
assertEquals( 2, block.getTransactions().size(),"Random block version 1 must contain exactly two transactions");
assertTrue( reader.nextKeyValue(),"Input Split for block version contains at least one block");
block=reader.getCurrentValue();
assertEquals( 343, block.getTransactions().size(),"Random block version 2 must contain exactly 343 transactions");
assertFalse( reader.nextKeyValue(),"No further blocks in multi block");
reader.close();
}
@Test
public void readBitcoinTransactionInputFormatGenesisBlock() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinTransactionFileInputFormat format = new BitcoinTransactionFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for genesis block");
RecordReader<BytesWritable, BitcoinTransactionWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
int transactCount=0;
while (reader.nextKeyValue()) {
transactCount++;
}
assertEquals( 1, transactCount,"Genesis Block must contain exactly one transactions");
reader.close();
}
@Test
public void readBitcoinTransactionInputFormatBlockVersion1() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinTransactionFileInputFormat format = new BitcoinTransactionFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block version 1");
RecordReader<BytesWritable, BitcoinTransactionWritable> reader = format.createRecordReader(splits.get(0),context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
int transactCount=0;
while (reader.nextKeyValue()) {
transactCount++;
}
assertEquals( 2, transactCount,"Block version 1 must contain exactly two transactions");
reader.close();
}
@Test
public void readBitcoinTransactionInputFormatBlockVersion2() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinTransactionFileInputFormat format = new BitcoinTransactionFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block version 2");
RecordReader<BytesWritable, BitcoinTransactionWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
int transactCount=0;
while (reader.nextKeyValue()) {
transactCount++;
}
assertEquals( 343, transactCount,"Block version 2 must contain exactly 343 transactions");
reader.close();
}
@Test
public void readBitcoinTransactionInputFormatBlockVersion3() throws IOException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version3.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
BitcoinTransactionFileInputFormat format = new BitcoinTransactionFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block version 3");
RecordReader<BytesWritable, BitcoinTransactionWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | true |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinScriptPatternParserTest.java | inputformat/src/test/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinScriptPatternParserTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinScriptPatternParser;
public class BitcoinScriptPatternParserTest {
@Test
public void testPaymentPubKeyGenesis() {
byte[] txOutScriptGenesis= new byte[]{(byte)0x41,(byte)0x04,(byte)0x67,(byte)0x8A,(byte)0xFD,(byte)0xB0,(byte)0xFE,(byte)0x55,(byte)0x48,(byte)0x27,(byte)0x19,(byte)0x67,(byte)0xF1,(byte)0xA6,(byte)0x71,(byte)0x30,(byte)0xB7,(byte)0x10,(byte)0x5C,(byte)0xD6,(byte)0xA8,(byte)0x28,(byte)0xE0,(byte)0x39,(byte)0x09,(byte)0xA6,(byte)0x79,(byte)0x62,(byte)0xE0,(byte)0xEA,(byte)0x1F,(byte)0x61,(byte)0xDE,(byte)0xB6,(byte)0x49,(byte)0xF6,(byte)0xBC,(byte)0x3F,(byte)0x4C,(byte)0xEF,(byte)0x38,(byte)0xC4,(byte)0xF3,(byte)0x55,(byte)0x04,(byte)0xE5,(byte)0x1E,(byte)0xC1,(byte)0x12,(byte)0xDE,(byte)0x5C,(byte)0x38,(byte)0x4D,(byte)0xF7,(byte)0xBA,(byte)0x0B,(byte)0x8D,(byte)0x57,(byte)0x8A,(byte)0x4C,(byte)0x70,(byte)0x2B,(byte)0x6B,(byte)0xF1,(byte)0x1D,(byte)0x5F,(byte)0xAC};
String result = BitcoinScriptPatternParser.getPaymentDestination(txOutScriptGenesis);
String comparatorText = "bitcoinpubkey_4104678AFDB0FE5548271967F1A67130B7105CD6A828E03909A67962E0EA1F61DEB649F6BC3F4CEF38C4F35504E51EC112DE5C384DF7BA0B8D578A4C702B6BF11D5F";
assertEquals( comparatorText,result,"TxOutScript from Genesis should be payment to a pubkey address");
}
@Test
public void testPaymentNull() {
String result = BitcoinScriptPatternParser.getPaymentDestination(null);
assertNull( result,"Null as script returns null");
}
@Test
public void testPaymentAnyone() {
String result = BitcoinScriptPatternParser.getPaymentDestination(new byte[0]);
assertEquals( "anyone",result,"Empty script means anyone can spend");
}
@Test
public void testPaymentUnspendable() {
String result = BitcoinScriptPatternParser.getPaymentDestination(new byte[]{0x6a});
assertEquals( "unspendable",result,"Unspendable script");
}
@Test
public void testPaymentInvalid() {
String result = BitcoinScriptPatternParser.getPaymentDestination(new byte[]{0x00});
assertNull( result,"Invalid script returns null");
}
@Test
public void testPaymentP2Hash() {
byte[] txOutScriptP2Hash= new byte[]{(byte)0x76,(byte)0xa9,(byte)0x14,(byte)0xfd,(byte)0x92,(byte)0xaa,(byte)0xfe,(byte)0x55,(byte)0x5c,(byte)0x07,(byte)0xe8,(byte)0x90,(byte)0xe8,(byte)0x07,(byte)0x5e,(byte)0xd6,(byte)0x1f,(byte)0x39,(byte)0xca,(byte)0x90,(byte)0x52,(byte)0x2b,(byte)0x8f,(byte)0x88,(byte)0xAC};
String result = BitcoinScriptPatternParser.getPaymentDestination(txOutScriptP2Hash);
String comparatorText = "bitcoinaddress_FD92AAFE555C07E890E8075ED61F39CA90522B8F";
assertEquals( comparatorText, result,"Payment destination of script should be p2hash");
}
@Test
public void testPaymentPuzzle() {
byte[] txOutScriptPuzzle= new byte[]{(byte)0xAA,(byte)0x20,(byte)0x6f,(byte)0xe2,(byte)0x8c,(byte)0x0a,(byte)0xb6,(byte)0xf1,(byte)0xb3,(byte)0x72,(byte)0xc1,(byte)0xa6,(byte)0xa2,(byte)0x46,(byte)0xae,(byte)0x63,(byte)0xf7,(byte)0x4f,(byte)0x93,(byte)0x1e,(byte)0x83,(byte)0x65,(byte)0xe1,(byte)0x5a,(byte)0x08,(byte)0x9c,(byte)0x68,(byte)0xd6,(byte)0x19,(byte)0x00,(byte)0x00,(byte)0x00,(byte)0x00,(byte)0x00,(byte)0x87};
String result = BitcoinScriptPatternParser.getPaymentDestination(txOutScriptPuzzle);
String comparatorText = "puzzle_206FE28C0AB6F1B372C1A6A246AE63F74F931E8365E15A089C68D61900000000";
assertEquals( comparatorText, result,"Payment destination of script should be puzzle");
}
@Test
public void testPaymentSegwitP2HashSynthetic() {
// synthetic because it is not based on real Bitcoin blockchain data
byte[] txOutScriptP2Hash= new byte[]{(byte)0x00,(byte)0x14,(byte)0x00,(byte)0x01,(byte)0x02,(byte)0x03,(byte)0x04,(byte)0x05,(byte)0x06,(byte)0x07,(byte)0x08,(byte)0x09,(byte)0x0A,(byte)0x0B,(byte)0x0C,(byte)0x0D,(byte)0x0E,(byte)0x0F,(byte)0x10,(byte)0x11,(byte)0x12,(byte)0x13};
String result = BitcoinScriptPatternParser.getPaymentDestination(txOutScriptP2Hash);
String comparatorText = "P2WPKH_000102030405060708090A0B0C0D0E0F10111213";
assertEquals( comparatorText, result,"Payment destination of script should be P2WPKH");
}
@Test
public void testPaymentSegwitP2WSHSynthetic() {
// synthetic because it is not based on real Bitcoin blockchain data
byte[] txOutScriptP2Hash= new byte[]{(byte)0x00,(byte)0x20,(byte)0x00,(byte)0x01,(byte)0x02,(byte)0x03,(byte)0x04,(byte)0x05,(byte)0x06,(byte)0x07,(byte)0x08,(byte)0x09,(byte)0x0A,(byte)0x0B,(byte)0x0C,(byte)0x0D,(byte)0x0E,(byte)0x0F,(byte)0x10,(byte)0x11,(byte)0x12,(byte)0x13,(byte)0x14,(byte)0x15,(byte)0x16,(byte)0x17,(byte)0x18,(byte)0x19,(byte)0x1A,(byte)0x1B,(byte)0x1C,(byte)0x1D,(byte)0x1E,(byte)0x1F};
String result = BitcoinScriptPatternParser.getPaymentDestination(txOutScriptP2Hash);
String comparatorText = "P2WSH_000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F";
assertEquals( comparatorText, result,"Payment destination of script should be P2WSH");
}
@Test
public void testPaymentSegwitPubKeySynthetic() {
// synthetic because it is not based on real Bitcoin blockchain data
byte[] txOutScriptP2WPKHP2SH = new byte[]{(byte)0xA9,(byte)0x14,(byte)0x00,(byte)0x01,(byte)0x02,(byte)0x03,(byte)0x04,(byte)0x05,(byte)0x06,(byte)0x07,(byte)0x08,(byte)0x09,(byte)0x0A,(byte)0x0B,(byte)0x0C,(byte)0x0D,(byte)0x0E,(byte)0x0F,(byte)0x10,(byte)0x11,(byte)0x12,(byte)0x13,(byte)0x87};
String result = BitcoinScriptPatternParser.getPaymentDestination(txOutScriptP2WPKHP2SH);
String comparatorText = "P2WPKHP2SH_000102030405060708090A0B0C0D0E0F10111213";
assertEquals( comparatorText,result,"Payment destination of script should be P2WPKHP2SH");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinFormatReaderTest.java | inputformat/src/test/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinFormatReaderTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinBlockReader;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
public class BitcoinFormatReaderTest {
static final int DEFAULT_BUFFERSIZE=64*1024;
static final int DEFAULT_MAXSIZE_BITCOINBLOCK=8 * 1024 * 1024;
static final byte[][] DEFAULT_MAGIC = {{(byte)0xF9,(byte)0xBE,(byte)0xB4,(byte)0xD9}};
private static final byte[][] TESTNET3_MAGIC = {{(byte)0x0B,(byte)0x11,(byte)0x09,(byte)0x07}};
private static final byte[][] MULTINET_MAGIC = {{(byte)0xF9,(byte)0xBE,(byte)0xB4,(byte)0xD9},{(byte)0x0B,(byte)0x11,(byte)0x09,(byte)0x07}};
@Test
public void checkTestDataGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion1BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion2BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion3BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version3.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion4BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataVersion1SeekBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="reqseekversion1.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataTestnet3GenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="testnet3genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataTestnet3Version4BlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="testnet3version4.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataMultiNetAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multinet.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataScriptWitnessNetAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="scriptwitness.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataScriptWitness2NetAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="scriptwitness2.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void parseGenesisBlockAsBitcoinRawBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameGenesis);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer genesisByteBuffer = bbr.readRawBlock();
assertFalse( genesisByteBuffer.isDirect(),"Raw Genesis Block is HeapByteBuffer");
assertEquals( 293, genesisByteBuffer.limit(),"Raw Genesis block has a size of 293 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseVersion1BlockAsBitcoinRawBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer version1ByteBuffer = bbr.readRawBlock();
assertFalse( version1ByteBuffer.isDirect(),"Random Version 1 Raw Block is HeapByteBuffer");
assertEquals( 482, version1ByteBuffer.limit(),"Random Version 1 Raw Block has a size of 482 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseVersion2BlockAsBitcoinRawBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer version2ByteBuffer = bbr.readRawBlock();
assertFalse( version2ByteBuffer.isDirect(),"Random Version 2 Raw Block is HeapByteBuffer");
assertEquals( 191198, version2ByteBuffer.limit(),"Random Version 2 Raw Block has a size of 191.198 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseVersion3BlockAsBitcoinRawBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version3.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer version3ByteBuffer = bbr.readRawBlock();
assertFalse( version3ByteBuffer.isDirect(),"Random Version 3 Raw Block is HeapByteBuffer");
assertEquals( 932199, version3ByteBuffer.limit(),"Random Version 3 Raw Block has a size of 932.199 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseVersion4BlockAsBitcoinRawBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer version4ByteBuffer = bbr.readRawBlock();
assertFalse( version4ByteBuffer.isDirect(),"Random Version 4 Raw Block is HeapByteBuffer");
assertEquals( 998039, version4ByteBuffer.limit(),"Random Version 4 Raw Block has a size of 998.039 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseTestNet3GenesisBlockAsBitcoinRawBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="testnet3genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameGenesis);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.TESTNET3_MAGIC,direct);
ByteBuffer genesisByteBuffer = bbr.readRawBlock();
assertFalse( genesisByteBuffer.isDirect(),"Raw TestNet3 Genesis Block is HeapByteBuffer");
assertEquals( 293, genesisByteBuffer.limit(),"Raw TestNet3 Genesis block has a size of 293 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseTestNet3Version4BlockAsBitcoinRawBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="testnet3version4.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.TESTNET3_MAGIC,direct);
ByteBuffer version4ByteBuffer = bbr.readRawBlock();
assertFalse( version4ByteBuffer.isDirect(),"Random TestNet3 Version 4 Raw Block is HeapByteBuffer");
assertEquals( 749041, version4ByteBuffer.limit(),"Random TestNet3 Version 4 Raw Block has a size of 749.041 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseMultiNetAsBitcoinRawBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multinet.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameGenesis);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.MULTINET_MAGIC,direct);
ByteBuffer firstMultinetByteBuffer = bbr.readRawBlock();
assertFalse( firstMultinetByteBuffer.isDirect(),"First MultiNetBlock is HeapByteBuffer");
assertEquals( 293, firstMultinetByteBuffer.limit(),"First MultiNetBlock has a size of 293 bytes");
ByteBuffer secondMultinetByteBuffer = bbr.readRawBlock();
assertFalse( secondMultinetByteBuffer.isDirect(),"Second MultiNetBlock is HeapByteBuffer");
assertEquals( 191198, secondMultinetByteBuffer.limit(),"Second MultiNetBlock has a size of 191.198 bytes");
ByteBuffer thirdMultinetByteBuffer = bbr.readRawBlock();
assertFalse( thirdMultinetByteBuffer.isDirect(),"Third MultiNetBlock is HeapByteBuffer");
assertEquals( 749041, thirdMultinetByteBuffer.limit(),"Third MultiNetBlock has a size of 749.041 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseScriptWitnessBlockAsBitcoinRawBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="scriptwitness.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer scriptwitnessByteBuffer = bbr.readRawBlock();
assertFalse( scriptwitnessByteBuffer.isDirect(),"Random ScriptWitness Raw Block is HeapByteBuffer");
assertEquals( 999283, scriptwitnessByteBuffer.limit(),"Random ScriptWitness Raw Block has a size of 999283 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseScriptWitness2BlockAsBitcoinRawBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="scriptwitness2.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer scriptwitnessByteBuffer = bbr.readRawBlock();
assertFalse( scriptwitnessByteBuffer.isDirect(),"Random ScriptWitness Raw Block is HeapByteBuffer");
assertEquals( 1000039, scriptwitnessByteBuffer.limit(),"Random ScriptWitness Raw Block has a size of 1000039 bytes");
scriptwitnessByteBuffer = bbr.readRawBlock();
assertFalse( scriptwitnessByteBuffer.isDirect(),"Random ScriptWitness Raw Block is HeapByteBuffer");
assertEquals( 999312, scriptwitnessByteBuffer.limit(),"Random ScriptWitness Raw Block has a size of 999312 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseGenesisBlockAsBitcoinRawBlockDirect() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=true;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer genesisByteBuffer = bbr.readRawBlock();
assertTrue( genesisByteBuffer.isDirect(),"Raw Genesis Block is DirectByteBuffer");
assertEquals( 293, genesisByteBuffer.limit(),"Raw Genesis Block has a size of 293 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseVersion1BlockAsBitcoinRawBlockDirect() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=true;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer version1ByteBuffer = bbr.readRawBlock();
assertTrue( version1ByteBuffer.isDirect(),"Random Version 1 Raw Block is DirectByteBuffer");
assertEquals( 482, version1ByteBuffer.limit(),"Random Version 1 Raw Block has a size of 482 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseVersion2BlockAsBitcoinRawBlockDirect() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=true;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer version2ByteBuffer = bbr.readRawBlock();
assertTrue( version2ByteBuffer.isDirect(),"Random Version 2 Raw Block is DirectByteBuffer");
assertEquals( 191198, version2ByteBuffer.limit(),"Random Version 2 Raw Block has a size of 191.198 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseVersion3BlockAsBitcoinRawBlockDirect() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version3.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=true;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer version3ByteBuffer = bbr.readRawBlock();
assertTrue( version3ByteBuffer.isDirect(),"Random Version 3 Raw Block is DirectByteBuffer");
assertEquals( 932199, version3ByteBuffer.limit(),"Random Version 3 Raw Block has a size of 932.199 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseVersion4BlockAsBitcoinRawBlockDirect() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version4.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=true;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer version4ByteBuffer = bbr.readRawBlock();
assertTrue( version4ByteBuffer.isDirect(),"Random Version 4 Raw Block is DirectByteBuffer");
assertEquals( 998039, version4ByteBuffer.limit(),"Random Version 4 Raw Block has a size of 998.039 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseTestNet3GenesisBlockAsBitcoinRawBlockDirect() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="testnet3genesis.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameGenesis);
BitcoinBlockReader bbr = null;
boolean direct=true;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.TESTNET3_MAGIC,direct);
ByteBuffer genesisByteBuffer = bbr.readRawBlock();
assertTrue( genesisByteBuffer.isDirect(),"Raw TestNet3 Genesis Block is DirectByteBuffer");
assertEquals( 293, genesisByteBuffer.limit(),"Raw TestNet3 Genesis block has a size of 293 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseTestNet3Version4BlockAsBitcoinRawBlockDirect() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="testnet3version4.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=true;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.TESTNET3_MAGIC,direct);
ByteBuffer version4ByteBuffer = bbr.readRawBlock();
assertTrue( version4ByteBuffer.isDirect(),"Random TestNet3 Version 4 Raw Block is DirectByteBuffer");
assertEquals( 749041, version4ByteBuffer.limit(),"Random TestNet3 Version 4 Raw Block has a size of 749.041 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseMultiNetAsBitcoinRawBlockDirect() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="multinet.blk";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameGenesis);
BitcoinBlockReader bbr = null;
boolean direct=true;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.MULTINET_MAGIC,direct);
ByteBuffer firstMultinetByteBuffer = bbr.readRawBlock();
assertTrue( firstMultinetByteBuffer.isDirect(),"First MultiNetBlock is DirectByteBuffer");
assertEquals( 293, firstMultinetByteBuffer.limit(),"First MultiNetBlock has a size of 293 bytes");
ByteBuffer secondMultinetByteBuffer = bbr.readRawBlock();
assertTrue( secondMultinetByteBuffer.isDirect(),"Second MultiNetBlock is DirectByteBuffer");
assertEquals( 191198, secondMultinetByteBuffer.limit(),"Second MultiNetBlock has a size of 191.198 bytes");
ByteBuffer thirdMultinetByteBuffer = bbr.readRawBlock();
assertTrue( thirdMultinetByteBuffer.isDirect(),"Third MultiNetBlock is DirectByteBuffer");
assertEquals( 749041, thirdMultinetByteBuffer.limit(),"Third MultiNetBlock has a size of 749.041 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseScriptWitnessBlockAsBitcoinRawBlockDirect() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="scriptwitness.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=true;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer scriptwitnessByteBuffer = bbr.readRawBlock();
assertTrue( scriptwitnessByteBuffer.isDirect(),"Random ScriptWitness Raw Block is DirectByteBuffer");
assertEquals( 999283, scriptwitnessByteBuffer.limit(),"Random ScriptWitness Raw Block has a size of 999283 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseScriptWitness2BlockAsBitcoinRawBlockDirect() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="scriptwitness2.blk";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
BitcoinBlockReader bbr = null;
boolean direct=true;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
ByteBuffer scriptwitnessByteBuffer = bbr.readRawBlock();
assertTrue( scriptwitnessByteBuffer.isDirect(),"Random ScriptWitness Raw Block is DirectByteBuffer");
assertEquals( 1000039, scriptwitnessByteBuffer.limit(),"Random ScriptWitness Raw Block has a size of 1000039 bytes");
scriptwitnessByteBuffer = bbr.readRawBlock();
assertTrue( scriptwitnessByteBuffer.isDirect(),"Random ScriptWitness Raw Block is DirectByteBuffer");
assertEquals( 999312, scriptwitnessByteBuffer.limit(),"Random ScriptWitness Raw Block has a size of 999312 bytes");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseGenesisBlockAsBitcoinBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fullFileNameString=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fullFileNameString);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
BitcoinBlock theBitcoinBlock = bbr.readBlock();
assertEquals( 1, theBitcoinBlock.getTransactions().size(),"Genesis Block must contain exactly one transaction");
assertEquals( 1, theBitcoinBlock.getTransactions().get(0).getListOfInputs().size(),"Genesis Block must contain exactly one transaction with one input");
assertEquals( 77, theBitcoinBlock.getTransactions().get(0).getListOfInputs().get(0).getTxInScript().length,"Genesis Block must contain exactly one transaction with one input and script length 77");
assertEquals( 1, theBitcoinBlock.getTransactions().get(0).getListOfOutputs().size(),"Genesis Block must contain exactly one transaction with one output");
assertEquals( BigInteger.valueOf(5000000000L),theBitcoinBlock.getTransactions().get(0).getListOfOutputs().get(0).getValue(), "Value must be BigInteger corresponding to 5000000000L");
assertEquals( 67, theBitcoinBlock.getTransactions().get(0).getListOfOutputs().get(0).getTxOutScript().length,"Genesis Block must contain exactly one transaction with one output and script length 67");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseTestNet3GenesisBlockAsBitcoinBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="testnet3genesis.blk";
String fullFileNameString=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fullFileNameString);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.TESTNET3_MAGIC,direct);
BitcoinBlock theBitcoinBlock = bbr.readBlock();
assertEquals( 1, theBitcoinBlock.getTransactions().size(),"TestNet3 Genesis Block must contain exactly one transaction");
assertEquals( 1, theBitcoinBlock.getTransactions().get(0).getListOfInputs().size(),"TestNet3 Genesis Block must contain exactly one transaction with one input");
assertEquals( 77, theBitcoinBlock.getTransactions().get(0).getListOfInputs().get(0).getTxInScript().length,"TestNet3 Genesis Block must contain exactly one transaction with one input and script length 77");
assertEquals( 1, theBitcoinBlock.getTransactions().get(0).getListOfOutputs().size(),"TestNet3 Genesis Block must contain exactly one transaction with one output");
assertEquals( 67, theBitcoinBlock.getTransactions().get(0).getListOfOutputs().get(0).getTxOutScript().length,"TestNet3 Genesis Block must contain exactly one transaction with one output and script length 67");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseVersion1BlockAsBitcoinBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version1.blk";
String fullFileNameString=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fullFileNameString);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
BitcoinBlock theBitcoinBlock = bbr.readBlock();
assertEquals( 2, theBitcoinBlock.getTransactions().size(),"Random Version 1 Block must contain exactly two transactions");
assertEquals( 1, theBitcoinBlock.getTransactions().get(0).getListOfInputs().size(),"Random Version 1 Block must contain exactly two transactions of which the first has one input");
assertEquals( 8, theBitcoinBlock.getTransactions().get(0).getListOfInputs().get(0).getTxInScript().length,"Random Version 1 Block must contain exactly two transactions of which the first has one input and script length 8");
assertEquals( 1, theBitcoinBlock.getTransactions().get(0).getListOfOutputs().size(),"Random Version 1 Block must contain exactly two transactions of which the first has one output");
assertEquals( 67, theBitcoinBlock.getTransactions().get(0).getListOfOutputs().get(0).getTxOutScript().length,"Random Version 1 Block must contain exactly two transactions of which the first has one output and script length 67");
} finally {
if (bbr!=null)
bbr.close();
}
}
@Test
public void parseVersion2BlockAsBitcoinBlockHeap() throws FileNotFoundException, IOException, BitcoinBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="version2.blk";
String fullFileNameString=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fullFileNameString);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
FileInputStream fin = new FileInputStream(file);
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | true |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinUtilTest.java | inputformat/src/test/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinUtilTest.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.List;
import java.util.ArrayList;
import java.nio.ByteBuffer;
import java.io.IOException;
import java.math.BigInteger;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.security.NoSuchAlgorithmException;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinUtil;
public class BitcoinUtilTest {
@Test
public void convertSignedIntToUnsigned() {
long unsignedint = BitcoinUtil.convertSignedIntToUnsigned(-1);
assertEquals( 4294967295L,unsignedint,"-1 from signed int must be 4294967295L unsigned");
}
@Test
public void convertIntToByteArray() {
byte[] intByteArray = BitcoinUtil.convertIntToByteArray(1);
byte[] comparatorArray = new byte[]{0x00,0x00,0x00,0x01};
assertArrayEquals( comparatorArray,intByteArray,"1 in int must be equivalent to the array {0x00,0x00,0x00,0x01}");
}
@Test
public void convertLongToByteArray() {
byte[] longByteArray = BitcoinUtil.convertLongToByteArray(1L);
byte[] comparatorArray = new byte[]{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01};
assertArrayEquals( comparatorArray,longByteArray,"1 in int must be equivalent to the array {0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01}");
}
@Test
public void convertVarIntByteBufferToByteArray() {
//note we will test here only one possible var int, because convertVarIntByteBufferToByteArray calls BitcoinUtil.getVarIntSize internally and thus we will do more tests when testing this function
byte[] originVarInt = new byte[]{0x01};
ByteBuffer testByteBuffer = ByteBuffer.allocate(1).put(originVarInt);
testByteBuffer.flip();
byte[] varIntByteArray = BitcoinUtil.convertVarIntByteBufferToByteArray(testByteBuffer);
assertArrayEquals( originVarInt,varIntByteArray,"0x01 in ByteBuffer must be equivalent to the varint represented in array {0x01}");
}
@Test
public void convertVarIntByteBufferToLong() {
// note we will test here only one possible varint, because convertVarIntByteBufferToLong calls BitcoinUtil.getVarInt internally and thus we will do more tests when testing this function
ByteBuffer testByteBuffer = ByteBuffer.allocate(1).put(new byte[]{0x01});
testByteBuffer.flip();
long convertedLong = BitcoinUtil.convertVarIntByteBufferToLong(testByteBuffer);
assertEquals(1L, convertedLong,"0x01 in ByteBuffer must be 1 as a long");
}
@Test
public void getVarIntByte() {
byte[] originalVarInt = new byte[] {0x01};
long convertedVarInt = BitcoinUtil.getVarInt(originalVarInt);
assertEquals( 1L,convertedVarInt,"varInt {0x01} must be 1 as long");
}
@Test
public void getVarIntWord() {
byte[] originalVarInt = new byte[] {(byte)0xFD,0x01,0x00};
long convertedVarInt = BitcoinUtil.getVarInt(originalVarInt);
assertEquals( 1L, convertedVarInt,"varInt {0xFD,0x01,0x00} must be 1 as long");
}
@Test
public void getVarIntDWord() {
byte[] originalVarInt = new byte[] {(byte)0xFE,0x01,0x00,0x00,0x00};
long convertedVarInt = BitcoinUtil.getVarInt(originalVarInt);
assertEquals( 1L, convertedVarInt,"varInt {0xFE,0x01,0x00,0x00,0x00} must be 1 as long");
}
@Test
public void getVarIntQWord() {
byte[] originalVarInt = new byte[] {(byte)0xFF,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
long convertedVarInt = BitcoinUtil.getVarInt(originalVarInt);
assertEquals( 1L, convertedVarInt,"varInt {0xFF,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00} must be 1 as long");
}
@Test
public void getVarIntSizeByte() {
byte[] originalVarInt = new byte[] {0x02};
byte varIntSize = BitcoinUtil.getVarIntSize(originalVarInt[0]);
assertEquals( 1, varIntSize,"varInt {0x02} must be of size 1");
}
@Test
public void getVarIntSizeWord() {
byte[] originalVarInt = new byte[] {(byte)0xFD,0x01,0x00};
byte varIntSize = BitcoinUtil.getVarIntSize(originalVarInt[0]);
assertEquals( 3, varIntSize,"varInt {0xFD,0x01,0x00} must be of size 3");
}
@Test
public void getVarIntSizeDWord() {
byte[] originalVarInt = new byte[] {(byte)0xFE,0x01,0x00,0x00,0x00};
byte varIntSize = BitcoinUtil.getVarIntSize(originalVarInt[0]);
assertEquals( 5, varIntSize,"varInt {0xFE,0x01,0x00,0x00,0x00} must be of size 5");
}
@Test
public void getVarIntSizeQWord() {
byte[] originalVarInt = new byte[] {(byte)0xFF,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
byte varIntSize = BitcoinUtil.getVarIntSize(originalVarInt[0]);
assertEquals( 9, varIntSize,"varInt {0xFF,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00} must be of size 9");
}
@Test
public void getSize() {
byte[] blockSize = new byte[] {(byte)0x1D,0x01,0x00,0x00}; // this is the size of the genesis block
long blockSizeLong = BitcoinUtil.getSize(blockSize);
assertEquals( 285, blockSizeLong,"Size in Array {0x1D,0x01,0x00,0x00} must be 285");
}
@Test
public void reverseByteArray() {
byte[] originalByteArray = new byte[]{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08};
byte[] resultByteArray = BitcoinUtil.reverseByteArray(originalByteArray);
byte[] reverseByteArray = new byte[]{0x08,0x07,0x06,0x05,0x04,0x03,0x02,0x01};
assertArrayEquals( reverseByteArray, resultByteArray,"{0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08} is equivalent to {0x08,0x07,0x06,0x05,0x04,0x03,0x02,0x01} in reverse order");
}
@Test
public void convertHexStringToByteArray() {
String hexString = "01FF02";
byte[] resultArray = BitcoinUtil.convertHexStringToByteArray(hexString);
byte[] expectedByteArray = new byte[]{0x01,(byte)0xFF,0x02};
assertArrayEquals( expectedByteArray, resultArray,"String \""+hexString+"\" is equivalent to byte array {0x01,0xFF,0x02}");
}
@Test
public void convertByteArrayToHexString() {
byte[] hexByteArray = new byte[]{0x01,(byte)0xFF,0x02};
String resultString = BitcoinUtil.convertByteArrayToHexString(hexByteArray);
String expectedString = "01FF02";
assertEquals( expectedString, resultString,"Byte array {0x01,0xFF,0x02} is equivalent to Hex String \""+expectedString+"\"");
}
@Test
public void convertIntToDate() {
int genesisBlockTimeStamp=1231006505;
Date genesisBlockDate = BitcoinUtil.convertIntToDate(genesisBlockTimeStamp);
SimpleDateFormat simpleFormat = new SimpleDateFormat("yyyy-MM-dd");
String genesisBlockDateString = simpleFormat.format(genesisBlockDate);
String expectedDate="2009-01-03";
assertEquals( expectedDate, genesisBlockDateString,"1231006505 is equivalent to the Date 2009-01-03");
}
@Test
public void compareMagicsPos() {
byte[] inputMagic1 = new byte[]{(byte)0xF9,(byte)0xBE,(byte)0xB4,(byte)0xD9};
byte[] inputMagic2 = new byte[]{(byte)0xF9,(byte)0xBE,(byte)0xB4,(byte)0xD9};
boolean isSame = BitcoinUtil.compareMagics(inputMagic1,inputMagic2);
assertTrue( isSame,"Magic 1 {0xF9,0xBE,0xB4,0xD9} is equivalent to Magic 2 {0xF9,0xBE,0xB4,0xD9}");
}
@Test
public void compareMagicsNeg() {
byte[] inputMagic1 = new byte[]{(byte)0xF9,(byte)0xBE,(byte)0xB4,(byte)0xD9};
byte[] inputMagic2 = new byte[]{(byte)0xFA,(byte)0xBF,(byte)0xB5,(byte)0xDA};
boolean isSame = BitcoinUtil.compareMagics(inputMagic1,inputMagic2);
assertFalse( isSame,"Magic 1 {0xF9,0xBE,0xB4,0xD9} is not equivalent to Magic 2 {0xFA,0xBF,0xB5,0xDA}");
}
@Test
public void getTransactionHash() throws NoSuchAlgorithmException, IOException {
// reconstruct the transaction from the genesis block
int version=1;
byte[] inCounter = new byte[]{0x01};
byte[] previousTransactionHash = new byte[]{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
long previousTxOutIndex = 4294967295L;
byte[] txInScriptLength = new byte[]{(byte)0x4D};
byte[] txInScript= new byte[]{(byte)0x04,(byte)0xFF,(byte)0xFF,(byte)0x00,(byte)0x1D,(byte)0x01,(byte)0x04,(byte)0x45,(byte)0x54,(byte)0x68,(byte)0x65,(byte)0x20,(byte)0x54,(byte)0x69,(byte)0x6D,(byte)0x65,(byte)0x73,(byte)0x20,(byte)0x30,(byte)0x33,(byte)0x2F,(byte)0x4A,(byte)0x61,(byte)0x6E,(byte)0x2F,(byte)0x32,(byte)0x30,(byte)0x30,(byte)0x39,(byte)0x20,(byte)0x43,(byte)0x68,(byte)0x61,(byte)0x6E,(byte)0x63,(byte)0x65,(byte)0x6C,(byte)0x6C,(byte)0x6F,(byte)0x72,(byte)0x20,(byte)0x6F,(byte)0x6E,(byte)0x20,(byte)0x62,(byte)0x72,(byte)0x69,(byte)0x6E,(byte)0x6B,(byte)0x20,(byte)0x6F,(byte)0x66,(byte)0x20,(byte)0x73,(byte)0x65,(byte)0x63,(byte)0x6F,(byte)0x6E,(byte)0x64,(byte)0x20,(byte)0x62,(byte)0x61,(byte)0x69,(byte)0x6C,(byte)0x6F,(byte)0x75,(byte)0x74,(byte)0x20,(byte)0x66,(byte)0x6F,(byte)0x72,(byte)0x20,(byte)0x62,(byte)0x61,(byte)0x6E,(byte)0x6B,(byte)0x73};
long seqNo=4294967295L;
byte[] outCounter = new byte[]{0x01};
long value=5000000000L;
byte[] txOutScriptLength=new byte[]{(byte)0x43};
byte[] txOutScript=new byte[]{(byte)0x41,(byte)0x04,(byte)0x67,(byte)0x8A,(byte)0xFD,(byte)0xB0,(byte)0xFE,(byte)0x55,(byte)0x48,(byte)0x27,(byte)0x19,(byte)0x67,(byte)0xF1,(byte)0xA6,(byte)0x71,(byte)0x30,(byte)0xB7,(byte)0x10,(byte)0x5C,(byte)0xD6,(byte)0xA8,(byte)0x28,(byte)0xE0,(byte)0x39,(byte)0x09,(byte)0xA6,(byte)0x79,(byte)0x62,(byte)0xE0,(byte)0xEA,(byte)0x1F,(byte)0x61,(byte)0xDE,(byte)0xB6,(byte)0x49,(byte)0xF6,(byte)0xBC,(byte)0x3F,(byte)0x4C,(byte)0xEF,(byte)0x38,(byte)0xC4,(byte)0xF3,(byte)0x55,(byte)0x04,(byte)0xE5,(byte)0x1E,(byte)0xC1,(byte)0x12,(byte)0xDE,(byte)0x5C,(byte)0x38,(byte)0x4D,(byte)0xF7,(byte)0xBA,(byte)0x0B,(byte)0x8D,(byte)0x57,(byte)0x8A,(byte)0x4C,(byte)0x70,(byte)0x2B,(byte)0x6B,(byte)0xF1,(byte)0x1D,(byte)0x5F,(byte)0xAC};
int lockTime = 0;
List<BitcoinTransactionInput> genesisInput = new ArrayList<BitcoinTransactionInput>(1);
genesisInput.add(new BitcoinTransactionInput(previousTransactionHash,previousTxOutIndex,txInScriptLength,txInScript,seqNo));
List<BitcoinTransactionOutput> genesisOutput = new ArrayList<BitcoinTransactionOutput>(1);
genesisOutput.add(new BitcoinTransactionOutput(BigInteger.valueOf(value),txOutScriptLength,txOutScript));
BitcoinTransaction genesisTransaction = new BitcoinTransaction(version,inCounter,genesisInput,outCounter,genesisOutput,lockTime);
byte[] genesisTransactionHash=BitcoinUtil.getTransactionHash(genesisTransaction);
byte[] expectedHash = BitcoinUtil.reverseByteArray(new byte[]{(byte)0x4A,(byte)0x5E,(byte)0x1E,(byte)0x4B,(byte)0xAA,(byte)0xB8,(byte)0x9F,(byte)0x3A,(byte)0x32,(byte)0x51,(byte)0x8A,(byte)0x88,(byte)0xC3,(byte)0x1B,(byte)0xC8,(byte)0x7F,(byte)0x61,(byte)0x8F,(byte)0x76,(byte)0x67,(byte)0x3E,(byte)0x2C,(byte)0xC7,(byte)0x7A,(byte)0xB2,(byte)0x12,(byte)0x7B,(byte)0x7A,(byte)0xFD,(byte)0xED,(byte)0xA3,(byte)0x3B});
assertArrayEquals( expectedHash, genesisTransactionHash,"Hash for Genesis Transaction correctly calculated");
}
@Test
public void getTransactionHashSegWit() throws NoSuchAlgorithmException, IOException {
// reconstruct the transaction from the a random segwit block
int version=2;
byte marker=0x00;
byte flag=0x01;
byte[] inCounter = new byte[]{0x01};
byte[] previousTransactionHash = new byte[]{(byte)0x07,(byte)0x21,(byte)0x35,(byte)0x23,(byte)0x6D,(byte)0x2E,(byte)0xBC,(byte)0x78,(byte)0xB6,(byte)0xAC,(byte)0xE1,(byte)0x88,(byte)0x97,(byte)0x03,(byte)0xB1,(byte)0x84,(byte)0x85,(byte)0x52,(byte)0x87,(byte)0x12,(byte)0xBD,(byte)0x70,(byte)0xE0,(byte)0x7F,(byte)0x4A,(byte)0x90,(byte)0x11,(byte)0x40,(byte)0xDE,(byte)0x38,(byte)0xA2,(byte)0xE8};
long previousTxOutIndex = 1L;
byte[] txInScriptLength = new byte[]{(byte)0x17};
byte[] txInScript= new byte[]{(byte)0x16,(byte)0x00,(byte)0x14,(byte)0x4D,(byte)0x4D,(byte)0x83,(byte)0xED,(byte)0x5F,(byte)0x10,(byte)0x7B,(byte)0x8D,(byte)0x45,(byte)0x1E,(byte)0x59,(byte)0xA0,(byte)0x43,(byte)0x1A,(byte)0x13,(byte)0x92,(byte)0x79,(byte)0x6B,(byte)0x26,(byte)0x04};
long seqNo=4294967295L;
byte[] outCounter = new byte[]{0x02};
long value_1=1009051983L;
byte[] txOutScriptLength_1=new byte[]{(byte)0x17};
byte[] txOutScript_1=new byte[]{(byte)0xA9,(byte)0x14,(byte)0xF0,(byte)0x50,(byte)0xC5,(byte)0x91,(byte)0xEA,(byte)0x98,(byte)0x26,(byte)0x73,(byte)0xCC,(byte)0xED,(byte)0xF5,(byte)0x21,(byte)0x13,(byte)0x65,(byte)0x7B,(byte)0x67,(byte)0x83,(byte)0x03,(byte)0xE6,(byte)0xA1,(byte)0x87};
long value_2=59801109L;
byte[] txOutScriptLength_2=new byte[]{(byte)0x19};
byte[] txOutScript_2=new byte[]{(byte)0x76,(byte)0xA9,(byte)0x14,(byte)0xFB,(byte)0x2E,(byte)0x13,(byte)0x83,(byte)0x5E,(byte)0x39,(byte)0x88,(byte)0xC7,(byte)0x8F,(byte)0x76,(byte)0x0D,(byte)0x4A,(byte)0xC8,(byte)0x1E,(byte)0x04,(byte)0xEA,(byte)0xF1,(byte)0x94,(byte)0xEA,(byte)0x92,(byte)0x88,(byte)0xAC};
// there is only one input so we have only one list of stack items containing 2 items in this case
byte[] noOfStackItems = new byte[]{0x02};
byte[] segwitnessLength_1=new byte[]{(byte)0x48};
byte[] segwitnessScript_1 = new byte[]{(byte)0x30,(byte)0x45,(byte)0x02,(byte)0x21,(byte)0x00,(byte)0xBB,(byte)0x5F,(byte)0x78,(byte)0xE8,(byte)0xA1,(byte)0xBA,(byte)0x5E,(byte)0x14,(byte)0x26,(byte)0x1B,(byte)0x0A,(byte)0xD3,(byte)0x95,(byte)0x56,(byte)0xAF,(byte)0x9B,(byte)0x21,(byte)0xD9,(byte)0x1F,(byte)0x67,(byte)0x5D,(byte)0x38,(byte)0xC8,(byte)0xCD,(byte)0xAD,(byte)0x7E,(byte)0x7F,(byte)0x5D,(byte)0x21,(byte)0x00,(byte)0x4A,(byte)0xBD,(byte)0x02,(byte)0x20,(byte)0x4C,(byte)0x1E,(byte)0xAC,(byte)0xF1,(byte)0xF9,(byte)0xAC,(byte)0x1D,(byte)0xCC,(byte)0x61,(byte)0x63,(byte)0xF2,(byte)0x07,(byte)0xFC,(byte)0xBC,(byte)0x49,(byte)0x8B,(byte)0x32,(byte)0x4C,(byte)0xBE,(byte)0xF5,(byte)0x7F,(byte)0x83,(byte)0x9F,(byte)0xA2,(byte)0xC2,(byte)0x55,(byte)0x57,(byte)0x4B,(byte)0x2F,(byte)0x37,(byte)0x19,(byte)0xBC,(byte)0x01};
byte[] segwitnessLength_2=new byte[]{(byte)0x21};
byte[] segwitnessScript_2 = new byte[]{(byte)0x03,(byte)0xC5,(byte)0x3F,(byte)0xEA,(byte)0x9A,(byte)0xE5,(byte)0x61,(byte)0xB6,(byte)0x05,(byte)0x74,(byte)0xB2,(byte)0xD5,(byte)0x10,(byte)0x27,(byte)0x3F,(byte)0x7C,(byte)0x51,(byte)0x60,(byte)0x69,(byte)0x7E,(byte)0xB4,(byte)0x7B,(byte)0x48,(byte)0x8E,(byte)0x95,(byte)0xAD,(byte)0x62,(byte)0x91,(byte)0xBB,(byte)0xCB,(byte)0x5E,(byte)0x43,(byte)0xA2};
int lockTime = 0;
List<BitcoinTransactionInput> randomScriptWitnessInput = new ArrayList<BitcoinTransactionInput>(1);
randomScriptWitnessInput.add(new BitcoinTransactionInput(previousTransactionHash,previousTxOutIndex,txInScriptLength,txInScript,seqNo));
List<BitcoinTransactionOutput> randomScriptWitnessOutput = new ArrayList<BitcoinTransactionOutput>(2);
randomScriptWitnessOutput.add(new BitcoinTransactionOutput(BigInteger.valueOf(value_1),txOutScriptLength_1,txOutScript_1));
randomScriptWitnessOutput.add(new BitcoinTransactionOutput(BigInteger.valueOf(value_2),txOutScriptLength_2,txOutScript_2));
List<BitcoinScriptWitnessItem> randomScriptWitnessSWI = new ArrayList<BitcoinScriptWitnessItem>(1);
List<BitcoinScriptWitness> randomScriptWitnessSW = new ArrayList<BitcoinScriptWitness>(2);
randomScriptWitnessSW.add(new BitcoinScriptWitness(segwitnessLength_1,segwitnessScript_1));
randomScriptWitnessSW.add(new BitcoinScriptWitness(segwitnessLength_2,segwitnessScript_2));
randomScriptWitnessSWI.add(new BitcoinScriptWitnessItem(noOfStackItems,randomScriptWitnessSW));
BitcoinTransaction randomScriptWitnessTransaction = new BitcoinTransaction(marker,flag,version,inCounter,randomScriptWitnessInput,outCounter,randomScriptWitnessOutput,randomScriptWitnessSWI,lockTime);
byte[] randomScriptWitnessTransactionHash=BitcoinUtil.getTransactionHash(randomScriptWitnessTransaction);
byte[] expectedHash = BitcoinUtil.reverseByteArray(new byte[]{(byte)0x47,(byte)0x52,(byte)0x1C,(byte)0x2A,(byte)0x13,(byte)0x45,(byte)0x5E,(byte)0x92,(byte)0xD3,(byte)0xBD,(byte)0x56,(byte)0x3F,(byte)0xAD,(byte)0xA5,(byte)0x78,(byte)0x6E,(byte)0x85,(byte)0xB4,(byte)0x5E,(byte)0x96,(byte)0x85,(byte)0xA8,(byte)0xC9,(byte)0xA3,(byte)0xFE,(byte)0xB8,(byte)0x9A,(byte)0x4F,(byte)0xB5,(byte)0x0D,(byte)0xAF,(byte)0xF5});
assertArrayEquals( expectedHash, randomScriptWitnessTransactionHash,"Hash for Random ScriptWitness Transaction correctly calculated (txId)");
byte[] randomScriptWitnessTransactionHashSegWit=BitcoinUtil.getTransactionHashSegwit(randomScriptWitnessTransaction);
//74700E2CE030013E2E10FCFD06DF99C7826E41C725CA5C467660BFA4874F65BF
byte[] expectedHashSegwit = BitcoinUtil.reverseByteArray(new byte[]{(byte)0x74,(byte)0x70,(byte)0x0E,(byte)0x2C,(byte)0xE0,(byte)0x30,(byte)0x01,(byte)0x3E,(byte)0x2E,(byte)0x10,(byte)0xFC,(byte)0xFD,(byte)0x06,(byte)0xDF,(byte)0x99,(byte)0xC7,(byte)0x82,(byte)0x6E,(byte)0x41,(byte)0xC7,(byte)0x25,(byte)0xCA,(byte)0x5C,(byte)0x46,(byte)0x76,(byte)0x60,(byte)0xBF,(byte)0xA4,(byte)0x87,(byte)0x4F,(byte)0x65,(byte)0xBF});
assertArrayEquals( expectedHashSegwit, randomScriptWitnessTransactionHashSegWit,"Hash for Random ScriptWitness Transaction correctly calculated (wtxId)");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/ethereum/format/mapred/EthereumFormatHadoopTest.java | inputformat/src/test/java/org/zuinnote/hadoop/ethereum/format/mapred/EthereumFormatHadoopTest.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.mapred;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
import java.text.ParseException;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlock;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockWritable;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockHeader;
import org.zuinnote.hadoop.ethereum.format.exception.EthereumBlockReadException;
/**
* @author jornfranke
*
*/
public class EthereumFormatHadoopTest {
private static Configuration defaultConf = new Configuration();
private static FileSystem localFs = null;
private static Reporter reporter = Reporter.NULL;
private final static char[] hexArray = "0123456789ABCDEF".toCharArray();
private static String bytesToHex(byte[] bytes) {
char[] hexChars = new char[bytes.length * 2];
for ( int j = 0; j < bytes.length; j++ ) {
int v = bytes[j] & 0xFF;
hexChars[j * 2] = hexArray[v >>> 4];
hexChars[j * 2 + 1] = hexArray[v & 0x0F];
}
return new String(hexChars);
}
@BeforeAll
public static void oneTimeSetUp() throws IOException {
// one-time initialization code
defaultConf.set("fs.defaultFS", "file:///");
localFs = FileSystem.getLocal(defaultConf);
}
@AfterAll
public static void oneTimeTearDown() {
// one-time cleanup code
}
@BeforeEach
public void setUp() {
}
@AfterEach
public void tearDown() {
}
@Test
public void checkTestDataGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="ethgenesis.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock1Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock1346406Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock3346406Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth3346406.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock0to10Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth0to10.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock351000to3510010Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth351000to3510010.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock1346406Bzip2CompressedAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin.bz2";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock1346406AGzipCompressedvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin.gz";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock403419() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="block403419.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock447533() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="block447533.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void readEthereumBlockInputFormatGenesisBlock() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="ethgenesis.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for genesis block");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.next(key,block),"Input Split for genesis block contains at least one block");
assertEquals( 0, block.getEthereumTransactions().size(),"Genesis Block must have 0 transactions");
assertFalse( reader.next(key,block),"No further blocks in genesis Block");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock403419() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="block403419.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block 403419");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.next(key,block),"Input Split for block 403419 contains at least one block");
assertEquals( 2, block.getEthereumTransactions().size(),"Block 403419 must have 2 transactions");
EthereumBlockHeader ethereumBlockHeader = block.getEthereumBlockHeader();
assertEquals(
"f8b483dba2c3b7176a3da549ad41a48bb3121069",
bytesToHex(ethereumBlockHeader.getCoinBase()).toLowerCase(),
"Block 403419 was mined by f8b483dba2c3b7176a3da549ad41a48bb3121069"
);
assertEquals(
"08741fa532c05804d9c1086a311e47cc024bbc43980f561041ad1fbb3c223322",
bytesToHex(ethereumBlockHeader.getParentHash()).toLowerCase(),
"The parent of block 403419 has hash 08741fa532c05804d9c1086a311e47cc024bbc43980f561041ad1fbb3c223322"
);
assertFalse( reader.next(key,block),"No further lock 403419 in genesis Block");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock447533() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="block447533.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for block 447533");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.next(key,block),"Input Split for block 447533 contains at least one block");
assertEquals( 2, block.getEthereumTransactions().size(),"Block 447533 must have 2 transactions");
EthereumBlockHeader ethereumBlockHeader = block.getEthereumBlockHeader();
assertEquals(
"a027231f42c80ca4125b5cb962a21cd4f812e88f",
bytesToHex(ethereumBlockHeader.getCoinBase()).toLowerCase(),
"Block 447533 was mined by a027231f42c80ca4125b5cb962a21cd4f812e88f"
);
assertEquals(
"043559b70c54f0eea6a90b384286d7ab312129603e750075d09fd35e66f8068a",
bytesToHex(ethereumBlockHeader.getParentHash()).toLowerCase(),
"The parent of block 447533 has hash 043559b70c54f0eea6a90b384286d7ab312129603e750075d09fd35e66f8068a"
);
assertFalse( reader.next(key,block),"No further block in block 447533");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock1() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for genesis block");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.next(key,block),"Input Split for block 1 contains at least one block");
assertEquals( 0, block.getEthereumTransactions().size(),"Block 1 must have 0 transactions");
assertFalse( reader.next(key,block),"No further blocks in block 1");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock1346406() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for genesis block");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.next(key,block),"Input Split for block 1346406 contains at least one block");
assertEquals( 6, block.getEthereumTransactions().size(),"Block 1346406 must have 6 transactions");
assertFalse( reader.next(key,block),"No further blocks in block 1346406");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock3346406() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth3346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for genesis block");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.next(key,block),"Input Split for block 3346406 contains at least one block");
assertEquals( 7, block.getEthereumTransactions().size(),"Block 3346406 must have 7 transactions");
assertFalse( reader.next(key,block),"No further blocks in block 3346406");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock0to10() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth0to10.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for genesis block");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
int count=0;
while (count<11) {
if (reader.next(key,block)) {
count++;
}
}
assertEquals(11,count,"Block 0..10 contains 11 blocks");
assertFalse( reader.next(key,block),"No further blocks in block 0..10");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock3510000to3510010() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth351000to3510010.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for genesis block");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
int count=0;
while (count<11) {
if (reader.next(key,block)) {
count++;
}
}
assertEquals(11,count,"Block 3510000 .. 3510010 contains 11 blocks");
assertFalse( reader.next(key,block),"No further blocks in block 3510000 .. 3510010");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock1346406GzipCompressed() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin.gz";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for genesis block");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.next(key,block),"Input Split for block 1346406 contains at least one block");
assertEquals( 6, block.getEthereumTransactions().size(),"Block 1346406 must have 6 transactions");
assertFalse( reader.next(key,block),"No further blocks in block 1346406");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock1346406Bzip2Compressed() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
JobConf job = new JobConf(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin.bz2";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
format.configure(job);
InputSplit[] inputSplits = format.getSplits(job,1);
assertEquals( 1, inputSplits.length,"Only one split generated for genesis block");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.getRecordReader(inputSplits[0], job, reporter);
assertNotNull( reader,"Format returned null RecordReader");
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.next(key,block),"Input Split for block 1346406 contains at least one block");
assertEquals( 6, block.getEthereumTransactions().size(),"Block 1346406 must have 6 transactions");
assertFalse( reader.next(key,block),"No further blocks in block 1346406");
reader.close();
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/ethereum/format/mapreduce/EthereumFormatHadoopTest.java | inputformat/src/test/java/org/zuinnote/hadoop/ethereum/format/mapreduce/EthereumFormatHadoopTest.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.mapreduce;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
import java.text.ParseException;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockWritable;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlock;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockHeader;
import org.zuinnote.hadoop.ethereum.format.exception.EthereumBlockReadException;
/**
*
*
*/
public class EthereumFormatHadoopTest {
private static Configuration defaultConf = new Configuration();
private static FileSystem localFs = null;
private final static char[] hexArray = "0123456789ABCDEF".toCharArray();
private static String bytesToHex(byte[] bytes) {
char[] hexChars = new char[bytes.length * 2];
for ( int j = 0; j < bytes.length; j++ ) {
int v = bytes[j] & 0xFF;
hexChars[j * 2] = hexArray[v >>> 4];
hexChars[j * 2 + 1] = hexArray[v & 0x0F];
}
return new String(hexChars);
}
@BeforeAll
public static void oneTimeSetUp() throws IOException {
// one-time initialization code
defaultConf.set("fs.defaultFS", "file:///");
localFs = FileSystem.getLocal(defaultConf);
}
@AfterAll
public static void oneTimeTearDown() {
// one-time cleanup code
}
@BeforeEach
public void setUp() {
}
@AfterEach
public void tearDown() {
}
@Test
public void checkTestDataGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="ethgenesis.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock1Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock1346406Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock3346406Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth3346406.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock0to10Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth0to10.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock351000to3510010Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth351000to3510010.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock1346406Bzip2CompressedAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin.bz2";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock1346406AGzipCompressedvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin.gz";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock403419() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="block403419.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock447533() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="block447533.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void readEthereumBlockInputFormatGenesisBlock() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="ethgenesis.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for genesis block");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for genesis block contains at least one block");
key=reader.getCurrentKey();
block=reader.getCurrentValue();
assertEquals( 0, block.getEthereumTransactions().size(),"Genesis Block must have 0 transactions");
assertFalse( reader.nextKeyValue(),"No further blocks in genesis Block");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock403419() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="block403419.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block 403419");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block 403419 contains at least one block");
key=reader.getCurrentKey();
block=reader.getCurrentValue();
assertEquals( 2, block.getEthereumTransactions().size(),"Block 403419 must have 2 transactions");
EthereumBlockHeader ethereumBlockHeader = block.getEthereumBlockHeader();
assertEquals(
"f8b483dba2c3b7176a3da549ad41a48bb3121069",
bytesToHex(ethereumBlockHeader.getCoinBase()).toLowerCase(),
"Block 403419 was mined by f8b483dba2c3b7176a3da549ad41a48bb3121069"
);
assertEquals(
"08741fa532c05804d9c1086a311e47cc024bbc43980f561041ad1fbb3c223322",
bytesToHex(ethereumBlockHeader.getParentHash()).toLowerCase(),
"The parent of block 403419 has hash 08741fa532c05804d9c1086a311e47cc024bbc43980f561041ad1fbb3c223322"
);
assertFalse( reader.nextKeyValue(),"No further blocks in block 403419");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock447533() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="block447533.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block 447533");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block 403419 contains at least one block");
key=reader.getCurrentKey();
block=reader.getCurrentValue();
assertEquals( 2, block.getEthereumTransactions().size(),"Block 447533 must have 2 transactions");
EthereumBlockHeader ethereumBlockHeader = block.getEthereumBlockHeader();
assertEquals(
"a027231f42c80ca4125b5cb962a21cd4f812e88f",
bytesToHex(ethereumBlockHeader.getCoinBase()).toLowerCase(),
"Block 447533 was mined by a027231f42c80ca4125b5cb962a21cd4f812e88f"
);
assertEquals(
"043559b70c54f0eea6a90b384286d7ab312129603e750075d09fd35e66f8068a",
bytesToHex(ethereumBlockHeader.getParentHash()).toLowerCase(),
"The parent of block 447533 has hash 043559b70c54f0eea6a90b384286d7ab312129603e750075d09fd35e66f8068a"
);
assertFalse( reader.nextKeyValue(),"No further blocks in block 447533");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock1() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block 1");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block 1 contains at least one block");
key=reader.getCurrentKey();
block=reader.getCurrentValue();
assertEquals( 0, block.getEthereumTransactions().size(),"Block 1 must have 0 transactions");
assertFalse( reader.nextKeyValue(),"No further blocks in block 1");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock1346406() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block 1346406");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block 1346406 contains at least one block");
key=reader.getCurrentKey();
block=reader.getCurrentValue();
assertEquals( 6, block.getEthereumTransactions().size(),"Block 1346406 must have 6 transactions");
assertFalse( reader.nextKeyValue(),"No further blocks in block 1346406");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock3346406() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth3346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block 3346406");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block 3346406 contains at least one block");
key=reader.getCurrentKey();
block=reader.getCurrentValue();
assertEquals( 7, block.getEthereumTransactions().size(),"Block 3346406 must have 7 transactions");
assertFalse( reader.nextKeyValue(),"No further blocks in block 3346406");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock0to10() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth0to10.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block 0..10");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
int count=0;
while (count<11) {
if (reader.nextKeyValue()) {
count++;
}
}
assertEquals(11,count,"Block 0..10 contains 11 blocks");
assertFalse( reader.nextKeyValue(),"No further blocks in block 0..10");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock3510000to3510010() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth351000to3510010.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block 3510000 .. 3510010");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
int count=0;
while (count<11) {
if (reader.nextKeyValue()) {
count++;
}
}
assertEquals(11,count,"Block 3510000 .. 3510010 contains 11 blocks");
assertFalse( reader.nextKeyValue(),"No further blocks in block 3510000 .. 3510010");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock1346406GzipCompressed() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin.gz";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block 1346406");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block 1346406 contains at least one block");
key=reader.getCurrentKey();
block=reader.getCurrentValue();
assertEquals( 6, block.getEthereumTransactions().size(),"Block 1346406 must have 6 transactions");
assertFalse( reader.nextKeyValue(),"No further blocks in block 1346406");
reader.close();
}
@Test
public void readEthereumBlockInputFormatBlock1346406Bzip2Compressed() throws IOException, EthereumBlockReadException, ParseException, InterruptedException {
Configuration conf = new Configuration(defaultConf);
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin.bz2";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
Path file = new Path(fileNameBlock);
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, file);
EthereumBlockFileInputFormat format = new EthereumBlockFileInputFormat();
List<InputSplit> splits = format.getSplits(job);
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
assertEquals( 1, splits.size(),"Only one split generated for block 1346406");
RecordReader<BytesWritable, EthereumBlockWritable> reader = format.createRecordReader(splits.get(0), context);
assertNotNull( reader,"Format returned null RecordReader");
reader.initialize(splits.get(0),context);
BytesWritable key = new BytesWritable();
EthereumBlockWritable block = new EthereumBlockWritable();
assertTrue( reader.nextKeyValue(),"Input Split for block 1346406 contains at least one block");
key=reader.getCurrentKey();
block=reader.getCurrentValue();
assertEquals( 6, block.getEthereumTransactions().size(),"Block 1346406 must have 6 transactions");
assertFalse( reader.nextKeyValue(),"No further blocks in block 1346406");
reader.close();
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/ethereum/format/common/EthereumUtilBlockTest.java | inputformat/src/test/java/org/zuinnote/hadoop/ethereum/format/common/EthereumUtilBlockTest.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.common;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.ethereum.format.exception.EthereumBlockReadException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.List;
import static org.junit.jupiter.api.Assertions.*;
public class EthereumUtilBlockTest {
static final int DEFAULT_BUFFERSIZE=64*1024;
static final int DEFAULT_MAXSIZE_ETHEREUM_BLOCK =1 * 1024 * 1024;
public final static int CHAIN_ID=1;
@Test
public void checkTestDataBlock1346406Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void calculateChainIdBlock1346406() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=false;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUM_BLOCK, this.DEFAULT_BUFFERSIZE,direct);
EthereumBlock eblock = ebr.readBlock();
List<EthereumTransaction> eTrans = eblock.getEthereumTransactions();
EthereumTransaction trans0 = eTrans.get(0);
assertNull(EthereumUtil.calculateChainId(trans0),"Block 1346406 Transaction 1 is Ethereum MainNet");
EthereumTransaction trans1 = eTrans.get(1);
assertNull(EthereumUtil.calculateChainId(trans1),"Block 1346406 Transaction 2 is Ethereum MainNet");
EthereumTransaction trans2 = eTrans.get(2);
assertNull(EthereumUtil.calculateChainId(trans2),"Block 1346406 Transaction 3 is Ethereum MainNet");
EthereumTransaction trans3 = eTrans.get(3);
assertNull(EthereumUtil.calculateChainId(trans3),"Block 1346406 Transaction 4 is Ethereum MainNet");
EthereumTransaction trans4 = eTrans.get(4);
assertNull(EthereumUtil.calculateChainId(trans4),"Block 1346406 Transaction 5 is Ethereum MainNet");
EthereumTransaction trans5 = eTrans.get(5);
assertNull(EthereumUtil.calculateChainId(trans5),"Block 1346406 Transaction 6 is Ethereum MainNet");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void getTransActionHashBlock1346406() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=false;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUM_BLOCK, this.DEFAULT_BUFFERSIZE,direct);
EthereumBlock eblock = ebr.readBlock();
List<EthereumTransaction> eTrans = eblock.getEthereumTransactions();
EthereumTransaction trans0 = eTrans.get(0);
byte[] expectedHash = new byte[] {(byte)0xe2,(byte)0x7e,(byte)0x92,(byte)0x88,(byte)0xe2,(byte)0x9c,(byte)0xc8,(byte)0xeb,(byte)0x78,(byte)0xf9,(byte)0xf7,(byte)0x68,(byte)0xd8,(byte)0x9b,(byte)0xf1,(byte)0xcd,(byte)0x4b,(byte)0x68,(byte)0xb7,(byte)0x15,(byte)0xa3,(byte)0x8b,(byte)0x95,(byte)0xd4,(byte)0x6d,(byte)0x77,(byte)0x86,(byte)0x18,(byte)0xcb,(byte)0x10,(byte)0x4d,(byte)0x58};
assertArrayEquals(expectedHash,EthereumUtil.getTransactionHash(trans0),"Block 1346406 Transaction 1 hash is correctly calculated");
EthereumTransaction trans1 = eTrans.get(1);
expectedHash = new byte[] {(byte)0x7a,(byte)0x23,(byte)0x2a,(byte)0xa2,(byte)0xae,(byte)0x6a,(byte)0x5e,(byte)0x1f,(byte)0x32,(byte)0xca,(byte)0x3a,(byte)0xc9,(byte)0x3f,(byte)0x4f,(byte)0xdb,(byte)0x77,(byte)0x98,(byte)0x3e,(byte)0x93,(byte)0x2b,(byte)0x38,(byte)0x09,(byte)0x93,(byte)0x56,(byte)0x44,(byte)0x42,(byte)0x08,(byte)0xc6,(byte)0x9d,(byte)0x40,(byte)0x86,(byte)0x81};
assertArrayEquals(expectedHash,EthereumUtil.getTransactionHash(trans1),"Block 1346406 Transaction 2 hash is correctly calculated");
EthereumTransaction trans2 = eTrans.get(2);
expectedHash = new byte[] {(byte)0x14,(byte)0x33,(byte)0xe3,(byte)0xcb,(byte)0x66,(byte)0x2f,(byte)0x66,(byte)0x8d,(byte)0x87,(byte)0xb8,(byte)0x35,(byte)0x55,(byte)0x34,(byte)0x5a,(byte)0x20,(byte)0xcc,(byte)0xf8,(byte)0x70,(byte)0x6f,(byte)0x25,(byte)0x21,(byte)0x49,(byte)0x18,(byte)0xe2,(byte)0xf8,(byte)0x1f,(byte)0xe3,(byte)0xd2,(byte)0x1c,(byte)0x9d,(byte)0x5b,(byte)0x23};
assertArrayEquals(expectedHash,EthereumUtil.getTransactionHash(trans2),"Block 1346406 Transaction 3 hash is correctly calculated");
EthereumTransaction trans3 = eTrans.get(3);
expectedHash = new byte[] {(byte)0x39,(byte)0x22,(byte)0xf7,(byte)0xf6,(byte)0x0a,(byte)0x33,(byte)0xa1,(byte)0x2d,(byte)0x13,(byte)0x9d,(byte)0x67,(byte)0xfa,(byte)0x53,(byte)0x30,(byte)0xdb,(byte)0xfd,(byte)0xba,(byte)0x42,(byte)0xa4,(byte)0xb7,(byte)0x67,(byte)0x29,(byte)0x6e,(byte)0xff,(byte)0x64,(byte)0x15,(byte)0xee,(byte)0xa3,(byte)0x2d,(byte)0x8a,(byte)0x7b,(byte)0x2b};
assertArrayEquals(expectedHash,EthereumUtil.getTransactionHash(trans3),"Block 1346406 Transaction 4 hash is correctly calculated");
EthereumTransaction trans4 = eTrans.get(4);
expectedHash = new byte[] {(byte)0xbb,(byte)0x7c,(byte)0xaa,(byte)0x23,(byte)0x38,(byte)0x5a,(byte)0x0f,(byte)0x73,(byte)0x75,(byte)0x3f,(byte)0x9e,(byte)0x28,(byte)0xd8,(byte)0xf0,(byte)0x60,(byte)0x2f,(byte)0xe2,(byte)0xe7,(byte)0x2d,(byte)0x87,(byte)0xe1,(byte)0xe0,(byte)0x95,(byte)0x52,(byte)0x75,(byte)0x28,(byte)0xd1,(byte)0x44,(byte)0x88,(byte)0x5d,(byte)0x6b,(byte)0x51};
assertArrayEquals(expectedHash,EthereumUtil.getTransactionHash(trans4),"Block 1346406 Transaction 5 hash is correctly calculated");
EthereumTransaction trans5 = eTrans.get(5);
expectedHash = new byte[] {(byte)0xbc,(byte)0xde,(byte)0x6f,(byte)0x49,(byte)0x84,(byte)0x2c,(byte)0x6d,(byte)0x73,(byte)0x8d,(byte)0x64,(byte)0x32,(byte)0x8f,(byte)0x78,(byte)0x09,(byte)0xb1,(byte)0xd4,(byte)0x9b,(byte)0xf0,(byte)0xff,(byte)0x3f,(byte)0xfa,(byte)0x46,(byte)0x0f,(byte)0xdd,(byte)0xd2,(byte)0x7f,(byte)0xd4,(byte)0x2b,(byte)0x7a,(byte)0x01,(byte)0xfc,(byte)0x9a};
assertArrayEquals(expectedHash,EthereumUtil.getTransactionHash(trans5),"Block 1346406 Transaction 6 hash is correctly calculated");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void getTransActionSendAddressBlock1346406() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=false;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUM_BLOCK, this.DEFAULT_BUFFERSIZE,direct);
EthereumBlock eblock = ebr.readBlock();
List<EthereumTransaction> eTrans = eblock.getEthereumTransactions();
EthereumTransaction trans0 = eTrans.get(0);
byte[] expectedSentAddress = new byte[] {(byte)0x39,(byte)0x42,(byte)0x4b,(byte)0xd2,(byte)0x8a,(byte)0x22,(byte)0x23,(byte)0xda,(byte)0x3e,(byte)0x14,(byte)0xbf,(byte)0x79,(byte)0x3c,(byte)0xf7,(byte)0xf8,(byte)0x20,(byte)0x8e,(byte)0xe9,(byte)0x98,(byte)0x0a};
assertArrayEquals(expectedSentAddress,EthereumUtil.getSendAddress(trans0, EthereumUtilBlockTest.CHAIN_ID),"Block 1346406 Transaction 1 send address is correctly calculated");
EthereumTransaction trans1 = eTrans.get(1);
expectedSentAddress = new byte[] {(byte)0x4b,(byte)0xb9,(byte)0x60,(byte)0x91,(byte)0xee,(byte)0x9d,(byte)0x80,(byte)0x2e,(byte)0xd0,(byte)0x39,(byte)0xc4,(byte)0xd1,(byte)0xa5,(byte)0xf6,(byte)0x21,(byte)0x6f,(byte)0x90,(byte)0xf8,(byte)0x1b,(byte)0x01};
assertArrayEquals(expectedSentAddress,EthereumUtil.getSendAddress(trans1, EthereumUtilBlockTest.CHAIN_ID),"Block 1346406 Transaction 2 send address is correctly calculated");
EthereumTransaction trans2 = eTrans.get(2);
expectedSentAddress = new byte[] {(byte)0x63,(byte)0xa9,(byte)0x97,(byte)0x5b,(byte)0xa3,(byte)0x1b,(byte)0x0b,(byte)0x96,(byte)0x26,(byte)0xb3,(byte)0x43,(byte)0x00,(byte)0xf7,(byte)0xf6,(byte)0x27,(byte)0x14,(byte)0x7d,(byte)0xf1,(byte)0xf5,(byte)0x26};
assertArrayEquals(expectedSentAddress,EthereumUtil.getSendAddress(trans2, EthereumUtilBlockTest.CHAIN_ID),"Block 1346406 Transaction 3 send address is correctly calculated");
EthereumTransaction trans3 = eTrans.get(3);
expectedSentAddress = new byte[] {(byte)0x63,(byte)0xa9,(byte)0x97,(byte)0x5b,(byte)0xa3,(byte)0x1b,(byte)0x0b,(byte)0x96,(byte)0x26,(byte)0xb3,(byte)0x43,(byte)0x00,(byte)0xf7,(byte)0xf6,(byte)0x27,(byte)0x14,(byte)0x7d,(byte)0xf1,(byte)0xf5,(byte)0x26};
assertArrayEquals(expectedSentAddress,EthereumUtil.getSendAddress(trans3, EthereumUtilBlockTest.CHAIN_ID),"Block 1346406 Transaction 4 send address is correctly calculated");
EthereumTransaction trans4 = eTrans.get(4);
expectedSentAddress = new byte[] {(byte)0x63,(byte)0xa9,(byte)0x97,(byte)0x5b,(byte)0xa3,(byte)0x1b,(byte)0x0b,(byte)0x96,(byte)0x26,(byte)0xb3,(byte)0x43,(byte)0x00,(byte)0xf7,(byte)0xf6,(byte)0x27,(byte)0x14,(byte)0x7d,(byte)0xf1,(byte)0xf5,(byte)0x26};
assertArrayEquals(expectedSentAddress,EthereumUtil.getSendAddress(trans4, EthereumUtilBlockTest.CHAIN_ID),"Block 1346406 Transaction 5 send address is correctly calculated");
EthereumTransaction trans5 = eTrans.get(5);
expectedSentAddress = new byte[] {(byte)0x63,(byte)0xa9,(byte)0x97,(byte)0x5b,(byte)0xa3,(byte)0x1b,(byte)0x0b,(byte)0x96,(byte)0x26,(byte)0xb3,(byte)0x43,(byte)0x00,(byte)0xf7,(byte)0xf6,(byte)0x27,(byte)0x14,(byte)0x7d,(byte)0xf1,(byte)0xf5,(byte)0x26};
assertArrayEquals(expectedSentAddress,EthereumUtil.getSendAddress(trans5, EthereumUtilBlockTest.CHAIN_ID),"Block 1346406 Transaction 6 send address is correctly calculated");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/ethereum/format/common/EthereumUtilDecodeTest.java | inputformat/src/test/java/org/zuinnote/hadoop/ethereum/format/common/EthereumUtilDecodeTest.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.common;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.ethereum.format.common.rlp.RLPElement;
import org.zuinnote.hadoop.ethereum.format.common.rlp.RLPList;
import org.zuinnote.hadoop.ethereum.format.common.rlp.RLPObject;
import java.nio.ByteBuffer;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class EthereumUtilDecodeTest {
public final static byte[] TEST_RLP_ELEMENT_INT_1024 = new byte[] {(byte) 0x82,0x04,0x00};
public final static byte[] TEST_RLP_LIST_SET3 = new byte[] {(byte) 0xc7,(byte) 0xc0,(byte) 0xc1,(byte) 0xc0,(byte) 0xc3,(byte) 0xc0,(byte) 0xc1,(byte) 0xc0};
public final static byte[] TEST_RLP_LIST_LARGE_LIST = new byte[] {(byte) 0xf8,0x50,(byte) 0xb8,0x4E, 'a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z','a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z','a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z'};
@Test
public void decodeRLPElementString() {
final byte[] TEST_RLP_ELEMENT_STRING = new byte[] {(byte) 0x83,'d','o','g'};
ByteBuffer wrapper = ByteBuffer.wrap(TEST_RLP_ELEMENT_STRING);
RLPObject elementObject = EthereumUtil.rlpDecodeNextItem(wrapper);
RLPElement element = (RLPElement)elementObject;
byte[] expectedIndicator = new byte[] {(byte) 0x83};
assertArrayEquals( expectedIndicator, element.getIndicator(),"Indicator is correct");
byte[] expectedRawData = new byte[]{'d','o','g'};
assertArrayEquals( expectedRawData,element.getRawData(),"Raw data is correct");
}
@Test
public void decodeRLPListString() {
final byte[] TEST_RLP_LIST_STRING = new byte[] {(byte) 0xc8,(byte) 0x83,'c','a','t',(byte) 0x83,'d','o','g'};
ByteBuffer wrapper = ByteBuffer.wrap(TEST_RLP_LIST_STRING);
RLPObject elementObject = EthereumUtil.rlpDecodeNextItem(wrapper);
RLPList list = (RLPList)elementObject;
assertEquals(2,list.getRlpList().size(),"List of size 2");
RLPElement firstElement = (RLPElement) list.getRlpList().get(0);
byte[] expectedIndicator = new byte[]{(byte) 0x83};
assertArrayEquals(expectedIndicator,firstElement.getIndicator(),"First element indicator is correct");
byte[] expectedRawData=new byte[]{'c','a','t'};
assertArrayEquals(expectedRawData,firstElement.getRawData(),"First element raw data is correct");
RLPElement secondElement = (RLPElement) list.getRlpList().get(1);
expectedIndicator=new byte[]{(byte) 0x83};
assertArrayEquals(expectedIndicator,secondElement.getIndicator(),"Second element indicator is correct");
expectedRawData=new byte[]{'d','o','g'};
assertArrayEquals(expectedRawData,secondElement.getRawData(),"Second element raw data is correct");
}
@Test
public void decodeRLPEmptyString() {
final byte[] TEST_RLP_EMPTY_STRING = new byte[]{(byte) 0x80};
ByteBuffer wrapper = ByteBuffer.wrap(TEST_RLP_EMPTY_STRING);
RLPObject elementObject = EthereumUtil.rlpDecodeNextItem(wrapper);
RLPElement element = (RLPElement)elementObject;
byte[] expectedIndicator = new byte[] {(byte) 0x80};
assertArrayEquals( expectedIndicator, element.getIndicator(),"Indicator is correct");
assertEquals( 0,element.getRawData().length,"Raw data is zero");
}
@Test
public void decodeRLPEmptyList() {
final byte[] TEST_RLP_EMPTY_LIST = new byte[]{(byte) 0xc0};
ByteBuffer wrapper = ByteBuffer.wrap(TEST_RLP_EMPTY_LIST);
RLPObject elementObject = EthereumUtil.rlpDecodeNextItem(wrapper);
RLPList list = (RLPList)elementObject;
assertEquals(0,list.getRlpList().size(),"List of size 0");
}
@Test
public void decodeRLPElementInt15() {
final byte[] TEST_RLP_ELEMENT_INT_15 = new byte[] {0x0f};
ByteBuffer wrapper = ByteBuffer.wrap(TEST_RLP_ELEMENT_INT_15);
RLPObject elementObject = EthereumUtil.rlpDecodeNextItem(wrapper);
RLPElement element = (RLPElement)elementObject;
byte[] expectedIndicator = new byte[] {(byte) 0x0f};
assertArrayEquals( expectedIndicator, element.getIndicator(),"Indicator is correct");
assertEquals( 1,element.getRawData().length,"Raw data has length 1");
assertEquals( 15,element.getRawData()[0],"Raw data contains the element 15");
}
@Test
public void decodeRLPElementInt1024() {
ByteBuffer wrapper = ByteBuffer.wrap(EthereumUtilDecodeTest.TEST_RLP_ELEMENT_INT_1024);
RLPObject elementObject = EthereumUtil.rlpDecodeNextItem(wrapper);
RLPElement element = (RLPElement)elementObject;
byte[] expectedIndicator = new byte[] {(byte) 0x82};
assertArrayEquals( expectedIndicator, element.getIndicator(),"Indicator is correct");
assertEquals( 2,element.getRawData().length,"Raw data has length 2");
assertArrayEquals( ByteBuffer.allocate(2).putShort((short) 1024).array(),element.getRawData(),"Raw data contains the element 1024");
}
@Test
public void decodeRLPListSet3() {
ByteBuffer wrapper = ByteBuffer.wrap(EthereumUtilDecodeTest.TEST_RLP_LIST_SET3);
RLPObject elementObject = EthereumUtil.rlpDecodeNextItem(wrapper);
RLPList list = (RLPList)elementObject;
assertEquals(3,list.getRlpList().size(),"List of size 3");
RLPList firstList=(RLPList) list.getRlpList().get(0);
assertEquals(0,firstList.getRlpList().size(),"First list is empty");
RLPList secondList=(RLPList) list.getRlpList().get(1);
assertEquals(1,secondList.getRlpList().size(),"Second list contains one object");
RLPList secondListOneObject=(RLPList) secondList.getRlpList().get(0);
assertEquals(0,secondListOneObject.getRlpList().size(),"Second List First Element is an empty list");
RLPList thirdList = (RLPList) list.getRlpList().get(2);
assertEquals(2,thirdList.getRlpList().size(),"Third list contains two objects");
RLPList thirdListFirstObject = (RLPList) thirdList.getRlpList().get(0);
assertEquals(0,thirdListFirstObject.getRlpList().size(),"Third list first object is an empyt list");
RLPList thirdListSecondObject = (RLPList) thirdList.getRlpList().get(1);
assertEquals(1,thirdListSecondObject.getRlpList().size(),"Third list second object is a list of size 1");
RLPList thirdListSecondObjectListOneObject = (RLPList) thirdListSecondObject.getRlpList().get(0);
assertEquals(0,thirdListSecondObjectListOneObject.getRlpList().size(),"Third list second object list first item is an empty list");
}
@Test
public void decodeRLPElementLargeString() {
ByteBuffer wrapper = ByteBuffer.wrap(EthereumUtilDecodeTest.TEST_RLP_LIST_LARGE_LIST);
RLPObject elementObject = EthereumUtil.rlpDecodeNextItem(wrapper);
RLPList list = (RLPList)elementObject;
assertEquals(1,list.getRlpList().size(),"List contains one object");
RLPElement element = (RLPElement) list.getRlpList().get(0);
byte[] expectedIndicator = new byte[] {(byte) 0xb8,(byte)0x4E};
assertArrayEquals( expectedIndicator, element.getIndicator(),"List first object indicator is correct");
byte[] expectedRawData = new byte[]{'a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z','a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z','a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z'};
assertArrayEquals( expectedRawData,element.getRawData(),"List first object raw data is correct");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/ethereum/format/common/EthereumUtilEncodeTest.java | inputformat/src/test/java/org/zuinnote/hadoop/ethereum/format/common/EthereumUtilEncodeTest.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.common;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class EthereumUtilEncodeTest {
@Test
public void encodeRLPElementLargeByteArray() {
String hexString="0A19B14A0000000000000000000000008F8221AFBB33998D8584A2B05749BA73C37A938A000000000000000000000000000000000000000000000028A857425466F80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004FEFA17B72400000000000000000000000000000000000000000000000000000000000000496606000000000000000000000000000000000000000000000000000000000B4594E6000000000000000000000000439F3E3AEB991155923B2E9D79B40169C38238C6000000000000000000000000000000000000000000000000000000000000001C6CAFDE39C9387CA142932617D966B4EEDA67ADE303604D3D7F57D796393324225DF009CCAA166300AC48C95013B8159F545CF11CC17B2BB23903FE1E5E02DD3D0000000000000000000000000000000000000000000000284041E810FF8A0000";
byte[] byteArray=EthereumUtil.convertHexStringToByteArray(hexString);
byte[] rlpEncode = EthereumUtil.encodeRLPElement(byteArray);
String rlpHexString = "B90164"+hexString;
assertEquals(rlpHexString,EthereumUtil.convertByteArrayToHexString(rlpEncode));
}
@Test
public void encodeRLPElementLargeByteArray2()
{
String hexString="6060604052341561000F57600080FD5B6040516060806105B08339810160405280805191906020018051919060200180519150505B600160A060020A038316151561004957600080FD5B60008054600160A060020A03808616600160A060020A0319928316179092556001805485841690831617905560028054928416929091169190911790555B5050505B6105168061009A6000396000F300606060405236156100675763FFFFFFFF60E060020A6000350416631072CBEA81146100A55780636C3B58E0146100C957806373FFD5B7146100F65780637822ED491461010E5780638DA5CB5B1461013D578063DC5F346B1461016C578063F444FDD814610199575B5B6000341161007557600080FD5B600154600160A060020A03163460405160006040518083038185876187965A03F19250505015156100A257FE5B5B005B34156100B057600080FD5B6100A2600160A060020A03600435166024356101C8565B005B34156100D457600080FD5B6100A2600160A060020A036004351660243560443560643560843561033A565B005B341561010157600080FD5B6100A26004356103AA565B005B341561011957600080FD5B61012161044D565B604051600160A060020A03909116815260200160405180910390F35B341561014857600080FD5B61012161045C565B604051600160A060020A03909116815260200160405180910390F35B341561017757600080FD5B6100A2600160A060020A036004351660243560443560643560843561046B565B005B34156101A457600080FD5B6101216104DB565B604051600160A060020A03909116815260200160405180910390F35B60025460009033600160A060020A03908116911614806101F6575060005433600160A060020A039081169116145B151561020157600080FD5B600160A060020A038316151561021357FE5B82600160A060020A03166370A082313060006040516020015260405160E060020A63FFFFFFFF8416028152600160A060020A039091166004820152602401602060405180830381600087803B151561026A57600080FD5B6102C65A03F1151561027B57600080FD5B50505060405180519150506000811180156102965750600082115B80156102A25750818110155B15156102AD57600080FD5B600154600160A060020A038085169163A9059CBB91168460006040516020015260405160E060020A63FFFFFFFF8516028152600160A060020A0390921660048301526024820152604401602060405180830381600087803B151561031057600080FD5B6102C65A03F1151561032157600080FD5B50505060405180519050151561033357FE5B5B5B505050565B60005433600160A060020A0390811691161461035557600080FD5B600160A060020A038516151561036A57600080FD5B80828486081461037957600080FD5B6001805473FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF1916600160A060020A0387161790555B5B5050505050565B60025433600160A060020A03908116911614806103D5575060005433600160A060020A039081169116145B15156103E057600080FD5B600030600160A060020A0316311180156103FA5750600081115B801561041057508030600160A060020A03163110155B151561041B57600080FD5B600154600160A060020A03168160405160006040518083038185876187965A03F192505050151561044857FE5B5B5B50565B600154600160A060020A031681565B600054600160A060020A031681565B60005433600160A060020A0390811691161461048657600080FD5B600160A060020A038516151561049B57600080FD5B8082848608146104AA57600080FD5B6002805473FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF1916600160A060020A0387161790555B5B5050505050565B600254600160A060020A0316815600A165627A7A72305820CF04E4D1DA97DCE96924EC4676A82FC1B7E1DF58CA22EC7328A5718A498BF8120029000000000000000000000000FE30E28B9DB06769D1D5F263905CDA31C4B2E0950000000000000000000000007B74C19124A9CA92C6141A2ED5F92130FC2791F20000000000000000000000007250ABCACDF1B5E19EC216619EA2614DC2FF6B7A";
byte[] byteArray=EthereumUtil.convertHexStringToByteArray(hexString);
byte[] rlpEncode = EthereumUtil.encodeRLPElement(byteArray);
String rlpHexString = "B90610"+hexString;
assertEquals(rlpHexString,EthereumUtil.convertByteArrayToHexString(rlpEncode));
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/test/java/org/zuinnote/hadoop/ethereum/format/common/EthereumFormatReaderTest.java | inputformat/src/test/java/org/zuinnote/hadoop/ethereum/format/common/EthereumFormatReaderTest.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.common;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.jupiter.api.Test;
import org.zuinnote.hadoop.ethereum.format.exception.EthereumBlockReadException;
/**
* @author jornfranke
*
*/
public class EthereumFormatReaderTest {
static final int DEFAULT_BUFFERSIZE=64*1024;
static final int DEFAULT_MAXSIZE_ETHEREUMBLOCK=1 * 1024 * 1024;
private static final Log LOG = LogFactory.getLog(EthereumFormatReaderTest.class.getName());
public static int CHAIN_ID=1;
@Test
public void checkTestDataGenesisBlockAvailable() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="ethgenesis.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock1Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock1346406Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock3346406Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth3346406.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock0to10Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth0to10.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock351000to3510010Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth351000to3510010.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlock4800251Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth4800251.bin";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void checkTestDataBlocks70000007000010Available() {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth70000007000010";
String fileNameGenesis=classLoader.getResource("testdata/"+fileName).getFile();
assertNotNull(fileNameGenesis,"Test Data File \""+fileName+"\" is not null in resource path");
File file = new File(fileNameGenesis);
assertTrue( file.exists(),"Test Data File \""+fileName+"\" exists");
assertFalse( file.isDirectory(),"Test Data File \""+fileName+"\" is not a directory");
}
@Test
public void parseGenesisBlockAsEthereumRawBlockHeap() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="ethgenesis.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=false;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
ByteBuffer blockByteBuffer = ebr.readRawBlock();
assertFalse( blockByteBuffer.isDirect(),"Raw Genesis Block is HeapByteBuffer");
assertEquals( 540, blockByteBuffer.limit(),"Raw Genesis block has a size of 540 bytes");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseGenesisBlockAsEthereumRawBlockDirect() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="ethgenesis.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=true;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
ByteBuffer blockByteBuffer = ebr.readRawBlock();
assertTrue( blockByteBuffer.isDirect(),"Raw Genesis Block is DirectByteBuffer");
assertEquals( 540, blockByteBuffer.limit(),"Raw Genesis block has a size of 540 bytes");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseBlockOneAsEthereumRawBlockHeap() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=false;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
ByteBuffer blockByteBuffer = ebr.readRawBlock();
assertFalse( blockByteBuffer.isDirect(),"Raw block 1 is HeapByteBuffer");
assertEquals( 537, blockByteBuffer.limit(),"Raw block 1 has a size of 537 bytes");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseBlockOneAsEthereumRawBlockDirect() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=true;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
ByteBuffer blockByteBuffer = ebr.readRawBlock();
assertTrue( blockByteBuffer.isDirect(),"Raw block 1 is DirectByteBuffer");
assertEquals( 537, blockByteBuffer.limit(),"Raw block 1 has a size of 537 bytes");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseBlock1346406AsEthereumRawBlockHeap() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=false;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
ByteBuffer blockByteBuffer = ebr.readRawBlock();
assertFalse( blockByteBuffer.isDirect(),"Raw block 1346406 is HeapByteBuffer");
assertEquals( 1223, blockByteBuffer.limit(),"Raw block 1346406 has a size of 1223 bytes");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseBlock1346406AsEthereumRawBlockDirect() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=true;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
ByteBuffer blockByteBuffer = ebr.readRawBlock();
assertTrue( blockByteBuffer.isDirect(),"Raw block 1346406 is DirectByteBuffer");
assertEquals( 1223, blockByteBuffer.limit(),"Raw block 1346406 has a size of 1223 bytes");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseBlock3346406AsEthereumRawBlockHeap() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth3346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=false;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
ByteBuffer blockByteBuffer = ebr.readRawBlock();
assertFalse( blockByteBuffer.isDirect(),"Raw block 3346406 is HeapByteBuffer");
assertEquals( 2251, blockByteBuffer.limit(),"Raw block 3346406 has a size of 2251 bytes");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseBlock3346406AsEthereumRawBlockDirect() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth3346406.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=true;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
ByteBuffer blockByteBuffer = ebr.readRawBlock();
assertTrue( blockByteBuffer.isDirect(),"Raw block 3346406 is DirectByteBuffer");
assertEquals( 2251, blockByteBuffer.limit(),"Raw block 3346406 has a size of 2251 bytes");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseBlock0to10AsEthereumRawBlockHeap() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth0to10.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=false;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
ByteBuffer blockByteBuffer = ebr.readRawBlock();
assertFalse( blockByteBuffer.isDirect(),"Raw block 0 is HeapByteBuffer");
assertEquals( 540, blockByteBuffer.limit(),"Raw block 0 has a size of 540 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 537, blockByteBuffer.limit(),"Raw block 1 has a size of 537 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 544, blockByteBuffer.limit(),"Raw block 2 has a size of 544 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1079, blockByteBuffer.limit(),"Raw block 3 has a size of 1079 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1079, blockByteBuffer.limit(),"Raw block 4 has a size of 1079 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 537, blockByteBuffer.limit(),"Raw block 5 has a size of 537 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 537, blockByteBuffer.limit(),"Raw block 6 has a size of 537 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1078, blockByteBuffer.limit(),"Raw block 7 has a size of 1078 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 537, blockByteBuffer.limit(),"Raw block 8 has a size of 537 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 544, blockByteBuffer.limit(),"Raw block 9 has a size of 544 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 537, blockByteBuffer.limit(),"Raw block 10 has a size of 537 bytes");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseBlock0to10AsEthereumRawBlockDirect() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth0to10.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=true;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
ByteBuffer blockByteBuffer = ebr.readRawBlock();
assertTrue( blockByteBuffer.isDirect(),"Raw block 0 is DirectByteBuffer");
assertEquals( 540, blockByteBuffer.limit(),"Raw block 0 has a size of 540 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 537, blockByteBuffer.limit(),"Raw block 1 has a size of 537 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 544, blockByteBuffer.limit(),"Raw block 2 has a size of 544 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1079, blockByteBuffer.limit(),"Raw block 3 has a size of 1079 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1079, blockByteBuffer.limit(),"Raw block 4 has a size of 1079 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 537, blockByteBuffer.limit(),"Raw block 5 has a size of 537 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 537, blockByteBuffer.limit(),"Raw block 6 has a size of 537 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1078, blockByteBuffer.limit(),"Raw block 7 has a size of 1078 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 537, blockByteBuffer.limit(),"Raw block 8 has a size of 537 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 544, blockByteBuffer.limit(),"Raw block 9 has a size of 544 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 537, blockByteBuffer.limit(),"Raw block 10 has a size of 537 bytes");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseBlock35100to351010AsEthereumRawBlockHeap() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth351000to3510010.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=false;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
ByteBuffer blockByteBuffer = ebr.readRawBlock();
assertFalse( blockByteBuffer.isDirect(),"Raw block 3510000 is HeapByteBuffer");
assertEquals( 2842, blockByteBuffer.limit(),"Raw block 3510000 has a size of 2842 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 539, blockByteBuffer.limit(),"Raw block 3510001 has a size of 539 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 2595, blockByteBuffer.limit(),"Raw block 3510002 has a size of 2595 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 11636, blockByteBuffer.limit(),"Raw block 3510003 has a size of 11636 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1335, blockByteBuffer.limit(),"Raw block 3510004 has a size of 1335 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 9126, blockByteBuffer.limit(),"Raw block 3510005 has a size of 9126 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 7807, blockByteBuffer.limit(),"Raw block 3510006 has a size of 7807 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 532, blockByteBuffer.limit(),"Raw block 3510007 has a size of 532 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1393, blockByteBuffer.limit(),"Raw block 3510008 has a size of 1393 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1217, blockByteBuffer.limit(),"Raw block 3510009 has a size of 1217 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1016, blockByteBuffer.limit(),"Raw block 3510010 has a size of 1016 bytes");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseBlock35100to351010AsEthereumRawBlockDirect() throws IOException, EthereumBlockReadException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth351000to3510010.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=true;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
ByteBuffer blockByteBuffer = ebr.readRawBlock();
assertTrue( blockByteBuffer.isDirect(),"Raw block 3510000 is DirecteBuffer");
assertEquals( 2842, blockByteBuffer.limit(),"Raw block 3510000 has a size of 2842 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 539, blockByteBuffer.limit(),"Raw block 3510001 has a size of 539 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 2595, blockByteBuffer.limit(),"Raw block 3510002 has a size of 2595 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 11636, blockByteBuffer.limit(),"Raw block 3510003 has a size of 11636 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1335, blockByteBuffer.limit(),"Raw block 3510004 has a size of 1335 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 9126, blockByteBuffer.limit(),"Raw block 3510005 has a size of 9126 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 7807, blockByteBuffer.limit(),"Raw block 3510006 has a size of 7807 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 532, blockByteBuffer.limit(),"Raw block 3510007 has a size of 532 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1393, blockByteBuffer.limit(),"Raw block 3510008 has a size of 1393 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1217, blockByteBuffer.limit(),"Raw block 3510009 has a size of 1217 bytes");
blockByteBuffer = ebr.readRawBlock();
assertEquals( 1016, blockByteBuffer.limit(),"Raw block 3510010 has a size of 1016 bytes");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseGenesisBlockAsEthereumBlockHeap() throws IOException, EthereumBlockReadException, ParseException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="ethgenesis.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=false;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
EthereumBlock eblock = ebr.readBlock();
EthereumBlockHeader eblockHeader = eblock.getEthereumBlockHeader();
List<EthereumTransaction> eTransactions = eblock.getEthereumTransactions();
List<EthereumBlockHeader> eUncles = eblock.getUncleHeaders();
assertEquals( 0, eTransactions.size(),"Genesis block contains 0 transactions");
assertEquals(0, eUncles.size(),"Genesis block contains 0 uncleHeaders");
byte[] expectedParentHash = new byte[] {0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
assertArrayEquals( expectedParentHash, eblockHeader.getParentHash(),"Genesis block contains a 32 byte hash consisting only of 0x00");
byte[] expectedUncleHash = new byte[] {(byte) 0x1D,(byte) 0xCC,0x4D,(byte) 0xE8,(byte) 0xDE, (byte) 0xC7,(byte) 0x5D,
(byte) 0x7A,(byte) 0xAB,(byte) 0x85,(byte) 0xB5,(byte) 0x67,(byte) 0xB6,(byte) 0xCC,(byte) 0xD4,
0x1A,(byte) 0xD3,(byte)0x12, 0x45,0x1B,(byte) 0x94,(byte) 0x8A,0x74,0x13,(byte) 0xF0,
(byte) 0xA1,0x42,(byte) 0xFD,0x40,(byte) 0xD4,(byte) 0x93,0x47};
assertArrayEquals( expectedUncleHash, eblockHeader.getUncleHash(),"Genesis block contains a correct 32 byte uncle hash");
byte[] expectedCoinbase = new byte[] {0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
assertArrayEquals(expectedCoinbase,eblockHeader.getCoinBase(),"Genesis block contains a 20 byte coinbase consisting only of 0x00");
byte[] expectedStateRoot= new byte[] {(byte) 0xD7,(byte) 0xF8,(byte) 0x97,0x4F,(byte) 0xB5,(byte) 0xAC,0x78,(byte) 0xD9,(byte) 0xAC,0x09,(byte) 0x9B,(byte) 0x9A,(byte) 0xD5,0x01,(byte) 0x8B,(byte) 0xED,(byte) 0xC2,(byte) 0xCE,0x0A,0x72,(byte) 0xDA,(byte) 0xD1,(byte) 0x82,0x7A,0x17,0x09,(byte) 0xDA,0x30,0x58,0x0F,0x05,0x44};
assertArrayEquals(expectedStateRoot,eblockHeader.getStateRoot(),"Genesis block contains a correct 32 byte stateroot");
byte[] expectedTxTrieRoot= new byte[] {0x56,(byte) 0xE8,0x1F,0x17,0x1B,(byte) 0xCC,0x55,(byte) 0xA6,(byte) 0xFF,(byte) 0x83,0x45,(byte) 0xE6,(byte) 0x92,(byte) 0xC0,(byte) 0xF8,0x6E,0x5B,0x48,(byte) 0xE0,0x1B,(byte) 0x99,0x6C,(byte) 0xAD,(byte) 0xC0,0x01,0x62,0x2F,(byte) 0xB5,(byte) 0xE3,0x63,(byte) 0xB4,0x21};
assertArrayEquals(expectedTxTrieRoot,eblockHeader.getTxTrieRoot(),"Genesis block contains a correct 32 byte txTrieRoot");
byte[] expectedReceiptTrieRoot=new byte[] {0x56,(byte) 0xE8,0x1F,0x17,0x1B,(byte) 0xCC,0x55,(byte) 0xA6,(byte) 0xFF,(byte) 0x83,0x45,(byte) 0xE6,(byte) 0x92,(byte) 0xC0,(byte) 0xF8,0x6E,0x5B,0x48,(byte) 0xE0,0x1B,(byte) 0x99,0x6C,(byte) 0xAD,(byte) 0xC0,0x01,0x62,0x2F,(byte) 0xB5,(byte) 0xE3,0x63,(byte) 0xB4,0x21};
assertArrayEquals(expectedReceiptTrieRoot,eblockHeader.getReceiptTrieRoot(),"Genesis block contains a correct 32 byte ReceiptTrieRoot");
byte[] expectedLogsBloom = new byte[] {0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
assertArrayEquals( expectedLogsBloom, eblockHeader.getLogsBloom(),"Genesis block contains a 256 byte log bloom consisting only of 0x00");
byte[] expectedDifficulty = new byte[] {0x04,0x00,0x00,0x00,0x00};
assertArrayEquals( expectedDifficulty, eblockHeader.getDifficulty(),"Genesis block contains a correct 5 byte difficulty");
assertEquals(0L, eblockHeader.getTimestamp(),"Genesis block contains a timestamp of 0");
long expectedNumber = 0L;
assertEquals( BigInteger.valueOf(expectedNumber), eblockHeader.getNumber(),"Genesis block contains a number 0");
byte[] expectedGasLimit = new byte[] {0x13,(byte) 0x88};
assertArrayEquals( expectedGasLimit, eblockHeader.getGasLimitRaw(),"Genesis block contains a correct 2 byte gas limit");
long expectedGasUsed = 0L;
assertEquals( BigInteger.valueOf(expectedGasUsed), eblockHeader.getGasUsed(),"Genesis block contains a gas used of 0");
byte[] expectedMixHash= new byte[] {0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
assertArrayEquals( expectedMixHash, eblockHeader.getMixHash(),"Genesis block contains a correct 32 byte mix hash consisting only of 0x00");
byte[] expectedExtraData= new byte[] {0x11,(byte) 0xBB,(byte) 0xE8,(byte) 0xDB,0x4E,0x34,0x7B,0x4E,(byte) 0x8C,(byte) 0x93,0x7C,0x1C,(byte) 0x83,0x70,(byte) 0xE4,(byte) 0xB5,(byte) 0xED,0x33,(byte) 0xAD,(byte) 0xB3,(byte) 0xDB,0x69,(byte) 0xCB,(byte) 0xDB,0x7A,0x38,(byte) 0xE1,(byte) 0xE5,0x0B,0x1B,(byte) 0x82,(byte) 0xFA};
assertArrayEquals( expectedExtraData, eblockHeader.getExtraData(),"Genesis block contains correct 32 byte extra data");
byte[] expectedNonce = new byte[] {0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x42};
assertArrayEquals( expectedNonce, eblockHeader.getNonce(),"Genesis block contains a correct 8 byte nonce");
} finally {
if (ebr!=null) {
ebr.close();
}
}
}
@Test
public void parseGenesisBlockAsEthereumBlockDirect() throws IOException, EthereumBlockReadException, ParseException {
ClassLoader classLoader = getClass().getClassLoader();
String fileName="ethgenesis.bin";
String fileNameBlock=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fileNameBlock);
boolean direct=true;
FileInputStream fin = new FileInputStream(file);
EthereumBlockReader ebr = null;
try {
ebr = new EthereumBlockReader(fin,this.DEFAULT_MAXSIZE_ETHEREUMBLOCK, this.DEFAULT_BUFFERSIZE,direct);
EthereumBlock eblock = ebr.readBlock();
EthereumBlockHeader eblockHeader = eblock.getEthereumBlockHeader();
List<EthereumTransaction> eTransactions = eblock.getEthereumTransactions();
List<EthereumBlockHeader> eUncles = eblock.getUncleHeaders();
assertEquals( 0, eTransactions.size(),"Genesis block contains 0 transactions");
assertEquals(0, eUncles.size(),"Genesis block contains 0 uncleHeaders");
byte[] expectedParentHash = new byte[] {0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
assertArrayEquals( expectedParentHash, eblockHeader.getParentHash(),"Genesis block contains a 32 byte hash consisting only of 0x00");
byte[] expectedUncleHash = new byte[] {(byte) 0x1D,(byte) 0xCC,0x4D,(byte) 0xE8,(byte) 0xDE, (byte) 0xC7,(byte) 0x5D,
(byte) 0x7A,(byte) 0xAB,(byte) 0x85,(byte) 0xB5,(byte) 0x67,(byte) 0xB6,(byte) 0xCC,(byte) 0xD4,
0x1A,(byte) 0xD3,(byte)0x12, 0x45,0x1B,(byte) 0x94,(byte) 0x8A,0x74,0x13,(byte) 0xF0,
(byte) 0xA1,0x42,(byte) 0xFD,0x40,(byte) 0xD4,(byte) 0x93,0x47};
assertArrayEquals( expectedUncleHash, eblockHeader.getUncleHash(),"Genesis block contains a correct 32 byte uncle hash");
byte[] expectedCoinbase = new byte[] {0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
assertArrayEquals(expectedCoinbase,eblockHeader.getCoinBase(),"Genesis block contains a 20 byte coinbase consisting only of 0x00");
byte[] expectedStateRoot= new byte[] {(byte) 0xD7,(byte) 0xF8,(byte) 0x97,0x4F,(byte) 0xB5,(byte) 0xAC,0x78,(byte) 0xD9,(byte) 0xAC,0x09,(byte) 0x9B,(byte) 0x9A,(byte) 0xD5,0x01,(byte) 0x8B,(byte) 0xED,(byte) 0xC2,(byte) 0xCE,0x0A,0x72,(byte) 0xDA,(byte) 0xD1,(byte) 0x82,0x7A,0x17,0x09,(byte) 0xDA,0x30,0x58,0x0F,0x05,0x44};
assertArrayEquals(expectedStateRoot,eblockHeader.getStateRoot(),"Genesis block contains a correct 32 byte stateroot");
byte[] expectedTxTrieRoot= new byte[] {0x56,(byte) 0xE8,0x1F,0x17,0x1B,(byte) 0xCC,0x55,(byte) 0xA6,(byte) 0xFF,(byte) 0x83,0x45,(byte) 0xE6,(byte) 0x92,(byte) 0xC0,(byte) 0xF8,0x6E,0x5B,0x48,(byte) 0xE0,0x1B,(byte) 0x99,0x6C,(byte) 0xAD,(byte) 0xC0,0x01,0x62,0x2F,(byte) 0xB5,(byte) 0xE3,0x63,(byte) 0xB4,0x21};
assertArrayEquals(expectedTxTrieRoot,eblockHeader.getTxTrieRoot(),"Genesis block contains a correct 32 byte txTrieRoot");
byte[] expectedReceiptTrieRoot=new byte[] {0x56,(byte) 0xE8,0x1F,0x17,0x1B,(byte) 0xCC,0x55,(byte) 0xA6,(byte) 0xFF,(byte) 0x83,0x45,(byte) 0xE6,(byte) 0x92,(byte) 0xC0,(byte) 0xF8,0x6E,0x5B,0x48,(byte) 0xE0,0x1B,(byte) 0x99,0x6C,(byte) 0xAD,(byte) 0xC0,0x01,0x62,0x2F,(byte) 0xB5,(byte) 0xE3,0x63,(byte) 0xB4,0x21};
assertArrayEquals(expectedReceiptTrieRoot,eblockHeader.getReceiptTrieRoot(),"Genesis block contains a correct 32 byte ReceiptTrieRoot");
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | true |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/namecoin/format/common/NamecoinUtil.java | inputformat/src/main/java/org/zuinnote/hadoop/namecoin/format/common/NamecoinUtil.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.namecoin.format.common;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinUtil;
public class NamecoinUtil {
private static final Log LOG = LogFactory.getLog(NamecoinUtil.class.getName());
public final static byte OP_NAME_NEW=0x51;
public final static byte OP_NAME_FIRSTUPDATE=0x52;
public final static byte OP_NAME_UDPATE=0x53;
public final static String STR_OP_UNKNOWN="unknown";
public final static String STR_OP_NAME_NEW="OP_NAME_NEW";
public final static String STR_OP_NAME_FIRSTUPDATE="OP_NAME_FIRSTUPDATE";
public final static String STR_OP_NAME_UDPATE="OP_NAME_UPDATE";
/**
* Extracts a Namecoin field name (String) and value field (a JSON object) from a script. Please note that not all Namecoin transactions do contain Namecoin fields, some are coinbase (ie mining) transactions and others are regular transactions to transfer Namecoins (comparable to Bitcoin transactions=
* Additionally, you can extract only information related to the OPs NAME_FIRSTUPDATE and NAME_UPDATE, because NAME_NEW does not contain the name, but only a script hash that is used by NAME_FIRSTUPDATE to define the name
*
* There are certain naming conventions that helps to identify the type of field, e.g. if name starts with:
* (1) d/ then it is a domain name
* (2) s/ or dd/ then it contains further domain data
* (3) id/ it contains a public online identity
*
* See also:
* https://wiki.namecoin.org/index.php?title=Domain_Name_Specification
* https://wiki.namecoin.org/index.php?title=Identity
*
* @param scriptPubKey Output script potentially containing a Namecoin operation
* @return Array of size 2 where the first entry is the name (e.g. d/example) and the second entry is a JSON object serialized as String, null if not a valid Namecoin DNS field
*/
public static String[] extractNamecoinField(byte[] scriptPubKey) {
// check if valid script
if ((scriptPubKey==null) || (scriptPubKey.length<2)) {
return null;
}
// only firstupdate and update work
if (!((scriptPubKey[0]==NamecoinUtil.OP_NAME_UDPATE)|| (scriptPubKey[0]==NamecoinUtil.OP_NAME_FIRSTUPDATE))) {
return null;
}
String[] result = new String[2];
// convert script into ByteBuffer
ByteBuffer scriptByteBuffer = ByteBuffer.wrap(scriptPubKey);
// skip op
scriptByteBuffer.get();
// read name
// get size
long nameSize=BitcoinUtil.convertVarIntByteBufferToLong(scriptByteBuffer);
// extract name
byte[] nameByteArray = new byte[(int)nameSize];
scriptByteBuffer.get(nameByteArray);
String name = new String(nameByteArray,Charset.forName("UTF-8"));
result[0]=name;
if (scriptPubKey[0]==NamecoinUtil.OP_NAME_FIRSTUPDATE) {
// skip intermediate information
long intermediateInformationSize = BitcoinUtil.convertVarIntByteBufferToLong(scriptByteBuffer);
byte[] intermediateInformation=new byte[(int)intermediateInformationSize];
scriptByteBuffer.get(intermediateInformation);
}
// read value
long valueSize = BitcoinUtil.convertVarIntByteBufferToLong(scriptByteBuffer);
byte[] valueByteArray = new byte[(int)valueSize];
scriptByteBuffer.get(valueByteArray);
String value = new String (valueByteArray, Charset.forName("UTF-8"));
result[1]=value;
return result;
}
/**
* Determines the name operation (if any) of the given script. Please note that not all Namecoin transactions do contain Namecoin fields, some are coinbase (ie mining) transactions and others are regular transactions to transfer Namecoins (comparable to Bitcoin transactions
*
* @param scriptPubKey Output script potentially containing a Namecoin operation
* @return Name operation: "OP_NAME_NEW", "OP_NAME_FIRST_UPDATE", "OP_NAME_UPDATE" or in case it cannot be determined: "unknown"
*/
public static String getNameOperation(byte[] scriptPubKey) {
if (scriptPubKey==null) {
return NamecoinUtil.STR_OP_UNKNOWN;
}
if (scriptPubKey.length>1) {
byte nameOp=scriptPubKey[0];
switch(nameOp) {
case NamecoinUtil.OP_NAME_NEW:
return NamecoinUtil.STR_OP_NAME_NEW;
case NamecoinUtil.OP_NAME_FIRSTUPDATE:
return NamecoinUtil.STR_OP_NAME_FIRSTUPDATE;
case NamecoinUtil.OP_NAME_UDPATE:
return NamecoinUtil.STR_OP_NAME_UDPATE;
default:
return NamecoinUtil.STR_OP_UNKNOWN;
}
}
// in all other cases we do not know
return NamecoinUtil.STR_OP_UNKNOWN;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinRawBlockRecordReader.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinRawBlockRecordReader.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapred;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
public class BitcoinRawBlockRecordReader extends AbstractBitcoinRecordReader<BytesWritable, BytesWritable> {
private static final Log LOG = LogFactory.getLog(BitcoinRawBlockRecordReader.class.getName());
public BitcoinRawBlockRecordReader(FileSplit split,JobConf job, Reporter reporter) throws IOException,HadoopCryptoLedgerConfigurationException,BitcoinBlockReadException {
super(split,job,reporter);
}
/**
*
* Create an empty key
*
* @return key
*/
@Override
public BytesWritable createKey() {
return new BytesWritable();
}
/**
*
* Create an empty value
*
* @return value
*/
@Override
public BytesWritable createValue() {
return new BytesWritable();
}
/**
*
* Read a next block.
*
* @param key is a 64 byte array (hashMerkleRoot and prevHashBlock)
* @param value is a deserialized Java object of class BitcoinBlock
*
* @return true if next block is available, false if not
*/
@Override
public boolean next(BytesWritable key, BytesWritable value) throws IOException {
// read all the blocks, if necessary a block overlapping a split
while(getFilePosition()<=getEnd()) { // did we already went beyond the split (remote) or do we have no further data left?
ByteBuffer dataBlock=null;
try {
dataBlock=getBbr().readRawBlock();
} catch (BitcoinBlockReadException e) {
// log
LOG.error(e);
}
if (dataBlock==null) {
return false;
}
byte[] newKey=getBbr().getKeyFromRawBlock(dataBlock);
key.set(newKey,0,newKey.length);
byte[] dataBlockArray;
if (dataBlock.hasArray()) {
dataBlockArray=dataBlock.array();
} else {
dataBlockArray=new byte[dataBlock.capacity()];
dataBlock.get(dataBlockArray);
}
value.set(dataBlockArray,0,dataBlockArray.length);
return true;
}
return false;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinTransactionRecordReader.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinTransactionRecordReader.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapred;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import java.io.IOException;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.zuinnote.hadoop.bitcoin.format.common.*;
public class BitcoinTransactionRecordReader extends AbstractBitcoinRecordReader<BytesWritable, BitcoinTransactionWritable> {
private static final Log LOG = LogFactory.getLog(BitcoinBlockRecordReader.class.getName());
private int currentTransactionCounterInBlock=0;
private BitcoinBlock currentBitcoinBlock;
public BitcoinTransactionRecordReader(FileSplit split,JobConf job, Reporter reporter) throws IOException,HadoopCryptoLedgerConfigurationException,BitcoinBlockReadException {
super(split,job,reporter);
}
/**
*
* Create an empty key
*
* @return key
*/
@Override
public BytesWritable createKey() {
return new BytesWritable();
}
/**
*
* Create an empty value
*
* @return value
*/
@Override
public BitcoinTransactionWritable createValue() {
return new BitcoinTransactionWritable();
}
/**
*
* Read a next block.
*
* @param key is a 68 byte array (hashMerkleRoot, prevHashBlock, transActionCounter)
* @param value is a deserialized Java object of class BitcoinBlock
*
* @return true if next block is available, false if not
*/
@Override
public boolean next(BytesWritable key, BitcoinTransactionWritable value) throws IOException {
// read all the blocks, if necessary a block overlapping a split
while(getFilePosition()<=getEnd()) { // did we already went beyond the split (remote) or do we have no further data left?
if ((currentBitcoinBlock==null) || (currentBitcoinBlock.getTransactions().size()==currentTransactionCounterInBlock)){
try {
currentBitcoinBlock=getBbr().readBlock();
currentTransactionCounterInBlock=0;
} catch (BitcoinBlockReadException e) {
// log
LOG.error(e);
}
}
if (currentBitcoinBlock==null) {
return false;
}
BitcoinTransaction currentTransaction=currentBitcoinBlock.getTransactions().get(currentTransactionCounterInBlock);
// the unique identifier that is linked in other transaction is usually its hash
byte[] newKey = BitcoinUtil.getTransactionHash(currentTransaction);
key.set(newKey,0,newKey.length);
value.set(currentTransaction);
currentTransactionCounterInBlock++;
return true;
}
return false;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinBlockRecordReader.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinBlockRecordReader.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapred;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import java.io.IOException;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.zuinnote.hadoop.bitcoin.format.common.*;
/**
* Reads records as blocks of the bitcoin blockchain. Note that it can be tricky to find the start of a block in a split. The BitcoinBlockReader provides a method (seekBlockStart) for this.
*
*/
public class BitcoinBlockRecordReader extends AbstractBitcoinRecordReader<BytesWritable, BitcoinBlockWritable> {
private static final Log LOG = LogFactory.getLog(BitcoinBlockRecordReader.class.getName());
public BitcoinBlockRecordReader(FileSplit split,JobConf job, Reporter reporter) throws IOException,HadoopCryptoLedgerConfigurationException,BitcoinBlockReadException {
super(split,job,reporter);
}
/**
*
* Create an empty key
*
* @return key
*/
@Override
public BytesWritable createKey() {
return new BytesWritable();
}
/**
*
* Create an empty value
*
* @return value
*/
@Override
public BitcoinBlockWritable createValue() {
return new BitcoinBlockWritable();
}
/**
*
* Read a next block.
*
* @param key is a 64 byte array (hashMerkleRoot and prevHashBlock)
* @param value is a deserialized Java object of class BitcoinBlock
*
* @return true if next block is available, false if not
*/
@Override
public boolean next(BytesWritable key, BitcoinBlockWritable value) throws IOException {
// read all the blocks, if necessary a block overlapping a split
while(getFilePosition()<=getEnd()) { // did we already went beyond the split (remote) or do we have no further data left?
BitcoinBlock dataBlock=null;
try {
dataBlock=getBbr().readBlock();
} catch (BitcoinBlockReadException e) {
// log
LOG.error(e);
}
if (dataBlock==null) {
return false;
}
byte[] hashMerkleRoot=dataBlock.getHashMerkleRoot();
byte[] hashPrevBlock=dataBlock.getHashPrevBlock();
byte[] newKey=new byte[hashMerkleRoot.length+hashPrevBlock.length];
for (int i=0;i<hashMerkleRoot.length;i++) {
newKey[i]=hashMerkleRoot[i];
}
for (int j=0;j<hashPrevBlock.length;j++) {
newKey[j+hashMerkleRoot.length]=hashPrevBlock[j];
}
key.set(newKey,0,newKey.length);
value.set(dataBlock);
return true;
}
return false;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/AbstractBitcoinRecordReader.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/AbstractBitcoinRecordReader.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapred;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import java.io.IOException;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.SplitCompressionInputStream;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.zuinnote.hadoop.bitcoin.format.common.*;
public abstract class AbstractBitcoinRecordReader<K,V> implements RecordReader<K,V> {
public static final String CONF_BUFFERSIZE=org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinRecordReader.CONF_BUFFERSIZE;
public static final String CONF_MAXBLOCKSIZE=org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinRecordReader.CONF_MAXBLOCKSIZE;
public static final String CONF_FILTERMAGIC=org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinRecordReader.CONF_FILTERMAGIC;
public static final String CONF_USEDIRECTBUFFER=org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinRecordReader.CONF_USEDIRECTBUFFER;
public static final String CONF_READAUXPOW=org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinRecordReader.CONF_READAUXPOW;
public static final int DEFAULT_BUFFERSIZE=org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinRecordReader.DEFAULT_BUFFERSIZE;
public static final int DEFAULT_MAXSIZE_BITCOINBLOCK=org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinRecordReader.DEFAULT_MAXSIZE_BITCOINBLOCK;
public static final String DEFAULT_MAGIC = org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinRecordReader.DEFAULT_MAGIC;
public static final boolean DEFAULT_USEDIRECTBUFFER=org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinRecordReader.DEFAULT_USEDIRECTBUFFER;
public static final boolean DEFAULT_READAUXPOW=org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinRecordReader.DEFAULT_READAUXPOW;
private static final Log LOG = LogFactory.getLog(AbstractBitcoinRecordReader.class.getName());
private int bufferSize=0;
private int maxSizeBitcoinBlock=0;
private boolean useDirectBuffer=false;
private boolean readAuxPOW=false;
private String specificMagic="";
private String[] specificMagicStringArray;
private byte[][] specificMagicByteArray;
private CompressionCodec codec;
private Decompressor decompressor;
private Reporter reporter;
private Configuration conf;
private long start;
private long end;
private final Seekable filePosition;
private FSDataInputStream fileIn;
private BitcoinBlockReader bbr;
/**
* Creates an Abstract Record Reader for Bitcoin blocks
* @param split Split to use (assumed to be a file split)
* @param job Configuration:
* io.file.buffer.size: Size of in-memory specified in the given Configuration. If io.file.buffer.size is not specified the default buffersize (maximum size of a bitcoin block) will be used. The configuration hadoopcryptoledger.bitcoinblockinputformat.filter.magic allows specifying the magic identifier of the block. The magic is a comma-separated list of Hex-values (e.g. F9BEB4D9,FABFB5DA,0B110907,0B110907). The default magic is always F9BEB4D9. One needs to specify at least one magic, otherwise it will be difficult to find blocks in splits. Furthermore, one may specify hadoopcryptoledger.bitcoinblockinputformat.maxblocksize, which defines the maximum size a bitcoin block may have. By default it is 8M). If you want to experiment with performance using DirectByteBuffer instead of HeapByteBuffer you can use "hadoopcryptoledeger.bitcoinblockinputformat.usedirectbuffer" (default: false). Note that it might have some unwanted consequences such as circumwenting Yarn memory management. The option is experimental and might be removed in future versions.
* @param reporter Reporter
*
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
* @throws org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException in case of an invalid HadoopCryptoLedger-specific configuration of the inputformat
* @throws org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException in case the Bitcoin data contains invalid blocks (e.g. magic might be different)
*
*/
public AbstractBitcoinRecordReader(FileSplit split,JobConf job, Reporter reporter) throws IOException,HadoopCryptoLedgerConfigurationException,BitcoinBlockReadException {
LOG.debug("Reading configuration");
// parse configuration
this.reporter=reporter;
this.conf=job;
this.maxSizeBitcoinBlock=conf.getInt(AbstractBitcoinRecordReader.CONF_MAXBLOCKSIZE,AbstractBitcoinRecordReader.DEFAULT_MAXSIZE_BITCOINBLOCK);
this.bufferSize=conf.getInt(AbstractBitcoinRecordReader.CONF_BUFFERSIZE,AbstractBitcoinRecordReader.DEFAULT_BUFFERSIZE);
this.specificMagic=conf.get(AbstractBitcoinRecordReader.CONF_FILTERMAGIC);
// we need to provide at least
if ((this.specificMagic==null) || (this.specificMagic.length()==0)) {
this.specificMagic=AbstractBitcoinRecordReader.DEFAULT_MAGIC;
}
if ((this.specificMagic!=null) && (this.specificMagic.length()>0)) {
this.specificMagicStringArray=specificMagic.split(",");
specificMagicByteArray=new byte[specificMagicStringArray.length][4]; // each magic is always 4 byte
for (int i=0;i<specificMagicStringArray.length;i++) {
byte[] currentMagicNo=BitcoinUtil.convertHexStringToByteArray(specificMagicStringArray[i]);
if (currentMagicNo.length!=4) {
throw new HadoopCryptoLedgerConfigurationException("Error: Configuration. Magic number has not a length of 4 bytes. Index: "+i);
}
specificMagicByteArray[i]=currentMagicNo;
}
}
this.useDirectBuffer=conf.getBoolean(AbstractBitcoinRecordReader.CONF_USEDIRECTBUFFER,AbstractBitcoinRecordReader.DEFAULT_USEDIRECTBUFFER);
this.readAuxPOW=conf.getBoolean(AbstractBitcoinRecordReader.CONF_READAUXPOW,AbstractBitcoinRecordReader.DEFAULT_READAUXPOW);
// Initialize start and end of split
start = split.getStart();
end = start + split.getLength();
final Path file = split.getPath();
codec = new CompressionCodecFactory(job).getCodec(file);
final FileSystem fs = file.getFileSystem(job);
fileIn = fs.open(file);
// open stream
if (isCompressedInput()) { // decompress
LOG.debug("Decompressing file");
decompressor = CodecPool.getDecompressor(codec);
if (codec instanceof SplittableCompressionCodec) {
LOG.debug("SplittableCompressionCodec");
final SplitCompressionInputStream cIn =((SplittableCompressionCodec)codec).createInputStream(fileIn, decompressor, start, end,SplittableCompressionCodec.READ_MODE.CONTINUOUS);
bbr = new BitcoinBlockReader(cIn, this.maxSizeBitcoinBlock,this.bufferSize,this.specificMagicByteArray,this.useDirectBuffer,this.readAuxPOW);
start = cIn.getAdjustedStart();
end = cIn.getAdjustedEnd();
filePosition = cIn; // take pos from compressed stream
} else {
LOG.debug("Not-splitable compression codec");
bbr = new BitcoinBlockReader(codec.createInputStream(fileIn,decompressor), this.maxSizeBitcoinBlock,this.bufferSize,this.specificMagicByteArray,this.useDirectBuffer,this.readAuxPOW);
filePosition = fileIn;
}
} else {
LOG.debug("Processing file without compression");
fileIn.seek(start);
bbr = new BitcoinBlockReader(fileIn, this.maxSizeBitcoinBlock,this.bufferSize,this.specificMagicByteArray,this.useDirectBuffer,this.readAuxPOW);
filePosition = fileIn;
}
// initialize reader
// seek to block start (for the case a block overlaps a split)
LOG.debug("Seeking to block start");
this.reporter.setStatus("Seeking Block start");
bbr.seekBlockStart();
this.reporter.setStatus("Ready to read");
}
/**
* Get the current file position in a compressed or uncompressed file.
*
* @return file position
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
public long getFilePosition() throws IOException {
return filePosition.getPos();
}
/**
* Get the end of file
*
* @return end of file position
*
*/
public long getEnd() {
return end;
}
/**
* Get the current Block Reader
*
* @return end of file position
*
*/
public BitcoinBlockReader getBbr() {
return this.bbr;
}
/*
* Returns how much of the file has been processed in terms of bytes
*
* @return progress percentage
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
@Override
public synchronized float getProgress() throws IOException {
if (start == end) {
return 0.0f;
} else {
return Math.min(1.0f, (getFilePosition() - start) / (float)(end - start));
}
}
/*
* Determines if the input is compressed or not
*
* @return true if compressed, false if not
*/
private boolean isCompressedInput() {
return codec != null;
}
/*
* Get current position in the stream
*
* @return position
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
@Override
public synchronized long getPos() throws IOException {
return filePosition.getPos();
}
/*
* Clean up InputStream and Decompressor after use
*
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
@Override
public synchronized void close() throws IOException {
try {
if (bbr != null) {
bbr.close();
}
} finally {
if (decompressor != null) {
CodecPool.returnDecompressor(decompressor);
decompressor = null;
}
}
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinRawBlockFileInputFormat.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinRawBlockFileInputFormat.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapred;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import java.io.IOException;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
public class BitcoinRawBlockFileInputFormat extends AbstractBitcoinFileInputFormat<BytesWritable,BytesWritable> {
private static final Log LOGFI = LogFactory.getLog(BitcoinRawBlockFileInputFormat.class.getName());
@Override
public RecordReader<BytesWritable,BytesWritable> getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException {
/** Create reader **/
try {
return new BitcoinRawBlockRecordReader( (FileSplit) split,job,reporter);
} catch (HadoopCryptoLedgerConfigurationException|BitcoinBlockReadException e) {
// log
LOGFI.error(e);
}
return null;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/AbstractBitcoinFileInputFormat.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/AbstractBitcoinFileInputFormat.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapred;
import java.io.IOException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobConfigurable;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
public abstract class AbstractBitcoinFileInputFormat<K,V> extends FileInputFormat<K,V> implements JobConfigurable {
public static final String CONF_IS_SPLITTABLE =org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinFileInputFormat.CONF_IS_SPLITTABLE;
public static final boolean DEFAULT_IS_SPLITTABLE=org.zuinnote.hadoop.bitcoin.format.mapreduce.AbstractBitcoinFileInputFormat.DEFAULT_IS_SPLITTABLE;
private boolean isSplittable =DEFAULT_IS_SPLITTABLE;
private CompressionCodecFactory compressionCodecs = null;
@Override
public abstract RecordReader<K,V> getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException;
@Override
public void configure(JobConf conf) {
this.compressionCodecs = new CompressionCodecFactory(conf);
this.isSplittable =conf.getBoolean(AbstractBitcoinFileInputFormat.CONF_IS_SPLITTABLE,AbstractBitcoinFileInputFormat.DEFAULT_IS_SPLITTABLE);
}
/**
*
* This method is experimental and derived from TextInputFormat. It is not necessary and not recommended to compress the blockchain files. Instead it is recommended to extract relevant data from the blockchain files once and store them in a format suitable for analytics (including compression), such as ORC or Parquet.
*
*/
@Override
protected boolean isSplitable(FileSystem fs, Path file) {
if (!(this.isSplittable)) {
return false;
}
final CompressionCodec codec = compressionCodecs.getCodec(file);
if (null == codec) {
return true;
}
return codec instanceof SplittableCompressionCodec;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinTransactionFileInputFormat.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinTransactionFileInputFormat.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapred;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import java.io.IOException;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.zuinnote.hadoop.bitcoin.format.common.*;
public class BitcoinTransactionFileInputFormat extends AbstractBitcoinFileInputFormat<BytesWritable,BitcoinTransactionWritable> {
private static final Log LOGFI = LogFactory.getLog(BitcoinTransactionFileInputFormat.class.getName());
@Override
public RecordReader<BytesWritable,BitcoinTransactionWritable> getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException {
/** Create reader **/
try {
return new BitcoinTransactionRecordReader( (FileSplit) split,job,reporter);
} catch (HadoopCryptoLedgerConfigurationException|BitcoinBlockReadException e) {
// log
LOGFI.error(e);
}
return null;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinBlockFileInputFormat.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapred/BitcoinBlockFileInputFormat.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapred;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import java.io.IOException;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.zuinnote.hadoop.bitcoin.format.common.*;
public class BitcoinBlockFileInputFormat extends AbstractBitcoinFileInputFormat<BytesWritable,BitcoinBlockWritable> {
private static final Log LOGFI = LogFactory.getLog(BitcoinBlockFileInputFormat.class.getName());
@Override
public RecordReader<BytesWritable,BitcoinBlockWritable> getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException {
/** Create reader **/
try {
return new BitcoinBlockRecordReader( (FileSplit) split,job,reporter);
} catch (HadoopCryptoLedgerConfigurationException|BitcoinBlockReadException e) {
// log
LOGFI.error(e);
}
return null;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinRawBlockRecordReader.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinRawBlockRecordReader.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapreduce;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.conf.Configuration;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
public class BitcoinRawBlockRecordReader extends AbstractBitcoinRecordReader<BytesWritable, BytesWritable> {
private static final Log LOG = LogFactory.getLog(BitcoinRawBlockRecordReader.class.getName());
private BytesWritable currentKey=new BytesWritable();
private BytesWritable currentValue=new BytesWritable();
public BitcoinRawBlockRecordReader(Configuration conf) throws HadoopCryptoLedgerConfigurationException {
super(conf);
}
/**
*
* get current key after calling next()
*
* @return key is a 64 byte array (hashMerkleRoot and prevHashBlock)
*/
@Override
public BytesWritable getCurrentKey() {
return this.currentKey;
}
/**
*
* get current value after calling next()
*
* @return value is a deserialized Java object of class BitcoinBlock
*/
@Override
public BytesWritable getCurrentValue() {
return this.currentValue;
}
/**
*
* Read a next block.
*
* @return true if next block is available, false if not
*/
@Override
public boolean nextKeyValue() throws IOException {
// read all the blocks, if necessary a block overlapping a split
while(getFilePosition()<=getEnd()) { // did we already went beyond the split (remote) or do we have no further data left?
ByteBuffer dataBlock=null;
try {
dataBlock=getBbr().readRawBlock();
} catch (BitcoinBlockReadException e) {
// log
LOG.error(e);
}
if (dataBlock==null) {
return false;
}
byte[] newKey=getBbr().getKeyFromRawBlock(dataBlock);
this.currentKey.set(newKey,0,newKey.length);
byte[] dataBlockArray;
if (dataBlock.hasArray()) {
dataBlockArray=dataBlock.array();
} else {
dataBlockArray=new byte[dataBlock.capacity()];
dataBlock.get(dataBlockArray);
}
this.currentValue.set(dataBlockArray,0,dataBlockArray.length);
return true;
}
return false;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinTransactionRecordReader.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinTransactionRecordReader.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapreduce;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import java.io.IOException;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.conf.Configuration;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.zuinnote.hadoop.bitcoin.format.common.*;
public class BitcoinTransactionRecordReader extends AbstractBitcoinRecordReader<BytesWritable, BitcoinTransactionWritable> {
private static final Log LOG = LogFactory.getLog(BitcoinBlockRecordReader.class.getName());
private int currentTransactionCounterInBlock=0;
private BitcoinBlock currentBitcoinBlock;
private BytesWritable currentKey=new BytesWritable();
private BitcoinTransactionWritable currentValue=new BitcoinTransactionWritable();
public BitcoinTransactionRecordReader(Configuration conf) throws HadoopCryptoLedgerConfigurationException {
super(conf);
}
/**
*
* get current key after calling next()
*
* @return key is is a 68 byte array (hashMerkleRoot, prevHashBlock, transActionCounter)
*/
@Override
public BytesWritable getCurrentKey() {
return this.currentKey;
}
/**
*
* get current value after calling next()
*
* @return value is a deserialized Java object of class BitcoinTransaction
*/
@Override
public BitcoinTransactionWritable getCurrentValue() {
return this.currentValue;
}
/**
*
* Read a next block.
*
*
* @return true if next block is available, false if not
*/
@Override
public boolean nextKeyValue() throws IOException {
// read all the blocks, if necessary a block overlapping a split
while(getFilePosition()<=getEnd()) { // did we already went beyond the split (remote) or do we have no further data left?
if ((currentBitcoinBlock==null) || (currentBitcoinBlock.getTransactions().size()==currentTransactionCounterInBlock)){
try {
currentBitcoinBlock=getBbr().readBlock();
currentTransactionCounterInBlock=0;
} catch (BitcoinBlockReadException e) {
// log
LOG.error(e);
}
}
if (currentBitcoinBlock==null) {
return false;
}
BitcoinTransaction currentTransaction=currentBitcoinBlock.getTransactions().get(currentTransactionCounterInBlock);
// the unique identifier that is linked in other transaction is usually its hash
byte[] newKey = BitcoinUtil.getTransactionHash(currentTransaction);
this.currentKey.set(newKey,0,newKey.length);
this.currentValue.set(currentTransaction);
currentTransactionCounterInBlock++;
return true;
}
return false;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinBlockRecordReader.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinBlockRecordReader.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapreduce;
import java.io.IOException;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.conf.Configuration;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.zuinnote.hadoop.bitcoin.format.common.*;
/**
* Reads records as blocks of the bitcoin blockchain. Note that it can be tricky to find the start of a block in a split. The BitcoinBlockReader provides a method (seekBlockStart) for this.
*
*/
public class BitcoinBlockRecordReader extends AbstractBitcoinRecordReader<BytesWritable, BitcoinBlockWritable> {
private static final Log LOG = LogFactory.getLog(BitcoinBlockRecordReader.class.getName());
private BytesWritable currentKey=new BytesWritable();
private BitcoinBlockWritable currentValue=new BitcoinBlockWritable();
public BitcoinBlockRecordReader(Configuration conf) throws HadoopCryptoLedgerConfigurationException {
super(conf);
}
/**
*
* get current key after calling next()
*
* @return key is a 64 byte array (hashMerkleRoot and prevHashBlock)
*/
@Override
public BytesWritable getCurrentKey() {
return this.currentKey;
}
/**
*
* get current value after calling next()
*
* @return is a deserialized Java object of class BitcoinBlock
*/
@Override
public BitcoinBlockWritable getCurrentValue() {
return this.currentValue;
}
/**
*
* Read a next block.
*
*
* @return true if next block is available, false if not
*/
@Override
public boolean nextKeyValue() throws IOException {
// read all the blocks, if necessary a block overlapping a split
while(getFilePosition()<=getEnd()) { // did we already went beyond the split (remote) or do we have no further data left?
BitcoinBlock dataBlock=null;
try {
dataBlock=getBbr().readBlock();
} catch (BitcoinBlockReadException e) {
// log
LOG.error(e);
}
if (dataBlock==null) {
return false;
}
byte[] hashMerkleRoot=dataBlock.getHashMerkleRoot();
byte[] hashPrevBlock=dataBlock.getHashPrevBlock();
byte[] newKey=new byte[hashMerkleRoot.length+hashPrevBlock.length];
for (int i=0;i<hashMerkleRoot.length;i++) {
newKey[i]=hashMerkleRoot[i];
}
for (int j=0;j<hashPrevBlock.length;j++) {
newKey[j+hashMerkleRoot.length]=hashPrevBlock[j];
}
this.currentKey.set(newKey,0,newKey.length);
this.currentValue.set(dataBlock);
return true;
}
return false;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/AbstractBitcoinRecordReader.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/AbstractBitcoinRecordReader.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapreduce;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import java.io.IOException;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.SplitCompressionInputStream;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.zuinnote.hadoop.bitcoin.format.common.*;
public abstract class AbstractBitcoinRecordReader<K,V> extends RecordReader<K,V> {
public static final String CONF_BUFFERSIZE="io.file.buffer.size";
public static final String CONF_MAXBLOCKSIZE="hadoopcryptoledger.bitcoinblockinputformat.maxblocksize";
public static final String CONF_FILTERMAGIC="hadoopcryptoledger.bitcoinblockinputformat.filter.magic";
public static final String CONF_USEDIRECTBUFFER="hadoopcryptoledeger.bitcoinblockinputformat.usedirectbuffer";
public static final String CONF_READAUXPOW="hadoopcryptoledger.bitcoinblockinputformat.readauxpow";
public static final int DEFAULT_BUFFERSIZE=64*1024;
public static final int DEFAULT_MAXSIZE_BITCOINBLOCK=8 * 1024 * 1024;
public static final String DEFAULT_MAGIC = "F9BEB4D9";
public static final boolean DEFAULT_USEDIRECTBUFFER=false;
public static final boolean DEFAULT_READAUXPOW=false;
private static final Log LOG = LogFactory.getLog(AbstractBitcoinRecordReader.class.getName());
private int bufferSize=0;
private int maxSizeBitcoinBlock=0;
private boolean useDirectBuffer=false;
private boolean readAuxPOW=false;
private String specificMagic="";
private String[] specificMagicStringArray;
private byte[][] specificMagicByteArray;
private CompressionCodec codec;
private Decompressor decompressor;
private long start;
private long end;
private Seekable filePosition;
private BitcoinBlockReader bbr;
/**
* Creates an Abstract Record Reader for Bitcoin blocks
* @param conf Configuration:
* io.file.buffer.size: Size of in-memory specified in the given Configuration. If io.file.buffer.size is not specified the default buffersize (maximum size of a bitcoin block) will be used. The configuration hadoopcryptoledger.bitcoinblockinputformat.filter.magic allows specifying the magic identifier of the block. The magic is a comma-separated list of Hex-values (e.g. F9BEB4D9,FABFB5DA,0B110907,0B110907). The default magic is always F9BEB4D9. One needs to specify at least one magic, otherwise it will be difficult to find blocks in splits. Furthermore, one may specify hadoopcryptoledger.bitcoinblockinputformat.maxblocksize, which defines the maximum size a bitcoin block may have. By default it is 8M). If you want to experiment with performance using DirectByteBuffer instead of HeapByteBuffer you can use "hadoopcryptoledeger.bitcoinblockinputformat.usedirectbuffer" (default: false). Note that it might have some unwanted consequences such as circumwenting Yarn memory management. The option is experimental and might be removed in future versions.
*
* @throws org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException in case of an invalid HadoopCryptoLedger-specific configuration of the inputformat
*
*/
public AbstractBitcoinRecordReader(Configuration conf) throws HadoopCryptoLedgerConfigurationException {
// parse configuration
this.maxSizeBitcoinBlock=conf.getInt(AbstractBitcoinRecordReader.CONF_MAXBLOCKSIZE,AbstractBitcoinRecordReader.DEFAULT_MAXSIZE_BITCOINBLOCK);
this.bufferSize=conf.getInt(AbstractBitcoinRecordReader.CONF_BUFFERSIZE,AbstractBitcoinRecordReader.DEFAULT_BUFFERSIZE);
this.specificMagic=conf.get(AbstractBitcoinRecordReader.CONF_FILTERMAGIC);
// we need to provide at least
if ((this.specificMagic==null) || (this.specificMagic.length()==0)) {
this.specificMagic=AbstractBitcoinRecordReader.DEFAULT_MAGIC;
}
if ((this.specificMagic!=null) && (this.specificMagic.length()>0)) {
this.specificMagicStringArray=specificMagic.split(",");
specificMagicByteArray=new byte[specificMagicStringArray.length][4]; // each magic is always 4 byte
for (int i=0;i<specificMagicStringArray.length;i++) {
byte[] currentMagicNo=BitcoinUtil.convertHexStringToByteArray(specificMagicStringArray[i]);
if (currentMagicNo.length!=4) {
throw new HadoopCryptoLedgerConfigurationException("Error: Configuration. Magic number has not a length of 4 bytes. Index: "+i);
}
specificMagicByteArray[i]=currentMagicNo;
}
}
this.useDirectBuffer=conf.getBoolean(AbstractBitcoinRecordReader.CONF_USEDIRECTBUFFER,AbstractBitcoinRecordReader.DEFAULT_USEDIRECTBUFFER);
this.readAuxPOW=conf.getBoolean(AbstractBitcoinRecordReader.CONF_READAUXPOW,AbstractBitcoinRecordReader.DEFAULT_READAUXPOW);
}
/**
* Initializes reader
* @param split Split to use (assumed to be a file split)
* @param context context of the job
*
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
* @throws java.lang.InterruptedException in case of thread interruption
*
*/
@Override
public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
FileSplit fSplit = (FileSplit)split;
// Initialize start and end of split
start = fSplit.getStart();
end = start + fSplit.getLength();
final Path file = fSplit.getPath();
codec = new CompressionCodecFactory(context.getConfiguration()).getCodec(file);
final FileSystem fs = file.getFileSystem(context.getConfiguration());
FSDataInputStream fileIn = fs.open(file);
// open stream
if (isCompressedInput()) { // decompress
decompressor = CodecPool.getDecompressor(codec);
if (codec instanceof SplittableCompressionCodec) {
final SplitCompressionInputStream cIn =((SplittableCompressionCodec)codec).createInputStream(fileIn, decompressor, start, end,SplittableCompressionCodec.READ_MODE.CONTINUOUS);
bbr = new BitcoinBlockReader(cIn, this.maxSizeBitcoinBlock,this.bufferSize,this.specificMagicByteArray,this.useDirectBuffer,this.readAuxPOW);
start = cIn.getAdjustedStart();
end = cIn.getAdjustedEnd();
filePosition = cIn; // take pos from compressed stream
} else {
bbr = new BitcoinBlockReader(codec.createInputStream(fileIn,decompressor), this.maxSizeBitcoinBlock,this.bufferSize,this.specificMagicByteArray,this.useDirectBuffer,readAuxPOW);
filePosition = fileIn;
}
} else {
fileIn.seek(start);
bbr = new BitcoinBlockReader(fileIn, this.maxSizeBitcoinBlock,this.bufferSize,this.specificMagicByteArray,this.useDirectBuffer,readAuxPOW);
filePosition = fileIn;
}
// seek to block start (for the case a block overlaps a split)
try {
bbr.seekBlockStart();
} catch (BitcoinBlockReadException bbre) {
LOG.error("Error reading Bitcoin blockchhain data");
LOG.error(bbre);
}
}
/**
* Get the current Block Reader
*
* @return end of file position
*
*/
public BitcoinBlockReader getBbr() {
return this.bbr;
}
/*
* Returns how much of the file has been processed in terms of bytes
*
* @return progress percentage
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
@Override
public float getProgress() throws IOException {
if (start == end) {
return 0.0f;
} else {
return Math.min(1.0f, (getFilePosition() - start) / (float)(end - start));
}
}
/**
* Get the end of file
*
* @return end of file position
*
*/
public long getEnd() {
return end;
}
/**
* Returns current position in file
*
* @return current position in file
*
*
*/
public long getFilePosition() throws IOException {
return this.filePosition.getPos();
}
/*
* Clean up InputStream and Decompressor after use
*
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
@Override
public synchronized void close() throws IOException {
try {
if (bbr != null) {
bbr.close();
}
} finally {
if (decompressor != null) {
CodecPool.returnDecompressor(decompressor);
decompressor = null;
}
}
}
/*
* Determines if the input is compressed or not
*
* @return true if compressed, false if not
*/
private boolean isCompressedInput() {
return codec != null;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinRawBlockFileInputFormat.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinRawBlockFileInputFormat.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapreduce;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import java.io.IOException;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
public class BitcoinRawBlockFileInputFormat extends AbstractBitcoinFileInputFormat<BytesWritable,BytesWritable> {
private static final Log LOG = LogFactory.getLog(BitcoinRawBlockFileInputFormat.class.getName());
@Override
public RecordReader<BytesWritable,BytesWritable> createRecordReader(InputSplit split, TaskAttemptContext ctx) throws IOException {
/** Create reader **/
try {
return new BitcoinRawBlockRecordReader(ctx.getConfiguration());
} catch (HadoopCryptoLedgerConfigurationException e) {
// log
LOG.error(e);
}
return null;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/AbstractBitcoinFileInputFormat.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/AbstractBitcoinFileInputFormat.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapreduce;
import java.io.IOException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
public abstract class AbstractBitcoinFileInputFormat<K,V> extends FileInputFormat<K,V> {
public static final String CONF_IS_SPLITTABLE ="hadoopcryptoledeger.bitcoinblockinputformat.issplitable";
public static final boolean DEFAULT_IS_SPLITTABLE =false;
@Override
public abstract RecordReader<K,V> createRecordReader(InputSplit split, TaskAttemptContext ctx) throws IOException;
/**
*
* This method is experimental and derived from TextInputFormat. It is not necessary and not recommended to compress the blockchain files. Instead it is recommended to extract relevant data from the blockchain files once and store them in a format suitable for analytics (including compression), such as ORC or Parquet.
*
*/
@Override
protected boolean isSplitable(JobContext context, Path file) {
boolean isSplitable=context.getConfiguration().getBoolean(AbstractBitcoinFileInputFormat.CONF_IS_SPLITTABLE,AbstractBitcoinFileInputFormat.DEFAULT_IS_SPLITTABLE);
if (!(isSplitable)) {
return false;
}
final CompressionCodec codec = new CompressionCodecFactory(context.getConfiguration()).getCodec(file);
if (null == codec) {
return true;
}
return codec instanceof SplittableCompressionCodec;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinTransactionFileInputFormat.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinTransactionFileInputFormat.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapreduce;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import java.io.IOException;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.zuinnote.hadoop.bitcoin.format.common.*;
public class BitcoinTransactionFileInputFormat extends AbstractBitcoinFileInputFormat<BytesWritable,BitcoinTransactionWritable> {
private static final Log LOG = LogFactory.getLog(BitcoinTransactionFileInputFormat.class.getName());
@Override
public RecordReader<BytesWritable,BitcoinTransactionWritable> createRecordReader(InputSplit split, TaskAttemptContext ctx) throws IOException {
/** Create reader **/
try {
return new BitcoinTransactionRecordReader(ctx.getConfiguration());
} catch (HadoopCryptoLedgerConfigurationException e) {
// log
LOG.error(e);
}
return null;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinBlockFileInputFormat.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/mapreduce/BitcoinBlockFileInputFormat.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.mapreduce;
import org.zuinnote.hadoop.bitcoin.format.exception.HadoopCryptoLedgerConfigurationException;
import java.io.IOException;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.zuinnote.hadoop.bitcoin.format.common.*;
public class BitcoinBlockFileInputFormat extends AbstractBitcoinFileInputFormat<BytesWritable,BitcoinBlockWritable> {
private static final Log LOG = LogFactory.getLog(BitcoinBlockFileInputFormat.class.getName());
@Override
public RecordReader<BytesWritable,BitcoinBlockWritable> createRecordReader(InputSplit split, TaskAttemptContext ctx) throws IOException {
/** Create reader **/
try {
return new BitcoinBlockRecordReader(ctx.getConfiguration());
} catch (HadoopCryptoLedgerConfigurationException e) {
// log
LOG.error(e);
}
return null;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/exception/BitcoinBlockReadException.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/exception/BitcoinBlockReadException.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.exception;
public class BitcoinBlockReadException extends Exception {
public BitcoinBlockReadException(String message) {
super(message);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/exception/HadoopCryptoLedgerConfigurationException.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/exception/HadoopCryptoLedgerConfigurationException.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.exception;
public class HadoopCryptoLedgerConfigurationException extends Exception {
public HadoopCryptoLedgerConfigurationException(String message) {
super(message);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinTransactionWritable.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinTransactionWritable.java | /**
* Copyright 2021 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.commons.io.output.ThresholdingOutputStream;
import org.apache.hadoop.io.Writable;
import java.io.Serializable;
import java.util.List;
import java.util.ArrayList;
public class BitcoinTransactionWritable extends BitcoinTransaction implements Writable {
/** Writable **/
@Override
public void write(DataOutput dataOutput) throws IOException {
throw new UnsupportedOperationException("write unsupported");
}
@Override
public void readFields(DataInput dataInput) throws IOException {
throw new UnsupportedOperationException("readFields unsupported");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinBlockWritable.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinBlockWritable.java | /**
* Copyright 2021 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.io.Writable;
import java.io.Serializable;
import java.util.List;
import java.util.ArrayList;
/**
* Dedicated class for Hadoop-based applications
*/
public class BitcoinBlockWritable extends BitcoinBlock implements Writable {
/** Writable **/
@Override
public void write(DataOutput dataOutput) throws IOException {
throw new UnsupportedOperationException("write unsupported");
}
@Override
public void readFields(DataInput dataInput) throws IOException {
throw new UnsupportedOperationException("readFields unsupported");
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinTransactionInput.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinTransactionInput.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.io.Serializable;
public class BitcoinTransactionInput implements Serializable {
/**
*
*/
private static final long serialVersionUID = 283893453089295979L;
private byte[] prevTransactionHash;
private long previousTxOutIndex;
private byte[] txInScriptLength;
private byte[] txInScript;
private long seqNo;
public BitcoinTransactionInput(byte[] prevTransactionHash, long previousTxOutIndex, byte[] txInScriptLength, byte[] txInScript, long seqNo) {
this.prevTransactionHash=prevTransactionHash;
this.previousTxOutIndex=previousTxOutIndex;
this.txInScriptLength=txInScriptLength;
this.txInScript=txInScript;
this.seqNo=seqNo;
}
public byte[] getPrevTransactionHash() {
return this.prevTransactionHash;
}
public long getPreviousTxOutIndex() {
return this.previousTxOutIndex;
}
public byte[] getTxInScriptLength() {
return this.txInScriptLength;
}
public byte[] getTxInScript() {
return this.txInScript;
}
public long getSeqNo() {
return this.seqNo;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinAuxPOWBranch.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinAuxPOWBranch.java | /**
* Copyright 2018 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.io.Serializable;
import java.util.List;
public class BitcoinAuxPOWBranch implements Serializable {
private byte[] numberOfLinks;
private List<byte[]> links;
private byte[] branchSideBitmask;
public BitcoinAuxPOWBranch(byte[] numberOfLinks, List<byte[]> links, byte[] branchSideBitmask) {
this.numberOfLinks=numberOfLinks;
this.links=links;
this.branchSideBitmask=branchSideBitmask;
}
public byte[] getNumberOfLinks() {
return this.numberOfLinks;
}
public List<byte[]> getLinks() {
return this.links;
}
public byte[] getBranchSideBitmask() {
return this.branchSideBitmask;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinScriptPatternParser.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinScriptPatternParser.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.util.Arrays;
public class BitcoinScriptPatternParser {
private BitcoinScriptPatternParser() {
}
/**
* Get the payment destination from an scriptPubKey (output script of a transaction). This is based on standard scripts accepted by the Bitcoin network (https://en.bitcoin.it/wiki/Script).
*
* @param scriptPubKey output script of a transaction
*
* @return a string in the following format in case of (1) a transfer for pay-to-witness-public-key-hash: P2WPKH_address (2) for pay-to-witness-public-key-hash pay-to-witness-public-key-hash (P2WPKH) nested in BIP16 P2SH: P2WPKHP2SH_address (3) a transaction for 1-of-2 multi-signature version 0 pay-to-witness-script-hash (P2WSH): P2WSH_address (4) a standard transfer to a Bitcoin address : "bitcoinaddress_ADRESS" where ADDRESS is the Bitcoin address, (5) an (obsolete) transfer to a public key: "bitcoinpupkey_PUBKEY" where PUBKEY is the public key, (6) in case of output that cannot be spent: "unspendable", (7) in case anyone can spend: "anyone", (8) in case of transaction puzzle: "puzzle_HASH256" where HASH256 is the puzzle (9) in all other cases null (different type of Bitcoin transaction)
**/
public static String getPaymentDestination(byte[] scriptPubKey) {
if (scriptPubKey==null) {
return null;
}
// test if anyone can spend output
if (scriptPubKey.length==0) {
return "anyone"; // need to check also ScriptSig for OP_TRUE
}
// test if standard transaction Bitcoin address (20 byte) including Segwit
String payToHashSegwit = checkP2WPKHP2SH(scriptPubKey);
if (payToHashSegwit!=null) {
return payToHashSegwit;
}
// test if standard transaction to Bitcoin address (32 byte) including Segwit
String payToP2WSHSegwit = checkP2WSH(scriptPubKey);
if (payToP2WSHSegwit!=null) {
return payToP2WSHSegwit;
}
// test if standard transaction public key including Segwit
String payToPubKeySegwit = checkP2WPKH(scriptPubKey);
if (payToPubKeySegwit!=null) {
return payToPubKeySegwit;
}
// test if standard transaction to Bitcoin address
String payToHash = checkPayToHash(scriptPubKey);
if (payToHash!=null) {
return payToHash;
}
// test if obsolete transaction to public key
String payToPubKey = checkPayToPubKey(scriptPubKey);
if (payToPubKey!=null) {
return payToPubKey;
}
// test if puzzle
if ((scriptPubKey.length>0) && ((scriptPubKey[0] & 0xFF)==0xAA) && ((scriptPubKey[scriptPubKey.length-1] & 0xFF)==0x87)) {
byte[] puzzle = Arrays.copyOfRange(scriptPubKey, 1, scriptPubKey.length-2);
return "puzzle_"+BitcoinUtil.convertByteArrayToHexString(puzzle);
}
// test if unspendable
if ((scriptPubKey.length>0) && ((scriptPubKey[0] & 0xFF)==0x6a)) {
return "unspendable";
}
return null;
}
/**
* Checks if scriptPubKey is about a transaction for pay-to-witness-public-key-hash (P2WPKH)
* Note that we return only the keyhash, but more information can be found in the witness (cf. https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki)
*
*
* @param scriptPubKey
* @return null, if transaction is not about P2WPKH, a string starting with "P2WPKH_keyhash"
*/
private static String checkP2WPKH(byte[] scriptPubKey) {
if ((scriptPubKey.length==22) && (scriptPubKey[0]==0) && (scriptPubKey[1]==0x14)){
byte[] keyhash = Arrays.copyOfRange(scriptPubKey, 2, 22);
return "P2WPKH_"+BitcoinUtil.convertByteArrayToHexString(keyhash);
}
return null;
}
/**
* Checks if scriptPubKey is about a transaction for pay-to-witness-public-key-hash (P2WPKH) nested in BIP16 P2SH
* Note that we return only the keyhash, but more information can be found in
* (1) the witness (cf. https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki)
* (2) in scriptSig (keyhash)
*
* @param scriptPubKey
* @return null, if transaction is not about P2WPKHP2SH_, a string starting with "P2WPKHP2SH_keyhash"
*/
private static String checkP2WPKHP2SH(byte[] scriptPubKey) {
boolean validLength=scriptPubKey.length==23;
if (!(validLength)) {
return null;
}
boolean validStart=((scriptPubKey[0] & 0xFF)==0xA9) && ((scriptPubKey[1] & 0xFF)==0x14);
boolean validEnd=(scriptPubKey[22] & 0xFF)==0x87;
if (validStart && validEnd){
byte[] keyhash = Arrays.copyOfRange(scriptPubKey, 2, 22);
return "P2WPKHP2SH_"+BitcoinUtil.convertByteArrayToHexString(keyhash);
}
return null;
}
/**
* Checks if scriptPubKey is about a transaction for 1-of-2 multi-signature version 0 pay-to-witness-script-hash (P2WSH)
* Note that we return only the keyhash, but more information can be found in the witness (cf. https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki)
*
*
* @param scriptPubKey
* @return null, if transaction is not about P2WSH, a string starting with "P2WSH_keyhash"
*/
private static String checkP2WSH(byte[] scriptPubKey) {
if ((scriptPubKey.length==34) && (scriptPubKey[0]==0) && (scriptPubKey[1]==0x20)){
byte[] keyhash = Arrays.copyOfRange(scriptPubKey, 2, 34);
return "P2WSH_"+BitcoinUtil.convertByteArrayToHexString(keyhash);
}
return null;
}
/***
* Checks if scriptPubKey is about a transaction for paying to a hash
*
* @param scriptPubKey of transaction
*
* @return null, if transaction not about paying to hash, a string starting with "bitcoinaddress_" and ending with the hex values as String of the hash address
*
*/
private static String checkPayToHash(byte[] scriptPubKey) {
// test start
boolean validLength=scriptPubKey.length==25;
if (!(validLength)) {
return null;
}
boolean validStart=((scriptPubKey[0] & 0xFF)==0x76) && ((scriptPubKey[1] & 0xFF)==0xA9) && ((scriptPubKey[2] & 0xFF)==0x14);
boolean validEnd=((scriptPubKey[23] & 0xFF)==0x88) && ((scriptPubKey[24] & 0xFF)==0xAC);
if (validStart && validEnd) {
byte[] bitcoinAddress = Arrays.copyOfRange(scriptPubKey, 3, 23);
return "bitcoinaddress_"+BitcoinUtil.convertByteArrayToHexString(bitcoinAddress);
}
return null;
}
/***
* Checks if scriptPubKey is about a transaction for paying to a public key
*
* @param scriptPubKey of transaction
*
* @return null, if transaction not about paying to hash, a string starting with "bitcoinpubkey_" and ending with the hex values as String of the public key
*
*/
private static String checkPayToPubKey(byte[] scriptPubKey) {
if ((scriptPubKey.length>0) && ((scriptPubKey[scriptPubKey.length-1] & 0xFF)==0xAC)) {
byte[] publicKey =Arrays.copyOfRange(scriptPubKey, 0, scriptPubKey.length-1);
return "bitcoinpubkey_"+BitcoinUtil.convertByteArrayToHexString(publicKey);
}
return null;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinAuxPOWBlockHeader.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinAuxPOWBlockHeader.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.io.Serializable;
public class BitcoinAuxPOWBlockHeader implements Serializable {
private long version;
private byte[] previousBlockHash;
private byte[] merkleRoot;
private long time;
private byte[] bits;
private long nonce;
public BitcoinAuxPOWBlockHeader(long version, byte[] previousBlockHash, byte[] merkleRoot, long time, byte[] bits, long nonce) {
this.version=version;
this.previousBlockHash=previousBlockHash;
this.merkleRoot=merkleRoot;
this.time=time;
this.bits=bits;
this.nonce=nonce;
}
public long getVersion() {
return version;
}
public byte[] getPreviousBlockHash() {
return previousBlockHash;
}
public byte[] getMerkleRoot() {
return merkleRoot;
}
public long getTime() {
return time;
}
public byte[] getBits() {
return bits;
}
public long getNonce() {
return nonce;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinTransaction.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinTransaction.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.io.IOException;
import org.apache.commons.io.output.ThresholdingOutputStream;
import java.io.Serializable;
import java.util.List;
import java.util.ArrayList;
public class BitcoinTransaction implements Serializable {
private long version;
private byte marker;
private byte flag;
private byte[] inCounter;
private byte[] outCounter;
private List<BitcoinTransactionInput> listOfInputs;
private List<BitcoinTransactionOutput> listOfOutputs;
private List<BitcoinScriptWitnessItem> listOfScriptWitnessItem;
private long lockTime;
public BitcoinTransaction() {
this.version=0;
this.marker=1;
this.flag=0;
this.inCounter=new byte[0];
this.outCounter=new byte[0];
this.listOfInputs=new ArrayList<>();
this.listOfOutputs=new ArrayList<>();
this.listOfScriptWitnessItem=new ArrayList<>();
this.lockTime=0;
}
/***
* Creates a traditional Bitcoin Transaction without ScriptWitness
*
* @param version
* @param inCounter
* @param listOfInputs
* @param outCounter
* @param listOfOutputs
* @param lockTime
*/
public BitcoinTransaction(long version, byte[] inCounter, List<BitcoinTransactionInput> listOfInputs, byte[] outCounter, List<BitcoinTransactionOutput> listOfOutputs, long lockTime) {
this.marker=1;
this.flag=0;
this.version=version;
this.inCounter=inCounter;
this.listOfInputs=listOfInputs;
this.outCounter=outCounter;
this.listOfOutputs=listOfOutputs;
this.listOfScriptWitnessItem=new ArrayList<>();
this.lockTime=lockTime;
}
/**
* Creates a Bitcoin Transaction with Segwitness
*
* @param marker
* @param flag
* @param version
* @param inCounter
* @param listOfInputs
* @param outCounter
* @param listOfOutputs
* @param listOfScriptWitnessItem
* @param lockTime
*/
public BitcoinTransaction(byte marker, byte flag, long version, byte[] inCounter, List<BitcoinTransactionInput> listOfInputs, byte[] outCounter, List<BitcoinTransactionOutput> listOfOutputs, List<BitcoinScriptWitnessItem> listOfScriptWitnessItem, long lockTime) {
this.marker=marker;
this.flag=flag;
this.version=version;
this.inCounter=inCounter;
this.listOfInputs=listOfInputs;
this.outCounter=outCounter;
this.listOfOutputs=listOfOutputs;
this.listOfScriptWitnessItem=listOfScriptWitnessItem;
this.lockTime=lockTime;
}
public long getVersion() {
return this.version;
}
public byte getMarker() {
return this.marker;
}
public byte getFlag() {
return this.flag;
}
public byte[] getInCounter() {
return this.inCounter;
}
public List<BitcoinTransactionInput> getListOfInputs() {
return this.listOfInputs;
}
public byte[] getOutCounter() {
return this.outCounter;
}
public List<BitcoinTransactionOutput> getListOfOutputs() {
return this.listOfOutputs;
}
public List<BitcoinScriptWitnessItem> getBitcoinScriptWitness() {
return this.listOfScriptWitnessItem;
}
public long getLockTime() {
return this.lockTime;
}
public void set(BitcoinTransaction newTransaction) {
this.version=newTransaction.getVersion();
this.marker=newTransaction.getMarker();
this.flag=newTransaction.getFlag();
this.inCounter=newTransaction.getInCounter();
this.listOfInputs=newTransaction.getListOfInputs();
this.outCounter=newTransaction.getOutCounter();
this.listOfOutputs=newTransaction.getListOfOutputs();
this.listOfScriptWitnessItem=newTransaction.getBitcoinScriptWitness();
this.lockTime=newTransaction.getLockTime();
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinUtil.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinUtil.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.Date;
import javax.xml.bind.DatatypeConverter; // Hex Converter for configuration options
import java.security.MessageDigest; // needed for SHA2-256 calculation
import java.security.NoSuchAlgorithmException;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
public class BitcoinUtil {
private static final Log LOG = LogFactory.getLog(BitcoinUtil.class.getName());
private BitcoinUtil() {
}
/**
* Converts a signed int to an unsigned (long)
*
* @param signedInt signed int that should be interpreted as unsigned
*
* @return long corresponding to signed int
*
*/
public static long convertSignedIntToUnsigned(int signedInt) {
return signedInt & 0x00000000ffffffffL;
}
/**
*
* Converts an int to a byte array
*
* @param intToConvert int that should be converted into a byte array
*
* @return byte array corresponding to int
*
**/
public static byte[] convertIntToByteArray(int intToConvert) {
return ByteBuffer.allocate(4).putInt(intToConvert).array();
}
/**
*
* Converts a long to a byte array
*
* @param longToConvert long that should be converted into a byte array
*
* @return byte array corresponding to long
*
**/
public static byte[] convertLongToByteArray(long longToConvert) {
return ByteBuffer.allocate(8).putLong(longToConvert).array();
}
/**
*
* Converts a Big Integer to a byte array
*
* @param bigIntegerToConvert BigInteger that should be converted into a byte array
* @param exactArraySize exact size of array
* @return byte array corresponding to BigInteger
*
**/
public static byte[] convertBigIntegerToByteArray(BigInteger bigIntegerToConvert,int exactArraySize) {
if ((bigIntegerToConvert==null) || (bigIntegerToConvert.signum()==-1)) {// negative
return null;
}
byte[] tempResult = bigIntegerToConvert.toByteArray();
byte [] result = new byte[exactArraySize];
int removeSign=0;
if ((tempResult.length>1) && (tempResult[0]==0)) { // remove sign
removeSign=1;
}
byte[] reverseTempResult = BitcoinUtil.reverseByteArray(tempResult);
for (int i=0;i<result.length;i++) {
if (i<reverseTempResult.length-removeSign) {
result[i]=reverseTempResult[i];
}
}
return result;
}
/**
* Converts a variable length integer (https://en.bitcoin.it/wiki/Protocol_documentation#Variable_length_integer) from a ByteBuffer to byte array
*
* @param byteBuffer Bytebuffer where to read from the variable length integer
*
* @return byte[] of the variable length integer (including marker)
*
*/
public static byte[] convertVarIntByteBufferToByteArray(ByteBuffer byteBuffer) {
// get the size
byte originalVarIntSize=byteBuffer.get();
byte varIntSize=getVarIntSize(originalVarIntSize);
// reserveBuffer
byte[] varInt=new byte[varIntSize];
varInt[0]=originalVarIntSize;
byteBuffer.get(varInt,1,varIntSize-1);
return varInt;
}
/**
* Converts a variable length integer (https://en.bitcoin.it/wiki/Protocol_documentation#Variable_length_integer) from a ByteBuffer to long
*
* @param byteBuffer Bytebuffer where to read from the variable length integer
*
* @return long corresponding to variable length integer. Please note that it is signed long and not unsigned long as int the Bitcoin specification. Should be in practice not relevant.
*
*/
public static long convertVarIntByteBufferToLong(ByteBuffer byteBuffer) {
byte[] varIntByteArray=convertVarIntByteBufferToByteArray(byteBuffer);
return getVarInt(varIntByteArray);
}
/**
* Converts a variable length integer (https://en.bitcoin.it/wiki/Protocol_documentation#Variable_length_integer) to BigInteger
*
* @param varInt byte array containing variable length integer
*
* @return BigInteger corresponding to variable length integer
*
*/
public static BigInteger getVarIntBI(byte[] varInt) {
BigInteger result=BigInteger.ZERO;
if (varInt.length==0) {
return result;
}
int unsignedByte=varInt[0] & 0xFF;
if (unsignedByte<0xFD) {
return new BigInteger(new byte[] {(byte) unsignedByte});
}
int intSize=0;
if (unsignedByte==0xFD) {
intSize=3;
}
else if (unsignedByte==0xFE) {
intSize=5;
}
else if (unsignedByte==0XFF) {
intSize=9;
}
byte[] rawDataInt=reverseByteArray(Arrays.copyOfRange(varInt, 1, intSize));
return new BigInteger(rawDataInt);
}
/**
* Converts a variable length integer (https://en.bitcoin.it/wiki/Protocol_documentation#Variable_length_integer) to long
*
* @param varInt byte array containing variable length integer
*
* @return long corresponding to variable length integer
*
*/
public static long getVarInt(byte[] varInt) {
long result=0;
if (varInt.length==0) {
return result;
}
int unsignedByte=varInt[0] & 0xFF;
if (unsignedByte<0xFD) {
return unsignedByte;
}
int intSize=0;
if (unsignedByte==0xFD) {
intSize=3;
}
else if (unsignedByte==0xFE) {
intSize=5;
}
else if (unsignedByte==0XFF) {
intSize=9;
}
byte[] rawDataInt=reverseByteArray(Arrays.copyOfRange(varInt, 1, intSize));
ByteBuffer byteBuffer = ByteBuffer.wrap(rawDataInt);
if (intSize==3) {
result=byteBuffer.getShort();
}
else if (intSize==5) {
result=byteBuffer.getInt();
}
else if (intSize==9) {
result=byteBuffer.getLong(); // Need to handle sign - available only in JDK8
}
return result;
}
/**
* Determines size of a variable length integer (https://en.bitcoin.it/wiki/Protocol_documentation#Variable_length_integer)
*
* @param firstByteVarInt first byte of the variable integeer
*
* @return byte with the size of the variable int (either 2, 3, 5 or 9) - does include the marker!
*
*/
public static byte getVarIntSize(byte firstByteVarInt) {
int unsignedByte=firstByteVarInt & 0xFF;
if (unsignedByte==0xFD) {
return 3;
}
if (unsignedByte==0xFE) {
return 5;
}
if (unsignedByte==0xFF) {
return 9;
}
return 1; //<0xFD
}
/**
* Reads a size from a reversed byte order, such as block size in the block header
*
* @param byteSize byte array with a length of exactly 4
*
* @return size, returns 0 in case of invalid block size
*
*/
public static long getSize(byte[] byteSize) {
if (byteSize.length!=4) {
return 0;
}
ByteBuffer converterBuffer = ByteBuffer.wrap(byteSize);
converterBuffer.order(ByteOrder.LITTLE_ENDIAN);
return convertSignedIntToUnsigned(converterBuffer.getInt());
}
/**
* Reverses the order of the byte array
*
* @param inputByteArray array to be reversed
*
* @return inputByteArray in reversed order
*
**/
public static byte[] reverseByteArray(byte[] inputByteArray) {
byte[] result=new byte[inputByteArray.length];
for (int i=inputByteArray.length-1;i>=0;i--) {
result[result.length-1-i]=inputByteArray[i];
}
return result;
}
/**
* Converts a Hex String to Byte Array. Only used for configuration not for parsing. Hex String is in format of xsd:hexBinary
*
* @param hexString String in Hex format.
*
* @return byte array corresponding to String in Hex format
*
*/
public static byte[] convertHexStringToByteArray(String hexString) {
return DatatypeConverter.parseHexBinary(hexString);
}
/**
* Converts a Byte Array to Hex String. Only used for configuration not for parsing. Hex String is in format of xsd:hexBinary
*
* @param byteArray byte array to convert
*
* @return String in Hex format corresponding to byteArray
*
*/
public static String convertByteArrayToHexString(byte[] byteArray) {
return DatatypeConverter.printHexBinary(byteArray);
}
/**
* Converts an int to a date
*
* @param dateInt timestamp in integer format
*
* @return Date corresponding to dateInt
*
*/
public static Date convertIntToDate(int dateInt) {
return new Date(dateInt*1000L);
}
/**
* Compares two Bitcoin magics
*
* @param magic1 first magic
* @param magic2 second magics
*
* @return false, if do not match, true if match
*
*/
public static boolean compareMagics (byte[] magic1,byte[] magic2) {
if (magic1.length!=magic2.length) {
return false;
}
for (int i=0;i<magic1.length;i++) {
if (magic1[i]!=magic2[i]) {
return false;
}
}
return true;
}
/**
* Calculates the double SHA256-Hash of a transaction in little endian format. This could be used for certain analysis scenario where one want to investigate the referenced transaction used as an input for a Transaction. Furthermore, it can be used as a unique identifier of the transaction
*
* It corresponds to the Bitcoin specification of txid (https://bitcoincore.org/en/segwit_wallet_dev/)
*
* @param transaction The BitcoinTransaction of which we want to calculate the hash
*
* @return byte array containing the hash of the transaction. Note: This one can be compared to a prevTransactionHash. However, if you want to search for it in popular blockchain explorers then you need to apply the function BitcoinUtil.reverseByteArray to it!
*
*
* @throws java.io.IOException in case of errors reading from the InputStream
*
*/
public static byte[] getTransactionHash(BitcoinTransaction transaction) throws IOException{
// convert transaction to byte array
ByteArrayOutputStream transactionBAOS = new ByteArrayOutputStream();
byte[] version = reverseByteArray(convertIntToByteArray((int)transaction.getVersion()));
transactionBAOS.write(version);
byte[] inCounter = transaction.getInCounter();
transactionBAOS.write(inCounter);
for (int i=0;i<transaction.getListOfInputs().size();i++) {
transactionBAOS.write(transaction.getListOfInputs().get(i).getPrevTransactionHash());
transactionBAOS.write(reverseByteArray(convertIntToByteArray((int)(transaction.getListOfInputs().get(i).getPreviousTxOutIndex()))));
transactionBAOS.write(transaction.getListOfInputs().get(i).getTxInScriptLength());
transactionBAOS.write(transaction.getListOfInputs().get(i).getTxInScript());
transactionBAOS.write(reverseByteArray(convertIntToByteArray((int)(transaction.getListOfInputs().get(i).getSeqNo()))));
}
byte[] outCounter = transaction.getOutCounter();
transactionBAOS.write(outCounter);
for (int j=0;j<transaction.getListOfOutputs().size();j++) {
transactionBAOS.write(convertBigIntegerToByteArray(transaction.getListOfOutputs().get(j).getValue(),8));
transactionBAOS.write(transaction.getListOfOutputs().get(j).getTxOutScriptLength());
transactionBAOS.write(transaction.getListOfOutputs().get(j).getTxOutScript());
}
byte[] lockTime=reverseByteArray(convertIntToByteArray((int)transaction.getLockTime()));
transactionBAOS.write(lockTime);
byte[] transactionByteArray= transactionBAOS.toByteArray();
byte[] firstRoundHash;
byte[] secondRoundHash;
try {
MessageDigest digest = MessageDigest.getInstance("SHA-256");
firstRoundHash = digest.digest(transactionByteArray);
secondRoundHash = digest.digest(firstRoundHash);
} catch (NoSuchAlgorithmException nsae) {
LOG.error(nsae);
return new byte[0];
}
return secondRoundHash;
}
/**
* Calculates the double SHA256-Hash of a transaction in little endian format. It serve as a unique identifier of a transaction, but cannot be used to link the outputs of other transactions as input
*
* It corresponds to the Bitcoin specification of wtxid (https://bitcoincore.org/en/segwit_wallet_dev/)
*
* @param transaction The BitcoinTransaction of which we want to calculate the hash
*
* @return byte array containing the hash of the transaction. Note: This one can be compared to a prevTransactionHash. However, if you want to search for it in popular blockchain explorers then you need to apply the function BitcoinUtil.reverseByteArray to it!
*
*
* @throws java.io.IOException in case of errors reading from the InputStream
*
*/
public static byte[] getTransactionHashSegwit(BitcoinTransaction transaction) throws IOException{
// convert transaction to byte array
ByteArrayOutputStream transactionBAOS = new ByteArrayOutputStream();
byte[] version = reverseByteArray(convertIntToByteArray((int)transaction.getVersion()));
transactionBAOS.write(version);
// check if segwit
boolean segwit=false;
if ((transaction.getMarker()==0) && (transaction.getFlag()!=0)) {
segwit=true;
// we still need to check the case that all witness script stack items for all input transactions are of size 0 => traditional transaction hash calculation
// cf. https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki
// A non-witness program (defined hereinafter) txin MUST be associated with an empty witness field, represented by a 0x00. If all txins are not witness program, a transaction's wtxid is equal to its txid.
boolean emptyWitness=true;
for (int k=0;k<transaction.getBitcoinScriptWitness().size();k++) {
BitcoinScriptWitnessItem currentItem = transaction.getBitcoinScriptWitness().get(k);
if (currentItem.getStackItemCounter().length>1) {
emptyWitness=false;
break;
} else if ((currentItem.getStackItemCounter().length==1) && (currentItem.getStackItemCounter()[0]!=0x00)) {
emptyWitness=false;
break;
}
}
if (emptyWitness==true) {
return BitcoinUtil.getTransactionHashSegwit(transaction);
}
transactionBAOS.write(transaction.getMarker());
transactionBAOS.write(transaction.getFlag());
}
byte[] inCounter = transaction.getInCounter();
transactionBAOS.write(inCounter);
for (int i=0;i<transaction.getListOfInputs().size();i++) {
transactionBAOS.write(transaction.getListOfInputs().get(i).getPrevTransactionHash());
transactionBAOS.write(reverseByteArray(convertIntToByteArray((int)(transaction.getListOfInputs().get(i).getPreviousTxOutIndex()))));
transactionBAOS.write(transaction.getListOfInputs().get(i).getTxInScriptLength());
transactionBAOS.write(transaction.getListOfInputs().get(i).getTxInScript());
transactionBAOS.write(reverseByteArray(convertIntToByteArray((int)(transaction.getListOfInputs().get(i).getSeqNo()))));
}
byte[] outCounter = transaction.getOutCounter();
transactionBAOS.write(outCounter);
for (int j=0;j<transaction.getListOfOutputs().size();j++) {
transactionBAOS.write(convertBigIntegerToByteArray(transaction.getListOfOutputs().get(j).getValue(),8));
transactionBAOS.write(transaction.getListOfOutputs().get(j).getTxOutScriptLength());
transactionBAOS.write(transaction.getListOfOutputs().get(j).getTxOutScript());
}
if (segwit) {
for (int k=0;k<transaction.getBitcoinScriptWitness().size();k++) {
BitcoinScriptWitnessItem currentItem = transaction.getBitcoinScriptWitness().get(k);
transactionBAOS.write(currentItem.getStackItemCounter());
for (int l=0;l<currentItem.getScriptWitnessList().size();l++) {
transactionBAOS.write(currentItem.getScriptWitnessList().get(l).getWitnessScriptLength());
transactionBAOS.write(currentItem.getScriptWitnessList().get(l).getWitnessScript());
}
}
}
byte[] lockTime=reverseByteArray(convertIntToByteArray((int)transaction.getLockTime()));
transactionBAOS.write(lockTime);
byte[] transactionByteArray= transactionBAOS.toByteArray();
byte[] firstRoundHash;
byte[] secondRoundHash;
try {
MessageDigest digest = MessageDigest.getInstance("SHA-256");
firstRoundHash = digest.digest(transactionByteArray);
secondRoundHash = digest.digest(firstRoundHash);
} catch (NoSuchAlgorithmException nsae) {
LOG.error(nsae);
return new byte[0];
}
return secondRoundHash;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinBlockReader.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinBlockReader.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException;
import org.zuinnote.hadoop.ethereum.format.common.EthereumUtil;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigInteger;
import java.io.BufferedInputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.List;
import java.util.ArrayList;
import java.util.Arrays;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
/**
* This class reads Bitcoin blocks (in raw network format) from an input stream and returns Java objects of the class BitcoinBlock. It reuses code from the LineRecordReader due to its robustness and well-tested functionality.
*
**/
public class BitcoinBlockReader {
private static final Log LOG = LogFactory.getLog(BitcoinBlockReader.class.getName());
private int maxSizeBitcoinBlock=0;
private boolean useDirectBuffer=false;
private boolean readAuxPow=false;
private boolean filterSpecificMagic=false;
private byte[][] specificMagicByteArray;
private ByteBuffer preAllocatedDirectByteBuffer;
private InputStream bin;
/**
* Create a BitcoinBlock reader that reads from the given stream and uses the given parameters for configuration. Note it assumed that the validity of this configuration is checked by BitcoinBlockRecordReader
* @param in Input stream to read from
* @param maxSizeBitcoinBlock Maximum size of a Bitcoinblock.
* @param bufferSize size of the memory buffer for the givenInputStream
* @param specificMagicByteArray filters by specific block magic numbers if not null.
* @param useDirectBuffer experimental feature to use a DirectByteBuffer instead of a HeapByteBuffer
**/
public BitcoinBlockReader(InputStream in, int maxSizeBitcoinBlock, int bufferSize, byte[][] specificMagicByteArray, boolean useDirectBuffer) {
this(in,maxSizeBitcoinBlock,bufferSize, specificMagicByteArray, useDirectBuffer, false);
}
/**
* Create a BitcoinBlock reader that reads from the given stream and uses the given parameters for configuration. Note it assumed that the validity of this configuration is checked by BitcoinBlockRecordReader
* @param in Input stream to read from
* @param maxSizeBitcoinBlock Maximum size of a Bitcoinblock.
* @param bufferSize size of the memory buffer for the givenInputStream
* @param specificMagicByteArray filters by specific block magic numbers if not null.
* @param useDirectBuffer experimental feature to use a DirectByteBuffer instead of a HeapByteBuffer
* @param readAuxPow true if auxPow information should be parsed, false if not
**/
public BitcoinBlockReader(InputStream in, int maxSizeBitcoinBlock, int bufferSize, byte[][] specificMagicByteArray, boolean useDirectBuffer, boolean readAuxPow) {
this.maxSizeBitcoinBlock=maxSizeBitcoinBlock;
this.specificMagicByteArray=specificMagicByteArray;
this.useDirectBuffer=useDirectBuffer;
if (specificMagicByteArray!=null) {
this.filterSpecificMagic=true;
}
if (bufferSize==0) { // use original stream
this.bin=in;
} else {
this.bin=new BufferedInputStream(in,bufferSize);
}
if (this.useDirectBuffer) { // in case of a DirectByteBuffer we do allocation only once for the maximum size of one block, otherwise we will have a high cost for reallocation
preAllocatedDirectByteBuffer=ByteBuffer.allocateDirect(this.maxSizeBitcoinBlock);
}
this.readAuxPow=readAuxPow;
}
/**
* Seek for a valid block start according to the following algorithm:
* (1) find the magic of the block
* (2) Check that the block can be fully read and that block size is smaller than maximum block size
* This functionality is particularly useful for file processing in Big Data systems, such as Hadoop and Co where we work indepently on different filesplits and cannot expect that the Bitcoin block starts directly at the beginning of the stream;
*
* @throws org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException in case of format errors of the Bitcoin Blockchain data
*
**/
public void seekBlockStart() throws BitcoinBlockReadException {
if (!(this.filterSpecificMagic)) {
throw new BitcoinBlockReadException("Error: Cannot seek to a block start, because no magic(s) are defined.");
}
findMagic();
// validate it is a full block
checkFullBlock();
}
/**
* Read a block into a Java object of the class Bitcoin Block. This makes analysis very easy, but might be slower for some type of analytics where you are only interested in small parts of the block. In this case it is recommended to use {@link #readRawBlock}
*
* @return BitcoinBlock
* @throws org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException in case of errors of reading the Bitcoin Blockchain data
*/
public BitcoinBlock readBlock() throws BitcoinBlockReadException {
ByteBuffer rawByteBuffer = readRawBlock();
if (rawByteBuffer==null) {
return null;
}
// start parsing
// initialize byte arrays
byte[] currentMagicNo=new byte[4];
byte[] currentBits=new byte[4];
byte[] currentHashMerkleRoot=new byte[32];
byte[] currentHashPrevBlock=new byte[32];
// magic no
rawByteBuffer.get(currentMagicNo,0,4);
// blocksize
int currentBlockSize=rawByteBuffer.getInt();
// version
int currentVersion=rawByteBuffer.getInt();
// hashPrevBlock
rawByteBuffer.get(currentHashPrevBlock,0,32);
// hashMerkleRoot
rawByteBuffer.get(currentHashMerkleRoot,0,32);
// time
int currentTime=rawByteBuffer.getInt();
// bits/difficulty
rawByteBuffer.get(currentBits,0,4);
// nonce
int currentNonce=rawByteBuffer.getInt();
// parse AuxPOW (if available)
BitcoinAuxPOW auxPOW=parseAuxPow(rawByteBuffer);
// read var int from transaction counter
long currentTransactionCounter=BitcoinUtil.convertVarIntByteBufferToLong(rawByteBuffer);
// parse transactions
List<BitcoinTransaction> allBlockTransactions=parseTransactions(rawByteBuffer,currentTransactionCounter);
if (allBlockTransactions.size()!=currentTransactionCounter) {
throw new BitcoinBlockReadException("Error: Number of Transactions ("+allBlockTransactions.size()+") does not correspond to transaction counter in block ("+currentTransactionCounter+")");
}
BitcoinBlock result=new BitcoinBlock();
result.setMagicNo(currentMagicNo);
result.setBlockSize(currentBlockSize);
result.setVersion(currentVersion);
result.setTime(currentTime);
result.setBits(currentBits);
result.setNonce(currentNonce);
result.setTransactionCounter(currentTransactionCounter);
result.setHashPrevBlock(currentHashPrevBlock);
result.setHashMerkleRoot(currentHashMerkleRoot);
result.setAuxPOW(auxPOW);
result.setTransactions(allBlockTransactions);
return result;
}
/**
* Parses AuxPOW information (cf. https://en.bitcoin.it/wiki/Merged_mining_specification)
*
* @param rawByteBuffer
* @return
*/
public BitcoinAuxPOW parseAuxPow(ByteBuffer rawByteBuffer) {
if (!this.readAuxPow) {
return null;
}
// in case it does not contain auxpow we need to reset
rawByteBuffer.mark();
int currentVersion=rawByteBuffer.getInt();
byte[] currentInCounterVarInt=BitcoinUtil.convertVarIntByteBufferToByteArray(rawByteBuffer);
byte[] currentTransactionInputPrevTransactionHash=new byte[32];
rawByteBuffer.get(currentTransactionInputPrevTransactionHash,0,32);
byte[] prevTxOutIdx = new byte[4];
rawByteBuffer.get(prevTxOutIdx,0,4);
// detect auxPow
rawByteBuffer.reset();
byte[] expectedPrevTransactionHash=new byte[]{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
byte[] expectedPrevOutIdx = new byte[] {(byte)0xFF,(byte)0xFF,(byte)0xFF,(byte)0xFF};
if ((!(Arrays.equals(prevTxOutIdx,expectedPrevOutIdx))||(!(Arrays.equals(currentTransactionInputPrevTransactionHash,expectedPrevTransactionHash))))) {
return null;
}
// continue reading auxPow
// txIn (for all of them)
currentVersion=rawByteBuffer.getInt();
currentInCounterVarInt=BitcoinUtil.convertVarIntByteBufferToByteArray(rawByteBuffer);
long currentNoOfInputs=BitcoinUtil.getVarInt(currentInCounterVarInt);
List<BitcoinTransactionInput> currentTransactionInput = parseTransactionInputs(rawByteBuffer, currentNoOfInputs);
// txOut (for all of them)
byte[] currentOutCounterVarInt=BitcoinUtil.convertVarIntByteBufferToByteArray(rawByteBuffer);
long currentNoOfOutput=BitcoinUtil.getVarInt(currentOutCounterVarInt);
List<BitcoinTransactionOutput> currentTransactionOutput = parseTransactionOutputs(rawByteBuffer,currentNoOfOutput);
int lockTime = rawByteBuffer.getInt();
BitcoinTransaction coinbaseTransaction= new BitcoinTransaction(currentVersion,currentInCounterVarInt,currentTransactionInput, currentOutCounterVarInt, currentTransactionOutput,lockTime);
// read branches
// coinbase branch
byte[] coinbaseParentBlockHeaderHash=new byte[32];
rawByteBuffer.get(coinbaseParentBlockHeaderHash,0,32);
BitcoinAuxPOWBranch coinbaseBranch = parseAuxPOWBranch(rawByteBuffer);
// auxchain branch
BitcoinAuxPOWBranch auxChainBranch = parseAuxPOWBranch(rawByteBuffer);
// parent Block header
byte[] parentBlockBits=new byte[4];
byte[] parentBlockHashMerkleRoot=new byte[32];
byte[] parentBlockHashPrevBlock=new byte[32];
// version
int parentBlockVersion=rawByteBuffer.getInt();
// hashPrevBlock
rawByteBuffer.get(parentBlockHashPrevBlock,0,32);
// hashMerkleRoot
rawByteBuffer.get(parentBlockHashMerkleRoot,0,32);
// time
int parentBlockTime=rawByteBuffer.getInt();
// bits/difficulty
rawByteBuffer.get(parentBlockBits,0,4);
// nonce
int parentBlockNonce=rawByteBuffer.getInt();
BitcoinAuxPOWBlockHeader parentBlockheader = new BitcoinAuxPOWBlockHeader(parentBlockVersion, parentBlockHashPrevBlock, parentBlockHashMerkleRoot, parentBlockTime, parentBlockBits, parentBlockNonce);
return new BitcoinAuxPOW(currentVersion, coinbaseTransaction, coinbaseParentBlockHeaderHash, coinbaseBranch, auxChainBranch, parentBlockheader);
}
/**
* Parse an AUXPowBranch
*
* @param rawByteBuffer ByteBuffer from which the AuxPOWBranch should be parsed
*
* @return AuxPOWBranch
*/
public BitcoinAuxPOWBranch parseAuxPOWBranch(ByteBuffer rawByteBuffer) {
byte[] noOfLinksVarInt=BitcoinUtil.convertVarIntByteBufferToByteArray(rawByteBuffer);
long currentNoOfLinks=BitcoinUtil.getVarInt(noOfLinksVarInt);
ArrayList<byte[]> links = new ArrayList((int)currentNoOfLinks);
for (int i=0;i<currentNoOfLinks;i++) {
byte[] currentLink = new byte[32];
rawByteBuffer.get(currentLink,0,32);
links.add(currentLink);
}
byte[] branchSideBitmask=new byte[4];
rawByteBuffer.get(branchSideBitmask,0,4);
return new BitcoinAuxPOWBranch(noOfLinksVarInt, links, branchSideBitmask);
}
/**
* Parses the Bitcoin transactions in a byte buffer.
*
* @param rawByteBuffer ByteBuffer from which the transactions have to be parsed
* @param noOfTransactions Number of expected transactions
*
* @return Array of transactions
*
*
*/
public List<BitcoinTransaction> parseTransactions(ByteBuffer rawByteBuffer,long noOfTransactions) {
ArrayList<BitcoinTransaction> resultTransactions = new ArrayList<>((int)noOfTransactions);
// read all transactions from ByteBuffer
for (int k=0;k<noOfTransactions;k++) {
// read version
int currentVersion=rawByteBuffer.getInt();
// read inCounter
byte[] currentInCounterVarInt=BitcoinUtil.convertVarIntByteBufferToByteArray(rawByteBuffer);
long currentNoOfInputs=BitcoinUtil.getVarInt(currentInCounterVarInt);
boolean segwit=false;
byte marker=1;
byte flag=0;
// check segwit marker
if (currentNoOfInputs==0) {
// this seems to be segwit - lets be sure
// check segwit flag
rawByteBuffer.mark();
byte segwitFlag = rawByteBuffer.get();
if (segwitFlag!=0) {
// load the real number of inputs
segwit=true;
marker=0;
flag=segwitFlag;
currentInCounterVarInt=BitcoinUtil.convertVarIntByteBufferToByteArray(rawByteBuffer);
currentNoOfInputs=BitcoinUtil.getVarInt(currentInCounterVarInt);
} else {
LOG.warn("It seems a block with 0 transaction inputs was found");
rawByteBuffer.reset();
}
}
// read inputs
List<BitcoinTransactionInput> currentTransactionInput = parseTransactionInputs(rawByteBuffer, currentNoOfInputs);
// read outCounter
byte[] currentOutCounterVarInt=BitcoinUtil.convertVarIntByteBufferToByteArray(rawByteBuffer);
long currentNoOfOutput=BitcoinUtil.getVarInt(currentOutCounterVarInt);
// read outputs
List<BitcoinTransactionOutput> currentTransactionOutput = parseTransactionOutputs(rawByteBuffer,currentNoOfOutput);
List<BitcoinScriptWitnessItem> currentListOfTransactionSegwits;
if (segwit) {
// read segwit data
// for each transaction input there is at least some segwit data item
// read scriptWitness size
currentListOfTransactionSegwits=new ArrayList<>();
for (int i=0;i<currentNoOfInputs;i++) {
// get no of witness items for input
byte[] currentWitnessCounterVarInt=BitcoinUtil.convertVarIntByteBufferToByteArray(rawByteBuffer);
long currentNoOfWitnesses=BitcoinUtil.getVarInt(currentWitnessCounterVarInt);
List<BitcoinScriptWitness> currentTransactionSegwit = new ArrayList<>((int)currentNoOfWitnesses);
for (int j=0;j<(int)currentNoOfWitnesses;j++) {
// read size of segwit script
byte[] currentTransactionSegwitScriptLength=BitcoinUtil.convertVarIntByteBufferToByteArray(rawByteBuffer);
long currentTransactionSegwitScriptSize=BitcoinUtil.getVarInt(currentTransactionSegwitScriptLength);
int currentTransactionSegwitScriptSizeInt= (int)currentTransactionSegwitScriptSize;
// read segwit script
byte[] currentTransactionInSegwitScript=new byte[currentTransactionSegwitScriptSizeInt];
rawByteBuffer.get(currentTransactionInSegwitScript,0,currentTransactionSegwitScriptSizeInt);
// add segwit
currentTransactionSegwit.add(new BitcoinScriptWitness(currentTransactionSegwitScriptLength,currentTransactionInSegwitScript));
}
currentListOfTransactionSegwits.add(new BitcoinScriptWitnessItem(currentWitnessCounterVarInt,currentTransactionSegwit));
}
} else {
currentListOfTransactionSegwits=new ArrayList<>();
}
// lock_time
int currentTransactionLockTime = rawByteBuffer.getInt();
// add transaction
resultTransactions.add(new BitcoinTransaction(marker,flag,currentVersion,currentInCounterVarInt,currentTransactionInput,currentOutCounterVarInt,currentTransactionOutput,currentListOfTransactionSegwits,currentTransactionLockTime));
}
return resultTransactions;
}
/*
* Parses the Bitcoin transaction inputs in a byte buffer.
*
* @param rawByteBuffer ByteBuffer from which the transaction inputs have to be parsed
* @param noOfTransactionInputs Number of expected transaction inputs
*
* @return Array of transactions
*
*/
public List<BitcoinTransactionInput> parseTransactionInputs(ByteBuffer rawByteBuffer, long noOfTransactionInputs) {
ArrayList<BitcoinTransactionInput> currentTransactionInput = new ArrayList<>((int)noOfTransactionInputs);
for (int i=0;i<noOfTransactionInputs;i++) {
// read previous Hash of Transaction
byte[] currentTransactionInputPrevTransactionHash=new byte[32];
rawByteBuffer.get(currentTransactionInputPrevTransactionHash,0,32);
// read previousTxOutIndex
long currentTransactionInputPrevTxOutIdx=BitcoinUtil.convertSignedIntToUnsigned(rawByteBuffer.getInt());
// read InScript length (Potential Internal Exceed Java Type)
byte[] currentTransactionTxInScriptLengthVarInt=BitcoinUtil.convertVarIntByteBufferToByteArray(rawByteBuffer);
long currentTransactionTxInScriptSize=BitcoinUtil.getVarInt(currentTransactionTxInScriptLengthVarInt);
// read inScript
int currentTransactionTxInScriptSizeInt=(int)currentTransactionTxInScriptSize;
byte[] currentTransactionInScript=new byte[currentTransactionTxInScriptSizeInt];
rawByteBuffer.get(currentTransactionInScript,0,currentTransactionTxInScriptSizeInt);
// read sequence no
long currentTransactionInputSeqNo=BitcoinUtil.convertSignedIntToUnsigned(rawByteBuffer.getInt());
// add input
currentTransactionInput.add(new BitcoinTransactionInput(currentTransactionInputPrevTransactionHash,currentTransactionInputPrevTxOutIdx,currentTransactionTxInScriptLengthVarInt,currentTransactionInScript,currentTransactionInputSeqNo));
}
return currentTransactionInput;
}
/*
* Parses the Bitcoin transaction outputs in a byte buffer.
*
* @param rawByteBuffer ByteBuffer from which the transaction outputs have to be parsed
* @param noOfTransactionInputs Number of expected transaction outputs
*
* @return Array of transactions
*
*/
public List<BitcoinTransactionOutput> parseTransactionOutputs(ByteBuffer rawByteBuffer, long noOfTransactionOutputs) {
ArrayList<BitcoinTransactionOutput> currentTransactionOutput = new ArrayList<>((int)(noOfTransactionOutputs));
for (int i=0;i<noOfTransactionOutputs;i++) {
// read value
byte[] currentTransactionOutputValueArray = new byte[8];
rawByteBuffer.get(currentTransactionOutputValueArray);
BigInteger currentTransactionOutputValue = new BigInteger(1,EthereumUtil.reverseByteArray(currentTransactionOutputValueArray));
// read outScript length (Potential Internal Exceed Java Type)
byte[] currentTransactionTxOutScriptLengthVarInt=BitcoinUtil.convertVarIntByteBufferToByteArray(rawByteBuffer);
long currentTransactionTxOutScriptSize=BitcoinUtil.getVarInt(currentTransactionTxOutScriptLengthVarInt);
int currentTransactionTxOutScriptSizeInt=(int)(currentTransactionTxOutScriptSize);
// read outScript
byte[] currentTransactionOutScript=new byte[currentTransactionTxOutScriptSizeInt];
rawByteBuffer.get(currentTransactionOutScript,0,currentTransactionTxOutScriptSizeInt);
currentTransactionOutput.add(new BitcoinTransactionOutput(currentTransactionOutputValue,currentTransactionTxOutScriptLengthVarInt,currentTransactionOutScript));
}
return currentTransactionOutput;
}
/*
* Reads a raw Bitcoin block into a ByteBuffer. This method is recommended if you are only interested in a small part of the block and do not need the deserialization of the full block, ie in case you generally skip a lot of blocks
*
*
* @return ByteBuffer containing the block
*
* @throws org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException in case of format errors of the Bitcoin Blockchain data
**/
public ByteBuffer readRawBlock() throws BitcoinBlockReadException {
try {
byte[] blockSizeByte = new byte[0];
while (blockSizeByte.length==0) { // in case of filtering by magic no we skip blocks until we reach a valid magicNo or end of Block
// check if more to read
if (this.bin.available()<1) {
return null;
}
blockSizeByte=skipBlocksNotInFilter();
}
// check if it is larger than maxsize, include 8 bytes for the magic and size header
long blockSize=BitcoinUtil.getSize(blockSizeByte)+8;
if (blockSize==0) {
throw new BitcoinBlockReadException("Error: Blocksize too small");
}
if (blockSize<0) {
throw new BitcoinBlockReadException("Error: This block size cannot be handled currently (larger then largest number in positive signed int)");
}
if (blockSize>this.maxSizeBitcoinBlock) {
throw new BitcoinBlockReadException("Error: Block size is larger then defined in configuration - Please increase it if this is a valid block");
}
// read full block into ByteBuffer
int blockSizeInt=(int)(blockSize);
byte[] fullBlock=new byte[blockSizeInt];
int totalByteRead=0;
int readByte;
while ((readByte=this.bin.read(fullBlock,totalByteRead,blockSizeInt-totalByteRead))>-1) {
totalByteRead+=readByte;
if (totalByteRead>=blockSize) {
break;
}
}
if (totalByteRead!=blockSize) {
throw new BitcoinBlockReadException("Error: Could not read full block");
}
ByteBuffer result;
if (!(this.useDirectBuffer)) {
result=ByteBuffer.wrap(fullBlock);
} else {
preAllocatedDirectByteBuffer.clear(); // clear out old bytebuffer
preAllocatedDirectByteBuffer.limit(fullBlock.length); // limit the bytebuffer
result=preAllocatedDirectByteBuffer;
result.put(fullBlock);
result.flip(); // put in read mode
}
result.order(ByteOrder.LITTLE_ENDIAN);
return result;
} catch (IOException e) {
LOG.error(e);
throw new BitcoinBlockReadException(e.toString());
}
}
/**
* This function is used to read from a raw Bitcoin block some identifier. Note: Does not change ByteBuffer position
*
* @param rawByteBuffer ByteBuffer as read by readRawBlock
* @return byte array containing hashMerkleRoot and prevHashBlock
*
*/
public byte[] getKeyFromRawBlock (ByteBuffer rawByteBuffer) {
rawByteBuffer.mark();
byte[] magicNo=new byte[4];
byte[] hashMerkleRoot=new byte[32];
byte[] hashPrevBlock=new byte[32];
// magic no (skip)
rawByteBuffer.get(magicNo,0,4);
// blocksize (skip)
rawByteBuffer.getInt();
// version (skip)
rawByteBuffer.getInt();
// hashPrevBlock
rawByteBuffer.get(hashPrevBlock,0,32);
// hashMerkleRoot
rawByteBuffer.get(hashMerkleRoot,0,32);
byte[] result=new byte[hashMerkleRoot.length+hashPrevBlock.length];
for (int i=0;i<hashMerkleRoot.length;i++) {
result[i]=hashMerkleRoot[i];
}
for (int j=0;j<hashPrevBlock.length;j++) {
result[j+hashMerkleRoot.length]=hashPrevBlock[j];
}
rawByteBuffer.reset();
return result;
}
/**
* Closes the reader
*
* @throws java.io.IOException in case of errors reading from the InputStream
*
*/
public void close() throws IOException {
this.bin.close();
}
/*
* Finds the start of a block by looking for the specified magics in the current InputStream
*
* @throws org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException in case of errors reading Blockchain data
*
*/
private void findMagic() throws BitcoinBlockReadException {
// search if first byte of any magic matches
// search up to maximum size of a bitcoin block
int currentSeek=0;
while(currentSeek!=this.maxSizeBitcoinBlock) {
int firstByte=-1;
try {
this.bin.mark(4); // magic is always 4 bytes
firstByte=this.bin.read();
} catch (IOException e) {
LOG.error(e);
throw new BitcoinBlockReadException(e.toString());
}
if (firstByte==-1) {
throw new BitcoinBlockReadException("Error: Did not find defined magic within current stream");
}
try {
if (checkForMagicBytes(firstByte)) {
return;
}
} catch (IOException e) {
LOG.error(e);
throw new BitcoinBlockReadException(e.toString());
}
if (currentSeek==this.maxSizeBitcoinBlock) {
throw new BitcoinBlockReadException("Error: Cannot seek to a block start, because no valid block found within the maximum size of a Bitcoin block. Check data or increase maximum size of Bitcoin block.");
}
// increase by one byte if magic not found yet
try {
this.bin.reset();
if (this.bin.skip(1)!=1) {
LOG.error("Error cannot skip 1 byte in InputStream");
}
} catch (IOException e) {
LOG.error(e);
throw new BitcoinBlockReadException(e.toString());
}
currentSeek++;
}
}
/*
* Checks if there is a full Bitcoin Block at the current position of the InputStream
*
* @throws org.zuinnote.hadoop.bitcoin.format.exception.BitcoinBlockReadException in case of errors reading Blockchain data
*
*/
private void checkFullBlock() throws BitcoinBlockReadException {
// now we can check that we have a full block
try {
this.bin.mark(this.maxSizeBitcoinBlock);
// skip maigc
long skipMagic=this.bin.skip(4);
if (skipMagic!=4) {
throw new BitcoinBlockReadException("Error: Cannot seek to a block start, because no valid block found. Cannot skip forward magic");
}
}
catch (IOException e) {
LOG.error(e);
throw new BitcoinBlockReadException(e.toString());
}
// read size
// blocksize
byte[] blockSizeArray = new byte[4];
try {
int maxByteRead=4;
int totalByteRead=0;
int readByte;
while ((readByte=this.bin.read(blockSizeArray,totalByteRead,maxByteRead-totalByteRead))>-1) {
totalByteRead+=readByte;
if (totalByteRead>=maxByteRead) {
break;
}
}
if (totalByteRead!=maxByteRead) {
throw new BitcoinBlockReadException("Error: Cannot seek to a block start, because no valid block found. Cannot read size of block");
}
}
catch (IOException e) {
LOG.error(e);
throw new BitcoinBlockReadException(e.toString());
}
long blockSize=BitcoinUtil.getSize(blockSizeArray);
if (this.maxSizeBitcoinBlock<blockSize) {
throw new BitcoinBlockReadException("Error: Cannot seek to a block start, because no valid block found. Max bitcoin block size is smaller than current block size.");
}
int blockSizeInt=(int)blockSize;
byte[] blockRead=new byte[blockSizeInt];
int totalByteRead=0;
int readByte;
try {
while ((readByte=this.bin.read(blockRead,totalByteRead,blockSizeInt-totalByteRead))>-1) {
totalByteRead+=readByte;
if (totalByteRead>=blockSize) {
break;
}
}
} catch (IOException e) {
LOG.error(e);
throw new BitcoinBlockReadException(e.toString());
}
if (totalByteRead!=blockSize) {
throw new BitcoinBlockReadException("Error: Cannot seek to a block start, because no valid block found. Cannot skip to end of block");
}
try {
this.bin.reset();
} catch (IOException e) {
LOG.error(e);
throw new BitcoinBlockReadException(e.toString());
}
// it is a full block
}
/*
* Skips blocks in inputStream which are not specified in the magic filter
*
* @return null or byte array containing the size of the block (not the block itself)
*
* @throws java.io.IOException in case of errors reading from InputStream
*
*/
private byte[] skipBlocksNotInFilter() throws IOException {
byte[] magicNo=new byte[4];
byte[] blockSizeByte=new byte[4];
// mark bytestream so we can peak into it
this.bin.mark(8);
// read magic
int maxByteRead=4;
int totalByteRead=0;
int readByte;
while ((readByte=this.bin.read(magicNo,totalByteRead,maxByteRead-totalByteRead))>-1) {
totalByteRead+=readByte;
if (totalByteRead>=maxByteRead) {
break;
}
}
if (totalByteRead!=maxByteRead) {
return new byte[0];
}
// read blocksize
maxByteRead=4;
totalByteRead=0;
while ((readByte=this.bin.read(blockSizeByte,totalByteRead,maxByteRead-totalByteRead))>-1) {
totalByteRead+=readByte;
if (totalByteRead>=maxByteRead) {
break;
}
}
if (totalByteRead!=maxByteRead) {
return new byte[0];
}
long blockSize=BitcoinUtil.getSize(blockSizeByte)+8;
// read the full block
this.bin.reset();
//filter by magic numbers?
if (filterSpecificMagic) {
for (int i=0;i<specificMagicByteArray.length;i++) {
byte[] currentFilter=specificMagicByteArray[i];
boolean doesMatchOneMagic=BitcoinUtil.compareMagics(currentFilter,magicNo);
// correspond to filter? read it!
if (doesMatchOneMagic) {
return blockSizeByte;
}
}
// Skip block if not found
if (this.bin.skip(blockSize)!=blockSize) {
LOG.error("Cannot skip block in InputStream");
}
return new byte[0];
} else {
return blockSizeByte;
}
}
/**
* Checks in BufferedInputStream (bin) for the magic(s) specified in specificMagicByteArray
*
* @param firstByte first byte (as int) of the byteBuffer
*
* @retrun true if one of the magics has been identified, false if not
*
* @throws java.io.IOException in case of issues reading from BufferedInputStream
*
*/
private boolean checkForMagicBytes(int firstByte) throws IOException {
byte[] fullMagic=null;
for (int i=0;i<this.specificMagicByteArray.length;i++) {
// compare first byte and decide if we want to read full magic
int currentMagicFirstbyte=this.specificMagicByteArray[i][0] & 0xFF;
if (firstByte==currentMagicFirstbyte) {
if (fullMagic==null) { // read full magic
fullMagic=new byte[4];
fullMagic[0]=this.specificMagicByteArray[i][0];
int maxByteRead=4;
int totalByteRead=1;
int readByte;
while ((readByte=this.bin.read(fullMagic,totalByteRead,maxByteRead-totalByteRead))>-1) {
totalByteRead+=readByte;
if (totalByteRead>=maxByteRead) {
break;
}
}
if (totalByteRead!=maxByteRead) {
return false;
}
}
// compare full magics
if (BitcoinUtil.compareMagics(fullMagic,this.specificMagicByteArray[i])) {
this.bin.reset();
return true;
}
}
}
return false;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinTransactionOutput.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinTransactionOutput.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.io.Serializable;
import java.math.BigInteger;
public class BitcoinTransactionOutput implements Serializable {
/**
*
*/
private static final long serialVersionUID = 2854570630540937753L;
private BigInteger value;
private byte[] txOutScriptLength;
private byte[] txOutScript;
public BitcoinTransactionOutput(BigInteger value, byte[] txOutScriptLength, byte[] txOutScript) {
this.value=value;
this.txOutScriptLength=txOutScriptLength;
this.txOutScript=txOutScript;
}
public BigInteger getValue() {
return this.value;
}
public byte[] getTxOutScriptLength() {
return this.txOutScriptLength;
}
public byte[] getTxOutScript() {
return this.txOutScript;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinScriptWitness.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinScriptWitness.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.io.Serializable;
public class BitcoinScriptWitness implements Serializable {
/**
*
*/
private static final long serialVersionUID = 273511143914429994L;
private byte[] witnessScriptLength;
private byte[] witnessScript;
public BitcoinScriptWitness(byte[] witnessScriptLength,byte[] witnessScript) {
this.witnessScriptLength=witnessScriptLength;
this.witnessScript=witnessScript;
}
public byte[] getWitnessScriptLength() {
return this.witnessScriptLength;
}
public byte[] getWitnessScript() {
return this.witnessScript;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinAuxPOW.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinAuxPOW.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
public class BitcoinAuxPOW implements Serializable {
private long version;
private BitcoinTransaction coinbaseTransaction;
private byte[] parentBlockHeaderHash;
private BitcoinAuxPOWBranch coinbaseBranch;
private BitcoinAuxPOWBranch auxBlockChainBranch;
private BitcoinAuxPOWBlockHeader parentBlockHeader;
/*
* Creates an empy AuxPOW object in case the feature is not used (e.g. in the main Bitcoin blockchain)
*
*/
public BitcoinAuxPOW() {
this.version=0L;
this.coinbaseTransaction=null;
this.coinbaseBranch=null;
this.auxBlockChainBranch=null;
this.parentBlockHeader=null;
}
public BitcoinAuxPOW(long version, BitcoinTransaction coinbaseTransaction, byte[] parentBlockHeaderHash, BitcoinAuxPOWBranch coinbaseBranch, BitcoinAuxPOWBranch auxBlockChainBranch, BitcoinAuxPOWBlockHeader parentBlockHeader) {
this.version=version;
this.coinbaseTransaction=coinbaseTransaction;
this.parentBlockHeaderHash=parentBlockHeaderHash;
this.coinbaseBranch=coinbaseBranch;
this.auxBlockChainBranch=auxBlockChainBranch;
this.parentBlockHeader=parentBlockHeader;
}
public long getVersion() {
return version;
}
public BitcoinAuxPOWBranch getCoinbaseBranch() {
return coinbaseBranch;
}
public void setCoinbaseBranch(BitcoinAuxPOWBranch coinbaseBranch) {
this.coinbaseBranch = coinbaseBranch;
}
public BitcoinAuxPOWBranch getAuxBlockChainBranch() {
return auxBlockChainBranch;
}
public BitcoinAuxPOWBlockHeader getParentBlockHeader() {
return parentBlockHeader;
}
public BitcoinTransaction getCoinbaseTransaction() {
return coinbaseTransaction;
}
public byte[] getParentBlockHeaderHash() {
return parentBlockHeaderHash;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinScriptWitnessItem.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinScriptWitnessItem.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.io.Serializable;
import java.util.List;
/**
*
*
*/
public class BitcoinScriptWitnessItem implements Serializable {
/**
*
*/
private static final long serialVersionUID = -8500521021303513414L;
private byte[] stackItemCounter;
private List<BitcoinScriptWitness> scriptWitnessList;
public BitcoinScriptWitnessItem(byte[] stackItemCounter, List<BitcoinScriptWitness> scriptWitnessList) {
this.stackItemCounter=stackItemCounter;
this.scriptWitnessList=scriptWitnessList;
}
public byte[] getStackItemCounter() {
return this.stackItemCounter;
}
public List<BitcoinScriptWitness> getScriptWitnessList() {
return this.scriptWitnessList;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinBlock.java | inputformat/src/main/java/org/zuinnote/hadoop/bitcoin/format/common/BitcoinBlock.java | /**
* Copyright 2016 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.bitcoin.format.common;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
import java.util.ArrayList;
/**
* This class is an object storing relevant fields of a Bitcoin Block.
*/
public class BitcoinBlock implements Serializable {
private long blockSize;
private byte[] magicNo;
private long version;
private long time;
private byte[] bits;
private long nonce;
private long transactionCounter;
private byte[] hashPrevBlock;
private byte[] hashMerkleRoot;
private List<BitcoinTransaction> transactions;
private BitcoinAuxPOW auxPOW;
public BitcoinBlock() {
this.magicNo=new byte[0];
this.bits=new byte[0];
this.transactionCounter=0L;
this.hashPrevBlock=new byte[0];
this.hashMerkleRoot=new byte[0];
this.transactions=new ArrayList<>();
this.auxPOW=new BitcoinAuxPOW();
}
public long getBlockSize() {
return this.blockSize;
}
public void setBlockSize(long blockSize) {
this.blockSize=blockSize;
}
public byte[] getMagicNo() {
return this.magicNo;
}
public void setMagicNo(byte[] magicNo) {
this.magicNo=magicNo;
}
public long getVersion() {
return this.version;
}
public void setVersion(long version) {
this.version=version;
}
public long getTime() {
return this.time;
}
public void setTime(long time) {
this.time=time;
}
public byte[] getBits() {
return this.bits;
}
public void setBits(byte[] bits) {
this.bits=bits;
}
public long getNonce() {
return this.nonce;
}
public void setNonce(long nonce) {
this.nonce=nonce;
}
public long getTransactionCounter() {
return this.transactionCounter;
}
public void setTransactionCounter(long transactionCounter) {
this.transactionCounter=transactionCounter;
}
public byte[] getHashPrevBlock() {
return this.hashPrevBlock;
}
public void setHashPrevBlock(byte[] hashPrevBlock) {
this.hashPrevBlock=hashPrevBlock;
}
public byte[] getHashMerkleRoot() {
return this.hashMerkleRoot;
}
public void setHashMerkleRoot(byte[] hashMerkleRoot) {
this.hashMerkleRoot=hashMerkleRoot;
}
public List<BitcoinTransaction> getTransactions() {
return this.transactions;
}
public void setTransactions(List<BitcoinTransaction> transactions) {
this.transactions=transactions;
}
public BitcoinAuxPOW getAuxPOW() {
return this.auxPOW;
}
public void setAuxPOW(BitcoinAuxPOW auxPOW) {
this.auxPOW = auxPOW;
}
public void set(BitcoinBlock newBitcoinBlock) {
this.blockSize=newBitcoinBlock.getBlockSize();
this.magicNo=newBitcoinBlock.getMagicNo();
this.version=newBitcoinBlock.getVersion();
this.time=newBitcoinBlock.getTime();
this.bits=newBitcoinBlock.getBits();
this.nonce=newBitcoinBlock.getNonce();
this.transactionCounter=newBitcoinBlock.getTransactionCounter();
this.hashPrevBlock=newBitcoinBlock.getHashPrevBlock();
this.hashMerkleRoot=newBitcoinBlock.getHashMerkleRoot();
this.transactions=newBitcoinBlock.getTransactions();
this.auxPOW=newBitcoinBlock.getAuxPOW();
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapred/AbstractEthereumRecordReader.java | inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapred/AbstractEthereumRecordReader.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.mapred;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.SplitCompressionInputStream;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockReader;
/**
*
*/
public abstract class AbstractEthereumRecordReader <K,V> implements RecordReader<K,V> {
public static final String CONF_BUFFERSIZE=org.zuinnote.hadoop.ethereum.format.mapreduce.AbstractEthereumRecordReader.CONF_BUFFERSIZE;
public static final String CONF_MAXBLOCKSIZE= org.zuinnote.hadoop.ethereum.format.mapreduce.AbstractEthereumRecordReader.CONF_MAXBLOCKSIZE;
public static final String CONF_USEDIRECTBUFFER= org.zuinnote.hadoop.ethereum.format.mapreduce.AbstractEthereumRecordReader.CONF_USEDIRECTBUFFER;
public static final int DEFAULT_BUFFERSIZE= org.zuinnote.hadoop.ethereum.format.mapreduce.AbstractEthereumRecordReader.DEFAULT_BUFFERSIZE;
public static final int DEFAULT_MAXSIZE_ETHEREUMBLOCK= org.zuinnote.hadoop.ethereum.format.mapreduce.AbstractEthereumRecordReader.DEFAULT_MAXSIZE_ETHEREUMBLOCK;
public static final boolean DEFAULT_USEDIRECTBUFFER= org.zuinnote.hadoop.ethereum.format.mapreduce.AbstractEthereumRecordReader.DEFAULT_USEDIRECTBUFFER;
private static final Log LOG = LogFactory.getLog(AbstractEthereumRecordReader.class.getName());
private int bufferSize=0;
private int maxSizeEthereumBlock=0;
private boolean useDirectBuffer=false;
private CompressionCodec codec;
private Decompressor decompressor;
private Reporter reporter;
private Configuration conf;
private long start;
private long end;
private final Seekable filePosition;
private FSDataInputStream fileIn;
private EthereumBlockReader ebr;
/**
* Creates an Abstract Record Reader for Ethereum blocks
* @param split Split to use (assumed to be a file split)
* @param job Configuration:
* io.file.buffer.size: Size of in-memory specified in the given Configuration. If io.file.buffer.size is not specified the default buffersize will be used. Furthermore, one may specify hadoopcryptoledger.ethereumblockinputformat.maxblocksize, which defines the maximum size a Ethereum block may have. By default it is 1M). If you want to experiment with performance using DirectByteBuffer instead of HeapByteBuffer you can use "hadoopcryptoledeger.ethereumblockinputformat.usedirectbuffer" (default: false). Note that it might have some unwanted consequences such as circumwenting Yarn memory management. The option is experimental and might be removed in future versions.
* @param reporter Reporter
*
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
public AbstractEthereumRecordReader(FileSplit split,JobConf job, Reporter reporter) throws IOException {
LOG.debug("Reading configuration");
// parse configuration
this.reporter=reporter;
this.conf=job;
this.maxSizeEthereumBlock=conf.getInt(AbstractEthereumRecordReader.CONF_MAXBLOCKSIZE,AbstractEthereumRecordReader.DEFAULT_MAXSIZE_ETHEREUMBLOCK);
this.bufferSize=conf.getInt(AbstractEthereumRecordReader.CONF_BUFFERSIZE,AbstractEthereumRecordReader.DEFAULT_BUFFERSIZE);
this.useDirectBuffer=conf.getBoolean(AbstractEthereumRecordReader.CONF_USEDIRECTBUFFER,AbstractEthereumRecordReader.DEFAULT_USEDIRECTBUFFER);
// Initialize start and end of split
start = split.getStart();
end = start + split.getLength();
final Path file = split.getPath();
codec = new CompressionCodecFactory(job).getCodec(file);
final FileSystem fs = file.getFileSystem(job);
fileIn = fs.open(file);
// open stream
if (isCompressedInput()) { // decompress
LOG.debug("Decompressing file");
decompressor = CodecPool.getDecompressor(codec);
if (codec instanceof SplittableCompressionCodec) {
LOG.debug("SplittableCompressionCodec");
final SplitCompressionInputStream cIn =((SplittableCompressionCodec)codec).createInputStream(fileIn, decompressor, start, end,SplittableCompressionCodec.READ_MODE.CONTINUOUS);
ebr = new EthereumBlockReader(cIn, this.maxSizeEthereumBlock,this.bufferSize,this.useDirectBuffer);
start = cIn.getAdjustedStart();
end = cIn.getAdjustedEnd();
filePosition = cIn; // take pos from compressed stream
} else {
LOG.debug("Not-splitable compression codec");
ebr = new EthereumBlockReader(codec.createInputStream(fileIn,decompressor), this.maxSizeEthereumBlock,this.bufferSize,this.useDirectBuffer);
filePosition = fileIn;
}
} else {
LOG.debug("Processing file without compression");
fileIn.seek(start);
ebr = new EthereumBlockReader(fileIn, this.maxSizeEthereumBlock,this.bufferSize,this.useDirectBuffer);
filePosition = fileIn;
}
// initialize reader
this.reporter.setStatus("Ready to read");
}
/**
* Get the current file position in a compressed or uncompressed file.
*
* @return file position
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
public long getFilePosition() throws IOException {
return filePosition.getPos();
}
/**
* Get the end of file
*
* @return end of file position
*
*/
public long getEnd() {
return end;
}
/**
* Get the current Block Reader
*
* @return end of file position
*
*/
public EthereumBlockReader getEbr() {
return this.ebr;
}
/*
* Returns how much of the file has been processed in terms of bytes
*
* @return progress percentage
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
@Override
public synchronized float getProgress() throws IOException {
if (start == end) {
return 0.0f;
} else {
return Math.min(1.0f, (getFilePosition() - start) / (float)(end - start));
}
}
/*
* Determines if the input is compressed or not
*
* @return true if compressed, false if not
*/
private boolean isCompressedInput() {
return codec != null;
}
/*
* Get current position in the stream
*
* @return position
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
@Override
public synchronized long getPos() throws IOException {
return filePosition.getPos();
}
/*
* Clean up InputStream and Decompressor after use
*
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
@Override
public synchronized void close() throws IOException {
try {
if (ebr != null) {
ebr.close();
}
} finally {
if (decompressor != null) {
CodecPool.returnDecompressor(decompressor);
decompressor = null;
}
}
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapred/EthereumBlockRecordReader.java | inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapred/EthereumBlockRecordReader.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.mapred;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlock;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockWritable;
import org.zuinnote.hadoop.ethereum.format.exception.EthereumBlockReadException;
/**
*
*/
public class EthereumBlockRecordReader extends AbstractEthereumRecordReader<BytesWritable,EthereumBlockWritable> {
private static final Log LOG = LogFactory.getLog(EthereumBlockRecordReader.class.getName());
public EthereumBlockRecordReader(FileSplit split, JobConf job, Reporter reporter) throws IOException {
super(split,job,reporter);
}
/**
*
* Read a next block.
*
* @param key is a 32 byte array (parentHash)
* @param value is a deserialized Java object of class EthereumBlock
*
* @return true if next block is available, false if not
*/
@Override
public boolean next(BytesWritable key, EthereumBlockWritable value) throws IOException {
// read all the blocks, if necessary a block overlapping a split
while(getFilePosition()<=getEnd()) { // did we already went beyond the split (remote) or do we have no further data left?
EthereumBlock dataBlock=null;
try {
dataBlock=getEbr().readBlock();
} catch (EthereumBlockReadException e) {
LOG.error(e);
throw new RuntimeException(e.toString());
}
if (dataBlock==null) {
return false;
}
byte[] newKey=dataBlock.getEthereumBlockHeader().getParentHash();
key.set(newKey,0,newKey.length);
value.set(dataBlock);
return true;
}
return false;
}
@Override
public BytesWritable createKey() {
return new BytesWritable();
}
@Override
public EthereumBlockWritable createValue() {
return new EthereumBlockWritable();
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapred/EthereumBlockFileInputFormat.java | inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapred/EthereumBlockFileInputFormat.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.mapred;
import java.io.IOException;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
/**
*
*/
public class EthereumBlockFileInputFormat extends AbstractEthereumFileInputFormat {
@Override
public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException {
return new EthereumBlockRecordReader((FileSplit)split,job,reporter);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapred/AbstractEthereumFileInputFormat.java | inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapred/AbstractEthereumFileInputFormat.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.mapred;
import java.io.IOException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobConfigurable;
/**
*
*/
public abstract class AbstractEthereumFileInputFormat <K,V> extends FileInputFormat<K,V> implements JobConfigurable{
private CompressionCodecFactory compressionCodecs = null;
@Override
public void configure(JobConf job) {
this.compressionCodecs = new CompressionCodecFactory(job);
}
/**
*
* Ethereum blockchain data is not splitable by definition. However, you can export the blockchains in files of a certain size (recommended: HDFS blocksize) to benefit from parallelism. It is not necessary and not recommended to compress the blockchain files. Instead it is recommended to extract relevant data from the blockchain files once and store them in a format suitable for analytics (including compression), such as ORC or Parquet.
*
*/
@Override
protected boolean isSplitable(FileSystem fs, Path file) {
return false;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapreduce/AbstractEthereumRecordReader.java | inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapreduce/AbstractEthereumRecordReader.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.mapreduce;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.SplitCompressionInputStream;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockReader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
*
*/
public abstract class AbstractEthereumRecordReader<K,V> extends RecordReader<K,V> {
public static final String CONF_BUFFERSIZE="io.file.buffer.size";
public static final String CONF_MAXBLOCKSIZE="hadoopcryptoledger.ethereumblockinputformat.maxblocksize";
public static final String CONF_USEDIRECTBUFFER="hadoopcryptoledeger.ethereumblockinputformat.usedirectbuffer";
public static final int DEFAULT_BUFFERSIZE=64*1024;
public static final int DEFAULT_MAXSIZE_ETHEREUMBLOCK=1 * 1024 * 1024;
public static final boolean DEFAULT_USEDIRECTBUFFER=false;
private static final Log LOG = LogFactory.getLog(AbstractEthereumRecordReader.class.getName());
private int bufferSize=0;
private int maxSizeEthereumBlock=0;
private boolean useDirectBuffer=false;
private CompressionCodec codec;
private Decompressor decompressor;
private long start;
private long end;
private Seekable filePosition;
private EthereumBlockReader ebr;
/***
* Creates an Abstract RecordReader for Ethereum blocks
*
*
* @param conf Configuration:
* io.file.buffer.size: Size of in-memory specified in the given Configuration. If io.file.buffer.size is not specified the default buffersize will be used. Furthermore, one may specify hadoopcryptoledger.ethereumblockinputformat.maxblocksize, which defines the maximum size a Ethereum block may have. By default it is 1M). If you want to experiment with performance using DirectByteBuffer instead of HeapByteBuffer you can use "hadoopcryptoledeger.ethereumblockinputformat.usedirectbuffer" (default: false). Note that it might have some unwanted consequences such as circumwenting Yarn memory management. The option is experimental and might be removed in future versions.
*/
public AbstractEthereumRecordReader(Configuration conf) {
this.maxSizeEthereumBlock=conf.getInt(AbstractEthereumRecordReader.CONF_MAXBLOCKSIZE,AbstractEthereumRecordReader.DEFAULT_MAXSIZE_ETHEREUMBLOCK);
this.bufferSize=conf.getInt(AbstractEthereumRecordReader.CONF_BUFFERSIZE,AbstractEthereumRecordReader.DEFAULT_BUFFERSIZE);
this.useDirectBuffer=conf.getBoolean(AbstractEthereumRecordReader.CONF_USEDIRECTBUFFER,AbstractEthereumRecordReader.DEFAULT_USEDIRECTBUFFER);
}
/***
* Initializes readers
*
* @param split Split to be used (asssumed to be a file split)
* ϟaram context context of the job
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
* @throws java.lang.InterruptedException in case of thread interruption
*
*/
@Override
public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
FileSplit fSplit = (FileSplit)split;
// Initialize start and end of split
start = fSplit.getStart();
end = start + fSplit.getLength();
final Path file = fSplit.getPath();
codec = new CompressionCodecFactory(context.getConfiguration()).getCodec(file);
final FileSystem fs = file.getFileSystem(context.getConfiguration());
FSDataInputStream fileIn = fs.open(file);
// open stream
if (isCompressedInput()) { // decompress
decompressor = CodecPool.getDecompressor(codec);
if (codec instanceof SplittableCompressionCodec) {
final SplitCompressionInputStream cIn =((SplittableCompressionCodec)codec).createInputStream(fileIn, decompressor, start, end,SplittableCompressionCodec.READ_MODE.CONTINUOUS);
ebr = new EthereumBlockReader(cIn, this.maxSizeEthereumBlock,this.bufferSize,this.useDirectBuffer);
start = cIn.getAdjustedStart();
end = cIn.getAdjustedEnd();
filePosition = cIn; // take pos from compressed stream
} else {
ebr = new EthereumBlockReader(codec.createInputStream(fileIn,decompressor), this.maxSizeEthereumBlock,this.bufferSize,this.useDirectBuffer);
filePosition = fileIn;
}
} else {
fileIn.seek(start);
ebr = new EthereumBlockReader(fileIn, this.maxSizeEthereumBlock,this.bufferSize,this.useDirectBuffer);
filePosition = fileIn;
}
}
/**
* Get the current Block Reader
*
* @return end of file position
*
*/
public EthereumBlockReader getEbr() {
return this.ebr;
}
/*
* Returns how much of the file has been processed in terms of bytes
*
* @return progress percentage
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
@Override
public float getProgress() throws IOException {
if (start == end) {
return 0.0f;
} else {
return Math.min(1.0f, (getFilePosition() - start) / (float)(end - start));
}
}
/**
* Get the end of file
*
* @return end of file position
*
*/
public long getEnd() {
return end;
}
/**
* Returns current position in file
*
* @return current position in file
*
*
*/
public long getFilePosition() throws IOException {
return this.filePosition.getPos();
}
/*
* Clean up InputStream and Decompressor after use
*
*
* @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop
*
*/
@Override
public synchronized void close() throws IOException {
try {
if (this.ebr != null) {
ebr.close();
}
} finally {
if (decompressor != null) {
CodecPool.returnDecompressor(decompressor);
decompressor = null;
}
}
}
/*
* Determines if the input is compressed or not
*
* @return true if compressed, false if not
*/
private boolean isCompressedInput() {
return codec != null;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapreduce/EthereumBlockRecordReader.java | inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapreduce/EthereumBlockRecordReader.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.mapreduce;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.BytesWritable;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlock;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockWritable;
import org.zuinnote.hadoop.ethereum.format.exception.EthereumBlockReadException;
import org.zuinnote.hadoop.ethereum.format.mapreduce.AbstractEthereumRecordReader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
*
*/
public class EthereumBlockRecordReader extends AbstractEthereumRecordReader<BytesWritable,EthereumBlockWritable>{
private static final Log LOG = LogFactory.getLog(EthereumBlockRecordReader.class.getName());
private BytesWritable currentKey=new BytesWritable();
private EthereumBlockWritable currentValue=new EthereumBlockWritable();
public EthereumBlockRecordReader(Configuration conf) {
super(conf);
}
/**
*
* Read a next block.
*
*
* @return true if next block is available, false if not
*/
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
// read all the blocks, if necessary a block overlapping a split
while(getFilePosition()<=getEnd()) { // did we already went beyond the split (remote) or do we have no further data left?
EthereumBlock dataBlock=null;
try {
dataBlock=getEbr().readBlock();
} catch (EthereumBlockReadException e) {
LOG.error(e);
throw new InterruptedException(e.toString());
}
if (dataBlock==null) {
return false;
}
byte[] newKey=dataBlock.getEthereumBlockHeader().getParentHash();
this.currentKey.set(newKey,0,newKey.length);
this.currentValue.set(dataBlock);
return true;
}
return false;
}
/**
*
* get current key after calling next()
*
* @return key is a 32byte array (parentHash)
*/
@Override
public BytesWritable getCurrentKey() throws IOException, InterruptedException {
return this.currentKey;
}
/**
*
* get current value after calling next()
*
* @return is a deserialized Java object of class EthereumBlock
*/
@Override
public EthereumBlockWritable getCurrentValue() throws IOException, InterruptedException {
return this.currentValue;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapreduce/EthereumBlockFileInputFormat.java | inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapreduce/EthereumBlockFileInputFormat.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.mapreduce;
import java.io.IOException;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.zuinnote.hadoop.ethereum.format.common.EthereumBlockWritable;
/**
*
*/
public class EthereumBlockFileInputFormat extends AbstractEthereumFileInputFormat<BytesWritable,EthereumBlockWritable>{
@Override
public RecordReader<BytesWritable, EthereumBlockWritable> createRecordReader(InputSplit split, TaskAttemptContext ctx)
throws IOException {
/** Create reader **/
return new EthereumBlockRecordReader(ctx.getConfiguration());
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapreduce/AbstractEthereumFileInputFormat.java | inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/mapreduce/AbstractEthereumFileInputFormat.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.mapreduce;
import java.io.IOException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
/**
*
*/
public abstract class AbstractEthereumFileInputFormat<K,V> extends FileInputFormat<K,V> {
@Override
public abstract RecordReader<K,V> createRecordReader(InputSplit split, TaskAttemptContext ctx) throws IOException;
/***
* The Ethereum format is not splitable due to its RLP encoding which makes it difficult to find the start/end of a RLP encoded item. Instead it is recommended to export blocks to files of the size of a HDFS block.
*
*
*/
@Override
protected boolean isSplitable(JobContext context, Path file) {
return false;
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
ZuInnoTe/hadoopcryptoledger | https://github.com/ZuInnoTe/hadoopcryptoledger/blob/b2df90b216a6024b3179c3afaa6f2bcfc975784c/inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/exception/EthereumBlockReadException.java | inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/exception/EthereumBlockReadException.java | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.exception;
/**
*
*/
public class EthereumBlockReadException extends Exception {
public EthereumBlockReadException(String message) {
super(message);
}
}
| java | Apache-2.0 | b2df90b216a6024b3179c3afaa6f2bcfc975784c | 2026-01-05T02:40:55.994732Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.