From f21091edd819e1dfd5c4edf53be038b70f239fd9 Mon Sep 17 00:00:00 2001 From: gavalian Date: Thu, 17 Aug 2017 12:04:21 -0400 Subject: [PATCH 1/2] changed the record header structure. Now the first half looks like EVIO --- java/org/jlab/coda/hipo/Compressor.java | 6 + java/org/jlab/coda/hipo/RecordStream.java | 135 ++++++++++++++++++++-- java/org/jlab/coda/hipo/TestWriter.java | 37 ++++-- java/org/jlab/coda/hipo/Writer.java | 2 +- 4 files changed, 161 insertions(+), 19 deletions(-) diff --git a/java/org/jlab/coda/hipo/Compressor.java b/java/org/jlab/coda/hipo/Compressor.java index 569783762..cb54e2801 100644 --- a/java/org/jlab/coda/hipo/Compressor.java +++ b/java/org/jlab/coda/hipo/Compressor.java @@ -20,6 +20,12 @@ */ public class Compressor { + + public static int RECORD_UNCOMPRESSED = 0; + public static int RECORD_COMPRESSION_LZ4 = 1; + public static int RECORD_COMPRESSION_LZ4_BEST = 2; + public static int RECORD_COMPRESSION_GZIP = 3; + public static final int MTU = 1024*1024; LZ4Factory factory = null; diff --git a/java/org/jlab/coda/hipo/RecordStream.java b/java/org/jlab/coda/hipo/RecordStream.java index 21bbefb07..4ac732400 100644 --- a/java/org/jlab/coda/hipo/RecordStream.java +++ b/java/org/jlab/coda/hipo/RecordStream.java @@ -22,7 +22,7 @@ public class RecordStream { private int MAX_BUFFER_SIZE = 8*1024*1024; private int MAX_EVENT_COUNT = 1024*1024; - private final int HEADER_SIZE = 48; + private final int HEADER_SIZE = 16*4; private final int RECORD_UID_WORD_LE = 0x43455248; private final int RECORD_UID_WORD_BE = 0x48524543; @@ -36,8 +36,33 @@ public class RecordStream { ByteBuffer recordDataCompressed = null; ByteBuffer recordBinary = null; + /** + * Compression information. Compression types. + */ + Compressor dataCompressor = null; + private int compressionType = Compressor.RECORD_COMPRESSION_LZ4; - Compressor dataCompressor = null; + /** + * BLOCK INFORMATION to be written to the header + * These words are part of the EVIO header format, and + * are written into header exactly as it was in EVIO. + */ + private int blockNumber = 0; + private int blockVersion = 6; + private int blockBitInfo = 0; + private int reservedWord = 0; + private int reservedWordSecond = 0; + /** + * UNIQUE identifiers part of the new HIPO Header. There are + * Two long words reserved to be used for tagging event records + * for fast search through the file. + */ + private long recordHeaderUniqueWordFirst = 0L; + private long recordHeaderUniqueWordSecond = 0L; + + /** + * Default constructor. + */ public RecordStream(){ /*recordStream = new ByteArrayOutputStream(MAX_BUFFER_SIZE); byte[] index = new byte[MAX_EVENT_COUNT*4]; @@ -59,7 +84,68 @@ public RecordStream(int size){ recordIndex.order(ByteOrder.LITTLE_ENDIAN); recordIndex.putInt(0, 0); } - + /** + * sets unique words for the record header, there are two LONG + * words at the end of each record. + * @param uw1 first unique word (LONG) + * @param uw2 second unique word (LONG) + */ + public void setUniqueWords(long uw1, long uw2){ + recordHeaderUniqueWordFirst = uw1; + recordHeaderUniqueWordSecond = uw2; + } + /** + * Sets compression type. Available compressions are: + * 0 - uncompressed + * 1 - LZ4 fast compression + * 2 - LZ4 best compression + * 3 - GZIP compression + * @param type compression type (0-4) + */ + public void setCompressionType(int type){ + compressionType = type; + if(compressionType<0||compressionType>3){ + System.out.println("[WARNING !] unknown compression type " + + type + ". using uncompressed buffers."); + compressionType = 0; + } + } + /** + * Set the version word for the record. Default is 6 + * @param version version number + */ + public void setVersion(int version){ + blockVersion = version; + } + /** + * sets the bit info for the record, this will be written into + * the high 24 bits of the word #6 (starting count from #1), the + * lower 8 bits are the version. + * @param bitinfo bit information for the record + */ + public void setBitInfo(int bitinfo){ + blockBitInfo = bitinfo; + } + /** + * set block number, for checking the order of the blocks + * that are coming in from DAQ. + * @param blkn block number + */ + public void setBlockNumber(int blkn){ + blockNumber = blkn; + } + /** + * Sets the reserved word for the block header. It is written + * to word #7 in the record header (counting from #1). + * @param rw reserved word (32 bits) + */ + public void setReservedWord(int rw){ + this.reservedWord = rw; + } + /** + * Allocates all buffers for constructing the record stream. + * @param size + */ private void allocate(int size){ MAX_BUFFER_SIZE = size; @@ -142,12 +228,28 @@ public void reset(){ recordEvents.putInt( 0, 4); // the length of the data is reset recordBinary.putInt( 4, 0); // set the size of the binary output buffer to 0 } + /** + * constructs the word that describes compression, includes the compression + * type (upper 8 bits) and compressed data size (lower 24 bits). + * @param ctype compression type + * @param csize compressed buffer size + * @return word with combined type and size + */ + private int getCompressionWord(int ctype, int csize){ + int word = ((ctype<<24)&0xFF000000)|(csize&0x00FFFFFF); + return word; + } + + private int getVersionWord(){ + int versionWord = ((this.blockBitInfo<<8)&(0xFFFFFF00))|blockVersion; + return versionWord; + } /** * Builds the record. First compresses the data buffer. * Then the header is constructed. */ public void build(){ - + int indexSize = recordIndex.getInt( 0) - 4; int eventSize = recordEvents.getInt( 0) - 4; @@ -163,14 +265,25 @@ public void build(){ //System.out.println(" DATA SIZE = " + dataBufferSize + " COMPRESSED SIZE = " + compressedSize); int nevents = recordIndex.getInt(0)/4; + int recordWordCount = (compressedSize + this.HEADER_SIZE)/4; + if( (compressedSize+this.HEADER_SIZE)%4!=0) recordWordCount+=1; + + recordBinary.position(0); - recordBinary.putInt( 0, this.RECORD_UID_WORD_LE); - recordBinary.putInt( 4, compressedSize + HEADER_SIZE); - recordBinary.putInt( 8, dataBufferSize); - recordBinary.putInt( 12, compressedSize); - recordBinary.putInt( 16, nevents); - recordBinary.putInt( 20, 0); - recordBinary.putInt( 24, recordIndex.getInt(0)); + recordBinary.putInt( 0, recordWordCount); + recordBinary.putInt( 4, blockNumber); + recordBinary.putInt( 8, 16); + recordBinary.putInt( 12, nevents); + recordBinary.putInt( 16, reservedWord); + recordBinary.putInt( 20, getVersionWord()); + recordBinary.putInt( 24, reservedWordSecond); + recordBinary.putInt( 28, RECORD_UID_WORD_LE); + recordBinary.putInt( 32, dataBufferSize); + recordBinary.putInt( 36, getCompressionWord(compressionType, compressedSize)); + recordBinary.putInt( 40, 0); + recordBinary.putInt( 44, indexSize/4); + recordBinary.putLong( 48, recordHeaderUniqueWordFirst); + recordBinary.putLong( 56, recordHeaderUniqueWordSecond); recordBinary.position(HEADER_SIZE); recordBinary.put(recordDataCompressed.array(), 0, compressedSize); diff --git a/java/org/jlab/coda/hipo/TestWriter.java b/java/org/jlab/coda/hipo/TestWriter.java index 0f702a7a5..ae5fc7b5b 100644 --- a/java/org/jlab/coda/hipo/TestWriter.java +++ b/java/org/jlab/coda/hipo/TestWriter.java @@ -7,9 +7,12 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.logging.Level; import java.util.logging.Logger; +import org.jlab.coda.jevio.EvioCompactReader; +import org.jlab.coda.jevio.EvioException; /** * @@ -79,26 +82,46 @@ public static void streamRecord(){ public static void writerTest(){ Writer writer = new Writer("compressed_file.evio",ByteOrder.BIG_ENDIAN); - byte[] array = TestWriter.generateBuffer(); - for(int i = 0; i < 3400000; i++){ - //byte[] array = TestWriter.generateBuffer(); + //byte[] array = TestWriter.generateBuffer(); + for(int i = 0; i < 340000; i++){ + byte[] array = TestWriter.generateBuffer(); writer.addEvent(array); } writer.close(); } - public static void main(String[] args){ + + public static void convertor() { + String filename = "/Users/gavalian/Work/Software/project-1a.0.0/clas_000810.evio.324"; + try { + EvioCompactReader reader = new EvioCompactReader(filename); + int nevents = reader.getEventCount(); + Writer writer = new Writer("converted_000810.evio",ByteOrder.LITTLE_ENDIAN); + System.out.println(" OPENED FILE EVENT COUNT = " + nevents); + for(int i = 1; i < nevents; i++){ + ByteBuffer buffer = reader.getEventBuffer(i,true); + writer.addEvent(buffer.array()); + //System.out.println(" EVENT # " + i + " size = " + buffer.array().length ); + } + writer.close(); + } catch (EvioException ex) { + Logger.getLogger(TestWriter.class.getName()).log(Level.SEVERE, null, ex); + } catch (IOException ex) { + Logger.getLogger(TestWriter.class.getName()).log(Level.SEVERE, null, ex); + } + } + + public static void main(String[] args){ + TestWriter.convertor(); - TestWriter.writerTest(); + //TestWriter.writerTest(); //TestWriter.streamRecord(); //TestWriter.byteStream(); - - /* byte[] header = TestWriter.generateBuffer(32); diff --git a/java/org/jlab/coda/hipo/Writer.java b/java/org/jlab/coda/hipo/Writer.java index 82f8c1d5b..4aa94aeea 100644 --- a/java/org/jlab/coda/hipo/Writer.java +++ b/java/org/jlab/coda/hipo/Writer.java @@ -213,7 +213,7 @@ public void addEvent(byte[] buffer){ private void writeOutput(){ outputRecordStream.build(); ByteBuffer buffer = outputRecordStream.getBinaryBuffer(); - int bufferSize = buffer.getInt(4); + int bufferSize = buffer.getInt(0)*4; try { outStreamRandom.write(buffer.array(), 0, bufferSize); From 28be29e16b62d701bb7f70c36c7465774909b579 Mon Sep 17 00:00:00 2001 From: gavalian Date: Fri, 18 Aug 2017 14:20:44 -0400 Subject: [PATCH 2/2] Changed the EVIO file header in new implementation to match old evio header --- java/org/jlab/coda/hipo/RecordStream.java | 77 +++++++++-------------- java/org/jlab/coda/hipo/TestWriter.java | 11 +++- java/org/jlab/coda/hipo/Writer.java | 48 ++++++++++++-- 3 files changed, 83 insertions(+), 53 deletions(-) diff --git a/java/org/jlab/coda/hipo/RecordStream.java b/java/org/jlab/coda/hipo/RecordStream.java index 4ac732400..1b90371e3 100644 --- a/java/org/jlab/coda/hipo/RecordStream.java +++ b/java/org/jlab/coda/hipo/RecordStream.java @@ -47,10 +47,10 @@ public class RecordStream { * These words are part of the EVIO header format, and * are written into header exactly as it was in EVIO. */ - private int blockNumber = 0; - private int blockVersion = 6; - private int blockBitInfo = 0; - private int reservedWord = 0; + private int blockNumber = 0; + private int blockVersion = 6; + private int blockBitInfo = 0; + private int reservedWord = 0; private int reservedWordSecond = 0; /** * UNIQUE identifiers part of the new HIPO Header. There are @@ -142,11 +142,19 @@ public void setBlockNumber(int blkn){ public void setReservedWord(int rw){ this.reservedWord = rw; } + /** + * sets the value of the second reserved word in the header. + * @param rw2 word value (32 bits) + */ + public void setReservedWordSecond(int rw2){ + reservedWordSecond = rw2; + } /** * Allocates all buffers for constructing the record stream. * @param size */ private void allocate(int size){ + MAX_BUFFER_SIZE = size; byte[] ri = new byte[MAX_EVENT_COUNT*4]; @@ -207,16 +215,6 @@ public boolean addEvent(byte[] event, int position, int length){ */ public boolean addEvent(byte[] event){ return addEvent(event,0,event.length); - /*int size = event.length; - int count = recordIndex.getInt(0); - recordIndex.putInt( (count+1)*4, size); - recordIndex.putInt( 0, count+1); - - try { - recordStream.write(event); - } catch (IOException ex) { - Logger.getLogger(RecordStream.class.getName()).log(Level.SEVERE, null, ex); - }*/ } /** * Reset internal buffers. The capacity of the ByteArray stream is set to 0. @@ -239,7 +237,12 @@ private int getCompressionWord(int ctype, int csize){ int word = ((ctype<<24)&0xFF000000)|(csize&0x00FFFFFF); return word; } - + /** + * Returns the word containing the version number of the record + * (lower 8 bits) and bit information that is provided by user + * (upper 24 bits) + * @return + */ private int getVersionWord(){ int versionWord = ((this.blockBitInfo<<8)&(0xFFFFFF00))|blockVersion; return versionWord; @@ -266,10 +269,10 @@ public void build(){ int nevents = recordIndex.getInt(0)/4; int recordWordCount = (compressedSize + this.HEADER_SIZE)/4; - if( (compressedSize+this.HEADER_SIZE)%4!=0) recordWordCount+=1; - + if( (compressedSize+this.HEADER_SIZE)%4!=0) recordWordCount+=1; recordBinary.position(0); + recordBinary.putInt( 0, recordWordCount); recordBinary.putInt( 4, blockNumber); recordBinary.putInt( 8, 16); @@ -287,38 +290,20 @@ public void build(){ recordBinary.position(HEADER_SIZE); recordBinary.put(recordDataCompressed.array(), 0, compressedSize); - - /* - int size = recordIndex.getInt(0)*4 + recordStream.size(); - byte[] buffer = new byte[size]; - - byte[] indexBuffer = recordIndex.array(); - byte[] dataBuffer = recordStream.toByteArray(); - int dataOffset = recordIndex.getInt(0)*4; - System.arraycopy( dataBuffer, 0, buffer, dataOffset, dataBuffer.length); - System.arraycopy( indexBuffer, 4, buffer, 0, dataOffset); - - byte[] dataCompressedBuffer = Compressor.getCompressedBuffer(1, buffer); - byte[] recordBuffer = new byte[48+dataCompressedBuffer.length]; - - System.arraycopy(dataCompressedBuffer, 0, recordBuffer, 48, dataCompressedBuffer.length); - ByteBuffer byteBuffer = ByteBuffer.wrap(recordBuffer); - byteBuffer.order(ByteOrder.LITTLE_ENDIAN); - byteBuffer.putInt( 0, RECORD_UID_WORD_LE); - byteBuffer.putInt( 4, recordBuffer.length); - byteBuffer.putInt( 8, dataBuffer.length); - byteBuffer.putInt( 12, dataCompressedBuffer.length); - byteBuffer.putInt( 16, recordIndex.getInt(0)); - byteBuffer.putInt( 20, 0); - byteBuffer.putInt( 24, recordIndex.getInt(0)*4); - - return byteBuffer.array(); - */ } + + /** + * returns number of events written so far into the buffer + * @return event count + */ public int getEventCount(){ - return this.recordIndex.getInt(0)/4; + return this.recordIndex.getInt(0)/4 - 1; } - + /** + * returns reference to internal ByteBuffer used to construct + * binary representation of the record. + * @return + */ public ByteBuffer getBinaryBuffer(){ return this.recordBinary; } diff --git a/java/org/jlab/coda/hipo/TestWriter.java b/java/org/jlab/coda/hipo/TestWriter.java index ae5fc7b5b..9bda7fadb 100644 --- a/java/org/jlab/coda/hipo/TestWriter.java +++ b/java/org/jlab/coda/hipo/TestWriter.java @@ -96,7 +96,9 @@ public static void convertor() { try { EvioCompactReader reader = new EvioCompactReader(filename); int nevents = reader.getEventCount(); - Writer writer = new Writer("converted_000810.evio",ByteOrder.LITTLE_ENDIAN); + String userHeader = "File is written with new version=6 format"; + Writer writer = new Writer("converted_000810.evio",userHeader.getBytes()); + System.out.println(" OPENED FILE EVENT COUNT = " + nevents); for(int i = 1; i < nevents; i++){ ByteBuffer buffer = reader.getEventBuffer(i,true); @@ -114,6 +116,13 @@ public static void convertor() { public static void main(String[] args){ + /*Writer writer = new Writer(); + + writer.open("new_header_test.evio",new byte[]{'a','b','c','d','e'}); + writer.close(); */ + + //writer.createHeader(new byte[17]); + TestWriter.convertor(); //TestWriter.writerTest(); diff --git a/java/org/jlab/coda/hipo/Writer.java b/java/org/jlab/coda/hipo/Writer.java index 4aa94aeea..bcbbf319b 100644 --- a/java/org/jlab/coda/hipo/Writer.java +++ b/java/org/jlab/coda/hipo/Writer.java @@ -28,10 +28,10 @@ public class Writer { * Internal constants used in the FILE header */ public final static int FILE_HEADER_LENGTH = 72; - public final static int FILE_UNIQUE_WORD = 0x4849504F; - public final static int FILE_VERSION_WORD = 0x56302E32; + public final static int FILE_UNIQUE_WORD = 0x4F504948;//0x4849504F; + public final static int FILE_VERSION_WORD = 0x322E3056;//0x56302E32; public final static int VERSION_NUMBER = 6; - public final static int MAGIC_WORD_LE = 0xc0da1000; + public final static int MAGIC_WORD_LE = 0xc0da0100; public final static int MAGIC_WORD_BE = 0x00a1dac0; /** @@ -69,6 +69,7 @@ public Writer(String filename, ByteOrder order){ */ public Writer(){ outputRecord = new Record(); + outputRecordStream = new RecordStream(); } /** * constructor with filename, the output file will be initialized. @@ -77,6 +78,7 @@ public Writer(){ */ public Writer(String filename){ outputRecord = new Record(); + outputRecordStream = new RecordStream(); this.open(filename); } /** @@ -86,6 +88,7 @@ public Writer(String filename){ */ public Writer(String filename, byte[] header){ outputRecord = new Record(); + outputRecordStream = new RecordStream(); this.writerHeaderBuffer = header; this.open(filename, header); } @@ -132,7 +135,7 @@ public final void open(String filename, byte[] header){ try { //outStream = new FileOutputStream(new File(filename)); outStreamRandom = new RandomAccessFile(filename,"rw"); - + /* byte[] headerBuffer = new byte[Writer.FILE_HEADER_LENGTH+header.length]; ByteBuffer byteBuffer = ByteBuffer.wrap(headerBuffer); @@ -148,8 +151,9 @@ public final void open(String filename, byte[] header){ if(byteOrderFile == ByteOrder.BIG_ENDIAN) byteBuffer.putInt(28, MAGIC_WORD_BE); System.arraycopy(header, 0, headerBuffer, Writer.FILE_HEADER_LENGTH, header.length); - - outStreamRandom.write(headerBuffer); + */ + ByteBuffer headerBuffer = this.createHeader(header); + outStreamRandom.write(headerBuffer.array()); } catch (FileNotFoundException ex) { Logger.getLogger(Writer.class.getName()).log(Level.SEVERE, null, ex); @@ -169,6 +173,38 @@ public final Writer setCompressionType(int compression){ this.compressionType = outputRecord.getCompressionType(); return this; } + + public ByteBuffer createHeader(byte[] userHeader){ + + int size = userHeader.length; + int uhWords = (size)/4; + + if(userHeader.length%4!=0) uhWords++; + + System.out.println(" SIZE = " + size + " words = " + uhWords); + byte[] fileHeader = new byte[64+uhWords*4]; + + System.arraycopy(userHeader, 0, fileHeader, 64, userHeader.length); + ByteBuffer headerBuffer = ByteBuffer.wrap(fileHeader); + headerBuffer.order(ByteOrder.LITTLE_ENDIAN); + + headerBuffer.putInt( 0, 16+uhWords); + headerBuffer.putInt( 4, 0); + headerBuffer.putInt( 8, 16); + headerBuffer.putInt( 12, 0); + headerBuffer.putInt( 16, 0); + headerBuffer.putInt( 20, 6); + headerBuffer.putInt( 24, 0); + + headerBuffer.putInt( 28, MAGIC_WORD_LE); + headerBuffer.putInt( 32, Writer.FILE_UNIQUE_WORD); + headerBuffer.putInt( 36, Writer.FILE_VERSION_WORD); + headerBuffer.putInt( 40, userHeader.length); + headerBuffer.putInt( 44, 0); + + + return headerBuffer; + } /** * Appends the record to the file. * @param record record object