-
Notifications
You must be signed in to change notification settings - Fork 328
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Compress indexes #43
Open
dvryaboy
wants to merge
6
commits into
twitter:master
Choose a base branch
from
dvryaboy:compress_indexes
base: master
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
Compress indexes #43
Changes from 5 commits
Commits
Show all changes
6 commits
Select commit
Hold shift + click to select a range
28919df
convert lzo index writing/reading to use an interface.
dvryaboy ebc50ea
Add test for LzoIndexSerdes
dvryaboy fdbc1e7
take care of instantiation exceptions for serdes
dvryaboy 2016eea
Add a compressed index representation.
dvryaboy 8ee9cea
Added GPL licenses where required.
dvryaboy 76c4aa0
rewrite LzoTinyOffsets to use VarInt from Mahout. Add NOTICE file.
dvryaboy File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
103 changes: 103 additions & 0 deletions
103
src/java/com/hadoop/compression/lzo/LzoBasicIndexSerde.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,103 @@ | ||
/* | ||
* This file is part of Hadoop-Gpl-Compression. | ||
* | ||
* Hadoop-Gpl-Compression is free software: you can redistribute it | ||
* and/or modify it under the terms of the GNU General Public License | ||
* as published by the Free Software Foundation, either version 3 of | ||
* the License, or (at your option) any later version. | ||
* | ||
* Hadoop-Gpl-Compression is distributed in the hope that it will be | ||
* useful, but WITHOUT ANY WARRANTY; without even the implied warranty | ||
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
* GNU General Public License for more details. | ||
* | ||
* You should have received a copy of the GNU General Public License | ||
* along with Hadoop-Gpl-Compression. If not, see | ||
* <http://www.gnu.org/licenses/>. | ||
*/ | ||
|
||
package com.hadoop.compression.lzo; | ||
|
||
import java.io.DataInputStream; | ||
import java.io.DataOutputStream; | ||
import java.io.IOException; | ||
import java.nio.ByteBuffer; | ||
|
||
import org.apache.hadoop.io.DataOutputBuffer; | ||
import org.apache.hadoop.io.IOUtils; | ||
|
||
public class LzoBasicIndexSerde implements LzoIndexSerde { | ||
|
||
private static final int BUFFER_CAPACITY = 16 * 1024 * 8; //size for a 4GB file (with 256KB lzo blocks) | ||
|
||
private DataOutputStream os; | ||
private DataInputStream is; | ||
private ByteBuffer bytesIn; | ||
private long firstLong; | ||
private int numBlocks = 0; | ||
private boolean processedFirstLong = false; | ||
|
||
@Override | ||
public boolean accepts(long firstLong) { | ||
if (firstLong < 0) { | ||
return false; | ||
} else { | ||
this.firstLong = firstLong; | ||
return true; | ||
} | ||
} | ||
|
||
@Override | ||
public void prepareToWrite(DataOutputStream os) throws IOException { | ||
this.os = os; | ||
} | ||
|
||
@Override | ||
public void prepareToRead(DataInputStream is) throws IOException { | ||
this.is = is; | ||
bytesIn = fillBuffer(); | ||
numBlocks = bytesIn.remaining()/8 + 1; // plus one for the first long. | ||
processedFirstLong = false; | ||
} | ||
|
||
@Override | ||
public void writeOffset(long offset) throws IOException { | ||
os.writeLong(offset); | ||
} | ||
|
||
@Override | ||
public void finishWriting() throws IOException { | ||
os.close(); | ||
} | ||
|
||
@Override | ||
public boolean hasNext() throws IOException { | ||
return !processedFirstLong || (bytesIn != null && bytesIn.hasRemaining()); | ||
} | ||
|
||
@Override | ||
public long next() throws IOException { | ||
if (!processedFirstLong) { | ||
processedFirstLong = true; | ||
return firstLong; | ||
} | ||
if (bytesIn != null && bytesIn.hasRemaining()) { | ||
return bytesIn.getLong(); | ||
} else { | ||
throw new IOException("Attempt to read past the edge of the index."); | ||
} | ||
} | ||
|
||
private ByteBuffer fillBuffer() throws IOException { | ||
DataOutputBuffer bytes = new DataOutputBuffer(BUFFER_CAPACITY); | ||
// copy indexIn and close it if finished | ||
IOUtils.copyBytes(is, bytes, 4*1024, true); | ||
return ByteBuffer.wrap(bytes.getData(), 0, bytes.getLength()); | ||
} | ||
|
||
@Override | ||
public int numBlocks() { | ||
return numBlocks; | ||
} | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,72 @@ | ||
/* | ||
* This file is part of Hadoop-Gpl-Compression. | ||
* | ||
* Hadoop-Gpl-Compression is free software: you can redistribute it | ||
* and/or modify it under the terms of the GNU General Public License | ||
* as published by the Free Software Foundation, either version 3 of | ||
* the License, or (at your option) any later version. | ||
* | ||
* Hadoop-Gpl-Compression is distributed in the hope that it will be | ||
* useful, but WITHOUT ANY WARRANTY; without even the implied warranty | ||
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
* GNU General Public License for more details. | ||
* | ||
* You should have received a copy of the GNU General Public License | ||
* along with Hadoop-Gpl-Compression. If not, see | ||
* <http://www.gnu.org/licenses/>. | ||
*/ | ||
|
||
package com.hadoop.compression.lzo; | ||
|
||
import java.io.DataInputStream; | ||
import java.io.DataOutputStream; | ||
import java.io.IOException; | ||
|
||
public interface LzoIndexSerde { | ||
|
||
/** | ||
* Serdes will be tried in order until one is found that accepts | ||
* the offered format. A format is determined from the first 8 | ||
* bytes (represented as a long) written to the index file. | ||
* <p> | ||
* The first long is somewhat constrained: the topmost bit should be | ||
* 1, the next 31 are a version number by which the appropriate SerDe | ||
* is decided, and the next 32 can have arbitrary data (a header, or | ||
* a length of the header, or an offset.. up to you). | ||
* | ||
* @param firstLong | ||
* @return true if this format is recognized by the SerDe, false otherwise. | ||
*/ | ||
public boolean accepts(long firstLong); | ||
|
||
public void prepareToWrite(DataOutputStream os) throws IOException; | ||
|
||
/** | ||
* Prepare to read the index. Note that the first 8 bits will have been already | ||
* read from this stream, and passed to you in accepts() in the form of a long. | ||
* @param is InputStream to read. | ||
*/ | ||
public void prepareToRead(DataInputStream is) throws IOException; | ||
|
||
/** | ||
* Write the next offset into the file. It is expected that | ||
* the offsets are supplied in order. <code>prepareToWrite()</code> | ||
* should be called before the first invocation of this method. | ||
* @param offset | ||
*/ | ||
public void writeOffset(long offset) throws IOException; | ||
|
||
public void finishWriting() throws IOException; | ||
|
||
public boolean hasNext() throws IOException; | ||
|
||
public long next() throws IOException; | ||
|
||
/** | ||
* Get the number of block expected to be read from this index. | ||
* Will only be called after prepareToRead(). | ||
* @return number of block offsets that will be read back. | ||
*/ | ||
public int numBlocks(); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is a bit problematic one to implement and forces us to process the whole file at once. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Fixed. |
||
|
||
} |
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
as discussed, this copies the whole file until EOF.