From 770f7f08630d9d21626d836eaf044b7cb8105577 Mon Sep 17 00:00:00 2001 From: zhanghaobo Date: Wed, 25 Dec 2024 17:20:47 +0800 Subject: [PATCH] add UT --- .../hadoop/hdfs/TestDFSOutputStream.java | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSOutputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSOutputStream.java index bdb91f91bc5e4..fe88ee281b3b6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSOutputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSOutputStream.java @@ -549,6 +549,45 @@ public void testFirstPacketSizeInNewBlocks() throws IOException { fs.delete(new Path("/testfile.dat"), true); } + @Test(timeout = 60000) + public void testLastPacketSizeInBlocks() throws IOException { + final long blockSize = (long) 1024 * 1024; + MiniDFSCluster dfsCluster = cluster; + DistributedFileSystem fs = dfsCluster.getFileSystem(); + Configuration dfsConf = fs.getConf(); + + EnumSet flags = EnumSet.of(CreateFlag.CREATE); + try(FSDataOutputStream fos = fs.create(new Path("/testfile.dat"), + FsPermission.getDefault(), + flags, 512, (short)3, blockSize, null)) { + + DataChecksum crc32c = DataChecksum.newDataChecksum( + DataChecksum.Type.CRC32C, 512); + + long loop = 0; + Random r = new Random(); + byte[] buf = new byte[(int) blockSize]; + r.nextBytes(buf); + fos.write(buf); + fos.hflush(); + + int chunkSize = crc32c.getBytesPerChecksum() + crc32c.getChecksumSize(); + int packetContentSize = (dfsConf.getInt(DFS_CLIENT_WRITE_PACKET_SIZE_KEY, + DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT) - + PacketHeader.PKT_MAX_HEADER_LEN) / chunkSize * chunkSize; + + while (loop < 20) { + r.nextBytes(buf); + fos.write(buf); + fos.hflush(); + loop++; + Assert.assertEquals(((DFSOutputStream) fos.getWrappedStream()).packetSize, + packetContentSize); + } + } + fs.delete(new Path("/testfile.dat"), true); + } + @AfterClass public static void tearDown() { if (cluster != null) {