Skip to content
This repository has been archived by the owner on Feb 18, 2024. It is now read-only.

Implement writing FixedSizeList to Parquet #1387

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ io_ipc_compression = ["lz4", "zstd"]
io_flight = ["io_ipc", "arrow-format/flight-data"]

# base64 + io_ipc because arrow schemas are stored as base64-encoded ipc format.
io_parquet = ["parquet2", "io_ipc", "base64", "futures", "streaming-iterator", "fallible-streaming-iterator"]
io_parquet = ["parquet2", "io_ipc", "base64", "futures", "streaming-iterator", "fallible-streaming-iterator", "compute_take"]

io_parquet_compression = [
"io_parquet_zstd",
Expand Down
1 change: 1 addition & 0 deletions arrow-parquet-integration-testing/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ def variations():
"generated_datetime",
"generated_decimal",
"generated_interval",
"generated_nested",
# see https://issues.apache.org/jira/browse/ARROW-13486 and
# https://issues.apache.org/jira/browse/ARROW-13487
# "generated_dictionary",
Expand Down
30 changes: 28 additions & 2 deletions src/io/parquet/write/pages.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ use parquet2::schema::types::{ParquetType, PrimitiveType as ParquetPrimitiveType
use parquet2::{page::Page, write::DynIter};
use std::fmt::Debug;

use crate::array::{ListArray, StructArray};
use crate::array::{FixedSizeListArray, ListArray, PrimitiveArray, StructArray};
use crate::bitmap::Bitmap;
use crate::datatypes::PhysicalType;
use crate::datatypes::{DataType, PhysicalType};
use crate::io::parquet::read::schema::is_nullable;
use crate::offset::Offset;
use crate::{
Expand Down Expand Up @@ -174,6 +174,32 @@ fn to_leaves_recursive<'a>(array: &'a dyn Array, leaves: &mut Vec<&'a dyn Array>
let array = array.as_any().downcast_ref::<ListArray<i64>>().unwrap();
to_leaves_recursive(array.values().as_ref(), leaves);
}
FixedSizeList => {
let indices: Option<Vec<u32>> = array.validity().map(|validity| {
validity
.into_iter()
.enumerate()
.map(|(idx, val)| if val { Some(idx as u32) } else { None })
.flatten()
.collect()
});

if let Some(indices) = indices {
let new_array = crate::compute::take::take(
array,
&PrimitiveArray::new(DataType::UInt32, indices.into(), None),
)
.unwrap();
let new_array = new_array
.as_any()
.downcast_ref::<FixedSizeListArray>()
.unwrap();
to_leaves_recursive(new_array.values().as_ref(), leaves);
} else {
let array = array.as_any().downcast_ref::<FixedSizeListArray>().unwrap();
to_leaves_recursive(array.values().as_ref(), leaves);
}
}
Null | Boolean | Primitive(_) | Binary | FixedSizeBinary | LargeBinary | Utf8
| LargeUtf8 | Dictionary(_) => leaves.push(array),
other => todo!("Writing {:?} to parquet not yet implemented", other),
Expand Down