Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions crates/duckdb/src/appender/arrow.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,15 @@ impl Appender<'_> {
/// Will return `Err` if append column count not the same with the table schema
#[inline]
pub fn append_record_batch(&mut self, record_batch: RecordBatch) -> Result<()> {
let logical_types: Vec<LogicalTypeHandle> = record_batch
.schema()
.fields()
.iter()
.map(|field| {
let fields = record_batch.schema().fields();
let capacity = fields.len();
let mut logical_types = Vec::with_capacity(capacity);
for field in fields.iter() {
logical_types.push(
to_duckdb_logical_type(field.data_type())
.map_err(|_op| Error::ArrowTypeToDuckdbType(field.to_string(), field.data_type().clone()))
})
.collect::<Result<Vec<_>, _>>()?;
.map_err(|_op| Error::ArrowTypeToDuckdbType(field.to_string(), field.data_type().clone()))?,
);
}

let vector_size = unsafe { duckdb_vector_size() } as usize;
let num_rows = record_batch.num_rows();
Expand Down
3 changes: 2 additions & 1 deletion crates/duckdb/src/core/data_chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@ impl DataChunkHandle {
/// Create a new [DataChunkHandle] with the given [LogicalTypeHandle]s.
pub fn new(logical_types: &[LogicalTypeHandle]) -> Self {
let num_columns = logical_types.len();
let mut c_types = logical_types.iter().map(|t| t.ptr).collect::<Vec<_>>();
let mut c_types = Vec::with_capacity(num_columns);
c_types.extend(logical_types.iter().map(|t| t.ptr));
let ptr = unsafe { duckdb_create_data_chunk(c_types.as_mut_ptr(), num_columns as u64) };
Self { ptr, owned: true }
}
Expand Down
12 changes: 6 additions & 6 deletions crates/duckdb/src/types/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,12 +135,12 @@ impl From<&DataType> for Type {
Self::Array(Box::new(Self::from(field.data_type())), (*size).try_into().unwrap())
}
// DataType::LargeList(_) => Self::LargeList,
DataType::Struct(inner) => Self::Struct(
inner
.iter()
.map(|f| (f.name().to_owned(), Self::from(f.data_type())))
.collect(),
),
DataType::Struct(inner) => {
let capacity = inner.len();
let mut struct_vec = Vec::with_capacity(capacity);
struct_vec.extend(inner.iter().map(|f| (f.name().to_owned(), Self::from(f.data_type()))));
Self::Struct(struct_vec)
}
DataType::LargeList(inner) => Self::List(Box::new(Self::from(inner.data_type()))),
DataType::Union(_, _) => Self::Union,
DataType::Decimal128(..) => Self::Decimal,
Expand Down
56 changes: 28 additions & 28 deletions crates/duckdb/src/types/value_ref.rs
Original file line number Diff line number Diff line change
Expand Up @@ -256,40 +256,41 @@ impl From<ValueRef<'_>> for Value {
Self::Enum(dict_values.value(dict_key).to_string())
}
ValueRef::Struct(items, idx) => {
let value: Vec<(String, Self)> = items
.columns()
.iter()
.zip(items.fields().iter().map(|f| f.name().to_owned()))
.map(|(column, name)| -> (String, Self) {
(name, Row::value_ref_internal(idx, 0, column).to_owned())
})
.collect();
let capacity = items.columns().len();
let mut value = Vec::with_capacity(capacity);
value.extend(
items
.columns()
.iter()
.zip(items.fields().iter().map(|f| f.name().to_owned()))
.map(|(column, name)| -> (String, Self) {
(name, Row::value_ref_internal(idx, 0, column).to_owned())
}),
);
Self::Struct(OrderedMap::from(value))
}
ValueRef::Map(arr, idx) => {
let keys = arr.keys();
let values = arr.values();
let offsets = arr.offsets();
let range = offsets[idx]..offsets[idx + 1];
Self::Map(OrderedMap::from(
range
.map(|row| {
let row = row.try_into().unwrap();
let key = Row::value_ref_internal(row, idx, keys).to_owned();
let value = Row::value_ref_internal(row, idx, values).to_owned();
(key, value)
})
.collect::<Vec<_>>(),
))
let capacity = range.len();
let mut map_vec = Vec::with_capacity(capacity);
map_vec.extend(range.map(|row| {
let row = row.try_into().unwrap();
let key = Row::value_ref_internal(row, idx, keys).to_owned();
let value = Row::value_ref_internal(row, idx, values).to_owned();
(key, value)
}));
Self::Map(OrderedMap::from(map_vec))
}
ValueRef::Array(items, idx) => {
let value_length = usize::try_from(items.value_length()).unwrap();
let range = (idx * value_length)..((idx + 1) * value_length);
Self::Array(
range
.map(|row| Row::value_ref_internal(row, idx, items.values()).to_owned())
.collect(),
)
let capacity = value_length;
let mut array_vec = Vec::with_capacity(capacity);
array_vec.extend(range.map(|row| Row::value_ref_internal(row, idx, items.values()).to_owned()));
Self::Array(array_vec)
}
ValueRef::Union(column, idx) => {
let column = column.as_any().downcast_ref::<UnionArray>().unwrap();
Expand All @@ -304,11 +305,10 @@ impl From<ValueRef<'_>> for Value {
}

fn from_list(start: usize, end: usize, idx: usize, values: &ArrayRef) -> Value {
Value::List(
(start..end)
.map(|row| Row::value_ref_internal(row, idx, values).to_owned())
.collect(),
)
let capacity = end - start;
let mut list_vec = Vec::with_capacity(capacity);
list_vec.extend((start..end).map(|row| Row::value_ref_internal(row, idx, values).to_owned()));
Value::List(list_vec)
}

impl<'a> From<&'a str> for ValueRef<'a> {
Expand Down