Skip to content

Commit

Permalink
Use zero-copy instead
Browse files Browse the repository at this point in the history
  • Loading branch information
Fokko committed Jul 9, 2024
1 parent deae941 commit 0202b4c
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 21 deletions.
4 changes: 2 additions & 2 deletions cpp/src/arrow/compute/kernels/scalar_cast_temporal.cc
Original file line number Diff line number Diff line change
Expand Up @@ -514,7 +514,7 @@ std::shared_ptr<CastFunction> GetDate32Cast() {
AddCommonCasts(Type::DATE32, out_ty, func.get());

// date32 -> date32
AddCrossUnitCast<Date32Type>(func.get());
AddZeroCopyCast(Type::DATE32, date32(), date32(), func.get());

// int32 -> date32
AddZeroCopyCast(Type::INT32, int32(), date32(), func.get());
Expand All @@ -539,7 +539,7 @@ std::shared_ptr<CastFunction> GetDate64Cast() {
AddCommonCasts(Type::DATE64, out_ty, func.get());

// date64 -> date64
AddCrossUnitCast<Date64Type>(func.get());
AddZeroCopyCast(Type::DATE64, date64(), date64(), func.get());

// int64 -> date64
AddZeroCopyCast(Type::INT64, int64(), date64(), func.get());
Expand Down
20 changes: 20 additions & 0 deletions python/pyarrow/tests/test_compute.py
Original file line number Diff line number Diff line change
Expand Up @@ -1842,6 +1842,26 @@ def test_cast():
assert pc.cast(arr, expected.type) == expected


def test_identity_cast_dates():
dt = datetime.date(1990, 3, 1)
data = [[dt], [dt]]

schema = pa.schema([
('date32', pa.date32()),
('date64', pa.date64()),
])

batch = pa.RecordBatch.from_arrays(data, schema=schema)

table = pa.RecordBatchReader.from_batches(
schema,
[batch]
).cast(schema).read_all()

assert table['date32'][0].as_py() == dt
assert table['date64'][0].as_py() == dt


@pytest.mark.parametrize('value_type', numerical_arrow_types)
def test_fsl_to_fsl_cast(value_type):
# Different field name and different type.
Expand Down
19 changes: 0 additions & 19 deletions python/pyarrow/tests/test_ipc.py
Original file line number Diff line number Diff line change
Expand Up @@ -1199,25 +1199,6 @@ def make_batches():
reader = pa.RecordBatchReader.from_batches(None, batches)
pass

# https://github.com/apache/arrow/issues/43183
dt = datetime.date(1990, 3, 1)
data = [[dt], [dt]]

schema = pa.schema([
('date32', pa.date32()),
('date64', pa.date64()),
])

batch = pa.RecordBatch.from_arrays(data, schema=schema)

table = pa.RecordBatchReader.from_batches(
schema,
[batch]
).cast(schema).read_all()

assert table['date32'][0].as_py() == dt
assert table['date64'][0].as_py() == dt


def test_record_batch_reader_from_arrow_stream():

Expand Down

0 comments on commit 0202b4c

Please sign in to comment.