Skip to content

Commit

Permalink
lint cpp
Browse files Browse the repository at this point in the history
  • Loading branch information
amol- committed Oct 16, 2024
1 parent ef8dfb2 commit d4fac36
Show file tree
Hide file tree
Showing 7 changed files with 22 additions and 16 deletions.
9 changes: 4 additions & 5 deletions cpp/src/arrow/array/util.cc
Original file line number Diff line number Diff line change
Expand Up @@ -916,18 +916,17 @@ Result<std::shared_ptr<Array>> MakeEmptyArray(std::shared_ptr<DataType> type,
return builder->Finish();
}

Result<std::shared_ptr<Array>> MakeMaskArray(const std::vector<int64_t> &indices, int64_t length,
MemoryPool* memory_pool) {
Result<std::shared_ptr<Array>> MakeMaskArray(const std::vector<int64_t>& indices,
int64_t length, MemoryPool* memory_pool) {
BooleanBuilder builder(memory_pool);
RETURN_NOT_OK(builder.Resize(length));

auto i = indices.begin();
for(int64_t builder_i = 0; builder_i < length; builder_i++) {
for (int64_t builder_i = 0; builder_i < length; builder_i++) {
if (builder_i == *i) {
builder.UnsafeAppend(true);
i++;
}
else {
} else {
builder.UnsafeAppend(false);
}
}
Expand Down
6 changes: 3 additions & 3 deletions cpp/src/arrow/array/util.h
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ ARROW_EXPORT
Result<std::shared_ptr<Array>> MakeEmptyArray(std::shared_ptr<DataType> type,
MemoryPool* pool = default_memory_pool());


/// \brief Create an Array representing a boolean mask
///
/// The mask will have all elements set to false except for those
Expand All @@ -80,8 +79,9 @@ Result<std::shared_ptr<Array>> MakeEmptyArray(std::shared_ptr<DataType> type,
/// \param[in] pool the memory pool to allocate memory from
/// \return the resulting Array
ARROW_EXPORT
Result<std::shared_ptr<Array>> MakeMaskArray(const std::vector<int64_t> &indices, int64_t length,
MemoryPool* memory_pool = default_memory_pool());
Result<std::shared_ptr<Array>> MakeMaskArray(
const std::vector<int64_t>& indices, int64_t length,
MemoryPool* memory_pool = default_memory_pool());
/// @}

namespace internal {
Expand Down
6 changes: 4 additions & 2 deletions python/pyarrow/array.pxi
Original file line number Diff line number Diff line change
Expand Up @@ -1884,7 +1884,8 @@ cdef class Array(_PandasConvertible):
inner_array = pyarrow_unwrap_array(casted_array)
except ArrowInvalid as e:
raise ValueError(
f"Could not cast {self.type} to requested type {target_type}: {e}"
f"Could not cast {self.type} to requested type {
target_type}: {e}"
)
else:
inner_array = self.sp_array
Expand Down Expand Up @@ -2029,7 +2030,8 @@ cdef class Array(_PandasConvertible):
inner_array = pyarrow_unwrap_array(casted_array)
except ArrowInvalid as e:
raise ValueError(
f"Could not cast {self.type} to requested type {target_type}: {e}"
f"Could not cast {self.type} to requested type {
target_type}: {e}"
)
else:
inner_array = self.sp_array
Expand Down
3 changes: 2 additions & 1 deletion python/pyarrow/lib.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,8 @@ def _ensure_cuda_loaded():
if __cuda_loaded is not True:
raise ImportError(
"Trying to import data on a CUDA device, but PyArrow is not built with "
f"CUDA support.\n(importing 'pyarrow.cuda' resulted in \"{__cuda_loaded}\")."
f"CUDA support.\n(importing 'pyarrow.cuda' resulted in \"{
__cuda_loaded}\")."
)


Expand Down
9 changes: 6 additions & 3 deletions python/pyarrow/table.pxi
Original file line number Diff line number Diff line change
Expand Up @@ -1395,7 +1395,8 @@ cdef class ChunkedArray(_PandasConvertible):
chunked = self.cast(target_type, safe=True)
except ArrowInvalid as e:
raise ValueError(
f"Could not cast {self.type} to requested type {target_type}: {e}"
f"Could not cast {self.type} to requested type {
target_type}: {e}"
)
else:
chunked = self
Expand Down Expand Up @@ -3776,7 +3777,8 @@ cdef class RecordBatch(_Tabular):
inner_batch = pyarrow_unwrap_batch(casted_batch)
except ArrowInvalid as e:
raise ValueError(
f"Could not cast {self.schema} to requested schema {target_schema}: {e}"
f"Could not cast {self.schema} to requested schema {
target_schema}: {e}"
)
else:
inner_batch = self.sp_batch
Expand Down Expand Up @@ -3955,7 +3957,8 @@ cdef class RecordBatch(_Tabular):
inner_batch = pyarrow_unwrap_batch(casted_batch)
except ArrowInvalid as e:
raise ValueError(
f"Could not cast {self.schema} to requested schema {target_schema}: {e}"
f"Could not cast {self.schema} to requested schema {
target_schema}: {e}"
)
else:
inner_batch = self.sp_batch
Expand Down
2 changes: 1 addition & 1 deletion python/pyarrow/tests/test_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -4207,4 +4207,4 @@ def test_non_cpu_array():
def test_mask_array():
expected = pa.array([False, False, True, False, True, False])
mask_array = pa.mask([2, 4], 6)
assert mask_array.equals(expected)
assert mask_array.equals(expected)
3 changes: 2 additions & 1 deletion python/pyarrow/types.pxi
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,8 @@ cdef void* _as_c_pointer(v, allow_null=False) except *:
else:
capsule_name_str = capsule_name.decode()
raise ValueError(
f"Can't convert PyCapsule with name '{capsule_name_str}' to pointer address"
f"Can't convert PyCapsule with name '{
capsule_name_str}' to pointer address"
)
else:
raise TypeError(f"Expected a pointer value, got {type(v)!r}")
Expand Down

0 comments on commit d4fac36

Please sign in to comment.