Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions src/iceberg/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ set(ICEBERG_SOURCES
expression/expression.cc
expression/literal.cc
file_reader.cc
file_writer.cc
json_internal.cc
manifest_entry.cc
manifest_list.cc
Expand All @@ -46,6 +47,8 @@ set(ICEBERG_SOURCES
type.cc
manifest_reader.cc
manifest_reader_internal.cc
manifest_writer.cc
manifest_writer_internal.cc
arrow_c_data_guard_internal.cc
util/murmurhash3_internal.cc
util/timepoint.cc
Expand Down Expand Up @@ -107,6 +110,7 @@ if(ICEBERG_BUILD_BUNDLE)
arrow/arrow_fs_file_io.cc
avro/avro_data_util.cc
avro/avro_reader.cc
avro/avro_writer.cc
avro/avro_schema_util.cc
avro/avro_register.cc
avro/avro_stream_internal.cc
Expand Down
159 changes: 159 additions & 0 deletions src/iceberg/avro/avro_writer.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

#include "iceberg/avro/avro_writer.h"

#include <memory>

#include <arrow/array/builder_base.h>
#include <arrow/c/bridge.h>
#include <arrow/record_batch.h>
#include <arrow/result.h>
#include <avro/DataFile.hh>
#include <avro/GenericDatum.hh>
#include <avro/NodeImpl.hh>

#include "iceberg/arrow/arrow_fs_file_io_internal.h"
#include "iceberg/avro/avro_schema_util_internal.h"
#include "iceberg/avro/avro_stream_internal.h"
#include "iceberg/schema.h"
#include "iceberg/util/checked_cast.h"
#include "iceberg/util/macros.h"

namespace iceberg::avro {

namespace {

Result<std::unique_ptr<AvroOutputStream>> CreateOutputStream(const WriterOptions& options,
int64_t buffer_size) {
auto io = internal::checked_pointer_cast<arrow::ArrowFileSystemFileIO>(options.io);
auto result = io->fs()->OpenOutputStream(options.path);
if (!result.ok()) {
return IOError("Failed to open file {} for {}", options.path,
result.status().message());
}
return std::make_unique<AvroOutputStream>(result.MoveValueUnsafe(), buffer_size);
}

} // namespace

// A stateful context to keep track of the writing progress.
struct WriteContext {};
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we really need a context? Writer is much simpler than the reader impl.


class AvroWriter::Impl {
public:
Status Open(const WriterOptions& options) {
write_schema_ = options.schema;

auto root = std::make_shared<::avro::NodeRecord>();
ToAvroNodeVisitor visitor;
for (const auto& field : write_schema_->fields()) {
::avro::NodePtr node;
ICEBERG_RETURN_UNEXPECTED(visitor.Visit(field, &node));
root->addLeaf(node);
}
Comment on lines +64 to +70
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
auto root = std::make_shared<::avro::NodeRecord>();
ToAvroNodeVisitor visitor;
for (const auto& field : write_schema_->fields()) {
::avro::NodePtr node;
ICEBERG_RETURN_UNEXPECTED(visitor.Visit(field, &node));
root->addLeaf(node);
}
::avro::NodePtr root;
ICEBERG_RETURN_UNEXPECTED(ToAvroNodeVisitor{}.Visit(*write_schema_, &root));

avro_schema_ = std::make_shared<::avro::ValidSchema>(root);

// Open the output stream and adapt to the avro interface.
constexpr int64_t kDefaultBufferSize = 1024 * 1024;
ICEBERG_ASSIGN_OR_RAISE(auto output_stream,
CreateOutputStream(options, kDefaultBufferSize));

writer_ = std::make_unique<::avro::DataFileWriter<::avro::GenericDatum>>(
std::move(output_stream), *avro_schema_);
return {};
}

Status Write(ArrowArray /*data*/) {
if (!context_) {
ICEBERG_RETURN_UNEXPECTED(InitWriteContext());
}
// TODO(xiao.dong) convert data and write to avro
// total_bytes_+= written_bytes;
return {};
}

Status Close() {
if (writer_ != nullptr) {
writer_->close();
writer_.reset();
}
context_.reset();
return {};
}

bool Closed() const { return writer_ == nullptr; }

int64_t length() { return total_bytes_; }

private:
Status InitWriteContext() { return {}; }

private:
int64_t total_bytes_ = 0;
// The schema to write.
std::shared_ptr<::iceberg::Schema> write_schema_;
// The avro schema to write.
std::shared_ptr<::avro::ValidSchema> avro_schema_;
// The avro writer to write the data into a datum.
std::unique_ptr<::avro::DataFileWriter<::avro::GenericDatum>> writer_;
// The context to keep track of the writing progress.
std::unique_ptr<WriteContext> context_;
};

AvroWriter::~AvroWriter() = default;

Status AvroWriter::Write(ArrowArray data) { return impl_->Write(data); }

Status AvroWriter::Open(const WriterOptions& options) {
impl_ = std::make_unique<Impl>();
return impl_->Open(options);
}

Status AvroWriter::Close() {
if (!impl_->Closed()) {
return impl_->Close();
}
return {};
}

std::shared_ptr<Metrics> AvroWriter::metrics() {
if (impl_->Closed()) {
// TODO(xiao.dong) implement metrics
return std::make_shared<Metrics>();
}
return nullptr;
}

int64_t AvroWriter::length() {
if (impl_->Closed()) {
return impl_->length();
}
return 0;
}

std::vector<int64_t> AvroWriter::splitOffsets() { return {}; }

void AvroWriter::Register() {
static WriterFactoryRegistry avro_writer_register(
FileFormatType::kAvro,
[]() -> Result<std::unique_ptr<Writer>> { return std::make_unique<AvroWriter>(); });
}

} // namespace iceberg::avro
54 changes: 54 additions & 0 deletions src/iceberg/avro/avro_writer.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

#pragma once

#include "iceberg/file_writer.h"
#include "iceberg/iceberg_bundle_export.h"

namespace iceberg::avro {

/// \brief A writer ArrowArray to Avro files.
class ICEBERG_BUNDLE_EXPORT AvroWriter : public Writer {
public:
AvroWriter() = default;

~AvroWriter() override;

Status Open(const WriterOptions& options) final;

Status Close() final;

Status Write(ArrowArray data) final;

std::shared_ptr<Metrics> metrics() final;

int64_t length() final;

std::vector<int64_t> splitOffsets() final;

/// \brief Register this Avro writer implementation.
static void Register();

private:
class Impl;
std::unique_ptr<Impl> impl_;
};

} // namespace iceberg::avro
62 changes: 62 additions & 0 deletions src/iceberg/file_writer.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

#include "iceberg/file_writer.h"

#include <unordered_map>

#include "iceberg/result.h"
#include "iceberg/util/formatter.h"
#include "iceberg/util/macros.h"

namespace iceberg {

namespace {

WriterFactory GetNotImplementedFactory(FileFormatType format_type) {
return [format_type]() -> Result<std::unique_ptr<Writer>> {
return NotImplemented("Missing writer factory for file format: {}", format_type);
};
}

} // namespace

WriterFactory& WriterFactoryRegistry::GetFactory(FileFormatType format_type) {
static std::unordered_map<FileFormatType, WriterFactory> factories = {
{FileFormatType::kAvro, GetNotImplementedFactory(FileFormatType::kAvro)},
{FileFormatType::kParquet, GetNotImplementedFactory(FileFormatType::kParquet)},
{FileFormatType::kOrc, GetNotImplementedFactory(FileFormatType::kOrc)},
{FileFormatType::kPuffin, GetNotImplementedFactory(FileFormatType::kPuffin)},
};
return factories.at(format_type);
}

WriterFactoryRegistry::WriterFactoryRegistry(FileFormatType format_type,
WriterFactory factory) {
GetFactory(format_type) = std::move(factory);
}

Result<std::unique_ptr<Writer>> WriterFactoryRegistry::Open(
FileFormatType format_type, const WriterOptions& options) {
ICEBERG_ASSIGN_OR_RAISE(auto writer, GetFactory(format_type)());
ICEBERG_RETURN_UNEXPECTED(writer->Open(options));
return writer;
}

} // namespace iceberg
17 changes: 16 additions & 1 deletion src/iceberg/file_writer.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@

#include "iceberg/arrow_c_data.h"
#include "iceberg/file_format.h"
#include "iceberg/metrics.h"
#include "iceberg/result.h"
#include "iceberg/schema.h"
#include "iceberg/type_fwd.h"

namespace iceberg {
Expand All @@ -38,7 +40,7 @@ struct ICEBERG_EXPORT WriterOptions {
/// \brief The path to the file to write.
std::string path;
/// \brief The schema of the data to write.
ArrowSchema schema;
std::shared_ptr<Schema> schema;
/// \brief FileIO instance to open the file. Writer implementations should down cast it
/// to the specific FileIO implementation. By default, the `iceberg-bundle` library uses
/// `ArrowFileSystemFileIO` as the default implementation.
Expand All @@ -65,6 +67,19 @@ class ICEBERG_EXPORT Writer {
///
/// \return Status of write results.
virtual Status Write(ArrowArray data) = 0;

/// \brief Get the file statistics.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
/// \brief Get the file statistics.
/// \brief Get the file statistics.
/// Only valid after the file is closed.

virtual std::shared_ptr<Metrics> metrics() = 0;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
virtual std::shared_ptr<Metrics> metrics() = 0;
virtual Metrics metrics() = 0;

Perhaps we can just return a simple struct instead of a shared_ptr?


/// \brief Get the file length.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
/// \brief Get the file length.
/// \brief Get the file length.
/// Only valid after the file is closed.

virtual int64_t length() = 0;

/// \brief Get the file length.
/// Returns a list of recommended split locations, if applicable, null otherwise.
/// When available, this information is used for planning scan tasks whose boundaries
/// are determined by these offsets. The returned list must be sorted in ascending order
/// Only valid after the file is closed.
Comment on lines +77 to +81
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
/// \brief Get the file length.
/// Returns a list of recommended split locations, if applicable, null otherwise.
/// When available, this information is used for planning scan tasks whose boundaries
/// are determined by these offsets. The returned list must be sorted in ascending order
/// Only valid after the file is closed.
/// \brief Returns a list of recommended split locations, if applicable, empty otherwise.
/// When available, this information is used for planning scan tasks whose boundaries
/// are determined by these offsets. The returned list must be sorted in ascending order.
/// Only valid after the file is closed.

virtual std::vector<int64_t> splitOffsets() = 0;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
virtual std::vector<int64_t> splitOffsets() = 0;
virtual std::vector<int64_t> split_offsets() = 0;

};

/// \brief Factory function to create a writer of a specific file format.
Expand Down
Loading
Loading