<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<p align="center">
<a href="https://hudi.apache.org/">
<img src="https://hudi.apache.org/assets/images/hudi_logo_transparent_1400x600.png" alt="Hudi logo" height="120px">
</a>
</p>
<p align="center">
The native Rust implementation for Apache Hudi, with Python API bindings.
<br>
<br>
<a href="https://github.com/apache/hudi-rs/actions/workflows/ci.yml">
<img alt="hudi-rs ci" src="https://github.com/apache/hudi-rs/actions/workflows/ci.yml/badge.svg">
</a>
<a href="https://codecov.io/github/apache/hudi-rs">
<img alt="hudi-rs codecov" src="https://codecov.io/github/apache/hudi-rs/graph/badge.svg">
</a>
<a href="https://join.slack.com/t/apache-hudi/shared_invite/zt-2ggm1fub8-_yt4Reu9djwqqVRFC7X49g">
<img alt="join hudi slack" src="https://img.shields.io/badge/slack-%23hudi-72eff8?logo=slack&color=48c628">
</a>
<a href="https://x.com/apachehudi">
<img alt="follow hudi x/twitter" src="https://img.shields.io/twitter/follow/apachehudi?label=apachehudi">
</a>
<a href="https://www.linkedin.com/company/apache-hudi">
<img alt="follow hudi linkedin" src="https://img.shields.io/badge/apache%E2%80%93hudi-0077B5?logo=linkedin">
</a>
</p>
The `hudi-rs` project aims to broaden the use of [Apache Hudi](https://github.com/apache/hudi) for a diverse range of
users and projects.
| Source | Installation Command |
|---------------|----------------------|
| **PyPi** | `pip install hudi` |
| **Crates.io** | `cargo add hudi` |
## Usage Examples
> [!NOTE]
> These examples expect a Hudi table exists at `/tmp/trips_table`, created using
> the [quick start guide](https://hudi.apache.org/docs/quick-start-guide).
### Snapshot Query
Snapshot query reads the latest version of the data from the table. The table API also accepts partition filters.
#### Python
```python
from hudi import HudiTableBuilder
import pyarrow as pa
hudi_table = HudiTableBuilder.from_base_uri("/tmp/trips_table").build()
batches = hudi_table.read_snapshot(filters=[("city", "=", "san_francisco")])
# convert to PyArrow table
arrow_table = pa.Table.from_batches(batches)
result = arrow_table.select(["rider", "city", "ts", "fare"])
print(result)
```
#### Rust
```rust
use hudi::error::Result;
use hudi::table::builder::TableBuilder as HudiTableBuilder;
use arrow::compute::concat_batches;
#[tokio::main]
async fn main() -> Result<()> {
let hudi_table = HudiTableBuilder::from_base_uri("/tmp/trips_table").build().await?;
let batches = hudi_table.read_snapshot(&[("city", "=", "san_francisco")]).await?;
let batch = concat_batches(&batches[0].schema(), &batches)?;
let columns = vec!["rider", "city", "ts", "fare"];
for col_name in columns {
let idx = batch.schema().index_of(col_name).unwrap();
println!("{}: {}", col_name, batch.column(idx));
}
Ok(())
}
```
To run read-optimized (RO) query on Merge-on-Read (MOR) tables, set `hoodie.read.use.read_optimized.mode` when creating the table.
#### Python
```python
hudi_table = (
HudiTableBuilder
.from_base_uri("/tmp/trips_table")
.with_option("hoodie.read.use.read_optimized.mode", "true")
.build()
)
```
#### Rust
```rust
let hudi_table =
HudiTableBuilder::from_base_uri("/tmp/trips_table")
.with_option("hoodie.read.use.read_optimized.mode", "true")
.build().await?;
```
> [!NOTE]
> Currently reading MOR tables is limited to tables with Parquet data blocks.
### Time-Travel Query
Time-travel query reads the data at a specific timestamp from the table. The table API also accepts partition filters.
#### Python
```python
batches = (
hudi_table
.read_snapshot_as_of("20241231123456789", filters=[("city", "=", "san_francisco")])
)
```
#### Rust
```rust
let batches =
hudi_table
.read_snapshot_as_of("20241231123456789", &[("city", "=", "san_francisco")]).await?;
```
### Incremental Query
Incremental query reads the changed data from the table for a given time range.
#### Python
```python
# read the records between t1 (exclusive) and t2 (inclusive)
batches = hudi_table.read_incremental_records(t1, t2)
# read the records after t1
batches = hudi_table.read_incremental_records(t1)
```
#### Rust
```rust
// read the records between t1 (exclusive) and t2 (inclusive)
let batches = hudi_table.read_incremental_records(t1, Some(t2)).await?;
// read the records after t1
let batches = hudi_table.read_incremental_records(t1, None).await?;
```
> [!NOTE]
> Currently the only supported format for the timestamp arguments is Hudi Timeline format: `yyyyMMddHHmmssSSS` or `yyyyMMddHHmmss`.
## Query Engine Integration
Hudi-rs provides APIs to support integration with query engines. The sections below highlight some commonly used APIs.
### Table API
Create a Hudi table instance using its constructor or the `TableBuilder` API.
| Stage | API | Description |
|-----------------|-------------------------------------------|--------------------------------------------------------------------------------|
| Query planning | `get_file_slices()` | For snapshot query, get a list of file slices. |
| | `get_file_slices_splits()` | For snapshot query, get a list of file slices in splits. |
| | `get_file_slices_as_of()` | For time-travel query, get a list of file slices at a given time. |
| | `get_file_slices_splits_as_of()` | For time-travel query, get a list of file slices in splits at a given time. |
| | `get_file_slices_between()` | For incremental query, get a list of changed file slices between a time range. |
| Query execution | `create_file_group_reader_with_options()` | Create a file group reader instance with the table instance's configs. |
### File Group API
Create a Hudi file group reader instance using its constructor or the Hudi table API `create_file_group_reader_with_options()`.
| Stage | API | Description |
|-----------------|---------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Query execution | `read_file_slice()` | Read records from a given file slice; based on the configs, read records from only base file, or from base file and log files, and merge records based on the configured strategy. |
### Apache DataFusion
Enabling the `hudi` crate with `datafusion` feature will provide a [DataFusion](https://datafusion.apache.org/)
extension to query Hudi tables.
<details>
<summary>Add crate hudi with datafusion feature to your application to query a Hudi table.</summary>
```shell
cargo new my_project --bin && cd my_project
cargo add tokio@1 datafusion@43
cargo add hudi --features datafusion
```
Update `src/main.rs` with the code snippet below then `cargo run`.
</details>
```rust
use std::sync::Arc;
use datafusion::error::Result;
use datafusion::prelude::{DataFrame, SessionContext};
use hudi::HudiDataSource;
#[tokio::main]
async fn main() -> Result<()> {
let ctx = SessionContext::new();
let hudi = HudiDataSource::new_with_options(
"/tmp/trips_table",
[("hoodie.read.input.partitions", "5")]).await?;
ctx.register_table("trips_table", Arc::new(hudi))?;
let df: DataFrame = ctx.sql("SELECT * from trips_table where city = 'san_francisco'").await?;
df.show().await?;
Ok(())
}
```
### Other Integrations
Hudi is also integrated with
- [Daft](https://www.getdaft.io/projects/docs/en/stable/user_guide/integrations/hudi.html)
- [Ray](https://docs.ray.io/en/latest/data/api/doc/ray.data.read_hudi.html#ray.data.read_hudi)
### Work with cloud storage
Ensure cloud storage credentials are set properly as environment variables, e.g., `AWS_*`, `AZURE_*`, or `GOOGLE_*`.
Relevant storage environment variables will then be picked up. The target table's base uri with schemes such
as `s3://`, `az://`, or `gs://` will be processed accordingly.
Alternatively, you can pass the storage configuration as options via Table APIs.
#### Python
```python
from hudi import HudiTableBuilder
hudi_table = (
HudiTableBuilder
.from_base_uri("s3://bucket/trips_table")
.with_option("aws_region", "us-west-2")
.build()
)
```
#### Rust
```rust
use hudi::table::builder::TableBuilder as HudiTableBuilder;
async fn main() -> Result<()> {
let hudi_table =
HudiTableBuilder::from_base_uri("s3://bucket/trips_table")
.with_option("aws_region", "us-west-2")
.build().await?;
}
```
## Contributing
Check out the [contributing guide](./CONTRIBUTING.md) for all the details about making contributions to the project.
Raw data
{
"_id": null,
"home_page": "https://github.com/apache/hudi-rs",
"name": "hudi",
"maintainer": null,
"docs_url": null,
"requires_python": ">=3.9",
"maintainer_email": null,
"keywords": "apachehudi, hudi, datalake, arrow",
"author": null,
"author_email": null,
"download_url": "https://files.pythonhosted.org/packages/89/12/dea14552cc792c9caa67834658f569fcec07f1343f5f1e84aa573dec943f/hudi-0.3.0.tar.gz",
"platform": null,
"description": "<!--\n ~ Licensed to the Apache Software Foundation (ASF) under one\n ~ or more contributor license agreements. See the NOTICE file\n ~ distributed with this work for additional information\n ~ regarding copyright ownership. The ASF licenses this file\n ~ to you under the Apache License, Version 2.0 (the\n ~ \"License\"); you may not use this file except in compliance\n ~ with the License. You may obtain a copy of the License at\n ~\n ~ http://www.apache.org/licenses/LICENSE-2.0\n ~\n ~ Unless required by applicable law or agreed to in writing,\n ~ software distributed under the License is distributed on an\n ~ \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n ~ KIND, either express or implied. See the License for the\n ~ specific language governing permissions and limitations\n ~ under the License.\n-->\n\n<p align=\"center\">\n <a href=\"https://hudi.apache.org/\">\n <img src=\"https://hudi.apache.org/assets/images/hudi_logo_transparent_1400x600.png\" alt=\"Hudi logo\" height=\"120px\">\n </a>\n</p>\n<p align=\"center\">\n The native Rust implementation for Apache Hudi, with Python API bindings.\n <br>\n <br>\n <a href=\"https://github.com/apache/hudi-rs/actions/workflows/ci.yml\">\n <img alt=\"hudi-rs ci\" src=\"https://github.com/apache/hudi-rs/actions/workflows/ci.yml/badge.svg\">\n </a>\n <a href=\"https://codecov.io/github/apache/hudi-rs\">\n <img alt=\"hudi-rs codecov\" src=\"https://codecov.io/github/apache/hudi-rs/graph/badge.svg\">\n </a>\n <a href=\"https://join.slack.com/t/apache-hudi/shared_invite/zt-2ggm1fub8-_yt4Reu9djwqqVRFC7X49g\">\n <img alt=\"join hudi slack\" src=\"https://img.shields.io/badge/slack-%23hudi-72eff8?logo=slack&color=48c628\">\n </a>\n <a href=\"https://x.com/apachehudi\">\n <img alt=\"follow hudi x/twitter\" src=\"https://img.shields.io/twitter/follow/apachehudi?label=apachehudi\">\n </a>\n <a href=\"https://www.linkedin.com/company/apache-hudi\">\n <img alt=\"follow hudi linkedin\" src=\"https://img.shields.io/badge/apache%E2%80%93hudi-0077B5?logo=linkedin\">\n </a>\n</p>\n\nThe `hudi-rs` project aims to broaden the use of [Apache Hudi](https://github.com/apache/hudi) for a diverse range of\nusers and projects.\n\n| Source | Installation Command |\n|---------------|----------------------|\n| **PyPi** | `pip install hudi` |\n| **Crates.io** | `cargo add hudi` |\n\n## Usage Examples\n\n> [!NOTE]\n> These examples expect a Hudi table exists at `/tmp/trips_table`, created using\n> the [quick start guide](https://hudi.apache.org/docs/quick-start-guide).\n\n### Snapshot Query\n\nSnapshot query reads the latest version of the data from the table. The table API also accepts partition filters.\n\n#### Python\n\n```python\nfrom hudi import HudiTableBuilder\nimport pyarrow as pa\n\nhudi_table = HudiTableBuilder.from_base_uri(\"/tmp/trips_table\").build()\nbatches = hudi_table.read_snapshot(filters=[(\"city\", \"=\", \"san_francisco\")])\n\n# convert to PyArrow table\narrow_table = pa.Table.from_batches(batches)\nresult = arrow_table.select([\"rider\", \"city\", \"ts\", \"fare\"])\nprint(result)\n```\n\n#### Rust\n\n```rust\nuse hudi::error::Result;\nuse hudi::table::builder::TableBuilder as HudiTableBuilder;\nuse arrow::compute::concat_batches;\n\n#[tokio::main]\nasync fn main() -> Result<()> {\n let hudi_table = HudiTableBuilder::from_base_uri(\"/tmp/trips_table\").build().await?;\n let batches = hudi_table.read_snapshot(&[(\"city\", \"=\", \"san_francisco\")]).await?;\n let batch = concat_batches(&batches[0].schema(), &batches)?;\n let columns = vec![\"rider\", \"city\", \"ts\", \"fare\"];\n for col_name in columns {\n let idx = batch.schema().index_of(col_name).unwrap();\n println!(\"{}: {}\", col_name, batch.column(idx));\n }\n Ok(())\n}\n```\n\nTo run read-optimized (RO) query on Merge-on-Read (MOR) tables, set `hoodie.read.use.read_optimized.mode` when creating the table.\n\n#### Python\n\n```python\nhudi_table = (\n HudiTableBuilder\n .from_base_uri(\"/tmp/trips_table\")\n .with_option(\"hoodie.read.use.read_optimized.mode\", \"true\")\n .build()\n)\n```\n\n#### Rust\n\n```rust\nlet hudi_table = \n HudiTableBuilder::from_base_uri(\"/tmp/trips_table\")\n .with_option(\"hoodie.read.use.read_optimized.mode\", \"true\")\n .build().await?;\n```\n\n> [!NOTE]\n> Currently reading MOR tables is limited to tables with Parquet data blocks.\n\n### Time-Travel Query\n\nTime-travel query reads the data at a specific timestamp from the table. The table API also accepts partition filters.\n\n#### Python\n\n```python\nbatches = (\n hudi_table\n .read_snapshot_as_of(\"20241231123456789\", filters=[(\"city\", \"=\", \"san_francisco\")])\n)\n```\n\n#### Rust\n\n```rust\nlet batches = \n hudi_table\n .read_snapshot_as_of(\"20241231123456789\", &[(\"city\", \"=\", \"san_francisco\")]).await?;\n```\n\n### Incremental Query\n\nIncremental query reads the changed data from the table for a given time range.\n\n#### Python\n\n```python\n# read the records between t1 (exclusive) and t2 (inclusive)\nbatches = hudi_table.read_incremental_records(t1, t2)\n\n# read the records after t1\nbatches = hudi_table.read_incremental_records(t1)\n```\n\n#### Rust\n\n```rust\n// read the records between t1 (exclusive) and t2 (inclusive)\nlet batches = hudi_table.read_incremental_records(t1, Some(t2)).await?;\n\n// read the records after t1\nlet batches = hudi_table.read_incremental_records(t1, None).await?;\n```\n\n> [!NOTE]\n> Currently the only supported format for the timestamp arguments is Hudi Timeline format: `yyyyMMddHHmmssSSS` or `yyyyMMddHHmmss`.\n\n## Query Engine Integration\n\nHudi-rs provides APIs to support integration with query engines. The sections below highlight some commonly used APIs.\n\n### Table API\n\nCreate a Hudi table instance using its constructor or the `TableBuilder` API.\n\n| Stage | API | Description |\n|-----------------|-------------------------------------------|--------------------------------------------------------------------------------|\n| Query planning | `get_file_slices()` | For snapshot query, get a list of file slices. |\n| | `get_file_slices_splits()` | For snapshot query, get a list of file slices in splits. |\n| | `get_file_slices_as_of()` | For time-travel query, get a list of file slices at a given time. |\n| | `get_file_slices_splits_as_of()` | For time-travel query, get a list of file slices in splits at a given time. |\n| | `get_file_slices_between()` | For incremental query, get a list of changed file slices between a time range. |\n| Query execution | `create_file_group_reader_with_options()` | Create a file group reader instance with the table instance's configs. |\n\n### File Group API\n\nCreate a Hudi file group reader instance using its constructor or the Hudi table API `create_file_group_reader_with_options()`.\n\n| Stage | API | Description |\n|-----------------|---------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|\n| Query execution | `read_file_slice()` | Read records from a given file slice; based on the configs, read records from only base file, or from base file and log files, and merge records based on the configured strategy. |\n\n\n### Apache DataFusion\n\nEnabling the `hudi` crate with `datafusion` feature will provide a [DataFusion](https://datafusion.apache.org/) \nextension to query Hudi tables.\n\n<details>\n<summary>Add crate hudi with datafusion feature to your application to query a Hudi table.</summary>\n\n```shell\ncargo new my_project --bin && cd my_project\ncargo add tokio@1 datafusion@43\ncargo add hudi --features datafusion\n```\n\nUpdate `src/main.rs` with the code snippet below then `cargo run`.\n\n</details>\n\n```rust\nuse std::sync::Arc;\n\nuse datafusion::error::Result;\nuse datafusion::prelude::{DataFrame, SessionContext};\nuse hudi::HudiDataSource;\n\n#[tokio::main]\nasync fn main() -> Result<()> {\n let ctx = SessionContext::new();\n let hudi = HudiDataSource::new_with_options(\n \"/tmp/trips_table\",\n [(\"hoodie.read.input.partitions\", \"5\")]).await?;\n ctx.register_table(\"trips_table\", Arc::new(hudi))?;\n let df: DataFrame = ctx.sql(\"SELECT * from trips_table where city = 'san_francisco'\").await?;\n df.show().await?;\n Ok(())\n}\n```\n\n### Other Integrations\n\nHudi is also integrated with\n\n- [Daft](https://www.getdaft.io/projects/docs/en/stable/user_guide/integrations/hudi.html)\n- [Ray](https://docs.ray.io/en/latest/data/api/doc/ray.data.read_hudi.html#ray.data.read_hudi)\n\n### Work with cloud storage\n\nEnsure cloud storage credentials are set properly as environment variables, e.g., `AWS_*`, `AZURE_*`, or `GOOGLE_*`.\nRelevant storage environment variables will then be picked up. The target table's base uri with schemes such\nas `s3://`, `az://`, or `gs://` will be processed accordingly.\n\nAlternatively, you can pass the storage configuration as options via Table APIs.\n\n#### Python\n\n```python\nfrom hudi import HudiTableBuilder\n\nhudi_table = (\n HudiTableBuilder\n .from_base_uri(\"s3://bucket/trips_table\")\n .with_option(\"aws_region\", \"us-west-2\")\n .build()\n)\n```\n\n#### Rust\n\n```rust\nuse hudi::table::builder::TableBuilder as HudiTableBuilder;\n\nasync fn main() -> Result<()> {\n let hudi_table = \n HudiTableBuilder::from_base_uri(\"s3://bucket/trips_table\")\n .with_option(\"aws_region\", \"us-west-2\")\n .build().await?;\n}\n```\n\n## Contributing\n\nCheck out the [contributing guide](./CONTRIBUTING.md) for all the details about making contributions to the project.\n\n",
"bugtrack_url": null,
"license": "Apache License 2.0",
"summary": "Native Python binding for Apache Hudi, based on hudi-rs.",
"version": "0.3.0",
"project_urls": {
"Homepage": "https://github.com/apache/hudi-rs",
"repository": "https://github.com/apache/hudi-rs/tree/main/python/"
},
"split_keywords": [
"apachehudi",
" hudi",
" datalake",
" arrow"
],
"urls": [
{
"comment_text": null,
"digests": {
"blake2b_256": "90cb684049028b50f905abae38df3921f7112e44a2169144c7056520e359d1ae",
"md5": "e033c023f46d67ad51ba108021570552",
"sha256": "61c043376279e574615db61d4e20e66f457fdaa5240619e87a53c1f8ee961fb4"
},
"downloads": -1,
"filename": "hudi-0.3.0-cp39-abi3-macosx_10_12_x86_64.whl",
"has_sig": false,
"md5_digest": "e033c023f46d67ad51ba108021570552",
"packagetype": "bdist_wheel",
"python_version": "cp39",
"requires_python": ">=3.9",
"size": 6893253,
"upload_time": "2025-02-02T17:04:35",
"upload_time_iso_8601": "2025-02-02T17:04:35.292351Z",
"url": "https://files.pythonhosted.org/packages/90/cb/684049028b50f905abae38df3921f7112e44a2169144c7056520e359d1ae/hudi-0.3.0-cp39-abi3-macosx_10_12_x86_64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": null,
"digests": {
"blake2b_256": "2d24c4ce6909ca454ec86b0ca920b1f99224f8a0547ef9e1ad56c04d4e365a5d",
"md5": "795978a171f72dd607c1e02ffbb1683c",
"sha256": "29836767b068357b43da5918a6b4e188f9a3789cbe4d22915de9e9594b8cb265"
},
"downloads": -1,
"filename": "hudi-0.3.0-cp39-abi3-macosx_11_0_arm64.whl",
"has_sig": false,
"md5_digest": "795978a171f72dd607c1e02ffbb1683c",
"packagetype": "bdist_wheel",
"python_version": "cp39",
"requires_python": ">=3.9",
"size": 6471586,
"upload_time": "2025-02-02T17:05:24",
"upload_time_iso_8601": "2025-02-02T17:05:24.976565Z",
"url": "https://files.pythonhosted.org/packages/2d/24/c4ce6909ca454ec86b0ca920b1f99224f8a0547ef9e1ad56c04d4e365a5d/hudi-0.3.0-cp39-abi3-macosx_11_0_arm64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": null,
"digests": {
"blake2b_256": "4674f3eb1ed1f0b01e8f0bf45eb5dc6a5a192cccfdf2207dc14dcceb3d4609b4",
"md5": "d09223d91643043c84eab9435f94f6f6",
"sha256": "61b9d6c7aa4d7f4f828e01a16dfbebb373746f474b1bf613ca9f51662d597e55"
},
"downloads": -1,
"filename": "hudi-0.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
"has_sig": false,
"md5_digest": "d09223d91643043c84eab9435f94f6f6",
"packagetype": "bdist_wheel",
"python_version": "cp39",
"requires_python": ">=3.9",
"size": 7460024,
"upload_time": "2025-02-02T17:09:44",
"upload_time_iso_8601": "2025-02-02T17:09:44.144323Z",
"url": "https://files.pythonhosted.org/packages/46/74/f3eb1ed1f0b01e8f0bf45eb5dc6a5a192cccfdf2207dc14dcceb3d4609b4/hudi-0.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": null,
"digests": {
"blake2b_256": "76c23de897a969fa9c43346e320915503222976a9557b87fca9debded9505d7f",
"md5": "30e32b7cb607f174bc217e0b70b58e06",
"sha256": "6c3cb690777451d977f761a6d19ffd2b357d13e8fc74ca83bc2f3b226a0a179b"
},
"downloads": -1,
"filename": "hudi-0.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
"has_sig": false,
"md5_digest": "30e32b7cb607f174bc217e0b70b58e06",
"packagetype": "bdist_wheel",
"python_version": "cp39",
"requires_python": ">=3.9",
"size": 7580196,
"upload_time": "2025-02-02T17:05:24",
"upload_time_iso_8601": "2025-02-02T17:05:24.007992Z",
"url": "https://files.pythonhosted.org/packages/76/c2/3de897a969fa9c43346e320915503222976a9557b87fca9debded9505d7f/hudi-0.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": null,
"digests": {
"blake2b_256": "5e52d8e99efda7cb7cf8fbb2f31f5011e7d8e3bb6ed241a95955dd18a2813f96",
"md5": "38ec310fbf0aa4c96fcba219aa201814",
"sha256": "b0cc5741e31f5fd6e7495b1cd8e652be57be2b1a80e4434a40b99ec5ee160f44"
},
"downloads": -1,
"filename": "hudi-0.3.0-cp39-abi3-win_amd64.whl",
"has_sig": false,
"md5_digest": "38ec310fbf0aa4c96fcba219aa201814",
"packagetype": "bdist_wheel",
"python_version": "cp39",
"requires_python": ">=3.9",
"size": 6360610,
"upload_time": "2025-02-02T17:09:02",
"upload_time_iso_8601": "2025-02-02T17:09:02.159726Z",
"url": "https://files.pythonhosted.org/packages/5e/52/d8e99efda7cb7cf8fbb2f31f5011e7d8e3bb6ed241a95955dd18a2813f96/hudi-0.3.0-cp39-abi3-win_amd64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": null,
"digests": {
"blake2b_256": "8912dea14552cc792c9caa67834658f569fcec07f1343f5f1e84aa573dec943f",
"md5": "adfa233b0c23690f38082f6aefc8ac07",
"sha256": "fee85d8afc545231d066bb02e375ada1e42f927656bfb75f4bc74d5604449e05"
},
"downloads": -1,
"filename": "hudi-0.3.0.tar.gz",
"has_sig": false,
"md5_digest": "adfa233b0c23690f38082f6aefc8ac07",
"packagetype": "sdist",
"python_version": "source",
"requires_python": ">=3.9",
"size": 417203,
"upload_time": "2025-02-02T17:05:27",
"upload_time_iso_8601": "2025-02-02T17:05:27.389963Z",
"url": "https://files.pythonhosted.org/packages/89/12/dea14552cc792c9caa67834658f569fcec07f1343f5f1e84aa573dec943f/hudi-0.3.0.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2025-02-02 17:05:27",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "apache",
"github_project": "hudi-rs",
"travis_ci": false,
"coveralls": false,
"github_actions": true,
"lcname": "hudi"
}