# Amazon S3 Tables Construct Library
<!--BEGIN STABILITY BANNER-->---

> The APIs of higher level constructs in this module are experimental and under active development.
> They are subject to non-backward compatible changes or removal in any future version. These are
> not subject to the [Semantic Versioning](https://semver.org/) model and breaking changes will be
> announced in the release notes. This means that while you may use them, you may need to update
> your source code when upgrading to a newer version of this package.
---
<!--END STABILITY BANNER-->
## Amazon S3 Tables
Amazon S3 Tables deliver the first cloud object store with built-in Apache Iceberg support and streamline storing tabular data at scale.
[Product Page](https://aws.amazon.com/s3/features/tables/) | [User Guide](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-tables.html)
## Usage
### Define an S3 Table Bucket
```python
# Build a Table bucket
sample_table_bucket = TableBucket(scope, "ExampleTableBucket",
table_bucket_name="example-bucket-1",
# optional fields:
unreferenced_file_removal=UnreferencedFileRemoval(
status=UnreferencedFileRemovalStatus.ENABLED,
noncurrent_days=20,
unreferenced_days=20
)
)
```
### Define an S3 Tables Namespace
```python
# Build a namespace
sample_namespace = Namespace(scope, "ExampleNamespace",
namespace_name="example-namespace-1",
table_bucket=table_bucket
)
```
### Define an S3 Table
```python
# Build a table
sample_table = Table(scope, "ExampleTable",
table_name="example_table",
namespace=namespace,
open_table_format=OpenTableFormat.ICEBERG,
without_metadata=True
)
# Build a table with an Iceberg Schema
sample_table_with_schema = Table(scope, "ExampleSchemaTable",
table_name="example_table_with_schema",
namespace=namespace,
open_table_format=OpenTableFormat.ICEBERG,
iceberg_metadata=IcebergMetadataProperty(
iceberg_schema=IcebergSchemaProperty(
schema_field_list=[SchemaFieldProperty(
name="id",
type="int",
required=True
), SchemaFieldProperty(
name="name",
type="string"
)
]
)
),
compaction=CompactionProperty(
status=Status.ENABLED,
target_file_size_mb=128
),
snapshot_management=SnapshotManagementProperty(
status=Status.ENABLED,
max_snapshot_age_hours=48,
min_snapshots_to_keep=5
)
)
```
Learn more about table buckets maintenance operations and default behavior from the [S3 Tables User Guide](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-table-buckets-maintenance.html)
### Controlling Table Bucket Permissions
```python
# Grant the principal read permissions to the bucket and all tables within
account_id = "123456789012"
table_bucket.grant_read(iam.AccountPrincipal(account_id), "*")
# Grant the role write permissions to the bucket and all tables within
role = iam.Role(stack, "MyRole", assumed_by=iam.ServicePrincipal("sample"))
table_bucket.grant_write(role, "*")
# Grant the user read and write permissions to the bucket and all tables within
table_bucket.grant_read_write(iam.User(stack, "MyUser"), "*")
# Grant permissions to the bucket and a particular table within it
table_id = "6ba046b2-26de-44cf-9144-0c7862593a7b"
table_bucket.grant_read_write(iam.AccountPrincipal(account_id), table_id)
# Add custom resource policy statements
permissions = iam.PolicyStatement(
effect=iam.Effect.ALLOW,
actions=["s3tables:*"],
principals=[iam.ServicePrincipal("example.aws.internal")],
resources=["*"]
)
table_bucket.add_to_resource_policy(permissions)
```
### Controlling Table Bucket Encryption Settings
S3 TableBuckets have SSE (server-side encryption with AES-256) enabled by default with S3 managed keys.
You can also bring your own KMS key for KMS-SSE or have S3 create a KMS key for you.
If a bucket is encrypted with KMS, grant functions on the bucket will also grant access
to the TableBucket's associated KMS key.
```python
# Provide a user defined KMS Key:
key = kms.Key(scope, "UserKey")
encrypted_bucket = TableBucket(scope, "EncryptedTableBucket",
table_bucket_name="table-bucket-1",
encryption=TableBucketEncryption.KMS,
encryption_key=key
)
# This account principal will also receive kms:Decrypt access to the KMS key
encrypted_bucket.grant_read(iam.AccountPrincipal("123456789012"), "*")
# Use S3 managed server side encryption (default)
encrypted_bucket_default = TableBucket(scope, "EncryptedTableBucketDefault",
table_bucket_name="table-bucket-3",
encryption=TableBucketEncryption.S3_MANAGED
)
```
When using KMS encryption (`TableBucketEncryption.KMS`), if no encryption key is provided, CDK will automatically create a new KMS key for the table bucket with necessary permissions.
```python
# If no key is provided, one will be created automatically
encrypted_bucket_auto = TableBucket(scope, "EncryptedTableBucketAuto",
table_bucket_name="table-bucket-2",
encryption=TableBucketEncryption.KMS
)
```
### Controlling Table Permissions
```python
# Grant the principal read permissions to the table
account_id = "123456789012"
table.grant_read(iam.AccountPrincipal(account_id))
# Grant the role write permissions to the table
role = iam.Role(stack, "MyRole", assumed_by=iam.ServicePrincipal("sample"))
table.grant_write(role)
# Grant the user read and write permissions to the table
table.grant_read_write(iam.User(stack, "MyUser"))
# Grant an account permissions to the table
table.grant_read_write(iam.AccountPrincipal(account_id))
# Add custom resource policy statements
permissions = iam.PolicyStatement(
effect=iam.Effect.ALLOW,
actions=["s3tables:*"],
principals=[iam.ServicePrincipal("example.aws.internal")],
resources=["*"]
)
table.add_to_resource_policy(permissions)
```
## Coming Soon
L2 Construct support for:
* KMS encryption support for Tables
Raw data
{
"_id": null,
"home_page": "https://github.com/aws/aws-cdk",
"name": "aws-cdk.aws-s3tables-alpha",
"maintainer": null,
"docs_url": null,
"requires_python": "~=3.9",
"maintainer_email": null,
"keywords": null,
"author": "Amazon Web Services",
"author_email": null,
"download_url": "https://files.pythonhosted.org/packages/9a/7f/eb40d2c12f955057a037c8eddd84cc9d7bf196e9ae23a1fb194bf08670a8/aws_cdk_aws_s3tables_alpha-2.214.0a0.tar.gz",
"platform": null,
"description": "# Amazon S3 Tables Construct Library\n\n<!--BEGIN STABILITY BANNER-->---\n\n\n\n\n> The APIs of higher level constructs in this module are experimental and under active development.\n> They are subject to non-backward compatible changes or removal in any future version. These are\n> not subject to the [Semantic Versioning](https://semver.org/) model and breaking changes will be\n> announced in the release notes. This means that while you may use them, you may need to update\n> your source code when upgrading to a newer version of this package.\n\n---\n<!--END STABILITY BANNER-->\n\n## Amazon S3 Tables\n\nAmazon S3 Tables deliver the first cloud object store with built-in Apache Iceberg support and streamline storing tabular data at scale.\n\n[Product Page](https://aws.amazon.com/s3/features/tables/) | [User Guide](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-tables.html)\n\n## Usage\n\n### Define an S3 Table Bucket\n\n```python\n# Build a Table bucket\nsample_table_bucket = TableBucket(scope, \"ExampleTableBucket\",\n table_bucket_name=\"example-bucket-1\",\n # optional fields:\n unreferenced_file_removal=UnreferencedFileRemoval(\n status=UnreferencedFileRemovalStatus.ENABLED,\n noncurrent_days=20,\n unreferenced_days=20\n )\n)\n```\n\n### Define an S3 Tables Namespace\n\n```python\n# Build a namespace\nsample_namespace = Namespace(scope, \"ExampleNamespace\",\n namespace_name=\"example-namespace-1\",\n table_bucket=table_bucket\n)\n```\n\n### Define an S3 Table\n\n```python\n# Build a table\nsample_table = Table(scope, \"ExampleTable\",\n table_name=\"example_table\",\n namespace=namespace,\n open_table_format=OpenTableFormat.ICEBERG,\n without_metadata=True\n)\n\n# Build a table with an Iceberg Schema\nsample_table_with_schema = Table(scope, \"ExampleSchemaTable\",\n table_name=\"example_table_with_schema\",\n namespace=namespace,\n open_table_format=OpenTableFormat.ICEBERG,\n iceberg_metadata=IcebergMetadataProperty(\n iceberg_schema=IcebergSchemaProperty(\n schema_field_list=[SchemaFieldProperty(\n name=\"id\",\n type=\"int\",\n required=True\n ), SchemaFieldProperty(\n name=\"name\",\n type=\"string\"\n )\n ]\n )\n ),\n compaction=CompactionProperty(\n status=Status.ENABLED,\n target_file_size_mb=128\n ),\n snapshot_management=SnapshotManagementProperty(\n status=Status.ENABLED,\n max_snapshot_age_hours=48,\n min_snapshots_to_keep=5\n )\n)\n```\n\nLearn more about table buckets maintenance operations and default behavior from the [S3 Tables User Guide](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-table-buckets-maintenance.html)\n\n### Controlling Table Bucket Permissions\n\n```python\n# Grant the principal read permissions to the bucket and all tables within\naccount_id = \"123456789012\"\ntable_bucket.grant_read(iam.AccountPrincipal(account_id), \"*\")\n\n# Grant the role write permissions to the bucket and all tables within\nrole = iam.Role(stack, \"MyRole\", assumed_by=iam.ServicePrincipal(\"sample\"))\ntable_bucket.grant_write(role, \"*\")\n\n# Grant the user read and write permissions to the bucket and all tables within\ntable_bucket.grant_read_write(iam.User(stack, \"MyUser\"), \"*\")\n\n# Grant permissions to the bucket and a particular table within it\ntable_id = \"6ba046b2-26de-44cf-9144-0c7862593a7b\"\ntable_bucket.grant_read_write(iam.AccountPrincipal(account_id), table_id)\n\n# Add custom resource policy statements\npermissions = iam.PolicyStatement(\n effect=iam.Effect.ALLOW,\n actions=[\"s3tables:*\"],\n principals=[iam.ServicePrincipal(\"example.aws.internal\")],\n resources=[\"*\"]\n)\n\ntable_bucket.add_to_resource_policy(permissions)\n```\n\n### Controlling Table Bucket Encryption Settings\n\nS3 TableBuckets have SSE (server-side encryption with AES-256) enabled by default with S3 managed keys.\nYou can also bring your own KMS key for KMS-SSE or have S3 create a KMS key for you.\n\nIf a bucket is encrypted with KMS, grant functions on the bucket will also grant access\nto the TableBucket's associated KMS key.\n\n```python\n# Provide a user defined KMS Key:\nkey = kms.Key(scope, \"UserKey\")\nencrypted_bucket = TableBucket(scope, \"EncryptedTableBucket\",\n table_bucket_name=\"table-bucket-1\",\n encryption=TableBucketEncryption.KMS,\n encryption_key=key\n)\n# This account principal will also receive kms:Decrypt access to the KMS key\nencrypted_bucket.grant_read(iam.AccountPrincipal(\"123456789012\"), \"*\")\n\n# Use S3 managed server side encryption (default)\nencrypted_bucket_default = TableBucket(scope, \"EncryptedTableBucketDefault\",\n table_bucket_name=\"table-bucket-3\",\n encryption=TableBucketEncryption.S3_MANAGED\n)\n```\n\nWhen using KMS encryption (`TableBucketEncryption.KMS`), if no encryption key is provided, CDK will automatically create a new KMS key for the table bucket with necessary permissions.\n\n```python\n# If no key is provided, one will be created automatically\nencrypted_bucket_auto = TableBucket(scope, \"EncryptedTableBucketAuto\",\n table_bucket_name=\"table-bucket-2\",\n encryption=TableBucketEncryption.KMS\n)\n```\n\n### Controlling Table Permissions\n\n```python\n# Grant the principal read permissions to the table\naccount_id = \"123456789012\"\ntable.grant_read(iam.AccountPrincipal(account_id))\n\n# Grant the role write permissions to the table\nrole = iam.Role(stack, \"MyRole\", assumed_by=iam.ServicePrincipal(\"sample\"))\ntable.grant_write(role)\n\n# Grant the user read and write permissions to the table\ntable.grant_read_write(iam.User(stack, \"MyUser\"))\n\n# Grant an account permissions to the table\ntable.grant_read_write(iam.AccountPrincipal(account_id))\n\n# Add custom resource policy statements\npermissions = iam.PolicyStatement(\n effect=iam.Effect.ALLOW,\n actions=[\"s3tables:*\"],\n principals=[iam.ServicePrincipal(\"example.aws.internal\")],\n resources=[\"*\"]\n)\n\ntable.add_to_resource_policy(permissions)\n```\n\n## Coming Soon\n\nL2 Construct support for:\n\n* KMS encryption support for Tables\n",
"bugtrack_url": null,
"license": "Apache-2.0",
"summary": "CDK Constructs for S3 Tables",
"version": "2.214.0a0",
"project_urls": {
"Homepage": "https://github.com/aws/aws-cdk",
"Source": "https://github.com/aws/aws-cdk.git"
},
"split_keywords": [],
"urls": [
{
"comment_text": null,
"digests": {
"blake2b_256": "b6f59e9762b6c3bc8e319737bf0ef06b3387b27d3e8c8444283abf27dc056123",
"md5": "df90d1ee9e39acbe3ce2c81269e9d41a",
"sha256": "368034880c783b289798e093276c43799a3b36c3a112ba52b4e85f134a0745a1"
},
"downloads": -1,
"filename": "aws_cdk_aws_s3tables_alpha-2.214.0a0-py3-none-any.whl",
"has_sig": false,
"md5_digest": "df90d1ee9e39acbe3ce2c81269e9d41a",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": "~=3.9",
"size": 119032,
"upload_time": "2025-09-02T12:32:49",
"upload_time_iso_8601": "2025-09-02T12:32:49.053371Z",
"url": "https://files.pythonhosted.org/packages/b6/f5/9e9762b6c3bc8e319737bf0ef06b3387b27d3e8c8444283abf27dc056123/aws_cdk_aws_s3tables_alpha-2.214.0a0-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": null,
"digests": {
"blake2b_256": "9a7feb40d2c12f955057a037c8eddd84cc9d7bf196e9ae23a1fb194bf08670a8",
"md5": "f931ae343cdbd1f40320b65037a9694f",
"sha256": "73c5f002a894ddcfca2c699a77977711f48ad63581d5a2599197f7070989d9e6"
},
"downloads": -1,
"filename": "aws_cdk_aws_s3tables_alpha-2.214.0a0.tar.gz",
"has_sig": false,
"md5_digest": "f931ae343cdbd1f40320b65037a9694f",
"packagetype": "sdist",
"python_version": "source",
"requires_python": "~=3.9",
"size": 119623,
"upload_time": "2025-09-02T12:33:23",
"upload_time_iso_8601": "2025-09-02T12:33:23.145017Z",
"url": "https://files.pythonhosted.org/packages/9a/7f/eb40d2c12f955057a037c8eddd84cc9d7bf196e9ae23a1fb194bf08670a8/aws_cdk_aws_s3tables_alpha-2.214.0a0.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2025-09-02 12:33:23",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "aws",
"github_project": "aws-cdk",
"travis_ci": false,
"coveralls": false,
"github_actions": true,
"lcname": "aws-cdk.aws-s3tables-alpha"
}