tfrecords
## simplify and transplant the tfrecord and table
### update information
```text
2023-07-01: Add arrow parquet
2022-10-30: Add lmdb leveldb read and writer and add record batch write
2022-10-17: Add shared memory for record to read mode with more accelerated Reading.
2022-02-01: simplify and transplant the tfrecord dataset
```
### 1. record read and write demo , with_share_memory flags will Accelerated Reading
```python
# -*- coding: utf-8 -*-
# @Time : 2022/9/8 15:49
import tfrecords
options = tfrecords.TFRecordOptions(compression_type=tfrecords.TFRecordCompressionType.NONE)
def test_write(filename, N=3, context='aaa'):
with tfrecords.TFRecordWriter(filename, options=options) as file_writer:
batch_data = []
for i in range(N):
d = context + '____' + str(i)
batch_data.append(d)
if (i + 1) % 100 == 0:
file_writer.write_batch(batch_data)
batch_data.clear()
if len(batch_data):
file_writer.write_batch(batch_data)
batch_data.clear()
def test_record_iterator(example_paths):
print('test_record_iterator')
for example_path in example_paths:
iterator = tfrecords.tf_record_iterator(example_path, options=options, skip_bytes=0, with_share_memory=True)
offset_list = iterator.read_offsets(0)
count = iterator.read_count(0)
print(count)
num = 0
for iter in iterator:
num += 1
print(iter)
def test_random_reader(example_paths):
print('test_random_reader')
for example_path in example_paths:
file_reader = tfrecords.tf_record_random_reader(example_path, options=options, with_share_memory=True)
last_pos = 0
while True:
try:
x, pos = file_reader.read(last_pos)
print(x, pos)
last_pos = pos
except Exception as e:
break
def test_random_reader2(example_paths):
print('test_random_reader2')
for example_path in example_paths:
file_reader = tfrecords.tf_record_random_reader(example_path, options=options, with_share_memory=True)
skip_bytes = 0
offset_list = file_reader.read_offsets(skip_bytes)
for offset, length in offset_list:
x, _ = file_reader.read(offset)
print(x)
test_write('d:/example.tfrecords0', 3, 'file0')
example_paths = tfrecords.glob('d:/example.tfrecords*')
print(example_paths)
test_record_iterator(example_paths)
print()
test_random_reader(example_paths)
print()
test_random_reader2(example_paths)
print()
```
### 2. leveldb read and write demo
```python
# -*- coding: utf-8 -*-
# @Time : 2022/9/8 15:49
from tfrecords import LEVELDB
db_path = 'd:/example_leveldb'
def test_write(db_path):
options = LEVELDB.LeveldbOptions(create_if_missing=True, error_if_exists=False)
file_writer = LEVELDB.Leveldb(db_path, options)
keys, values = [], []
for i in range(30):
keys.append(b"input_" + str(i).encode())
keys.append(b"label_" + str(i).encode())
values.append(b"xiaoming" + str(i).encode())
values.append(b"zzs" + str(i).encode())
if (i + 1) % 1000 == 0:
file_writer.put_batch(keys, values)
keys.clear()
values.clear()
if len(keys):
file_writer.put_batch(keys, values)
keys.clear()
values.clear()
file_writer.close()
def test_read(db_path):
options = LEVELDB.LeveldbOptions(create_if_missing=False, error_if_exists=False)
reader = LEVELDB.Leveldb(db_path, options)
def show():
it = reader.get_iterater(reverse=False)
i = 0
for item in it:
print(i, item)
i += 1
def test_find(key):
value = reader.get(key)
print('find', type(value), value)
show()
test_find(b'input_0')
test_find(b'input_5')
test_find(b'input_10')
reader.close()
test_write(db_path)
test_read(db_path)
```
### 3. lmdb read and write demo
```python
# -*- coding: utf-8 -*-
# @Time : 2022/9/8 15:49
from tfrecords import LMDB
db_path = 'd:/example_lmdb'
def test_write(db_path):
options = LMDB.LmdbOptions(env_open_flag=0,
env_open_mode=0o664, # 8进制表示
txn_flag=0,
dbi_flag=0,
put_flag=0)
file_writer = LMDB.Lmdb(db_path, options, map_size=1024 * 1024 * 10)
keys, values = [], []
for i in range(30):
keys.append(b"input_" + str(i).encode())
keys.append(b"label_" + str(i).encode())
values.append(b"xiaoming_" + str(i).encode())
values.append(b"zzs_" + str(i).encode())
if (i + 1) % 1000 == 0:
file_writer.put_batch(keys, values)
keys.clear()
values.clear()
if len(keys):
file_writer.put_batch(keys, values)
file_writer.close()
def test_read(db_path):
options = LMDB.LmdbOptions(env_open_flag=LMDB.LmdbFlag.MDB_RDONLY,
env_open_mode=0o664, # 8进制表示
txn_flag = 0, # LMDB.LmdbFlag.MDB_RDONLY
dbi_flag=0,
put_flag=0)
reader = LMDB.Lmdb(db_path, options, map_size=0)
def show():
it = reader.get_iterater(reverse=False)
i = 0
for item in it:
print(i, item)
i += 1
def test_find(key):
value = reader.get(key)
print('find', type(value), value)
show()
test_find('input0')
test_find('input5')
test_find(b'input10')
reader.close()
test_write(db_path)
test_read(db_path)
```
### 4. arrow demo
### Stream
```python
from tfrecords.python.io.arrow import IPC_Writer,IPC_StreamReader,arrow
path_file = "d:/tmp/data.arrow"
def test_write():
schema = arrow.schema([
arrow.field('id', arrow.int32()),
arrow.field('text', arrow.utf8())
])
a = arrow.Int32Builder()
a.AppendValues([0,1,4])
a = a.Finish().Value()
b = arrow.StringBuilder()
b.AppendValues(["aaaa","你是谁","张三"])
b = b.Finish().Value()
table = arrow.Table.Make(schema = schema,arrays=[a,b])
fs = IPC_Writer(path_file,schema,with_stream = True)
fs.write_table(table)
fs.close()
def test_read():
fs = IPC_StreamReader(path_file)
table = fs.read_all()
fs.close()
print(table)
col = table.GetColumnByName('text')
text_list = col.chunk(0)
for i in range(text_list.length()):
x = text_list.Value(i)
print(type(x), x)
test_write()
test_read()
```
### file
```python
from tfrecords.python.io.arrow import IPC_Writer,IPC_StreamReader,IPC_MemoryMappedFileReader,arrow
path_file = "d:/tmp/data.arrow"
def test_write():
schema = arrow.schema([
arrow.field('id', arrow.int32()),
arrow.field('text', arrow.utf8())
])
a = arrow.Int32Builder()
a.AppendValues([0,1,4])
a = a.Finish().Value()
b = arrow.StringBuilder()
b.AppendValues(["aaaa","你是谁","张三"])
b = b.Finish().Value()
table = arrow.Table.Make(schema = schema,arrays=[a,b])
fs = IPC_Writer(path_file,schema,with_stream = False)
fs.write_table(table)
fs.close()
def test_read():
fs = IPC_MemoryMappedFileReader(path_file)
for i in range(fs.num_record_batches()):
batch = fs.read_batch(i)
print(batch)
fs.close()
test_write()
test_read()
```
### 4. parquet demo
```python
from tfrecords.python.io.arrow import ParquetWriter,IPC_StreamReader,ParquetReader,arrow
path_file = "d:/tmp/data.parquet"
def test_write():
schema = arrow.schema([
arrow.field('id', arrow.int32()),
arrow.field('text', arrow.utf8())
])
a = arrow.Int32Builder()
a.AppendValues([0, 1, 4, 5])
a = a.Finish().Value()
b = arrow.StringBuilder()
b.AppendValues(["aaaa", "你是谁", "张三", "李赛"])
b = b.Finish().Value()
table = arrow.Table.Make(schema=schema, arrays=[a, b])
fs = ParquetWriter(path_file, schema)
fs.write_table(table)
fs.close()
def test_read():
fs = ParquetReader(path_file,options=dict(buffer_size=2))
table = fs.read_table()
fs.close()
table = table.Flatten().Value()
print(table)
col = table.GetColumnByName('text')
text_list = col.chunk(0)
for i in range(text_list.length()):
x = text_list.Value(i)
print(type(x),x)
test_write()
test_read()
```
Raw data
{
"_id": null,
"home_page": "https://github.com/ssbuild/tfrecords",
"name": "tfrecords",
"maintainer": null,
"docs_url": null,
"requires_python": "<4,>=3",
"maintainer_email": null,
"keywords": "tfrecords, tfrecords, tfrecord, records, datasets",
"author": "ssbuild",
"author_email": "9727464@qq.com",
"download_url": null,
"platform": "linux_x86_64",
"description": "tfrecords\n## simplify and transplant the tfrecord and table\n\n### update information\n```text\n 2023-07-01: Add arrow parquet\n 2022-10-30: Add lmdb leveldb read and writer and add record batch write\n 2022-10-17: Add shared memory for record to read mode with more accelerated Reading.\n 2022-02-01: simplify and transplant the tfrecord dataset\n```\n\n### 1. record read and write demo , with_share_memory flags will Accelerated Reading\n\n```python\n# -*- coding: utf-8 -*-\n# @Time : 2022/9/8 15:49\n\nimport tfrecords\n\noptions = tfrecords.TFRecordOptions(compression_type=tfrecords.TFRecordCompressionType.NONE)\n\n\ndef test_write(filename, N=3, context='aaa'):\n with tfrecords.TFRecordWriter(filename, options=options) as file_writer:\n batch_data = []\n for i in range(N):\n d = context + '____' + str(i)\n batch_data.append(d)\n if (i + 1) % 100 == 0:\n file_writer.write_batch(batch_data)\n batch_data.clear()\n if len(batch_data):\n file_writer.write_batch(batch_data)\n batch_data.clear()\n\n\ndef test_record_iterator(example_paths):\n print('test_record_iterator')\n for example_path in example_paths:\n iterator = tfrecords.tf_record_iterator(example_path, options=options, skip_bytes=0, with_share_memory=True)\n offset_list = iterator.read_offsets(0)\n count = iterator.read_count(0)\n print(count)\n num = 0\n for iter in iterator:\n num += 1\n print(iter)\n\n\ndef test_random_reader(example_paths):\n print('test_random_reader')\n for example_path in example_paths:\n file_reader = tfrecords.tf_record_random_reader(example_path, options=options, with_share_memory=True)\n last_pos = 0\n while True:\n try:\n x, pos = file_reader.read(last_pos)\n print(x, pos)\n last_pos = pos\n\n except Exception as e:\n break\n\n\ndef test_random_reader2(example_paths):\n print('test_random_reader2')\n for example_path in example_paths:\n file_reader = tfrecords.tf_record_random_reader(example_path, options=options, with_share_memory=True)\n skip_bytes = 0\n offset_list = file_reader.read_offsets(skip_bytes)\n for offset, length in offset_list:\n x, _ = file_reader.read(offset)\n print(x)\n\n\ntest_write('d:/example.tfrecords0', 3, 'file0')\n\nexample_paths = tfrecords.glob('d:/example.tfrecords*')\nprint(example_paths)\ntest_record_iterator(example_paths)\nprint()\ntest_random_reader(example_paths)\nprint()\ntest_random_reader2(example_paths)\nprint()\n```\n\n### 2. leveldb read and write demo\n\n```python\n# -*- coding: utf-8 -*-\n# @Time : 2022/9/8 15:49\n\nfrom tfrecords import LEVELDB\n\ndb_path = 'd:/example_leveldb'\n\n\ndef test_write(db_path):\n options = LEVELDB.LeveldbOptions(create_if_missing=True, error_if_exists=False)\n file_writer = LEVELDB.Leveldb(db_path, options)\n\n keys, values = [], []\n for i in range(30):\n keys.append(b\"input_\" + str(i).encode())\n keys.append(b\"label_\" + str(i).encode())\n values.append(b\"xiaoming\" + str(i).encode())\n values.append(b\"zzs\" + str(i).encode())\n if (i + 1) % 1000 == 0:\n file_writer.put_batch(keys, values)\n keys.clear()\n values.clear()\n if len(keys):\n file_writer.put_batch(keys, values)\n keys.clear()\n values.clear()\n\n file_writer.close()\n\n\ndef test_read(db_path):\n options = LEVELDB.LeveldbOptions(create_if_missing=False, error_if_exists=False)\n reader = LEVELDB.Leveldb(db_path, options)\n\n def show():\n it = reader.get_iterater(reverse=False)\n i = 0\n for item in it:\n print(i, item)\n i += 1\n\n def test_find(key):\n value = reader.get(key)\n print('find', type(value), value)\n\n show()\n\n test_find(b'input_0')\n test_find(b'input_5')\n test_find(b'input_10')\n\n reader.close()\n\n\ntest_write(db_path)\ntest_read(db_path)\n```\n\n\n### 3. lmdb read and write demo\n\n```python\n# -*- coding: utf-8 -*-\n# @Time : 2022/9/8 15:49\n\nfrom tfrecords import LMDB\n\ndb_path = 'd:/example_lmdb'\n\n\ndef test_write(db_path):\n options = LMDB.LmdbOptions(env_open_flag=0,\n env_open_mode=0o664, # 8\u8fdb\u5236\u8868\u793a\n txn_flag=0,\n dbi_flag=0,\n put_flag=0)\n file_writer = LMDB.Lmdb(db_path, options, map_size=1024 * 1024 * 10)\n keys, values = [], []\n for i in range(30):\n keys.append(b\"input_\" + str(i).encode())\n keys.append(b\"label_\" + str(i).encode())\n values.append(b\"xiaoming_\" + str(i).encode())\n values.append(b\"zzs_\" + str(i).encode())\n if (i + 1) % 1000 == 0:\n file_writer.put_batch(keys, values)\n keys.clear()\n values.clear()\n if len(keys):\n file_writer.put_batch(keys, values)\n file_writer.close()\n\n\ndef test_read(db_path):\n options = LMDB.LmdbOptions(env_open_flag=LMDB.LmdbFlag.MDB_RDONLY,\n env_open_mode=0o664, # 8\u8fdb\u5236\u8868\u793a\n txn_flag = 0, # LMDB.LmdbFlag.MDB_RDONLY\n dbi_flag=0,\n put_flag=0)\n reader = LMDB.Lmdb(db_path, options, map_size=0)\n\n def show():\n it = reader.get_iterater(reverse=False)\n i = 0\n for item in it:\n print(i, item)\n i += 1\n\n def test_find(key):\n value = reader.get(key)\n print('find', type(value), value)\n\n show()\n test_find('input0')\n test_find('input5')\n test_find(b'input10')\n reader.close()\n\n\ntest_write(db_path)\ntest_read(db_path)\n\n```\n\n\n### 4. arrow demo\n\n### Stream\n```python\n\nfrom tfrecords.python.io.arrow import IPC_Writer,IPC_StreamReader,arrow\n\npath_file = \"d:/tmp/data.arrow\"\n\ndef test_write():\n schema = arrow.schema([\n arrow.field('id', arrow.int32()),\n arrow.field('text', arrow.utf8())\n ])\n\n a = arrow.Int32Builder()\n a.AppendValues([0,1,4])\n a = a.Finish().Value()\n\n b = arrow.StringBuilder()\n b.AppendValues([\"aaaa\",\"\u4f60\u662f\u8c01\",\"\u5f20\u4e09\"])\n b = b.Finish().Value()\n\n table = arrow.Table.Make(schema = schema,arrays=[a,b])\n fs = IPC_Writer(path_file,schema,with_stream = True)\n fs.write_table(table)\n fs.close()\n\ndef test_read():\n fs = IPC_StreamReader(path_file)\n table = fs.read_all()\n fs.close()\n print(table)\n\n col = table.GetColumnByName('text')\n text_list = col.chunk(0)\n for i in range(text_list.length()):\n x = text_list.Value(i)\n print(type(x), x)\n\n\ntest_write()\ntest_read()\n```\n\n### file\n```python\nfrom tfrecords.python.io.arrow import IPC_Writer,IPC_StreamReader,IPC_MemoryMappedFileReader,arrow\n\npath_file = \"d:/tmp/data.arrow\"\n\ndef test_write():\n schema = arrow.schema([\n arrow.field('id', arrow.int32()),\n arrow.field('text', arrow.utf8())\n ])\n\n a = arrow.Int32Builder()\n a.AppendValues([0,1,4])\n a = a.Finish().Value()\n\n b = arrow.StringBuilder()\n b.AppendValues([\"aaaa\",\"\u4f60\u662f\u8c01\",\"\u5f20\u4e09\"])\n b = b.Finish().Value()\n\n table = arrow.Table.Make(schema = schema,arrays=[a,b])\n fs = IPC_Writer(path_file,schema,with_stream = False)\n fs.write_table(table)\n fs.close()\n\n\ndef test_read():\n\n fs = IPC_MemoryMappedFileReader(path_file)\n for i in range(fs.num_record_batches()):\n batch = fs.read_batch(i)\n print(batch)\n fs.close()\n\n\ntest_write()\ntest_read()\n```\n\n\n### 4. parquet demo\n\n\n```python\nfrom tfrecords.python.io.arrow import ParquetWriter,IPC_StreamReader,ParquetReader,arrow\npath_file = \"d:/tmp/data.parquet\"\n\ndef test_write():\n schema = arrow.schema([\n arrow.field('id', arrow.int32()),\n arrow.field('text', arrow.utf8())\n ])\n\n a = arrow.Int32Builder()\n a.AppendValues([0, 1, 4, 5])\n a = a.Finish().Value()\n\n b = arrow.StringBuilder()\n b.AppendValues([\"aaaa\", \"\u4f60\u662f\u8c01\", \"\u5f20\u4e09\", \"\u674e\u8d5b\"])\n b = b.Finish().Value()\n\n table = arrow.Table.Make(schema=schema, arrays=[a, b])\n\n fs = ParquetWriter(path_file, schema)\n fs.write_table(table)\n fs.close()\n\ndef test_read():\n\n fs = ParquetReader(path_file,options=dict(buffer_size=2))\n table = fs.read_table()\n fs.close()\n table = table.Flatten().Value()\n print(table)\n\n col = table.GetColumnByName('text')\n text_list = col.chunk(0)\n for i in range(text_list.length()):\n x = text_list.Value(i)\n print(type(x),x)\n\n\ntest_write()\ntest_read()\n```\n\n\n",
"bugtrack_url": null,
"license": "Apache 2.0",
"summary": "tfrecords: fast and simple reader and writer",
"version": "0.2.21",
"project_urls": {
"Homepage": "https://github.com/ssbuild/tfrecords"
},
"split_keywords": [
"tfrecords",
" tfrecords",
" tfrecord",
" records",
" datasets"
],
"urls": [
{
"comment_text": "",
"digests": {
"blake2b_256": "2456d4702c7a3e5e1dbea1166a36576ee0f7935e3bb4c5bc05fa378a5e117f36",
"md5": "f9a046b686665cc7c0c774cf41214a68",
"sha256": "4f653d0a0cb98d23b1354a50a3913c9e04dd49a1921d7f12d490809231bfb087"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp310-cp310-manylinux2014_aarch64.whl",
"has_sig": false,
"md5_digest": "f9a046b686665cc7c0c774cf41214a68",
"packagetype": "bdist_wheel",
"python_version": "cp310",
"requires_python": "<4,>=3",
"size": 13684515,
"upload_time": "2024-06-10T08:53:05",
"upload_time_iso_8601": "2024-06-10T08:53:05.935026Z",
"url": "https://files.pythonhosted.org/packages/24/56/d4702c7a3e5e1dbea1166a36576ee0f7935e3bb4c5bc05fa378a5e117f36/tfrecords-0.2.21-cp310-cp310-manylinux2014_aarch64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "fa0b283d061686c6afbea12affa85a82039741023a086bf2c7a17864a2172135",
"md5": "9c13c46d031aed362da2a0ad7e5e2192",
"sha256": "2ca7b65d5f6bb92d3be872e8389c8270b3cb32bbc6f4065b701ce069601a57cf"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp310-cp310-manylinux2014_x86_64.whl",
"has_sig": false,
"md5_digest": "9c13c46d031aed362da2a0ad7e5e2192",
"packagetype": "bdist_wheel",
"python_version": "cp310",
"requires_python": "<4,>=3",
"size": 17321561,
"upload_time": "2024-06-10T08:55:30",
"upload_time_iso_8601": "2024-06-10T08:55:30.580303Z",
"url": "https://files.pythonhosted.org/packages/fa/0b/283d061686c6afbea12affa85a82039741023a086bf2c7a17864a2172135/tfrecords-0.2.21-cp310-cp310-manylinux2014_x86_64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "5ba9093afaa71a02b1b4908537239929b1eb5741eb017e90cd0381a1594dc5ed",
"md5": "1303466736df2762b029b54c879acd18",
"sha256": "3c6a95959ba2ed54cec374f1b450431a66c28eea575706580157af7b815f2ae7"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp310-cp310-win_amd64.whl",
"has_sig": false,
"md5_digest": "1303466736df2762b029b54c879acd18",
"packagetype": "bdist_wheel",
"python_version": "cp310",
"requires_python": "<4,>=3",
"size": 8280089,
"upload_time": "2024-06-10T09:03:45",
"upload_time_iso_8601": "2024-06-10T09:03:45.968394Z",
"url": "https://files.pythonhosted.org/packages/5b/a9/093afaa71a02b1b4908537239929b1eb5741eb017e90cd0381a1594dc5ed/tfrecords-0.2.21-cp310-cp310-win_amd64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "74b3ea0cdf4f985adab4b438e7733a61f0ef50bc9afed75f78716adb64f8af62",
"md5": "55764944c1ddebeaabc6f78a092baf12",
"sha256": "be6a2f3c95c147321eb120372f0f5d87e2a95bd2f401cae2bee1ac1a56e2c85a"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp311-cp311-manylinux2014_aarch64.whl",
"has_sig": false,
"md5_digest": "55764944c1ddebeaabc6f78a092baf12",
"packagetype": "bdist_wheel",
"python_version": "cp311",
"requires_python": "<4,>=3",
"size": 13691448,
"upload_time": "2024-06-10T08:57:54",
"upload_time_iso_8601": "2024-06-10T08:57:54.095158Z",
"url": "https://files.pythonhosted.org/packages/74/b3/ea0cdf4f985adab4b438e7733a61f0ef50bc9afed75f78716adb64f8af62/tfrecords-0.2.21-cp311-cp311-manylinux2014_aarch64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "e8c8f470d1d534a26c5eb4f147c5ac35458f96b43991d8fed45c5bc8a8ff7f3f",
"md5": "bdc6ad7cf613afbe44faddf6e251bec2",
"sha256": "1afe0634c0de1037d2fb8a32074a4bbcb8e3c365be1131a390d28cfda8d767b9"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp311-cp311-manylinux2014_x86_64.whl",
"has_sig": false,
"md5_digest": "bdc6ad7cf613afbe44faddf6e251bec2",
"packagetype": "bdist_wheel",
"python_version": "cp311",
"requires_python": "<4,>=3",
"size": 17327204,
"upload_time": "2024-06-10T09:00:52",
"upload_time_iso_8601": "2024-06-10T09:00:52.151874Z",
"url": "https://files.pythonhosted.org/packages/e8/c8/f470d1d534a26c5eb4f147c5ac35458f96b43991d8fed45c5bc8a8ff7f3f/tfrecords-0.2.21-cp311-cp311-manylinux2014_x86_64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "c1d3b1c36707a53bd3e6bf8770fd05ac334c761ca713e3d48bffb2046f897a8a",
"md5": "ba4ccb672de5a3f1e7c92768a95e18d6",
"sha256": "30d620fc63847aa8b413c510d0dfc410c616bac1807a5fb1f48fbb35a6624110"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp311-cp311-win_amd64.whl",
"has_sig": false,
"md5_digest": "ba4ccb672de5a3f1e7c92768a95e18d6",
"packagetype": "bdist_wheel",
"python_version": "cp311",
"requires_python": "<4,>=3",
"size": 8284105,
"upload_time": "2024-06-10T09:03:49",
"upload_time_iso_8601": "2024-06-10T09:03:49.705949Z",
"url": "https://files.pythonhosted.org/packages/c1/d3/b1c36707a53bd3e6bf8770fd05ac334c761ca713e3d48bffb2046f897a8a/tfrecords-0.2.21-cp311-cp311-win_amd64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "4613fc45ba5d158be216539c5d916ae59735ca7ccdb98d088153864f158b37ac",
"md5": "ea5fca818d75648c18189d7358922105",
"sha256": "f01b0844e2932be2182c3a538209fbaf202d4fd5f6b6008cf18d7fe4f7908190"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp312-cp312-manylinux2014_aarch64.whl",
"has_sig": false,
"md5_digest": "ea5fca818d75648c18189d7358922105",
"packagetype": "bdist_wheel",
"python_version": "cp312",
"requires_python": "<4,>=3",
"size": 13672833,
"upload_time": "2024-06-10T09:02:58",
"upload_time_iso_8601": "2024-06-10T09:02:58.640222Z",
"url": "https://files.pythonhosted.org/packages/46/13/fc45ba5d158be216539c5d916ae59735ca7ccdb98d088153864f158b37ac/tfrecords-0.2.21-cp312-cp312-manylinux2014_aarch64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "3bd333b1d8426a4510312dbbd58c86dd2e44cea9fb3f2b4d6874fa1d417a3170",
"md5": "ae4165911da95721fa7c9207f07facd1",
"sha256": "74614aac051d01f15d19e78f2d1bd8f0a291b02695c007404b8925095e8f1de9"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp312-cp312-manylinux2014_x86_64.whl",
"has_sig": false,
"md5_digest": "ae4165911da95721fa7c9207f07facd1",
"packagetype": "bdist_wheel",
"python_version": "cp312",
"requires_python": "<4,>=3",
"size": 17329947,
"upload_time": "2024-06-10T09:06:01",
"upload_time_iso_8601": "2024-06-10T09:06:01.011885Z",
"url": "https://files.pythonhosted.org/packages/3b/d3/33b1d8426a4510312dbbd58c86dd2e44cea9fb3f2b4d6874fa1d417a3170/tfrecords-0.2.21-cp312-cp312-manylinux2014_x86_64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "91abdb5c19b3c4a2b10ebf34afab484c3e49e94f829717d056a542d6e5cc0a2f",
"md5": "96048edaf6f701a5102269649120ca0e",
"sha256": "1d6a51a703b82b86a3fc078130362d0a9378934526938e4150243f39e02dcf9a"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp312-cp312-win_amd64.whl",
"has_sig": false,
"md5_digest": "96048edaf6f701a5102269649120ca0e",
"packagetype": "bdist_wheel",
"python_version": "cp312",
"requires_python": "<4,>=3",
"size": 8277775,
"upload_time": "2024-06-10T09:03:52",
"upload_time_iso_8601": "2024-06-10T09:03:52.258042Z",
"url": "https://files.pythonhosted.org/packages/91/ab/db5c19b3c4a2b10ebf34afab484c3e49e94f829717d056a542d6e5cc0a2f/tfrecords-0.2.21-cp312-cp312-win_amd64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "5a5166cd5c29654aae3b70d0318f1eacdf04eea4c295673e6441c20914c38067",
"md5": "3fc3e06e06b657eac4771209b2b0835a",
"sha256": "e468c773e166903e3a09369780af7901fa210f17bba475f636e3082e97a15966"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp36-cp36m-win_amd64.whl",
"has_sig": false,
"md5_digest": "3fc3e06e06b657eac4771209b2b0835a",
"packagetype": "bdist_wheel",
"python_version": "cp36",
"requires_python": "<4,>=3",
"size": 8243910,
"upload_time": "2024-06-10T09:03:54",
"upload_time_iso_8601": "2024-06-10T09:03:54.837092Z",
"url": "https://files.pythonhosted.org/packages/5a/51/66cd5c29654aae3b70d0318f1eacdf04eea4c295673e6441c20914c38067/tfrecords-0.2.21-cp36-cp36m-win_amd64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "5dbfc2c8ddbfbab62b6f541293c767f59ddf952ed6dca2bff5bf5ac89d69f1cf",
"md5": "0e717d9659d0e25bed92c762d7d1f59b",
"sha256": "da7b9cff2a53ed1e55e061e6088f4b78dc6c9a8309213c6cb5a69ea114c3b6c9"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp37-cp37m-win_amd64.whl",
"has_sig": false,
"md5_digest": "0e717d9659d0e25bed92c762d7d1f59b",
"packagetype": "bdist_wheel",
"python_version": "cp37",
"requires_python": "<4,>=3",
"size": 8244788,
"upload_time": "2024-06-10T09:03:57",
"upload_time_iso_8601": "2024-06-10T09:03:57.276046Z",
"url": "https://files.pythonhosted.org/packages/5d/bf/c2c8ddbfbab62b6f541293c767f59ddf952ed6dca2bff5bf5ac89d69f1cf/tfrecords-0.2.21-cp37-cp37m-win_amd64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "f26a970a82a153564a02d16ff74ac204925f19d8d2c172594b4325e628809dfb",
"md5": "b14014c32cd035b9ed9ca96c33cb8b44",
"sha256": "69072d9631aa9540f020e761b1d8d234a72d95035b4aee72c79f6e9e00c2c818"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp38-cp38-win_amd64.whl",
"has_sig": false,
"md5_digest": "b14014c32cd035b9ed9ca96c33cb8b44",
"packagetype": "bdist_wheel",
"python_version": "cp38",
"requires_python": "<4,>=3",
"size": 8277683,
"upload_time": "2024-06-10T09:03:59",
"upload_time_iso_8601": "2024-06-10T09:03:59.743378Z",
"url": "https://files.pythonhosted.org/packages/f2/6a/970a82a153564a02d16ff74ac204925f19d8d2c172594b4325e628809dfb/tfrecords-0.2.21-cp38-cp38-win_amd64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "417004840c0514f1df295b542eb599ca65cab787396b52b6539c681dce0acef6",
"md5": "508a06a155c27825369cb48af89dc0b1",
"sha256": "3a7ecf3a664d777b8865c18131ba916d5b942330b24eb626c676cc6c2bf08891"
},
"downloads": -1,
"filename": "tfrecords-0.2.21-cp39-cp39-win_amd64.whl",
"has_sig": false,
"md5_digest": "508a06a155c27825369cb48af89dc0b1",
"packagetype": "bdist_wheel",
"python_version": "cp39",
"requires_python": "<4,>=3",
"size": 8279865,
"upload_time": "2024-06-10T09:04:02",
"upload_time_iso_8601": "2024-06-10T09:04:02.080720Z",
"url": "https://files.pythonhosted.org/packages/41/70/04840c0514f1df295b542eb599ca65cab787396b52b6539c681dce0acef6/tfrecords-0.2.21-cp39-cp39-win_amd64.whl",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2024-06-10 08:53:05",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "ssbuild",
"github_project": "tfrecords",
"github_not_found": true,
"lcname": "tfrecords"
}