tfrecords


Nametfrecords JSON
Version 0.2.19 PyPI version JSON
download
home_pagehttps://github.com/ssbuild/tfrecords
Summarytfrecords: fast and simple reader and writer
upload_time2024-03-25 18:52:47
maintainerNone
docs_urlNone
authorssbuild
requires_python<4,>=3
licenseApache 2.0
keywords tfrecords tfrecords tfrecord records datasets
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            tfrecords
## simplify and transplant the tfrecord and table

### update information
```text
    2023-07-01:  Add arrow parquet
    2022-10-30:  Add lmdb leveldb read and writer and add record batch write
    2022-10-17:  Add shared memory for record to read mode with more accelerated Reading.
    2022-02-01:  simplify and transplant the tfrecord dataset
```

### 1. record read and write demo , with_share_memory flags will Accelerated Reading

```python
# -*- coding: utf-8 -*-
# @Time    : 2022/9/8 15:49

import tfrecords

options = tfrecords.TFRecordOptions(compression_type=tfrecords.TFRecordCompressionType.NONE)


def test_write(filename, N=3, context='aaa'):
    with tfrecords.TFRecordWriter(filename, options=options) as file_writer:
        batch_data = []
        for i in range(N):
            d = context + '____' + str(i)
            batch_data.append(d)
            if (i + 1) % 100 == 0:
                file_writer.write_batch(batch_data)
                batch_data.clear()
        if len(batch_data):
            file_writer.write_batch(batch_data)
            batch_data.clear()


def test_record_iterator(example_paths):
    print('test_record_iterator')
    for example_path in example_paths:
        iterator = tfrecords.tf_record_iterator(example_path, options=options, skip_bytes=0, with_share_memory=True)
        offset_list = iterator.read_offsets(0)
        count = iterator.read_count(0)
        print(count)
        num = 0
        for iter in iterator:
            num += 1
            print(iter)


def test_random_reader(example_paths):
    print('test_random_reader')
    for example_path in example_paths:
        file_reader = tfrecords.tf_record_random_reader(example_path, options=options, with_share_memory=True)
        last_pos = 0
        while True:
            try:
                x, pos = file_reader.read(last_pos)
                print(x, pos)
                last_pos = pos

            except Exception as e:
                break


def test_random_reader2(example_paths):
    print('test_random_reader2')
    for example_path in example_paths:
        file_reader = tfrecords.tf_record_random_reader(example_path, options=options, with_share_memory=True)
        skip_bytes = 0
        offset_list = file_reader.read_offsets(skip_bytes)
        for offset, length in offset_list:
            x, _ = file_reader.read(offset)
            print(x)


test_write('d:/example.tfrecords0', 3, 'file0')

example_paths = tfrecords.glob('d:/example.tfrecords*')
print(example_paths)
test_record_iterator(example_paths)
print()
test_random_reader(example_paths)
print()
test_random_reader2(example_paths)
print()
```

### 2. leveldb read and write demo

```python
# -*- coding: utf-8 -*-
# @Time    : 2022/9/8 15:49

from tfrecords import LEVELDB

db_path = 'd:/example_leveldb'


def test_write(db_path):
    options = LEVELDB.LeveldbOptions(create_if_missing=True, error_if_exists=False)
    file_writer = LEVELDB.Leveldb(db_path, options)

    keys, values = [], []
    for i in range(30):
        keys.append(b"input_" + str(i).encode())
        keys.append(b"label_" + str(i).encode())
        values.append(b"xiaoming" + str(i).encode())
        values.append(b"zzs" + str(i).encode())
        if (i + 1) % 1000 == 0:
            file_writer.put_batch(keys, values)
            keys.clear()
            values.clear()
    if len(keys):
        file_writer.put_batch(keys, values)
        keys.clear()
        values.clear()

    file_writer.close()


def test_read(db_path):
    options = LEVELDB.LeveldbOptions(create_if_missing=False, error_if_exists=False)
    reader = LEVELDB.Leveldb(db_path, options)

    def show():
        it = reader.get_iterater(reverse=False)
        i = 0
        for item in it:
            print(i, item)
            i += 1

    def test_find(key):
        value = reader.get(key)
        print('find', type(value), value)

    show()

    test_find(b'input_0')
    test_find(b'input_5')
    test_find(b'input_10')

    reader.close()


test_write(db_path)
test_read(db_path)
```


### 3. lmdb read and write demo

```python
# -*- coding: utf-8 -*-
# @Time    : 2022/9/8 15:49

from tfrecords import LMDB

db_path = 'd:/example_lmdb'


def test_write(db_path):
    options = LMDB.LmdbOptions(env_open_flag=0,
                               env_open_mode=0o664,  # 8进制表示
                               txn_flag=0,
                               dbi_flag=0,
                               put_flag=0)
    file_writer = LMDB.Lmdb(db_path, options, map_size=1024 * 1024 * 10)
    keys, values = [], []
    for i in range(30):
        keys.append(b"input_" + str(i).encode())
        keys.append(b"label_" + str(i).encode())
        values.append(b"xiaoming_" + str(i).encode())
        values.append(b"zzs_" + str(i).encode())
        if (i + 1) % 1000 == 0:
            file_writer.put_batch(keys, values)
            keys.clear()
            values.clear()
    if len(keys):
        file_writer.put_batch(keys, values)
    file_writer.close()


def test_read(db_path):
    options = LMDB.LmdbOptions(env_open_flag=LMDB.LmdbFlag.MDB_RDONLY,
                               env_open_mode=0o664,  # 8进制表示
                               txn_flag = 0, # LMDB.LmdbFlag.MDB_RDONLY
                               dbi_flag=0,
                               put_flag=0)
    reader = LMDB.Lmdb(db_path, options, map_size=0)

    def show():
        it = reader.get_iterater(reverse=False)
        i = 0
        for item in it:
            print(i, item)
            i += 1

    def test_find(key):
        value = reader.get(key)
        print('find', type(value), value)

    show()
    test_find('input0')
    test_find('input5')
    test_find(b'input10')
    reader.close()


test_write(db_path)
test_read(db_path)

```


### 4. arrow demo

### Stream
```python

from tfrecords.python.io.arrow import IPC_Writer,IPC_StreamReader,arrow

path_file = "d:/tmp/data.arrow"

def test_write():
    schema = arrow.schema([
        arrow.field('id', arrow.int32()),
        arrow.field('text', arrow.utf8())
    ])

    a = arrow.Int32Builder()
    a.AppendValues([0,1,4])
    a = a.Finish().Value()

    b = arrow.StringBuilder()
    b.AppendValues(["aaaa","你是谁","张三"])
    b = b.Finish().Value()

    table = arrow.Table.Make(schema = schema,arrays=[a,b])
    fs = IPC_Writer(path_file,schema,with_stream = True)
    fs.write_table(table)
    fs.close()

def test_read():
    fs = IPC_StreamReader(path_file)
    table = fs.read_all()
    fs.close()
    print(table)

    col = table.GetColumnByName('text')
    text_list = col.chunk(0)
    for i in range(text_list.length()):
        x = text_list.Value(i)
        print(type(x), x)


test_write()
test_read()
```

### file
```python
from tfrecords.python.io.arrow import IPC_Writer,IPC_StreamReader,IPC_MemoryMappedFileReader,arrow

path_file = "d:/tmp/data.arrow"

def test_write():
    schema = arrow.schema([
        arrow.field('id', arrow.int32()),
        arrow.field('text', arrow.utf8())
    ])

    a = arrow.Int32Builder()
    a.AppendValues([0,1,4])
    a = a.Finish().Value()

    b = arrow.StringBuilder()
    b.AppendValues(["aaaa","你是谁","张三"])
    b = b.Finish().Value()

    table = arrow.Table.Make(schema = schema,arrays=[a,b])
    fs = IPC_Writer(path_file,schema,with_stream = False)
    fs.write_table(table)
    fs.close()


def test_read():

    fs = IPC_MemoryMappedFileReader(path_file)
    for i in range(fs.num_record_batches()):
        batch = fs.read_batch(i)
        print(batch)
    fs.close()


test_write()
test_read()
```


### 4. parquet demo


```python
from tfrecords.python.io.arrow import ParquetWriter,IPC_StreamReader,ParquetReader,arrow
path_file = "d:/tmp/data.parquet"

def test_write():
    schema = arrow.schema([
        arrow.field('id', arrow.int32()),
        arrow.field('text', arrow.utf8())
    ])

    a = arrow.Int32Builder()
    a.AppendValues([0, 1, 4, 5])
    a = a.Finish().Value()

    b = arrow.StringBuilder()
    b.AppendValues(["aaaa", "你是谁", "张三", "李赛"])
    b = b.Finish().Value()

    table = arrow.Table.Make(schema=schema, arrays=[a, b])

    fs = ParquetWriter(path_file, schema)
    fs.write_table(table)
    fs.close()

def test_read():

    fs = ParquetReader(path_file,options=dict(buffer_size=2))
    table = fs.read_table()
    fs.close()
    table = table.Flatten().Value()
    print(table)

    col = table.GetColumnByName('text')
    text_list = col.chunk(0)
    for i in range(text_list.length()):
        x = text_list.Value(i)
        print(type(x),x)


test_write()
test_read()
```



            

Raw data

            {
    "_id": null,
    "home_page": "https://github.com/ssbuild/tfrecords",
    "name": "tfrecords",
    "maintainer": null,
    "docs_url": null,
    "requires_python": "<4,>=3",
    "maintainer_email": null,
    "keywords": "tfrecords, tfrecords, tfrecord, records, datasets",
    "author": "ssbuild",
    "author_email": "9727464@qq.com",
    "download_url": null,
    "platform": "win32_AMD64",
    "description": "tfrecords\r\n## simplify and transplant the tfrecord and table\r\n\r\n### update information\r\n```text\r\n    2023-07-01:  Add arrow parquet\r\n    2022-10-30:  Add lmdb leveldb read and writer and add record batch write\r\n    2022-10-17:  Add shared memory for record to read mode with more accelerated Reading.\r\n    2022-02-01:  simplify and transplant the tfrecord dataset\r\n```\r\n\r\n### 1. record read and write demo , with_share_memory flags will Accelerated Reading\r\n\r\n```python\r\n# -*- coding: utf-8 -*-\r\n# @Time    : 2022/9/8 15:49\r\n\r\nimport tfrecords\r\n\r\noptions = tfrecords.TFRecordOptions(compression_type=tfrecords.TFRecordCompressionType.NONE)\r\n\r\n\r\ndef test_write(filename, N=3, context='aaa'):\r\n    with tfrecords.TFRecordWriter(filename, options=options) as file_writer:\r\n        batch_data = []\r\n        for i in range(N):\r\n            d = context + '____' + str(i)\r\n            batch_data.append(d)\r\n            if (i + 1) % 100 == 0:\r\n                file_writer.write_batch(batch_data)\r\n                batch_data.clear()\r\n        if len(batch_data):\r\n            file_writer.write_batch(batch_data)\r\n            batch_data.clear()\r\n\r\n\r\ndef test_record_iterator(example_paths):\r\n    print('test_record_iterator')\r\n    for example_path in example_paths:\r\n        iterator = tfrecords.tf_record_iterator(example_path, options=options, skip_bytes=0, with_share_memory=True)\r\n        offset_list = iterator.read_offsets(0)\r\n        count = iterator.read_count(0)\r\n        print(count)\r\n        num = 0\r\n        for iter in iterator:\r\n            num += 1\r\n            print(iter)\r\n\r\n\r\ndef test_random_reader(example_paths):\r\n    print('test_random_reader')\r\n    for example_path in example_paths:\r\n        file_reader = tfrecords.tf_record_random_reader(example_path, options=options, with_share_memory=True)\r\n        last_pos = 0\r\n        while True:\r\n            try:\r\n                x, pos = file_reader.read(last_pos)\r\n                print(x, pos)\r\n                last_pos = pos\r\n\r\n            except Exception as e:\r\n                break\r\n\r\n\r\ndef test_random_reader2(example_paths):\r\n    print('test_random_reader2')\r\n    for example_path in example_paths:\r\n        file_reader = tfrecords.tf_record_random_reader(example_path, options=options, with_share_memory=True)\r\n        skip_bytes = 0\r\n        offset_list = file_reader.read_offsets(skip_bytes)\r\n        for offset, length in offset_list:\r\n            x, _ = file_reader.read(offset)\r\n            print(x)\r\n\r\n\r\ntest_write('d:/example.tfrecords0', 3, 'file0')\r\n\r\nexample_paths = tfrecords.glob('d:/example.tfrecords*')\r\nprint(example_paths)\r\ntest_record_iterator(example_paths)\r\nprint()\r\ntest_random_reader(example_paths)\r\nprint()\r\ntest_random_reader2(example_paths)\r\nprint()\r\n```\r\n\r\n### 2. leveldb read and write demo\r\n\r\n```python\r\n# -*- coding: utf-8 -*-\r\n# @Time    : 2022/9/8 15:49\r\n\r\nfrom tfrecords import LEVELDB\r\n\r\ndb_path = 'd:/example_leveldb'\r\n\r\n\r\ndef test_write(db_path):\r\n    options = LEVELDB.LeveldbOptions(create_if_missing=True, error_if_exists=False)\r\n    file_writer = LEVELDB.Leveldb(db_path, options)\r\n\r\n    keys, values = [], []\r\n    for i in range(30):\r\n        keys.append(b\"input_\" + str(i).encode())\r\n        keys.append(b\"label_\" + str(i).encode())\r\n        values.append(b\"xiaoming\" + str(i).encode())\r\n        values.append(b\"zzs\" + str(i).encode())\r\n        if (i + 1) % 1000 == 0:\r\n            file_writer.put_batch(keys, values)\r\n            keys.clear()\r\n            values.clear()\r\n    if len(keys):\r\n        file_writer.put_batch(keys, values)\r\n        keys.clear()\r\n        values.clear()\r\n\r\n    file_writer.close()\r\n\r\n\r\ndef test_read(db_path):\r\n    options = LEVELDB.LeveldbOptions(create_if_missing=False, error_if_exists=False)\r\n    reader = LEVELDB.Leveldb(db_path, options)\r\n\r\n    def show():\r\n        it = reader.get_iterater(reverse=False)\r\n        i = 0\r\n        for item in it:\r\n            print(i, item)\r\n            i += 1\r\n\r\n    def test_find(key):\r\n        value = reader.get(key)\r\n        print('find', type(value), value)\r\n\r\n    show()\r\n\r\n    test_find(b'input_0')\r\n    test_find(b'input_5')\r\n    test_find(b'input_10')\r\n\r\n    reader.close()\r\n\r\n\r\ntest_write(db_path)\r\ntest_read(db_path)\r\n```\r\n\r\n\r\n### 3. lmdb read and write demo\r\n\r\n```python\r\n# -*- coding: utf-8 -*-\r\n# @Time    : 2022/9/8 15:49\r\n\r\nfrom tfrecords import LMDB\r\n\r\ndb_path = 'd:/example_lmdb'\r\n\r\n\r\ndef test_write(db_path):\r\n    options = LMDB.LmdbOptions(env_open_flag=0,\r\n                               env_open_mode=0o664,  # 8\u8fdb\u5236\u8868\u793a\r\n                               txn_flag=0,\r\n                               dbi_flag=0,\r\n                               put_flag=0)\r\n    file_writer = LMDB.Lmdb(db_path, options, map_size=1024 * 1024 * 10)\r\n    keys, values = [], []\r\n    for i in range(30):\r\n        keys.append(b\"input_\" + str(i).encode())\r\n        keys.append(b\"label_\" + str(i).encode())\r\n        values.append(b\"xiaoming_\" + str(i).encode())\r\n        values.append(b\"zzs_\" + str(i).encode())\r\n        if (i + 1) % 1000 == 0:\r\n            file_writer.put_batch(keys, values)\r\n            keys.clear()\r\n            values.clear()\r\n    if len(keys):\r\n        file_writer.put_batch(keys, values)\r\n    file_writer.close()\r\n\r\n\r\ndef test_read(db_path):\r\n    options = LMDB.LmdbOptions(env_open_flag=LMDB.LmdbFlag.MDB_RDONLY,\r\n                               env_open_mode=0o664,  # 8\u8fdb\u5236\u8868\u793a\r\n                               txn_flag = 0, # LMDB.LmdbFlag.MDB_RDONLY\r\n                               dbi_flag=0,\r\n                               put_flag=0)\r\n    reader = LMDB.Lmdb(db_path, options, map_size=0)\r\n\r\n    def show():\r\n        it = reader.get_iterater(reverse=False)\r\n        i = 0\r\n        for item in it:\r\n            print(i, item)\r\n            i += 1\r\n\r\n    def test_find(key):\r\n        value = reader.get(key)\r\n        print('find', type(value), value)\r\n\r\n    show()\r\n    test_find('input0')\r\n    test_find('input5')\r\n    test_find(b'input10')\r\n    reader.close()\r\n\r\n\r\ntest_write(db_path)\r\ntest_read(db_path)\r\n\r\n```\r\n\r\n\r\n### 4. arrow demo\r\n\r\n### Stream\r\n```python\r\n\r\nfrom tfrecords.python.io.arrow import IPC_Writer,IPC_StreamReader,arrow\r\n\r\npath_file = \"d:/tmp/data.arrow\"\r\n\r\ndef test_write():\r\n    schema = arrow.schema([\r\n        arrow.field('id', arrow.int32()),\r\n        arrow.field('text', arrow.utf8())\r\n    ])\r\n\r\n    a = arrow.Int32Builder()\r\n    a.AppendValues([0,1,4])\r\n    a = a.Finish().Value()\r\n\r\n    b = arrow.StringBuilder()\r\n    b.AppendValues([\"aaaa\",\"\u4f60\u662f\u8c01\",\"\u5f20\u4e09\"])\r\n    b = b.Finish().Value()\r\n\r\n    table = arrow.Table.Make(schema = schema,arrays=[a,b])\r\n    fs = IPC_Writer(path_file,schema,with_stream = True)\r\n    fs.write_table(table)\r\n    fs.close()\r\n\r\ndef test_read():\r\n    fs = IPC_StreamReader(path_file)\r\n    table = fs.read_all()\r\n    fs.close()\r\n    print(table)\r\n\r\n    col = table.GetColumnByName('text')\r\n    text_list = col.chunk(0)\r\n    for i in range(text_list.length()):\r\n        x = text_list.Value(i)\r\n        print(type(x), x)\r\n\r\n\r\ntest_write()\r\ntest_read()\r\n```\r\n\r\n### file\r\n```python\r\nfrom tfrecords.python.io.arrow import IPC_Writer,IPC_StreamReader,IPC_MemoryMappedFileReader,arrow\r\n\r\npath_file = \"d:/tmp/data.arrow\"\r\n\r\ndef test_write():\r\n    schema = arrow.schema([\r\n        arrow.field('id', arrow.int32()),\r\n        arrow.field('text', arrow.utf8())\r\n    ])\r\n\r\n    a = arrow.Int32Builder()\r\n    a.AppendValues([0,1,4])\r\n    a = a.Finish().Value()\r\n\r\n    b = arrow.StringBuilder()\r\n    b.AppendValues([\"aaaa\",\"\u4f60\u662f\u8c01\",\"\u5f20\u4e09\"])\r\n    b = b.Finish().Value()\r\n\r\n    table = arrow.Table.Make(schema = schema,arrays=[a,b])\r\n    fs = IPC_Writer(path_file,schema,with_stream = False)\r\n    fs.write_table(table)\r\n    fs.close()\r\n\r\n\r\ndef test_read():\r\n\r\n    fs = IPC_MemoryMappedFileReader(path_file)\r\n    for i in range(fs.num_record_batches()):\r\n        batch = fs.read_batch(i)\r\n        print(batch)\r\n    fs.close()\r\n\r\n\r\ntest_write()\r\ntest_read()\r\n```\r\n\r\n\r\n### 4. parquet demo\r\n\r\n\r\n```python\r\nfrom tfrecords.python.io.arrow import ParquetWriter,IPC_StreamReader,ParquetReader,arrow\r\npath_file = \"d:/tmp/data.parquet\"\r\n\r\ndef test_write():\r\n    schema = arrow.schema([\r\n        arrow.field('id', arrow.int32()),\r\n        arrow.field('text', arrow.utf8())\r\n    ])\r\n\r\n    a = arrow.Int32Builder()\r\n    a.AppendValues([0, 1, 4, 5])\r\n    a = a.Finish().Value()\r\n\r\n    b = arrow.StringBuilder()\r\n    b.AppendValues([\"aaaa\", \"\u4f60\u662f\u8c01\", \"\u5f20\u4e09\", \"\u674e\u8d5b\"])\r\n    b = b.Finish().Value()\r\n\r\n    table = arrow.Table.Make(schema=schema, arrays=[a, b])\r\n\r\n    fs = ParquetWriter(path_file, schema)\r\n    fs.write_table(table)\r\n    fs.close()\r\n\r\ndef test_read():\r\n\r\n    fs = ParquetReader(path_file,options=dict(buffer_size=2))\r\n    table = fs.read_table()\r\n    fs.close()\r\n    table = table.Flatten().Value()\r\n    print(table)\r\n\r\n    col = table.GetColumnByName('text')\r\n    text_list = col.chunk(0)\r\n    for i in range(text_list.length()):\r\n        x = text_list.Value(i)\r\n        print(type(x),x)\r\n\r\n\r\ntest_write()\r\ntest_read()\r\n```\r\n\r\n\r\n",
    "bugtrack_url": null,
    "license": "Apache 2.0",
    "summary": "tfrecords: fast and simple reader and writer",
    "version": "0.2.19",
    "project_urls": {
        "Homepage": "https://github.com/ssbuild/tfrecords"
    },
    "split_keywords": [
        "tfrecords",
        " tfrecords",
        " tfrecord",
        " records",
        " datasets"
    ],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "cf0a1fbba65b87f19f58a60d83039a3f9525bb70cc2058a8efdf62828028027c",
                "md5": "f10781aa13a9da5a82c6f83c42f7a380",
                "sha256": "b75f32f26afa003ce031003e4e0b330ea2e734e98d17225ba9e7aa208bddb30e"
            },
            "downloads": -1,
            "filename": "tfrecords-0.2.19-cp310-cp310-win_amd64.whl",
            "has_sig": false,
            "md5_digest": "f10781aa13a9da5a82c6f83c42f7a380",
            "packagetype": "bdist_wheel",
            "python_version": "cp310",
            "requires_python": "<4,>=3",
            "size": 8221154,
            "upload_time": "2024-03-25T18:52:47",
            "upload_time_iso_8601": "2024-03-25T18:52:47.246375Z",
            "url": "https://files.pythonhosted.org/packages/cf/0a/1fbba65b87f19f58a60d83039a3f9525bb70cc2058a8efdf62828028027c/tfrecords-0.2.19-cp310-cp310-win_amd64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "637dac0ad731c0c596247738106c075bfd2f8c557e97753c2c6f0820b059cd34",
                "md5": "7e81a009df39b1d5b70a5b7688df211c",
                "sha256": "c054e70e77b339e9449a906b0a86aff562dc363c6d60e2a8131a94f34534f78d"
            },
            "downloads": -1,
            "filename": "tfrecords-0.2.19-cp311-cp311-win_amd64.whl",
            "has_sig": false,
            "md5_digest": "7e81a009df39b1d5b70a5b7688df211c",
            "packagetype": "bdist_wheel",
            "python_version": "cp311",
            "requires_python": "<4,>=3",
            "size": 8224162,
            "upload_time": "2024-03-25T18:52:50",
            "upload_time_iso_8601": "2024-03-25T18:52:50.648234Z",
            "url": "https://files.pythonhosted.org/packages/63/7d/ac0ad731c0c596247738106c075bfd2f8c557e97753c2c6f0820b059cd34/tfrecords-0.2.19-cp311-cp311-win_amd64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "3ce080fd8a275ead5c020a808ca0d7ef2e90f9e4a2cd3428564f779d1fceb1ff",
                "md5": "6edc9f1ecb7a96b5a2328a06d060e1b7",
                "sha256": "a589f244b8832940aa5ceb60337658f9bbf538868a988d2681e1794775569800"
            },
            "downloads": -1,
            "filename": "tfrecords-0.2.19-cp312-cp312-win_amd64.whl",
            "has_sig": false,
            "md5_digest": "6edc9f1ecb7a96b5a2328a06d060e1b7",
            "packagetype": "bdist_wheel",
            "python_version": "cp312",
            "requires_python": "<4,>=3",
            "size": 8208433,
            "upload_time": "2024-03-25T18:52:54",
            "upload_time_iso_8601": "2024-03-25T18:52:54.233006Z",
            "url": "https://files.pythonhosted.org/packages/3c/e0/80fd8a275ead5c020a808ca0d7ef2e90f9e4a2cd3428564f779d1fceb1ff/tfrecords-0.2.19-cp312-cp312-win_amd64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "b45204d0090aa3ef4cabec3d519bce3d4c625b87ba8cb1197ec7204a893047bc",
                "md5": "9f46064ac28952dcb8e19053f22f1924",
                "sha256": "6b15ebcf9ba975ac81c519bd0a3e41e56cdff1991405d3d448e9d847782956da"
            },
            "downloads": -1,
            "filename": "tfrecords-0.2.19-cp36-cp36m-win_amd64.whl",
            "has_sig": false,
            "md5_digest": "9f46064ac28952dcb8e19053f22f1924",
            "packagetype": "bdist_wheel",
            "python_version": "cp36",
            "requires_python": "<4,>=3",
            "size": 8183899,
            "upload_time": "2024-03-25T18:52:57",
            "upload_time_iso_8601": "2024-03-25T18:52:57.325081Z",
            "url": "https://files.pythonhosted.org/packages/b4/52/04d0090aa3ef4cabec3d519bce3d4c625b87ba8cb1197ec7204a893047bc/tfrecords-0.2.19-cp36-cp36m-win_amd64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "d3bbae79eb8027ffaa7bbf5ae8fc7c3645bdb781d305222d476ea9105444d84d",
                "md5": "c2853b20b3fdef4a8065486bb9a951f3",
                "sha256": "f9baec7388dc086c61340f57d6356470a30c71631b02f6a52d5d23c660315279"
            },
            "downloads": -1,
            "filename": "tfrecords-0.2.19-cp37-cp37m-win_amd64.whl",
            "has_sig": false,
            "md5_digest": "c2853b20b3fdef4a8065486bb9a951f3",
            "packagetype": "bdist_wheel",
            "python_version": "cp37",
            "requires_python": "<4,>=3",
            "size": 8184146,
            "upload_time": "2024-03-25T18:53:00",
            "upload_time_iso_8601": "2024-03-25T18:53:00.223464Z",
            "url": "https://files.pythonhosted.org/packages/d3/bb/ae79eb8027ffaa7bbf5ae8fc7c3645bdb781d305222d476ea9105444d84d/tfrecords-0.2.19-cp37-cp37m-win_amd64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "8a189c2a4e436659a02c150e8f38d52c4a11a01fa1a6f4a541211598578e3a91",
                "md5": "f1a8c6cbdc292bd1afe5844063c1c8ff",
                "sha256": "8b76abaa53cb589d0a3109139e3fa9b82b0b48dc588e73878c859d8d8aa02775"
            },
            "downloads": -1,
            "filename": "tfrecords-0.2.19-cp38-cp38-win_amd64.whl",
            "has_sig": false,
            "md5_digest": "f1a8c6cbdc292bd1afe5844063c1c8ff",
            "packagetype": "bdist_wheel",
            "python_version": "cp38",
            "requires_python": "<4,>=3",
            "size": 8220382,
            "upload_time": "2024-03-25T18:53:04",
            "upload_time_iso_8601": "2024-03-25T18:53:04.010166Z",
            "url": "https://files.pythonhosted.org/packages/8a/18/9c2a4e436659a02c150e8f38d52c4a11a01fa1a6f4a541211598578e3a91/tfrecords-0.2.19-cp38-cp38-win_amd64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "3882d4adf711a7ee635dae8c24280b8f3ff050ca270e758fa1204b174f67ef8a",
                "md5": "1ee4fd723dff3df2cdc60727f029b5f9",
                "sha256": "ee07188cbb1787d40ce4cb7d3f813fde28d81959a591d2eb823a8642cc6dacc1"
            },
            "downloads": -1,
            "filename": "tfrecords-0.2.19-cp39-cp39-win_amd64.whl",
            "has_sig": false,
            "md5_digest": "1ee4fd723dff3df2cdc60727f029b5f9",
            "packagetype": "bdist_wheel",
            "python_version": "cp39",
            "requires_python": "<4,>=3",
            "size": 8220882,
            "upload_time": "2024-03-25T18:53:07",
            "upload_time_iso_8601": "2024-03-25T18:53:07.271351Z",
            "url": "https://files.pythonhosted.org/packages/38/82/d4adf711a7ee635dae8c24280b8f3ff050ca270e758fa1204b174f67ef8a/tfrecords-0.2.19-cp39-cp39-win_amd64.whl",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2024-03-25 18:52:47",
    "github": true,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "github_user": "ssbuild",
    "github_project": "tfrecords",
    "github_not_found": true,
    "lcname": "tfrecords"
}
        
Elapsed time: 0.31172s