# Tinygrad Lightning - WIP
Pytorch Lightning clone for tinygrad. Easy data loading, training, logging and checkpointing.
### Example
```
import tinygrad_lightning as pl
### model ###
class TinyBobNet(pl.LightningModule):
def __init__(self, filters=64):
self.model = ResNet18(num_classes=10)
def forward(self, input: Tensor):
return self.model(input)
def configure_optimizers(self):
return optim.SGD(optim.get_parameters(self), lr=5e-3, momentum=0.9)
def training_step(self, train_batch, batch_idx):
x, y = train_batch
for image in x:
self.log_image("inputs", image)
out = self.forward(x)
cat = np.argmax(out.cpu().numpy(), axis=-1)
accuracy = (cat == y).mean()
loss = sparse_categorical_crossentropy(out, y)
loss_value = loss.detach().cpu().numpy()
# automatically logs to train/loss, ...
self.log("loss", loss_value.mean())
self.log("accuracy", accuracy)
return loss
def validation_step(self, val_batch, val_idx):
x, y = val_batch
out = self.forward(x)
cat = np.argmax(out.cpu().numpy(), axis=-1)
accuracy = (cat == y).mean()
loss = sparse_categorical_crossentropy(out, y)
loss_value = loss.detach().cpu().numpy()
# automatically logs to val/loss, ...
self.log("loss", loss_value.mean())
self.log("accuracy", accuracy)
return loss
batch_size = 4
test_ds = MnistDataset(variant='test') # same as torch dataset
train_loader = pl.DataLoader(train_ds, batch_size, workers=1, shuffle=True)
# define your model
model = TinyBobNet()
callbacks=[pl.TQDMProgressBar(refresh_rate=10), pl.TensorboardLogger("./logdir")]
trainer = pl.Trainer(model, train_loader=train_loader, callbacks=callbacks)
trainer.fit(epochs=1) # train_batches=2, val_batches=4
```
Raw data
{
"_id": null,
"home_page": "https://github.com/baudcode/tinygrad-lightning",
"name": "tinygrad-lightning",
"maintainer": "Malte Koch",
"docs_url": null,
"requires_python": "",
"maintainer_email": "malte-koch@gmx.net",
"keywords": "api,order",
"author": "Malte Koch",
"author_email": "malte-koch@gmx.net",
"download_url": "https://files.pythonhosted.org/packages/bd/15/d0ff0ce512ad9f52ffc6d5a9b8abc1c00d5c7eedcf529272e80bd029a9a9/tinygrad_lightning-0.0.1.tar.gz",
"platform": null,
"description": "# Tinygrad Lightning - WIP\n\nPytorch Lightning clone for tinygrad. Easy data loading, training, logging and checkpointing.\n\n### Example\n\n```\nimport tinygrad_lightning as pl\n\n### model ###\n\nclass TinyBobNet(pl.LightningModule):\n def __init__(self, filters=64):\n self.model = ResNet18(num_classes=10)\n\n def forward(self, input: Tensor):\n return self.model(input)\n\n def configure_optimizers(self):\n return optim.SGD(optim.get_parameters(self), lr=5e-3, momentum=0.9)\n\n def training_step(self, train_batch, batch_idx):\n x, y = train_batch\n\n for image in x:\n self.log_image(\"inputs\", image)\n\n out = self.forward(x)\n\n cat = np.argmax(out.cpu().numpy(), axis=-1)\n accuracy = (cat == y).mean()\n\n loss = sparse_categorical_crossentropy(out, y)\n loss_value = loss.detach().cpu().numpy()\n\n # automatically logs to train/loss, ...\n self.log(\"loss\", loss_value.mean())\n self.log(\"accuracy\", accuracy)\n\n return loss\n\n def validation_step(self, val_batch, val_idx):\n x, y = val_batch\n out = self.forward(x)\n\n cat = np.argmax(out.cpu().numpy(), axis=-1)\n accuracy = (cat == y).mean()\n\n loss = sparse_categorical_crossentropy(out, y)\n loss_value = loss.detach().cpu().numpy()\n\n # automatically logs to val/loss, ...\n self.log(\"loss\", loss_value.mean())\n self.log(\"accuracy\", accuracy)\n\n return loss\n\nbatch_size = 4\n\ntest_ds = MnistDataset(variant='test') # same as torch dataset\ntrain_loader = pl.DataLoader(train_ds, batch_size, workers=1, shuffle=True)\n\n# define your model\nmodel = TinyBobNet()\ncallbacks=[pl.TQDMProgressBar(refresh_rate=10), pl.TensorboardLogger(\"./logdir\")]\n\ntrainer = pl.Trainer(model, train_loader=train_loader, callbacks=callbacks)\ntrainer.fit(epochs=1) # train_batches=2, val_batches=4\n```\n\n",
"bugtrack_url": null,
"license": "MIT",
"summary": "high level interface for tinygrad",
"version": "0.0.1",
"project_urls": {
"Homepage": "https://github.com/baudcode/tinygrad-lightning"
},
"split_keywords": [
"api",
"order"
],
"urls": [
{
"comment_text": "",
"digests": {
"blake2b_256": "2543c5046a678db116c31f977c78d6a1b374113e05955e20416fd3038e1f75e3",
"md5": "1fbf55a3b606e96942e989e27dd03713",
"sha256": "4f952659e0830df3c561724dd4b75ee86496821b668ad658a6eddfa4b238be10"
},
"downloads": -1,
"filename": "tinygrad_lightning-0.0.1-py3-none-any.whl",
"has_sig": false,
"md5_digest": "1fbf55a3b606e96942e989e27dd03713",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": null,
"size": 9442,
"upload_time": "2023-06-03T16:29:09",
"upload_time_iso_8601": "2023-06-03T16:29:09.493188Z",
"url": "https://files.pythonhosted.org/packages/25/43/c5046a678db116c31f977c78d6a1b374113e05955e20416fd3038e1f75e3/tinygrad_lightning-0.0.1-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "bd15d0ff0ce512ad9f52ffc6d5a9b8abc1c00d5c7eedcf529272e80bd029a9a9",
"md5": "ef7f621c1647a64acc955258f80c69f8",
"sha256": "765e177141ae3d6fe71a826b601e19cb69d49b15c999a5761c9897c8cb19fff9"
},
"downloads": -1,
"filename": "tinygrad_lightning-0.0.1.tar.gz",
"has_sig": false,
"md5_digest": "ef7f621c1647a64acc955258f80c69f8",
"packagetype": "sdist",
"python_version": "source",
"requires_python": null,
"size": 8188,
"upload_time": "2023-06-03T16:29:11",
"upload_time_iso_8601": "2023-06-03T16:29:11.698518Z",
"url": "https://files.pythonhosted.org/packages/bd/15/d0ff0ce512ad9f52ffc6d5a9b8abc1c00d5c7eedcf529272e80bd029a9a9/tinygrad_lightning-0.0.1.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2023-06-03 16:29:11",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "baudcode",
"github_project": "tinygrad-lightning",
"travis_ci": false,
"coveralls": false,
"github_actions": false,
"requirements": [],
"lcname": "tinygrad-lightning"
}